diff --git a/.env.template b/.env.template index a1462638..4f617c5f 100644 --- a/.env.template +++ b/.env.template @@ -152,6 +152,10 @@ ## Cron schedule of the job that cleans old auth requests from the auth request. ## Defaults to every minute. Set blank to disable this job. # AUTH_REQUEST_PURGE_SCHEDULE="30 * * * * *" +## +## Cron schedule of the job that cleans sso nonce from incomplete flow +## Defaults to daily (20 minutes after midnight). Set blank to disable this job. +# PURGE_INCOMPLETE_SSO_NONCE="0 20 0 * * *" ######################## ### General settings ### @@ -417,11 +421,32 @@ # SSO_ENABLED=true ## Prevent users from logging in directly without going through SSO # SSO_ONLY=false +## On SSO Signup if a user with a matching email already exists make the association +# SSO_SIGNUPS_MATCH_EMAIL=true ## Base URL of the OIDC server (auto-discovery is used) +## - Should not include the `/.well-known/openid-configuration` part and no trailing `/` +## - ${SSO_AUTHORITY}/.well-known/openid-configuration should return a json document: https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfigurationResponse # SSO_AUTHORITY=https://auth.example.com +## Optional SSO scopes, override if email and profile are not enough +#SSO_SCOPES="email profile" +## Additionnal authorization url parameters (ex: to obtain a `refresh_token` with Google Auth). +# SSO_AUTHORIZE_EXTRA_PARAMS=" +# access_type=offline +# prompt=consent +# " +## Activate PKCE for the Auth Code flow. Recommended but disabled for now waiting for feedback on support. +# SSO_PKCE=false +## Regex to add additionnal trusted audience to Id Token (by default only the client_id is trusted). +# SSO_AUDIENCE_TRUSTED='^$' ## Set your Client ID and Client Key # SSO_CLIENT_ID=11111 # SSO_CLIENT_SECRET=AAAAAAAAAAAAAAAAAAAAAAAA +## Optional Master password policy (minComplexity=[0-4]) +# SSO_MASTER_PASSWORD_POLICY='{"enforceOnLogin":false,"minComplexity":3,"minLength":12,"requireLower":false,"requireNumbers":false,"requireSpecial":false,"requireUpper":false}' +## Use sso only for authentication not the session lifecycle +# SSO_AUTH_ONLY_NOT_SESSION=false +## Log all the tokens, LOG_LEVEL=debug is required +# SSO_DEBUG_TOKENS=false ######################## ### MFA/2FA settings ### diff --git a/Cargo.lock b/Cargo.lock index 4be52b4f..3eb76e6c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -353,6 +353,12 @@ dependencies = [ "rustc-demangle", ] +[[package]] +name = "base16ct" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" + [[package]] name = "base64" version = "0.13.1" @@ -404,9 +410,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" [[package]] name = "blake2" @@ -466,6 +472,12 @@ version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" +[[package]] +name = "bytecount" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce" + [[package]] name = "bytemuck" version = "1.16.1" @@ -495,7 +507,7 @@ dependencies = [ "cached_proc_macro", "cached_proc_macro_types", "futures", - "hashbrown", + "hashbrown 0.14.5", "instant", "once_cell", "thiserror", @@ -521,10 +533,41 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ade8366b8bd5ba243f0a58f036cc0ca8a2f069cff1a2351ef1cac6b083e16fc0" [[package]] -name = "cc" -version = "1.0.99" +name = "camino" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96c51067fd44124faa7f870b4b1c969379ad32b2ba805aa959430ceaa384f695" +checksum = "e0ec6b951b160caa93cc0c7b209e5a3bff7aae9062213451ac99493cd844c239" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo-platform" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24b1f0365a6c6bb4020cd05806fd0d33c44d38046b8bd7f0e40814b9763cabfc" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo_metadata" +version = "0.14.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4acbb09d9ee8e23699b9634375c72795d095bf268439da88562cf9b501f181fa" +dependencies = [ + "camino", + "cargo-platform", + "semver", + "serde", + "serde_json", +] + +[[package]] +name = "cc" +version = "1.0.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c891175c3fb232128f48de6590095e59198bbeb8620c310be349bfc3afd12c7b" [[package]] name = "cfg-if" @@ -540,8 +583,10 @@ checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" dependencies = [ "android-tzdata", "iana-time-zone", + "js-sys", "num-traits", "serde", + "wasm-bindgen", "windows-targets 0.52.5", ] @@ -573,7 +618,7 @@ version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8eebd66744a15ded14960ab4ccdbfb51ad3b81f51f3f04a80adac98c985396c9" dependencies = [ - "hashbrown", + "hashbrown 0.14.5", "stacker", ] @@ -586,6 +631,12 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + [[package]] name = "cookie" version = "0.18.1" @@ -659,12 +710,33 @@ dependencies = [ "once_cell", ] +[[package]] +name = "crossbeam-channel" +version = "0.5.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "crossbeam-utils" version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" +[[package]] +name = "crypto-bigint" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" +dependencies = [ + "generic-array", + "rand_core", + "subtle", + "zeroize", +] + [[package]] name = "crypto-common" version = "0.1.6" @@ -675,6 +747,33 @@ dependencies = [ "typenum", ] +[[package]] +name = "curve25519-dalek" +version = "4.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" +dependencies = [ + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest", + "fiat-crypto", + "rustc_version", + "subtle", + "zeroize", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "darling" version = "0.20.9" @@ -717,7 +816,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ "cfg-if", - "hashbrown", + "hashbrown 0.14.5", "lock_api", "once_cell", "parking_lot_core", @@ -725,13 +824,13 @@ dependencies = [ [[package]] name = "dashmap" -version = "6.0.0" +version = "6.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23fadfd577acfd4485fb258011b0fd080882ea83359b6fd41304900b94ccf487" +checksum = "804c8821570c3f8b70230c2ba75ffa5c0f9a4189b9a432b6656c536712acae28" dependencies = [ "cfg-if", "crossbeam-utils", - "hashbrown", + "hashbrown 0.14.5", "lock_api", "once_cell", "parking_lot_core", @@ -749,6 +848,17 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c297a1c74b71ae29df00c3e22dd9534821d60eb9af5a0192823fa2acea70c2a" +[[package]] +name = "der" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" +dependencies = [ + "const-oid", + "pem-rfc7468", + "zeroize", +] + [[package]] name = "deranged" version = "0.3.11" @@ -756,6 +866,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" dependencies = [ "powerfmt", + "serde", ] [[package]] @@ -784,7 +895,7 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35b50dba0afdca80b187392b24f2499a88c336d5a8493e4b4ccfb608708be56a" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "proc-macro2", "proc-macro2-diagnostics", "quote", @@ -798,7 +909,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62d6dcd069e7b5fe49a302411f759d4cf1cf2c27fe798ef46fb8baefc053dd2b" dependencies = [ "bigdecimal", - "bitflags 2.5.0", + "bitflags 2.6.0", "byteorder", "chrono", "diesel_derives", @@ -865,6 +976,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", + "const-oid", "crypto-common", "subtle", ] @@ -889,12 +1001,77 @@ dependencies = [ "syn", ] +[[package]] +name = "dyn-clone" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125" + +[[package]] +name = "ecdsa" +version = "0.16.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" +dependencies = [ + "der", + "digest", + "elliptic-curve", + "rfc6979", + "signature", + "spki", +] + +[[package]] +name = "ed25519" +version = "2.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" +dependencies = [ + "pkcs8", + "signature", +] + +[[package]] +name = "ed25519-dalek" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a3daa8e81a3963a60642bcc1f90a670680bd4a77535faa384e9d1c79d620871" +dependencies = [ + "curve25519-dalek", + "ed25519", + "serde", + "sha2", + "subtle", + "zeroize", +] + [[package]] name = "either" version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b" +[[package]] +name = "elliptic-curve" +version = "0.13.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" +dependencies = [ + "base16ct", + "crypto-bigint", + "digest", + "ff", + "generic-array", + "group", + "hkdf", + "pem-rfc7468", + "pkcs8", + "rand_core", + "sec1", + "subtle", + "zeroize", +] + [[package]] name = "email-encoding" version = "0.3.0" @@ -1025,6 +1202,22 @@ dependencies = [ "syslog", ] +[[package]] +name = "ff" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" +dependencies = [ + "rand_core", + "subtle", +] + +[[package]] +name = "fiat-crypto" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" + [[package]] name = "figment" version = "0.10.19" @@ -1223,6 +1416,7 @@ checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", + "zeroize", ] [[package]] @@ -1282,6 +1476,17 @@ dependencies = [ "spinning_top", ] +[[package]] +name = "group" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" +dependencies = [ + "ff", + "rand_core", + "subtle", +] + [[package]] name = "h2" version = "0.3.26" @@ -1294,7 +1499,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap", + "indexmap 2.2.6", "slab", "tokio", "tokio-util", @@ -1313,7 +1518,7 @@ dependencies = [ "futures-core", "futures-sink", "http 1.1.0", - "indexmap", + "indexmap 2.2.6", "slab", "tokio", "tokio-util", @@ -1341,6 +1546,12 @@ dependencies = [ "walkdir", ] +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + [[package]] name = "hashbrown" version = "0.14.5" @@ -1375,6 +1586,12 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + [[package]] name = "hickory-proto" version = "0.24.1" @@ -1420,6 +1637,15 @@ dependencies = [ "tracing", ] +[[package]] +name = "hkdf" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" +dependencies = [ + "hmac", +] + [[package]] name = "hmac" version = "0.12.1" @@ -1581,6 +1807,20 @@ dependencies = [ "want", ] +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http 0.2.12", + "hyper 0.14.29", + "rustls 0.21.12", + "tokio", + "tokio-rustls 0.24.1", +] + [[package]] name = "hyper-rustls" version = "0.27.2" @@ -1706,6 +1946,17 @@ dependencies = [ "unicode-normalization", ] +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", + "serde", +] + [[package]] name = "indexmap" version = "2.2.6" @@ -1713,7 +1964,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", - "hashbrown", + "hashbrown 0.14.5", "serde", ] @@ -1772,6 +2023,15 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.11" @@ -1830,9 +2090,12 @@ dependencies = [ [[package]] name = "lazy_static" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +dependencies = [ + "spin", +] [[package]] name = "lettre" @@ -2014,6 +2277,21 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" +[[package]] +name = "mini-moka" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c325dfab65f261f386debee8b0969da215b3fa0037e74c8a1234db7ba986d803" +dependencies = [ + "crossbeam-channel", + "crossbeam-utils", + "dashmap 5.5.3", + "skeptic", + "smallvec", + "tagptr", + "triomphe", +] + [[package]] name = "minimal-lexical" version = "0.2.1" @@ -2128,6 +2406,23 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-bigint-dig" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" +dependencies = [ + "byteorder", + "lazy_static", + "libm", + "num-integer", + "num-iter", + "num-traits", + "rand", + "smallvec", + "zeroize", +] + [[package]] name = "num-conv" version = "0.1.0" @@ -2154,6 +2449,17 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-iter" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + [[package]] name = "num-traits" version = "0.2.19" @@ -2161,6 +2467,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", + "libm", ] [[package]] @@ -2182,6 +2489,26 @@ dependencies = [ "libc", ] +[[package]] +name = "oauth2" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c38841cdd844847e3e7c8d29cef9dcfed8877f8f56f9071f77843ecf3baf937f" +dependencies = [ + "base64 0.13.1", + "chrono", + "getrandom", + "http 0.2.12", + "rand", + "reqwest 0.11.27", + "serde", + "serde_json", + "serde_path_to_error", + "sha2", + "thiserror", + "url", +] + [[package]] name = "object" version = "0.36.0" @@ -2197,13 +2524,45 @@ version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +[[package]] +name = "openidconnect" +version = "3.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f47e80a9cfae4462dd29c41e987edd228971d6565553fbc14b8a11e666d91590" +dependencies = [ + "base64 0.13.1", + "chrono", + "dyn-clone", + "ed25519-dalek", + "hmac", + "http 0.2.12", + "itertools", + "log", + "oauth2", + "p256", + "p384", + "rand", + "rsa", + "serde", + "serde-value", + "serde_derive", + "serde_json", + "serde_path_to_error", + "serde_plain", + "serde_with", + "sha2", + "subtle", + "thiserror", + "url", +] + [[package]] name = "openssl" version = "0.10.64" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "cfg-if", "foreign-types", "libc", @@ -2251,12 +2610,45 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "ordered-float" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" +dependencies = [ + "num-traits", +] + [[package]] name = "overload" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" +[[package]] +name = "p256" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" +dependencies = [ + "ecdsa", + "elliptic-curve", + "primeorder", + "sha2", +] + +[[package]] +name = "p384" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70786f51bcc69f6a4c0360e063a4cac5419ef7c5cd5b3c99ad70f3be5ba79209" +dependencies = [ + "ecdsa", + "elliptic-curve", + "primeorder", + "sha2", +] + [[package]] name = "parking" version = "2.2.0" @@ -2345,6 +2737,15 @@ dependencies = [ "serde", ] +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] + [[package]] name = "percent-encoding" version = "2.3.1" @@ -2483,6 +2884,27 @@ dependencies = [ "futures-io", ] +[[package]] +name = "pkcs1" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" +dependencies = [ + "der", + "pkcs8", + "spki", +] + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "spki", +] + [[package]] name = "pkg-config" version = "0.3.30" @@ -2548,10 +2970,19 @@ dependencies = [ ] [[package]] -name = "proc-macro2" -version = "1.0.85" +name = "primeorder" +version = "0.13.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22244ce15aa966053a896d1accb3a6e68469b97c7f33f284b99f0d576879fc23" +checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" +dependencies = [ + "elliptic-curve", +] + +[[package]] +name = "proc-macro2" +version = "1.0.86" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" dependencies = [ "unicode-ident", ] @@ -2594,6 +3025,17 @@ dependencies = [ "psl-types", ] +[[package]] +name = "pulldown-cmark" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b" +dependencies = [ + "bitflags 2.6.0", + "memchr", + "unicase", +] + [[package]] name = "quanta" version = "0.12.3" @@ -2677,7 +3119,7 @@ version = "11.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e29830cbb1290e404f24c73af91c5d8d631ce7e128691e9477556b540cd01ecd" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", ] [[package]] @@ -2686,7 +3128,7 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c82cf8cff14456045f55ec4241383baeff27af886adb72ffb2162f99911de0fd" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", ] [[package]] @@ -2779,6 +3221,7 @@ dependencies = [ "http 0.2.12", "http-body 0.4.6", "hyper 0.14.29", + "hyper-rustls 0.24.2", "hyper-tls 0.5.0", "ipnet", "js-sys", @@ -2788,6 +3231,7 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", + "rustls 0.21.12", "rustls-pemfile 1.0.4", "serde", "serde_json", @@ -2796,11 +3240,13 @@ dependencies = [ "system-configuration", "tokio", "tokio-native-tls", + "tokio-rustls 0.24.1", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", + "webpki-roots", "winreg 0.50.0", ] @@ -2823,7 +3269,7 @@ dependencies = [ "http-body 1.0.0", "http-body-util", "hyper 1.3.1", - "hyper-rustls", + "hyper-rustls 0.27.2", "hyper-tls 0.6.0", "hyper-util", "ipnet", @@ -2863,6 +3309,16 @@ dependencies = [ "quick-error", ] +[[package]] +name = "rfc6979" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" +dependencies = [ + "hmac", + "subtle", +] + [[package]] name = "ring" version = "0.17.8" @@ -2913,7 +3369,7 @@ dependencies = [ "either", "figment", "futures", - "indexmap", + "indexmap 2.2.6", "log", "memchr", "multer", @@ -2945,7 +3401,7 @@ checksum = "575d32d7ec1a9770108c879fc7c47815a80073f96ca07ff9525a94fcede1dd46" dependencies = [ "devise", "glob", - "indexmap", + "indexmap 2.2.6", "proc-macro2", "quote", "rocket_http", @@ -2965,7 +3421,7 @@ dependencies = [ "futures", "http 0.2.12", "hyper 0.14.29", - "indexmap", + "indexmap 2.2.6", "log", "memchr", "pear", @@ -3005,6 +3461,26 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "rsa" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc" +dependencies = [ + "const-oid", + "digest", + "num-bigint-dig", + "num-integer", + "num-traits", + "pkcs1", + "pkcs8", + "rand_core", + "signature", + "spki", + "subtle", + "zeroize", +] + [[package]] name = "rtoolbox" version = "0.0.2" @@ -3021,6 +3497,15 @@ version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + [[package]] name = "rustix" version = "0.37.27" @@ -3041,7 +3526,7 @@ version = "0.38.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "errno", "libc", "linux-raw-sys 0.4.14", @@ -3180,13 +3665,27 @@ dependencies = [ "untrusted", ] +[[package]] +name = "sec1" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" +dependencies = [ + "base16ct", + "der", + "generic-array", + "pkcs8", + "subtle", + "zeroize", +] + [[package]] name = "security-framework" version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c627723fd09706bacdb5cf41499e95098555af3c3c29d014dc3c458ef6be11c0" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "core-foundation", "core-foundation-sys", "libc", @@ -3208,6 +3707,9 @@ name = "semver" version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" +dependencies = [ + "serde", +] [[package]] name = "serde" @@ -3218,6 +3720,16 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde-value" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" +dependencies = [ + "ordered-float", + "serde", +] + [[package]] name = "serde_cbor" version = "0.11.2" @@ -3241,15 +3753,34 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.117" +version = "1.0.118" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3" +checksum = "d947f6b3163d8857ea16c4fa0dd4840d52f3041039a85decd46867eb1abef2e4" dependencies = [ "itoa", "ryu", "serde", ] +[[package]] +name = "serde_path_to_error" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6" +dependencies = [ + "itoa", + "serde", +] + +[[package]] +name = "serde_plain" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce1fc6db65a611022b23a0dec6975d63fb80a302cb3388835ff02c097258d50" +dependencies = [ + "serde", +] + [[package]] name = "serde_spanned" version = "0.6.6" @@ -3271,6 +3802,36 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_with" +version = "3.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ad483d2ab0149d5a5ebcd9972a3852711e0153d863bf5a5d0391d28883c4a20" +dependencies = [ + "base64 0.22.1", + "chrono", + "hex", + "indexmap 1.9.3", + "indexmap 2.2.6", + "serde", + "serde_derive", + "serde_json", + "serde_with_macros", + "time", +] + +[[package]] +name = "serde_with_macros" +version = "3.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65569b702f41443e8bc8bbb1c5779bd0450bbe723b56198980e80ec45780bce2" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "sha1" version = "0.10.6" @@ -3321,6 +3882,16 @@ dependencies = [ "libc", ] +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest", + "rand_core", +] + [[package]] name = "simple_asn1" version = "0.6.2" @@ -3339,6 +3910,21 @@ version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" +[[package]] +name = "skeptic" +version = "0.13.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16d23b015676c90a0f01c197bfdc786c20342c73a0afdda9025adb0bc42940a8" +dependencies = [ + "bytecount", + "cargo_metadata", + "error-chain", + "glob", + "pulldown-cmark", + "tempfile", + "walkdir", +] + [[package]] name = "slab" version = "0.4.9" @@ -3389,6 +3975,16 @@ dependencies = [ "lock_api", ] +[[package]] +name = "spki" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der", +] + [[package]] name = "stable-pattern" version = "0.1.0" @@ -3428,15 +4024,15 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "subtle" -version = "2.5.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" -version = "2.0.66" +version = "2.0.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5" +checksum = "901fa70d88b9d6c98022e23b4136f9f3e54e4662c3bc1bd1d84a42a9a0f0c1e9" dependencies = [ "proc-macro2", "quote", @@ -3489,6 +4085,12 @@ dependencies = [ "libc", ] +[[package]] +name = "tagptr" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417" + [[package]] name = "tempfile" version = "3.10.1" @@ -3575,9 +4177,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.6.0" +version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +checksum = "c55115c6fbe2d2bef26eb09ad74bde02d8255476fc0c7b515ef09fbb35742d82" dependencies = [ "tinyvec_macros", ] @@ -3724,7 +4326,7 @@ version = "0.22.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f21c7aaf97f1bd9ca9d4f9e73b0a6c74bd5afef56f2bc931943a6e1c37e04e38" dependencies = [ - "indexmap", + "indexmap 2.2.6", "serde", "serde_spanned", "toml_datetime", @@ -3832,6 +4434,12 @@ dependencies = [ "tracing-log", ] +[[package]] +name = "triomphe" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6631e42e10b40c0690bf92f404ebcfe6e1fdb480391d15f17cc8e96eeed5369" + [[package]] name = "try-lock" version = "0.2.5" @@ -3888,6 +4496,15 @@ dependencies = [ "version_check", ] +[[package]] +name = "unicase" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" +dependencies = [ + "version_check", +] + [[package]] name = "unicode-bidi" version = "0.3.15" @@ -3941,9 +4558,9 @@ checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" [[package]] name = "uuid" -version = "1.8.0" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a183cf7feeba97b4dd1c0d46788634f6221d87fa961b305bed08c851829efcc0" +checksum = "5de17fd2f7da591098415cff336e12965a28061ddace43b59cb3c430179c9439" dependencies = [ "getrandom", ] @@ -3972,7 +4589,7 @@ dependencies = [ "chrono-tz", "cookie", "cookie_store", - "dashmap 6.0.0", + "dashmap 6.0.1", "data-encoding", "data-url", "diesel", @@ -3992,9 +4609,11 @@ dependencies = [ "libsqlite3-sys", "log", "mimalloc", + "mini-moka", "num-derive", "num-traits", "once_cell", + "openidconnect", "openssl", "paste", "percent-encoding", @@ -4173,6 +4792,12 @@ dependencies = [ "url", ] +[[package]] +name = "webpki-roots" +version = "0.25.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" + [[package]] name = "which" version = "6.0.1" diff --git a/Cargo.toml b/Cargo.toml index 10f96bb7..6342a4e7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -150,7 +150,8 @@ paste = "1.0.15" governor = "0.6.3" # OIDC for SSO -openidconnect = "3.4.0" +openidconnect = "3.5.0" +mini-moka = "0.10.2" # Check client versions for specific features. semver = "1.0.23" diff --git a/SSO.md b/SSO.md new file mode 100644 index 00000000..8d622ad4 --- /dev/null +++ b/SSO.md @@ -0,0 +1,260 @@ +# SSO using OpenId Connect + +To use an external source of authentication your SSO will need to support OpenID Connect : + + - And OpenID Connect Discovery endpoint should be available + - Client authentication will be done using Id and Secret. + +A master password will still required and not controlled by the SSO (depending of your point of view this might be a feature ;). +This introduce another way to control who can use the vault without having to use invitation or using an LDAP. + +## Configuration + +The following configurations are available + + - `SSO_ENABLED` : Activate the SSO + - `SSO_ONLY` : disable email+Master password authentication + - `SSO_SIGNUPS_MATCH_EMAIL`: On SSO Signup if a user with a matching email already exists make the association (default `true`) + - `SSO_AUTHORITY` : the OpendID Connect Discovery endpoint of your SSO + - Should not include the `/.well-known/openid-configuration` part and no trailing `/` + - $SSO_AUTHORITY/.well-known/openid-configuration should return the a json document: https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfigurationResponse + - `SSO_SCOPES` : Optional, allow to override scopes if needed (default `"email profile"`) + - `SSO_AUTHORIZE_EXTRA_PARAMS` : Optional, allow to add extra parameter to the authorize redirection (default `""`) + - `SSO_PKCE`: Activate PKCE for the Auth Code flow. Recommended but disabled for now waiting for feedback on support (default `false`). + - `SSO_AUDIENCE_TRUSTED`: Optional, Regex to trust additionnal audience for the IdToken (`client_id` is always trusted). Use single quote when writing the regex: `'^$'`. + - `SSO_CLIENT_ID` : Client Id + - `SSO_CLIENT_SECRET` : Client Secret + - `SSO_MASTER_PASSWORD_POLICY`: Optional Master password policy + - `SSO_AUTH_ONLY_NOT_SESSION`: Enable to use SSO only for authentication not session lifecycle + - `SSO_DEBUG_TOKENS`: Log all tokens (default `false`, `LOG_LEVEL=debug` is required) + +The callback url is : `https://your.domain/identity/connect/oidc-signin` + +## Account and Email handling + +When logging with SSO an identifier (`{iss}/{sub}` claims from the IdToken) is saved in a separate table (`sso_users`). +This is used to link to the SSO provider identifier without changing the default Vaultwarden user `uuid`. This is needed because: + + - Storing the SSO identifier is important to prevent account takeover due to email change. + - We can't use the identifier as the User uuid since it's way longer (Max 255 chars for the `sub` part, cf [spec](https://openid.net/specs/openid-connect-core-1_0.html#CodeIDToken)). + - We want to be able to associate existing account based on `email` but only when the user log for the first time (controlled by `SSO_SIGNUPS_MATCH_EMAIL`). + - We need to be able to associate with existing stub account, such as the one created when inviting a user to an org (association is possible only if the user does not have a private key). + +Additionnaly: + + - Signup to Vaultwarden will be blocked if the Provider report the email as `unverified`. + - Changing the email need to be done by the user since it require updating the `key`. + On login if the email returned by the provider is not the one saved in Vaultwarden an email will be sent to the user to ask him to update it. + - If set `SIGNUPS_DOMAINS_WHITELIST` is applied on SSO signup and when attempting to change the email. + +This mean that if you ever need to change the provider url or the provider itself; you'll have to first delete the association +then ensure that `SSO_SIGNUPS_MATCH_EMAIL` is activated to allow a new association. + +To delete the association (this has no impact on the `Vaultwarden` user): + +```sql +TRUNCATE TABLE sso_users; +``` + +## Client Cache + +By default the client cache is disabled since it can cause issues with the signing keys. +\ +This mean that the discovery endpoint will be called again each time we need to interact with the provider (generating authorize_url, exhange the authorize code, refresh tokens). +This is suboptimal so the `SSO_CLIENT_CACHE_EXPIRATION` allow you to configure an expiration that should work for your provider. + +As a protection against a misconfigured expiration if the validation of the `IdToken` fails then the client cache is invalidated (but you'll periodically have an unlucky user ^^). + +### Google example (Rolling keys) + +If we take Google as an example checking the discovery [endpoint](https://accounts.google.com/.well-known/openid-configuration) response headers we can see that the `max-age` of the cache control is set to `3600` seconds. And the [jwk_uri](https://www.googleapis.com/oauth2/v3/certs) response headers usually contain a `max-age` with an even bigger value. +/ +Combined with user [feedback](https://github.com/ramosbugs/openidconnect-rs/issues/152) we can conclude that Google will roll the signing keys each week. + +Setting the cache expiration too high has diminishing return but using something like `600` (10 min) should provide plenty benefits. + +### Rolling keys manually + +If you want to roll the used key, first add a new one but do not immediately start signing with it. +Wait for the delay you configured in `SSO_CLIENT_CACHE_EXPIRATION` then you can start signing with it. + +As mentionned in the Google example setting too high of a value has dimishing return even if you do not plan to roll the keys. + +## Keycloak + +Default access token lifetime might be only `5min`, set a longer value otherwise it will collide with `VaultWarden` front-end expiration detection which is also set at `5min`. +\ +At the realm level +- `Realm settings / Tokens / Access Token Lifespan` to at least `10min` (`accessTokenLifespan` setting when using `kcadm.sh`). +- `Realm settings / Sessions / SSO Session Idle/Max` for the Refresh token lifetime + +Or for a specific client in `Clients / Client details / Advanced / Advanced settings` you can find `Access Token Lifespan` and `Client Session Idle/Max`. + +Server configuration, nothing specific just set: + +- `SSO_AUTHORITY=https://${domain}/realms/${realm_name}` +- `SSO_CLIENT_ID` +- `SSO_CLIENT_SECRET` +- `SSO_PKCE=true` + +### Testing + +If you want to run a testing instance of Keycloak a [docker-compose](docker/keycloak/docker-compose.yml) is available. + +## Authelia + +To obtain a `refresh_token` to be able to extend session you'll need to add the `offline_access` scope. + +Config will look like: + + - `SSO_SCOPES="email profile offline_access"` + + +## Authentik + +Default access token lifetime might be only `5min`, set a longer value otherwise it will collide with `VaultWarden` front-end expiration detection which is also set at `5min`. +\ +To change the tokens expiration go to `Applications / Providers / Edit / Advanced protocol settings`. + +Starting with `2024.2` version you will need to add the `offline_access` scope and ensure it's selected in `Applications / Providers / Edit / Advanced protocol settings / Scopes` ([Doc](https://docs.goauthentik.io/docs/providers/oauth2/#authorization_code)). + +Server configuration should look like: + +- `SSO_AUTHORITY=https://${domain}/application/o/${application_name}/` : trailing `/` is important +- `SSO_SCOPES="email profile offline_access"` +- `SSO_CLIENT_ID` +- `SSO_CLIENT_SECRET` +- `SSO_PKCE=true` + +## GitLab + +Create an application in your Gitlab Settings with + +- `redirectURI`: https://your.domain/identity/connect/oidc-signin +- `Confidential`: `true` +- `scopes`: `openid`, `profile`, `email` + +Then configure your server with + + - `SSO_AUTHORITY=https://gitlab.com` + - `SSO_CLIENT_ID` + - `SSO_CLIENT_SECRET` + - `SSO_PKCE=true` + +## Google Auth + +Google [Documentation](https://developers.google.com/identity/openid-connect/openid-connect). +\ +By default without extra [configuration](https://developers.google.com/identity/protocols/oauth2/web-server#creatingclient) you won´t have a `refresh_token` and session will be limited to 1h. + +Configure your server with : + + - `SSO_AUTHORITY=https://accounts.google.com` + - ```conf + SSO_AUTHORIZE_EXTRA_PARAMS=" + access_type=offline + prompt=consent + " + ``` + - `SSO_PKCE=true` + - `SSO_CLIENT_ID` + - `SSO_CLIENT_SECRET` + +## Kanidm + +Kanidm recommend always running with PKCE: + +Config will look like: + + - `SSO_PKCE=true` + +Otherwise you can disable the PKCE requirement with: `kanidm system oauth2 warning-insecure-client-disable-pkce CLIENT_NAME --name admin`. + +## Microsoft Entra ID + +1. Create an "App registration" in [Entra ID](https://entra.microsoft.com/) following [Identity | Applications | App registrations](https://entra.microsoft.com/#blade/Microsoft_AAD_RegisteredApps/ApplicationsListBlade/quickStartType//sourceType/Microsoft_AAD_IAM). +2. From the "Overview" of your "App registration", you'll need the "Directory (tenant) ID" for the `SSO_AUTHORITY` variable and the "Application (client) ID" as the `SSO_CLIENT_ID` value. +3. In "Certificates & Secrets" create an "App secret" , you'll need the "Secret Value" for the `SSO_CLIENT_SECRET` variable. +4. In "Authentication" add https://vaultwarden.example.org/identity/connect/oidc-signin as "Web Redirect URI". +5. In "API Permissions" make sure you have `profile`, `email` and `offline_access` listed under "API / Permission name" (`offline_access` is required, otherwise no refresh_token is returned, see https://github.com/MicrosoftDocs/azure-docs/issues/17134). + +Only the v2 endpooint is compliant with the OpenID spec, see https://github.com/MicrosoftDocs/azure-docs/issues/38427 and https://github.com/ramosbugs/openidconnect-rs/issues/122. + +Your configuration should look like this: + +* `SSO_AUTHORITY=https://login.microsoftonline.com/${Directory (tenant) ID}/v2.0` +* `SSO_SCOPES="email profile offline_access"` +* `SSO_CLIENT_ID=${Application (client) ID}` +* `SSO_CLIENT_SECRET=${Secret Value}` + +## Zitadel + +To obtain a `refresh_token` to be able to extend session you'll need to add the `offline_access` scope. + +Additionnaly Zitadel include the `Project id` and the `Client Id` in the audience of the Id Token. +For the validation to work you will need to add the `Resource Id` as a trusted audience (`Client Id` is trusted by default). +You can control the trusted audience with the config `SSO_AUDIENCE_TRUSTED` + +It appears it's not possible to use PKCE with confidential client so it needs to be disabled. + +Config will look like: + + - `SSO_AUTHORITY=https://${provider_host}` + - `SSO_SCOPES="email profile offline_access"` + - `SSO_CLIENT_ID` + - `SSO_CLIENT_SECRET` + - `SSO_AUDIENCE_TRUSTED='^${Project Id}$'` + - `SSO_PKCE=false` + +## Session lifetime + +Session lifetime is dependant on refresh token and access token returned after calling your SSO token endpoint (grant type `authorization_code`). +If no refresh token is returned then the session will be limited to the access token lifetime. + +Tokens are not persisted in VaultWarden but wrapped in JWT tokens and returned to the application (The `refresh_token` and `access_token` values returned by VW `identity/connect/token` endpoint). +Note that VaultWarden will always return a `refresh_token` for compatibility reasons with the web front and it presence does not indicate that a refresh token was returned by your SSO (But you can decode its value with https://jwt.io and then check if the `token` field contain anything). + +With a refresh token present, activity in the application will trigger a refresh of the access token when it's close to expiration ([5min](https://github.com/bitwarden/clients/blob/0bcb45ed5caa990abaff735553a5046e85250f24/libs/common/src/auth/services/token.service.ts#L126) in web client). + +Additionnaly for certain action a token check is performed, if we have a refresh token we will perform a refresh otherwise we'll call the user information endpoint to check the access token validity. + +### Disabling SSO session handling + +If you are unable to obtain a `refresh_token` or for any other reason you can disable SSO session handling and revert to the default handling. +You'll need to enable `SSO_AUTH_ONLY_NOT_SESSION=true` then access token will be valid for 2h and refresh token will allow for an idle time of 7 days (which can be indefinitely extended). + +### Debug information + +Running with `LOG_LEVEL=debug` you'll be able to see information on token expiration. + +## Desktop Client + +There is some issue to handle redirection from your browser (used for sso login) to the application. + +### Chrome + +Probably not much hope, an [issue](https://github.com/bitwarden/clients/issues/2606) is open on the subject and it appears that both Linux and Windows are not working. + +## Firefox + +On Windows you'll be presented with a prompt the first time you log to confirm which application should be launched (But there is a bug at the moment you might end-up with an empty vault after login atm). + + +On Linux it's a bit more tricky. +First you'll need to add some config in `about:config` : + +``` +network.protocol-handler.expose.bitwarden=false +network.protocol-handler.external.bitwarden=true +``` + +If you have any doubt you can check `mailto` to see how it's configured. + +The redirection will still not work since it appears that the association to an application can only be done on a link/click. You can trigger it with a dummy page such as: + +```html +data:text/html,Click me to register Bitwarden +``` + +From now on the redirection should now work. +If you need to change the application launched you can now find it in `Settings` by using the search function and entering `application`. diff --git a/docker/keycloak/.env.template b/docker/keycloak/.env.template new file mode 100644 index 00000000..74d47f89 --- /dev/null +++ b/docker/keycloak/.env.template @@ -0,0 +1,26 @@ +# Keycloak Config +KEYCLOAK_ADMIN=admin +KEYCLOAK_ADMIN_PASSWORD=${KEYCLOAK_ADMIN} +KC_HTTP_HOST=127.0.0.1 +KC_HTTP_PORT=8080 + +# Script parameters (use Keycloak and VaultWarden config too) +TEST_REALM=test + +TEST_USER=test +TEST_USER_PASSWORD=${TEST_USER} +TEST_USER_MAIL="${TEST_USER}@yopmail.com" + +TEST_USER_2=test2 +TEST_USER_2_PASSWORD=${TEST_USER_2} +TEST_USER_2_MAIL="${TEST_USER_2}@yopmail.com" + +# VaultWarden Config +ROCKET_PORT=8000 +DOMAIN=http://127.0.0.1:${ROCKET_PORT} +I_REALLY_WANT_VOLATILE_STORAGE=true +SSO_ENABLED=true +SSO_ONLY=false +SSO_CLIENT_ID=VaultWarden +SSO_CLIENT_SECRET=VaultWarden +SSO_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${TEST_REALM} diff --git a/docker/keycloak/README.md b/docker/keycloak/README.md new file mode 100644 index 00000000..000dd648 --- /dev/null +++ b/docker/keycloak/README.md @@ -0,0 +1,67 @@ +# OpenID Connect test setup + +This `docker-compose` template allow to run locally a `VaultWarden` and [`Keycloak`](https://www.keycloak.org/) instance to test OIDC. + +## Usage + +You'll need `docker` and `docker-compose` ([cf](https://docs.docker.com/engine/install/)). + +First create a copy of `.env.template` as `.env` (This is done to prevent commiting your custom settings, Ex `SMTP_`). + +Then start the stack (the `profile` is required to run the `VaultWarden`) : + +```bash +> DOCKER_BUILDKIT=1 docker-compose --profile VaultWarden up +.... +keycloakSetup_1 | Logging into http://127.0.0.1:8080 as user admin of realm master +keycloakSetup_1 | Created new realm with id 'test' +keycloakSetup_1 | 74af4933-e386-4e64-ba15-a7b61212c45e +oidc_keycloakSetup_1 exited with code 0 +``` + +Wait until `oidc_keycloakSetup_1 exited with code 0` which indicate the correct setup of the Keycloak realm, client and user (It's normal for this container to stop once the configuration is done). + +Then you can access : + + - `VaultWarden` on http://127.0.0.1:8000 with the default user `test@yopmail.com/test`. + - `Keycloak` on http://127.0.0.1:8080/admin/master/console/ with the default user `admin/admin` + +To proceed with an SSO login after you enter the email, on the screen prompting for `Master Password` the SSO button should be visible. + +## Running only Keycloak + +Since the `VaultWarden` service is defined with a `profile` you can just use the default `docker-compose` command : + +```bash +> docker-compose up +``` + +When running with a local VaultWarden you'll need to make the SSO button visible using : + +```bash +sed -i 's#a\[routerlink="/sso"\],##' /web-vault/app/main.*.css +``` + +Otherwise you'll need to reveal the SSO login button using the debug console (F12) + + ```js + document.querySelector('a[routerlink="/sso"]').style.setProperty("display", "inline-block", "important"); + ``` + +## To force rebuilding the VaultWarden image + +Use `DOCKER_BUILDKIT=1 docker-compose --profile VaultWarden up --build VaultWarden`. + +If after building the `Keycloak` configuration is not run, just interrupt and run without `--build` + +## Configuration + +All configuration for `keycloak` / `VaultWarden` / `keycloak_setup.sh` can be found in [.env](.env.template). +The content of the file will be loaded as environment variables in all containers. + +- `keycloak` [configuration](https://www.keycloak.org/server/all-config) include `KEYCLOAK_ADMIN` / `KEYCLOAK_ADMIN_PASSWORD` and any variable prefixed `KC_` ([more information](https://www.keycloak.org/server/configuration#_example_configuring_the_db_url_host_parameter)). +- All `VaultWarden` configuration can be set (EX: `SMTP_*`) + +## Cleanup + +Use `docker-compose --profile VaultWarden down`. diff --git a/docker/keycloak/docker-compose.yml b/docker/keycloak/docker-compose.yml new file mode 100644 index 00000000..6c0274c6 --- /dev/null +++ b/docker/keycloak/docker-compose.yml @@ -0,0 +1,33 @@ +services: + keycloak: + container_name: keycloak-${ENV:-dev} + image: quay.io/keycloak/keycloak + network_mode: "host" + command: + - start-dev + env_file: ${ENV}.env + volumes: + - ./keycloak_setup.sh:/opt/script/keycloak_setup.sh + keycloakSetup: + container_name: keycloakSetup-${ENV:-dev} + image: quay.io/keycloak/keycloak + network_mode: "host" + depends_on: + - keycloak + restart: "no" + env_file: ${ENV}.env + entrypoint: [ "bash", "-c", "/opt/script/keycloak_setup.sh"] + volumes: + - ${KC_SETUP_PATH:-.}/keycloak_setup.sh:/opt/script/keycloak_setup.sh + VaultWarden: + image: vaultwarden + profiles: ["VaultWarden"] + network_mode: "host" + build: + context: ../../.. + dockerfile: Dockerfile + depends_on: + - keycloak + env_file: ${ENV}.env + volumes: + - ./vaultwarden.sh:/etc/vaultwarden.sh diff --git a/docker/keycloak/keycloak_setup.sh b/docker/keycloak/keycloak_setup.sh new file mode 100755 index 00000000..7ee5590e --- /dev/null +++ b/docker/keycloak/keycloak_setup.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +export PATH=$PATH:/opt/keycloak/bin + +CANARY=/tmp/keycloak_setup_done + +if [ -f $CANARY ] +then + echo "Setup should already be done. Will not run." + exit 0 +fi + +while true; do + sleep 5 + kcadm.sh config credentials --server "http://${KC_HTTP_HOST}:${KC_HTTP_PORT}" --realm master --user "$KEYCLOAK_ADMIN" --password "$KEYCLOAK_ADMIN_PASSWORD" --client admin-cli + EC=$? + if [ $EC -eq 0 ]; then + break + fi + echo "Will retry in 5 seconds" +done + +kcadm.sh create realms -s realm="$TEST_REALM" -s enabled=true -s "accessTokenLifespan=600" +kcadm.sh create clients -r test -s "clientId=$SSO_CLIENT_ID" -s "secret=$SSO_CLIENT_SECRET" -s "redirectUris=[\"$DOMAIN/*\"]" -i + +TEST_USER_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER" -s "email=$TEST_USER_MAIL" -s emailVerified=true -s enabled=true -i) +kcadm.sh update users/$TEST_USER_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER_PASSWORD" -n + +TEST_USER_2_ID=$(kcadm.sh create users -r "$TEST_REALM" -s "username=$TEST_USER_2" -s "email=$TEST_USER_2_MAIL" -s emailVerified=true -s enabled=true -i) +kcadm.sh update users/$TEST_USER_2_ID/reset-password -r "$TEST_REALM" -s type=password -s "value=$TEST_USER_2_PASSWORD" -n + +touch $CANARY diff --git a/docker/keycloak/vaultwarden.sh b/docker/keycloak/vaultwarden.sh new file mode 100755 index 00000000..b956d94e --- /dev/null +++ b/docker/keycloak/vaultwarden.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env sh + +# Remove CSS to hide SSO Link +sed -i 's#a\[routerlink="/sso"\],##' /web-vault/app/main.*.css diff --git a/migrations/mysql/2023-02-01-133000_add_sso/up.sql b/migrations/mysql/2023-02-01-133000_add_sso/up.sql deleted file mode 100644 index c10ab5cf..00000000 --- a/migrations/mysql/2023-02-01-133000_add_sso/up.sql +++ /dev/null @@ -1,3 +0,0 @@ -CREATE TABLE sso_nonce ( - nonce CHAR(36) NOT NULL PRIMARY KEY -); diff --git a/migrations/mysql/2023-02-01-133000_add_sso/down.sql b/migrations/mysql/2023-09-10-133000_add_sso/down.sql similarity index 100% rename from migrations/mysql/2023-02-01-133000_add_sso/down.sql rename to migrations/mysql/2023-09-10-133000_add_sso/down.sql diff --git a/migrations/mysql/2023-09-10-133000_add_sso/up.sql b/migrations/mysql/2023-09-10-133000_add_sso/up.sql new file mode 100644 index 00000000..518664df --- /dev/null +++ b/migrations/mysql/2023-09-10-133000_add_sso/up.sql @@ -0,0 +1,4 @@ +CREATE TABLE sso_nonce ( + nonce CHAR(36) NOT NULL PRIMARY KEY, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP +); diff --git a/migrations/mysql/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql b/migrations/mysql/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql new file mode 100644 index 00000000..3a708927 --- /dev/null +++ b/migrations/mysql/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql @@ -0,0 +1 @@ +ALTER TABLE users_organizations DROP COLUMN invited_by_email; diff --git a/migrations/mysql/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql b/migrations/mysql/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql new file mode 100644 index 00000000..c94e1131 --- /dev/null +++ b/migrations/mysql/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql @@ -0,0 +1 @@ +ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL; diff --git a/migrations/mysql/2024-02-14-170000_add_state_to_sso_nonce/down.sql b/migrations/mysql/2024-02-14-170000_add_state_to_sso_nonce/down.sql new file mode 100644 index 00000000..bce31222 --- /dev/null +++ b/migrations/mysql/2024-02-14-170000_add_state_to_sso_nonce/down.sql @@ -0,0 +1,6 @@ +DROP TABLE IF EXISTS sso_nonce; + +CREATE TABLE sso_nonce ( + nonce CHAR(36) NOT NULL PRIMARY KEY, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP +); diff --git a/migrations/mysql/2024-02-14-170000_add_state_to_sso_nonce/up.sql b/migrations/mysql/2024-02-14-170000_add_state_to_sso_nonce/up.sql new file mode 100644 index 00000000..f73aeea9 --- /dev/null +++ b/migrations/mysql/2024-02-14-170000_add_state_to_sso_nonce/up.sql @@ -0,0 +1,8 @@ +DROP TABLE IF EXISTS sso_nonce; + +CREATE TABLE sso_nonce ( + state VARCHAR(512) NOT NULL PRIMARY KEY, + nonce TEXT NOT NULL, + redirect_uri TEXT NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT now() +); diff --git a/migrations/mysql/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql b/migrations/mysql/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql new file mode 100644 index 00000000..c033f7cb --- /dev/null +++ b/migrations/mysql/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql @@ -0,0 +1,8 @@ +DROP TABLE IF EXISTS sso_nonce; + +CREATE TABLE sso_nonce ( + state VARCHAR(512) NOT NULL PRIMARY KEY, + nonce TEXT NOT NULL, + redirect_uri TEXT NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT now() +); diff --git a/migrations/mysql/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql b/migrations/mysql/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql new file mode 100644 index 00000000..42fb0efa --- /dev/null +++ b/migrations/mysql/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql @@ -0,0 +1,9 @@ +DROP TABLE IF EXISTS sso_nonce; + +CREATE TABLE sso_nonce ( + state VARCHAR(512) NOT NULL PRIMARY KEY, + nonce TEXT NOT NULL, + verifier TEXT, + redirect_uri TEXT NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT now() +); diff --git a/migrations/mysql/2024-03-06-170000_add_sso_users/down.sql b/migrations/mysql/2024-03-06-170000_add_sso_users/down.sql new file mode 100644 index 00000000..f2f92f68 --- /dev/null +++ b/migrations/mysql/2024-03-06-170000_add_sso_users/down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS sso_users; diff --git a/migrations/mysql/2024-03-06-170000_add_sso_users/up.sql b/migrations/mysql/2024-03-06-170000_add_sso_users/up.sql new file mode 100644 index 00000000..7809d43e --- /dev/null +++ b/migrations/mysql/2024-03-06-170000_add_sso_users/up.sql @@ -0,0 +1,7 @@ +CREATE TABLE sso_users ( + user_uuid CHAR(36) NOT NULL PRIMARY KEY, + identifier VARCHAR(768) NOT NULL UNIQUE, + created_at TIMESTAMP NOT NULL DEFAULT now(), + + FOREIGN KEY(user_uuid) REFERENCES users(uuid) +); diff --git a/migrations/mysql/2024-03-13-170000_sso_users_cascade/down.sql b/migrations/mysql/2024-03-13-170000_sso_users_cascade/down.sql new file mode 100644 index 00000000..e69de29b diff --git a/migrations/mysql/2024-03-13-170000_sso_users_cascade/up.sql b/migrations/mysql/2024-03-13-170000_sso_users_cascade/up.sql new file mode 100644 index 00000000..4e06fe58 --- /dev/null +++ b/migrations/mysql/2024-03-13-170000_sso_users_cascade/up.sql @@ -0,0 +1,2 @@ +ALTER TABLE sso_users DROP FOREIGN KEY `sso_users_ibfk_1`; +ALTER TABLE sso_users ADD FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE; diff --git a/migrations/postgresql/2023-02-01-133000_add_sso/up.sql b/migrations/postgresql/2023-02-01-133000_add_sso/up.sql deleted file mode 100644 index 57f976c1..00000000 --- a/migrations/postgresql/2023-02-01-133000_add_sso/up.sql +++ /dev/null @@ -1,3 +0,0 @@ -CREATE TABLE sso_nonce ( - nonce CHAR(36) NOT NULL PRIMARY KEY -); \ No newline at end of file diff --git a/migrations/postgresql/2023-02-01-133000_add_sso/down.sql b/migrations/postgresql/2023-09-10-133000_add_sso/down.sql similarity index 100% rename from migrations/postgresql/2023-02-01-133000_add_sso/down.sql rename to migrations/postgresql/2023-09-10-133000_add_sso/down.sql diff --git a/migrations/postgresql/2023-09-10-133000_add_sso/up.sql b/migrations/postgresql/2023-09-10-133000_add_sso/up.sql new file mode 100644 index 00000000..1321e246 --- /dev/null +++ b/migrations/postgresql/2023-09-10-133000_add_sso/up.sql @@ -0,0 +1,4 @@ +CREATE TABLE sso_nonce ( + nonce CHAR(36) NOT NULL PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT now() +); diff --git a/migrations/postgresql/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql b/migrations/postgresql/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql new file mode 100644 index 00000000..3a708927 --- /dev/null +++ b/migrations/postgresql/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql @@ -0,0 +1 @@ +ALTER TABLE users_organizations DROP COLUMN invited_by_email; diff --git a/migrations/postgresql/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql b/migrations/postgresql/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql new file mode 100644 index 00000000..c94e1131 --- /dev/null +++ b/migrations/postgresql/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql @@ -0,0 +1 @@ +ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL; diff --git a/migrations/postgresql/2024-02-14-170000_add_state_to_sso_nonce/down.sql b/migrations/postgresql/2024-02-14-170000_add_state_to_sso_nonce/down.sql new file mode 100644 index 00000000..7cf4d9d6 --- /dev/null +++ b/migrations/postgresql/2024-02-14-170000_add_state_to_sso_nonce/down.sql @@ -0,0 +1,6 @@ +DROP TABLE sso_nonce; + +CREATE TABLE sso_nonce ( + nonce CHAR(36) NOT NULL PRIMARY KEY, + created_at TIMESTAMP NOT NULL DEFAULT now() +); diff --git a/migrations/postgresql/2024-02-14-170000_add_state_to_sso_nonce/up.sql b/migrations/postgresql/2024-02-14-170000_add_state_to_sso_nonce/up.sql new file mode 100644 index 00000000..f7402460 --- /dev/null +++ b/migrations/postgresql/2024-02-14-170000_add_state_to_sso_nonce/up.sql @@ -0,0 +1,8 @@ +DROP TABLE sso_nonce; + +CREATE TABLE sso_nonce ( + state TEXT NOT NULL PRIMARY KEY, + nonce TEXT NOT NULL, + redirect_uri TEXT NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT now() +); diff --git a/migrations/postgresql/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql b/migrations/postgresql/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql new file mode 100644 index 00000000..ef209a45 --- /dev/null +++ b/migrations/postgresql/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql @@ -0,0 +1,8 @@ +DROP TABLE IF EXISTS sso_nonce; + +CREATE TABLE sso_nonce ( + state TEXT NOT NULL PRIMARY KEY, + nonce TEXT NOT NULL, + redirect_uri TEXT NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT now() +); diff --git a/migrations/postgresql/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql b/migrations/postgresql/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql new file mode 100644 index 00000000..f2dedfc9 --- /dev/null +++ b/migrations/postgresql/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql @@ -0,0 +1,9 @@ +DROP TABLE IF EXISTS sso_nonce; + +CREATE TABLE sso_nonce ( + state TEXT NOT NULL PRIMARY KEY, + nonce TEXT NOT NULL, + verifier TEXT, + redirect_uri TEXT NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT now() +); diff --git a/migrations/postgresql/2024-03-06-170000_add_sso_users/down.sql b/migrations/postgresql/2024-03-06-170000_add_sso_users/down.sql new file mode 100644 index 00000000..f2f92f68 --- /dev/null +++ b/migrations/postgresql/2024-03-06-170000_add_sso_users/down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS sso_users; diff --git a/migrations/postgresql/2024-03-06-170000_add_sso_users/up.sql b/migrations/postgresql/2024-03-06-170000_add_sso_users/up.sql new file mode 100644 index 00000000..b74b5728 --- /dev/null +++ b/migrations/postgresql/2024-03-06-170000_add_sso_users/up.sql @@ -0,0 +1,7 @@ +CREATE TABLE sso_users ( + user_uuid CHAR(36) NOT NULL PRIMARY KEY, + identifier TEXT NOT NULL UNIQUE, + created_at TIMESTAMP NOT NULL DEFAULT now(), + + FOREIGN KEY(user_uuid) REFERENCES users(uuid) +); diff --git a/migrations/postgresql/2024-03-13-170000_sso_users_cascade/down.sql b/migrations/postgresql/2024-03-13-170000_sso_users_cascade/down.sql new file mode 100644 index 00000000..e69de29b diff --git a/migrations/postgresql/2024-03-13-170000_sso_users_cascade/up.sql b/migrations/postgresql/2024-03-13-170000_sso_users_cascade/up.sql new file mode 100644 index 00000000..38f97b4d --- /dev/null +++ b/migrations/postgresql/2024-03-13-170000_sso_users_cascade/up.sql @@ -0,0 +1,3 @@ +ALTER TABLE sso_users + DROP CONSTRAINT "sso_users_user_uuid_fkey", + ADD CONSTRAINT "sso_users_user_uuid_fkey" FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE; diff --git a/migrations/sqlite/2023-02-01-133000_add_sso/up.sql b/migrations/sqlite/2023-02-01-133000_add_sso/up.sql deleted file mode 100644 index c10ab5cf..00000000 --- a/migrations/sqlite/2023-02-01-133000_add_sso/up.sql +++ /dev/null @@ -1,3 +0,0 @@ -CREATE TABLE sso_nonce ( - nonce CHAR(36) NOT NULL PRIMARY KEY -); diff --git a/migrations/sqlite/2023-02-01-133000_add_sso/down.sql b/migrations/sqlite/2023-09-10-133000_add_sso/down.sql similarity index 100% rename from migrations/sqlite/2023-02-01-133000_add_sso/down.sql rename to migrations/sqlite/2023-09-10-133000_add_sso/down.sql diff --git a/migrations/sqlite/2023-09-10-133000_add_sso/up.sql b/migrations/sqlite/2023-09-10-133000_add_sso/up.sql new file mode 100644 index 00000000..518664df --- /dev/null +++ b/migrations/sqlite/2023-09-10-133000_add_sso/up.sql @@ -0,0 +1,4 @@ +CREATE TABLE sso_nonce ( + nonce CHAR(36) NOT NULL PRIMARY KEY, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP +); diff --git a/migrations/sqlite/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql b/migrations/sqlite/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql new file mode 100644 index 00000000..3a708927 --- /dev/null +++ b/migrations/sqlite/2023-09-14-133000_add_users_organizations_invited_by_email/down.sql @@ -0,0 +1 @@ +ALTER TABLE users_organizations DROP COLUMN invited_by_email; diff --git a/migrations/sqlite/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql b/migrations/sqlite/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql new file mode 100644 index 00000000..c94e1131 --- /dev/null +++ b/migrations/sqlite/2023-09-14-133000_add_users_organizations_invited_by_email/up.sql @@ -0,0 +1 @@ +ALTER TABLE users_organizations ADD COLUMN invited_by_email TEXT DEFAULT NULL; diff --git a/migrations/sqlite/2024-02-14-170000_add_state_to_sso_nonce/down.sql b/migrations/sqlite/2024-02-14-170000_add_state_to_sso_nonce/down.sql new file mode 100644 index 00000000..3cbd4602 --- /dev/null +++ b/migrations/sqlite/2024-02-14-170000_add_state_to_sso_nonce/down.sql @@ -0,0 +1,6 @@ +DROP TABLE sso_nonce; + +CREATE TABLE sso_nonce ( + nonce CHAR(36) NOT NULL PRIMARY KEY, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP +); diff --git a/migrations/sqlite/2024-02-14-170000_add_state_to_sso_nonce/up.sql b/migrations/sqlite/2024-02-14-170000_add_state_to_sso_nonce/up.sql new file mode 100644 index 00000000..13e95fd8 --- /dev/null +++ b/migrations/sqlite/2024-02-14-170000_add_state_to_sso_nonce/up.sql @@ -0,0 +1,8 @@ +DROP TABLE sso_nonce; + +CREATE TABLE sso_nonce ( + state TEXT NOT NULL PRIMARY KEY, + nonce TEXT NOT NULL, + redirect_uri TEXT NOT NULL, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP +); diff --git a/migrations/sqlite/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql b/migrations/sqlite/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql new file mode 100644 index 00000000..e7a55bd8 --- /dev/null +++ b/migrations/sqlite/2024-02-26-170000_add_pkce_to_sso_nonce/down.sql @@ -0,0 +1,8 @@ +DROP TABLE IF EXISTS sso_nonce; + +CREATE TABLE sso_nonce ( + state TEXT NOT NULL PRIMARY KEY, + nonce TEXT NOT NULL, + redirect_uri TEXT NOT NULL, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP +); diff --git a/migrations/sqlite/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql b/migrations/sqlite/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql new file mode 100644 index 00000000..6b55e95d --- /dev/null +++ b/migrations/sqlite/2024-02-26-170000_add_pkce_to_sso_nonce/up.sql @@ -0,0 +1,9 @@ +DROP TABLE IF EXISTS sso_nonce; + +CREATE TABLE sso_nonce ( + state TEXT NOT NULL PRIMARY KEY, + nonce TEXT NOT NULL, + verifier TEXT, + redirect_uri TEXT NOT NULL, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP +); diff --git a/migrations/sqlite/2024-03-06-170000_add_sso_users/down.sql b/migrations/sqlite/2024-03-06-170000_add_sso_users/down.sql new file mode 100644 index 00000000..f2f92f68 --- /dev/null +++ b/migrations/sqlite/2024-03-06-170000_add_sso_users/down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS sso_users; diff --git a/migrations/sqlite/2024-03-06-170000_add_sso_users/up.sql b/migrations/sqlite/2024-03-06-170000_add_sso_users/up.sql new file mode 100644 index 00000000..6d015f04 --- /dev/null +++ b/migrations/sqlite/2024-03-06-170000_add_sso_users/up.sql @@ -0,0 +1,7 @@ +CREATE TABLE sso_users ( + user_uuid CHAR(36) NOT NULL PRIMARY KEY, + identifier TEXT NOT NULL UNIQUE, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + + FOREIGN KEY(user_uuid) REFERENCES users(uuid) +); diff --git a/migrations/sqlite/2024-03-13_170000_sso_userscascade/down.sql b/migrations/sqlite/2024-03-13_170000_sso_userscascade/down.sql new file mode 100644 index 00000000..e69de29b diff --git a/migrations/sqlite/2024-03-13_170000_sso_userscascade/up.sql b/migrations/sqlite/2024-03-13_170000_sso_userscascade/up.sql new file mode 100644 index 00000000..53b09cf4 --- /dev/null +++ b/migrations/sqlite/2024-03-13_170000_sso_userscascade/up.sql @@ -0,0 +1,9 @@ +DROP TABLE IF EXISTS sso_users; + +CREATE TABLE sso_users ( + user_uuid CHAR(36) NOT NULL PRIMARY KEY, + identifier TEXT NOT NULL UNIQUE, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + + FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE +); diff --git a/playwright/README.md b/playwright/README.md index 3b742f89..7a72dca3 100644 --- a/playwright/README.md +++ b/playwright/README.md @@ -37,6 +37,20 @@ npx playwright test --project postgres npx playwright test --project mysql ``` +### SSO + +To run the SSO tests: + +```bash +npx playwright test --project sso-sqllite +``` + +Additionnaly if you want you can keep the Keycloak `docker-compose` runnning with (its state is not impacted by the tests): + +```bash +KC_KEEP_RUNNNING=true npx playwright test --project sso-sqllite +``` + ### Running specific tests To run a whole file you can : diff --git a/playwright/global-setup.ts b/playwright/global-setup.ts index 7f99a2fd..2bb5f838 100644 --- a/playwright/global-setup.ts +++ b/playwright/global-setup.ts @@ -1,5 +1,5 @@ -import { firefox, type FullConfig } from '@playwright/test'; -import { exec, execSync } from 'node:child_process'; +import { type FullConfig } from '@playwright/test'; +import { execSync } from 'node:child_process'; import fs from 'fs'; import yaml from 'js-yaml'; @@ -49,6 +49,9 @@ function retrieveFrontend(){ execSync(`cd temp && wget -c https://github.com/dani-garcia/bw_web_builds/releases/download/${vv}/bw_web_${vv}.tar.gz -O - | tar xz`, { stdio: "inherit" }); + // Make the SSO button visible + execSync(`bash -c "sed -i 's#a.routerlink=./sso..,##' temp/web-vault/app/main.*.css"`, { stdio: "inherit" }); + console.log(`Retrieved bw_web_builds-${vv}`); } else { console.log(`Using existing bw_web_builds-${vv}`); diff --git a/playwright/global-utils.ts b/playwright/global-utils.ts index 520e8904..93ff4008 100644 --- a/playwright/global-utils.ts +++ b/playwright/global-utils.ts @@ -1,4 +1,4 @@ -import { type Browser, type TestInfo } from '@playwright/test'; +import { test, type Browser, type TestInfo, type Page } from '@playwright/test'; import { execSync } from 'node:child_process'; import dotenv from 'dotenv'; import dotenvExpand from 'dotenv-expand'; @@ -50,12 +50,28 @@ function startMariaDB() { ); } - function stopMariaDB() { console.log("Stopping MariaDB (ensure DB is wiped)"); execSync(`docker stop ${process.env.MARIADB_CONTAINER} || true`); } +function startMysqlDB() { + console.log(`Starting Mysql`); + execSync(`docker run --rm --name ${process.env.MYSQL_CONTAINER} \ + -e MYSQL_ROOT_PASSWORD=${process.env.MYSQL_PWD} \ + -e MYSQL_USER=${process.env.MYSQL_USER} \ + -e MYSQL_PASSWORD=${process.env.MYSQL_PWD} \ + -e MYSQL_DATABASE=${process.env.MYSQL_DB} \ + -p ${process.env.MYSQL_PORT}:3306 \ + -d mysql:8.3.0` + ); +} + +function stopMysqlDB() { + console.log("Stopping Mysql (ensure DB is wiped)"); + execSync(`docker stop ${process.env.MYSQL_CONTAINER} || true`); +} + function startPostgres() { console.log(`Starting Postgres`); execSync(`docker run --rm --name ${process.env.POSTGRES_CONTAINER} \ @@ -73,30 +89,37 @@ function stopPostgres() { } function dbConfig(testInfo: TestInfo){ - switch(testInfo.project.name) { - case "postgres": return { + if( testInfo.project.name.includes("postgres") ){ + return { DATABASE_URL: `postgresql://${process.env.POSTGRES_USER}:${process.env.POSTGRES_PWD}@127.0.0.1:${process.env.POSTGRES_PORT}/${process.env.POSTGRES_DB}` - } - case "mysql": return { + }; + } else if( testInfo.project.name.includes("mariadb") ){ + return { DATABASE_URL: `mysql://${process.env.MARIADB_USER}:${process.env.MARIADB_PWD}@127.0.0.1:${process.env.MARIADB_PORT}/${process.env.MARIADB_DB}` - } - default: return { I_REALLY_WANT_VOLATILE_STORAGE: true } + }; + } else if( testInfo.project.name.includes("mysql") ){ + return { + DATABASE_URL: `mysql://${process.env.MYSQL_USER}:${process.env.MYSQL_PWD}@127.0.0.1:${process.env.MYSQL_PORT}/${process.env.MYSQL_DB}` + }; + } else { + return { I_REALLY_WANT_VOLATILE_STORAGE: true }; } } async function startVaultwarden(browser: Browser, testInfo: TestInfo, env = {}, resetDB: Boolean = true) { if( resetDB ){ - switch(testInfo.project.name) { - case "postgres": - stopPostgres(); - startPostgres() - break; - case "mysql": - stopMariaDB(); - startMariaDB(); - break; - default: - startStopSqlite(); + test.setTimeout(20000); + if( testInfo.project.name.includes("postgres") ){ + stopPostgres(); + startPostgres() + } else if( testInfo.project.name.includes("mariadb") ){ + stopMariaDB(); + startMariaDB(); + } else if( testInfo.project.name.includes("mysql") ){ + stopMysqlDB(); + startMysqlDB(); + } else { + startStopSqlite(); } } @@ -118,15 +141,14 @@ async function stopVaultwarden(proc, testInfo: TestInfo, resetDB: Boolean = true proc.kill(); if( resetDB ){ - switch(testInfo.project.name) { - case "postgres": - stopPostgres(); - break; - case "mysql": - stopMariaDB(); - break; - default: - startStopSqlite(); + if( testInfo.project.name.includes("postgres") ){ + stopPostgres(); + } else if( testInfo.project.name.includes("mariadb") ){ + stopMariaDB(); + } else if( testInfo.project.name.includes("mysql") ){ + stopMysqlDB(); + } else { + startStopSqlite(); } } } diff --git a/playwright/package-lock.json b/playwright/package-lock.json index 0b5fd2e9..85c03be9 100644 --- a/playwright/package-lock.json +++ b/playwright/package-lock.json @@ -89,20 +89,6 @@ "node": ">=6" } }, - "node_modules/fsevents": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", - "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", - "dev": true, - "hasInstallScript": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, "node_modules/js-yaml": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", diff --git a/playwright/playwright.config.ts b/playwright/playwright.config.ts index 81b510b9..9d5a9495 100644 --- a/playwright/playwright.config.ts +++ b/playwright/playwright.config.ts @@ -9,7 +9,7 @@ utils.loadEnv(); * See https://playwright.dev/docs/test-configuration. */ export default defineConfig({ - testDir: 'tests', + testDir: './.', /* Run tests in files in parallel */ fullyParallel: false, @@ -38,16 +38,61 @@ export default defineConfig({ projects: [ { name: 'sqllite', + testMatch: 'tests/*.spec.ts', + testIgnore: 'tests/sso_*.spec.ts', use: { ...devices['Desktop Firefox'] }, }, { name: 'postgres', + testMatch: 'tests/*.spec.ts', + testIgnore: 'tests/sso_*.spec.ts', + use: { ...devices['Desktop Firefox'] }, + }, + { + name: 'mariadb', + testMatch: 'tests/*.spec.ts', + testIgnore: 'tests/sso_*.spec.ts', use: { ...devices['Desktop Firefox'] }, }, { name: 'mysql', + testMatch: 'tests/*.spec.ts', + testIgnore: 'tests/sso_*.spec.ts', use: { ...devices['Desktop Firefox'] }, }, + { + name: 'sso-setup', + testMatch: 'sso-setup.ts', + teardown: 'sso-teardown', + }, + { + name: 'sso-sqllite', + testMatch: 'tests/sso_*.spec.ts', + dependencies: ['sso-setup'], + teardown: 'sso-teardown', + }, + { + name: 'sso-postgres', + testMatch: 'tests/sso_*.spec.ts', + dependencies: ['sso-setup'], + teardown: 'sso-teardown', + }, + { + name: 'sso-mariadb', + testMatch: 'tests/sso_*.spec.ts', + dependencies: ['sso-setup'], + teardown: 'sso-teardown', + }, + { + name: 'sso-mysql', + testMatch: 'tests/sso_*.spec.ts', + dependencies: ['sso-setup'], + teardown: 'sso-teardown', + }, + { + name: 'sso-teardown', + testMatch: 'sso-teardown.ts', + }, ], globalSetup: require.resolve('./global-setup'), diff --git a/playwright/sso-setup.ts b/playwright/sso-setup.ts new file mode 100644 index 00000000..e0edf92b --- /dev/null +++ b/playwright/sso-setup.ts @@ -0,0 +1,18 @@ +import { test, expect, type TestInfo } from '@playwright/test'; + +const { exec } = require('node:child_process'); +const utils = require('./global-utils'); + +utils.loadEnv(); + +test.beforeAll('Setup', async () => { + var kcPath = process.env.KC_SETUP_PATH; + console.log("Starting Keycloak"); + exec(`ENV=test KC_SETUP_PATH=${kcPath} docker-compose -f ${kcPath}/docker-compose.yml --project-directory . up >> temp/logs/keycloak.log 2>&1`); +}); + +test('Keycloak is up', async ({ page }) => { + test.setTimeout(60000); + await utils.waitFor(process.env.SSO_AUTHORITY, page.context().browser()); + console.log(`Keycloak running on: ${process.env.SSO_AUTHORITY}`); +}); diff --git a/playwright/sso-teardown.ts b/playwright/sso-teardown.ts new file mode 100644 index 00000000..daef5117 --- /dev/null +++ b/playwright/sso-teardown.ts @@ -0,0 +1,17 @@ +import { test, type FullConfig } from '@playwright/test'; + +const { execSync } = require('node:child_process'); +const utils = require('./global-utils'); + +utils.loadEnv(); + +test('Keycloak teardown', async () => { + var kcPath = process.env.KC_SETUP_PATH; + + if( process.env.KC_KEEP_RUNNNING == "true" ) { + console.log("Keep Keycloak running"); + } else { + console.log("Keycloak stopping"); + execSync(`ENV=test KC_SETUP_PATH=${kcPath} docker-compose -f ${kcPath}/docker-compose.yml --project-directory . down`); + } +}); diff --git a/playwright/test.env b/playwright/test.env index 950475e5..5dc30c17 100644 --- a/playwright/test.env +++ b/playwright/test.env @@ -2,13 +2,31 @@ ### Shared Playwright conf test file Vaultwarden and Databases ### ################################################################## -############# -# Test user # -############# +######################## +# Docker-compose Config# +######################## +KC_SETUP_PATH=../docker/keycloak +KC_KEEP_RUNNNING=false + +################### +# Keycloak Config # +################### +KEYCLOAK_ADMIN=admin +KEYCLOAK_ADMIN_PASSWORD=${KEYCLOAK_ADMIN} +KC_HTTP_HOST=127.0.0.1 +KC_HTTP_PORT=8081 + +# Script parameters (use Keycloak and VaultWarden config too) +TEST_REALM=test + TEST_USER=test TEST_USER_PASSWORD=${TEST_USER} TEST_USER_MAIL="${TEST_USER}@example.com" +TEST_USER_2=test2 +TEST_USER_2_PASSWORD=${TEST_USER_2} +TEST_USER_2_MAIL="${TEST_USER_2}@example.com" + ###################### # Vaultwarden Config # ###################### @@ -18,6 +36,11 @@ WEB_VAULT_FOLDER=temp/web-vault/ ROCKET_PORT=8001 DOMAIN=http://127.0.0.1:${ROCKET_PORT} +SSO_CLIENT_ID=VaultWarden +SSO_CLIENT_SECRET=VaultWarden +SSO_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${TEST_REALM} +SSO_PKCE=true + ########################### # Docker MariaDb container# ########################### @@ -27,6 +50,15 @@ MARIADB_USER=vaultwarden MARIADB_PWD=vaultwarden MARIADB_DB=vaultwarden +########################### +# Docker Mysql container# +########################### +MYSQL_CONTAINER=vw-mysql-test +MYSQL_PORT=3309 +MYSQL_USER=vaultwarden +MYSQL_PWD=vaultwarden +MYSQL_DB=vaultwarden + ############################ # Docker Postgres container# ############################ diff --git a/playwright/tests/sso_login.spec.ts b/playwright/tests/sso_login.spec.ts new file mode 100644 index 00000000..f01f5dba --- /dev/null +++ b/playwright/tests/sso_login.spec.ts @@ -0,0 +1,121 @@ +import { test, expect, type TestInfo } from '@playwright/test'; +const utils = require('../global-utils'); + +utils.loadEnv(); + +var proc; + +test.beforeAll('Setup', async ({ browser }, testInfo: TestInfo) => { + proc = await utils.startVaultwarden(browser, testInfo, { + SSO_ENABLED: true, + SSO_ONLY: false + }); +}); + +test.afterAll('Teardown', async ({}, testInfo: TestInfo) => { + utils.stopVaultwarden(proc, testInfo); +}); + +test('Account creation using SSO', async ({ page }) => { + // Landing page + await page.goto('/'); + await page.getByLabel(/Email address/).fill(process.env.TEST_USER_MAIL); + await page.getByRole('button', { name: 'Continue' }).click(); + + // Unlock page + await page.getByRole('link', { name: /Enterprise single sign-on/ }).click(); + + // Keycloak Login page + await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible(); + await page.getByLabel(/Username/).fill(process.env.TEST_USER); + await page.getByLabel('Password', { exact: true }).fill(process.env.TEST_USER_PASSWORD); + await page.getByRole('button', { name: 'Sign In' }).click(); + + // Back to Vault create account + await expect(page.getByText('Set master password')).toBeVisible(); + await page.getByLabel('Master password', { exact: true }).fill('Master password'); + await page.getByLabel('Re-type master password').fill('Master password'); + await page.getByRole('button', { name: 'Submit' }).click(); + + // We are now in the default vault page + await expect(page).toHaveTitle(/Vaults/); +}); + +test('SSO login', async ({ page }) => { + // Landing page + await page.goto('/'); + await page.getByLabel(/Email address/).fill(process.env.TEST_USER_MAIL); + await page.getByRole('button', { name: 'Continue' }).click(); + + // Unlock page + await page.getByRole('link', { name: /Enterprise single sign-on/ }).click(); + + // Keycloak Login page + await expect(page.getByRole('heading', { name: 'Sign in to your account' })).toBeVisible(); + await page.getByLabel(/Username/).fill(process.env.TEST_USER); + await page.getByLabel('Password', { exact: true }).fill(process.env.TEST_USER_PASSWORD); + await page.getByRole('button', { name: 'Sign In' }).click(); + + // Back to Vault unlock page + await expect(page).toHaveTitle('Vaultwarden Web'); + await page.getByLabel('Master password').fill('Master password'); + await page.getByRole('button', { name: 'Unlock' }).click(); + + // We are now in the default vault page + await expect(page).toHaveTitle(/Vaults/); +}); + +test('Non SSO login', async ({ page }) => { + // Landing page + await page.goto('/'); + await page.getByLabel(/Email address/).fill(process.env.TEST_USER_MAIL); + await page.getByRole('button', { name: 'Continue' }).click(); + + // Unlock page + await page.getByLabel('Master password').fill('Master password'); + await page.getByRole('button', { name: 'Log in with master password' }).click(); + + // We are now in the default vault page + await expect(page).toHaveTitle(/Vaults/); +}); + + +test('Non SSO login Failure', async ({ page, browser }, testInfo: TestInfo) => { + proc = await utils.restartVaultwarden(proc, page, testInfo, { + SSO_ENABLED: true, + SSO_ONLY: true + }, false); + + // Landing page + await page.goto('/'); + await page.getByLabel(/Email address/).fill(process.env.TEST_USER_MAIL); + await page.getByRole('button', { name: 'Continue' }).click(); + + // Unlock page + await page.getByLabel('Master password').fill('Master password'); + await page.getByRole('button', { name: 'Log in with master password' }).click(); + + // An error should appear + await page.getByLabel('SSO sign-in is required') +}); + +test('SSO login Failure', async ({ page }, testInfo: TestInfo) => { + proc = await utils.restartVaultwarden(proc, page, testInfo, { + SSO_ENABLED: false + }, false); + + // Landing page + await page.goto('/'); + await page.getByLabel(/Email address/).fill(process.env.TEST_USER_MAIL); + await page.getByRole('button', { name: 'Continue' }).click(); + + // Unlock page + await page.getByRole('link', { name: /Enterprise single sign-on/ }).click(); + + // SSO identifier page + await page.getByLabel('SSO identifier').fill('Random'); + await page.getByRole('button', { name: 'Log in' }).click(); + + // An error should appear + await page.getByLabel('SSO sign-in is not available') +}); diff --git a/src/api/admin.rs b/src/api/admin.rs index 58a056b6..0350fe2d 100644 --- a/src/api/admin.rs +++ b/src/api/admin.rs @@ -286,7 +286,7 @@ async fn invite_user(data: Json, _token: AdminToken, mut conn: DbCon err_code!("User already exists", Status::Conflict.code) } - let mut user = User::new(data.email); + let mut user = User::new(data.email, None); async fn _generate_invite(user: &User, conn: &mut DbConn) -> EmptyResult { if CONFIG.mail_enabled() { diff --git a/src/api/core/accounts.rs b/src/api/core/accounts.rs index b1abb114..437a0ff3 100644 --- a/src/api/core/accounts.rs +++ b/src/api/core/accounts.rs @@ -6,7 +6,7 @@ use serde_json::Value; use crate::{ api::{ core::{log_user_event, two_factor::email}, - register_push_device, unregister_push_device, AnonymousNotify, EmptyResult, JsonResult, Notify, + register_push_device, unregister_push_device, AnonymousNotify, ApiResult, EmptyResult, JsonResult, Notify, PasswordOrOtpData, UpdateType, }, auth::{decode_delete, decode_invite, decode_verify_email, ClientHeaders, Headers}, @@ -103,13 +103,6 @@ struct KeysData { public_key: String, } -#[derive(Debug, Serialize, Deserialize)] -struct TokenPayload { - exp: i64, - email: String, - nonce: String, -} - /// Trims whitespace from password hints, and converts blank password hints to `None`. fn clean_password_hint(password_hint: &Option) -> Option { match password_hint { @@ -182,10 +175,7 @@ pub async fn _register(data: Json, mut conn: DbConn) -> JsonResult err!("Registration email does not match invite email") } } else if Invitation::take(&email, &mut conn).await { - for user_org in UserOrganization::find_invited_by_user(&user.uuid, &mut conn).await.iter_mut() { - user_org.status = UserOrgStatus::Accepted as i32; - user_org.save(&mut conn).await?; - } + UserOrganization::confirm_user_invitations(&user.uuid, &mut conn).await?; user } else if CONFIG.is_signup_allowed(&email) || (CONFIG.emergency_access_allowed() @@ -201,7 +191,7 @@ pub async fn _register(data: Json, mut conn: DbConn) -> JsonResult // because the vaultwarden admin can invite anyone, regardless // of other signup restrictions. if Invitation::take(&email, &mut conn).await || CONFIG.is_signup_allowed(&email) { - User::new(email.clone()) + User::new(email.clone(), None) } else { err!("Registration not allowed or user already exists") } @@ -272,7 +262,7 @@ async fn post_set_password(data: Json, headers: Headers, mut co // Check against the password hint setting here so if it fails, the user // can retry without losing their invitation below. - let password_hint = clean_password_hint(&data.master_password_hash); + let password_hint = clean_password_hint(&data.master_password_hint); enforce_password_hint_setting(&password_hint)?; if let Some(client_kdf_iter) = data.kdf_iterations { @@ -988,15 +978,30 @@ struct SecretVerificationRequest { master_password_hash: String, } +// Change the KDF Iterations if necessary +pub async fn kdf_upgrade(user: &mut User, pwd_hash: &str, conn: &mut DbConn) -> ApiResult<()> { + if user.password_iterations != CONFIG.password_iterations() { + user.password_iterations = CONFIG.password_iterations(); + user.set_password(pwd_hash, None, false, None); + + if let Err(e) = user.save(conn).await { + error!("Error updating user: {:#?}", e); + } + } + Ok(()) +} + #[post("/accounts/verify-password", data = "")] -fn verify_password(data: Json, headers: Headers) -> JsonResult { +async fn verify_password(data: Json, headers: Headers, mut conn: DbConn) -> JsonResult { let data: SecretVerificationRequest = data.into_inner(); - let user = headers.user; + let mut user = headers.user; if !user.check_valid_password(&data.master_password_hash) { err!("Invalid password") } + kdf_upgrade(&mut user, &data.master_password_hash, &mut conn).await?; + Ok(Json(json!({ "MasterPasswordPolicy": {}, // Required for SSO login with mobile apps }))) diff --git a/src/api/core/emergency_access.rs b/src/api/core/emergency_access.rs index 8f9e0015..da8b1f40 100644 --- a/src/api/core/emergency_access.rs +++ b/src/api/core/emergency_access.rs @@ -224,7 +224,7 @@ async fn send_invite(data: Json, headers: Headers, mu invitation.save(&mut conn).await?; } - let mut user = User::new(email.clone()); + let mut user = User::new(email.clone(), None); user.save(&mut conn).await?; (user, true) } diff --git a/src/api/core/organizations.rs b/src/api/core/organizations.rs index 0cff0497..ac85ef92 100644 --- a/src/api/core/organizations.rs +++ b/src/api/core/organizations.rs @@ -58,6 +58,7 @@ pub fn routes() -> Vec { list_policies, list_policies_token, list_policies_invited_user, + get_policy_master_password, get_policy, put_policy, get_organization_tax, @@ -171,7 +172,7 @@ async fn create_organization(headers: Headers, data: Json, mut conn: Db }; let org = Organization::new(data.name, data.billing_email, private_key, public_key); - let mut user_org = UserOrganization::new(headers.user.uuid, org.uuid.clone()); + let mut user_org = UserOrganization::new(headers.user.uuid, org.uuid.clone(), None); let collection = Collection::new(org.uuid.clone(), data.collection_name, None); user_org.akey = data.key; @@ -305,9 +306,13 @@ async fn get_user_collections(headers: Headers, mut conn: DbConn) -> Json })) } +// Called during the SSO enrollment +// The `_identifier` should be the harcoded value returned by `get_org_domain_sso_details` +// The returned `Id` will then be passed to `get_policy_master_password` which will mainly ignore it #[get("/organizations/<_identifier>/auto-enroll-status")] -fn get_auto_enroll_status(_identifier: String) -> JsonResult { +fn get_auto_enroll_status(_identifier: &str) -> JsonResult { Ok(Json(json!({ + "Id": "_", "ResetPasswordEnabled": false, // Not implemented }))) } @@ -779,6 +784,9 @@ async fn _get_org_details(org_id: &str, host: &str, user_uuid: &str, conn: &mut json!(ciphers_json) } +// Endpoint called when the user select SSO login (body: `{ "email": "" }`). +// Returning a Domain/Organization here allow to prefill it and prevent prompting the user +// VaultWarden sso login is not linked to Org so we set a dummy value. #[post("/organizations/domain/sso/details")] fn get_org_domain_sso_details() -> JsonResult { Ok(Json(json!({ @@ -896,7 +904,7 @@ async fn send_invite(org_id: &str, data: Json, headers: AdminHeaders invitation.save(&mut conn).await?; } - let mut user = User::new(email.clone()); + let mut user = User::new(email.clone(), None); user.save(&mut conn).await?; user } @@ -913,7 +921,8 @@ async fn send_invite(org_id: &str, data: Json, headers: AdminHeaders } }; - let mut new_user = UserOrganization::new(user.uuid.clone(), String::from(org_id)); + let mut new_user = + UserOrganization::new(user.uuid.clone(), String::from(org_id), Some(headers.user.email.clone())); let access_all = data.access_all.unwrap_or(false); new_user.access_all = access_all; new_user.atype = new_type; @@ -1682,17 +1691,22 @@ async fn list_policies_token(org_id: &str, token: &str, mut conn: DbConn) -> Jso }))) } +// Called during the SSO enrollment. +// Since the VW SSO flow is not linked to an organization it will be called with a dummy or undefined `org_id` #[allow(non_snake_case)] #[get("/organizations//policies/invited-user?")] -async fn list_policies_invited_user(org_id: String, userId: String, mut conn: DbConn) -> JsonResult { - // We should confirm the user is part of the organization, but unique domain_hints must be supported first. - +async fn list_policies_invited_user(org_id: &str, userId: &str, mut conn: DbConn) -> JsonResult { if userId.is_empty() { err!("userId must not be empty"); } - let policies = OrgPolicy::find_by_org(&org_id, &mut conn).await; - let policies_json: Vec = policies.iter().map(OrgPolicy::to_json).collect(); + let user_orgs = UserOrganization::find_invited_by_user(userId, &mut conn).await; + let policies_json: Vec = if user_orgs.into_iter().any(|user_org| user_org.org_uuid == org_id) { + let policies = OrgPolicy::find_by_org(org_id, &mut conn).await; + policies.iter().map(OrgPolicy::to_json).collect() + } else { + Vec::with_capacity(0) + }; Ok(Json(json!({ "Data": policies_json, @@ -1701,7 +1715,26 @@ async fn list_policies_invited_user(org_id: String, userId: String, mut conn: Db }))) } -#[get("/organizations//policies/")] +// Called during the SSO enrollment. +#[get("/organizations//policies/master-password", rank = 1)] +fn get_policy_master_password(org_id: &str, _headers: Headers) -> JsonResult { + let data = match CONFIG.sso_master_password_policy() { + Some(policy) => policy, + None => "null".to_string(), + }; + + let policy = OrgPolicy { + uuid: String::from(org_id), + org_uuid: String::from(org_id), + atype: OrgPolicyType::MasterPassword as i32, + enabled: CONFIG.sso_master_password_policy().is_some(), + data, + }; + + Ok(Json(policy.to_json())) +} + +#[get("/organizations//policies/", rank = 2)] async fn get_policy(org_id: &str, pol_type: i32, _headers: AdminHeaders, mut conn: DbConn) -> JsonResult { let pol_type_enum = match OrgPolicyType::from_i32(pol_type) { Some(pt) => pt, @@ -1937,7 +1970,8 @@ async fn import(org_id: &str, data: Json, headers: Headers, mut c UserOrgStatus::Accepted as i32 // Automatically mark user as accepted if no email invites }; - let mut new_org_user = UserOrganization::new(user.uuid.clone(), String::from(org_id)); + let mut new_org_user = + UserOrganization::new(user.uuid.clone(), String::from(org_id), Some(headers.user.email.clone())); new_org_user.access_all = false; new_org_user.atype = UserOrgType::User as i32; new_org_user.status = user_org_status; diff --git a/src/api/core/public.rs b/src/api/core/public.rs index 0cdcbb63..d8d2c4d2 100644 --- a/src/api/core/public.rs +++ b/src/api/core/public.rs @@ -93,7 +93,7 @@ async fn ldap_import(data: Json, token: PublicToken, mut conn: Db Some(user) => user, // exists in vaultwarden None => { // User does not exist yet - let mut new_user = User::new(user_data.email.clone()); + let mut new_user = User::new(user_data.email.clone(), None); new_user.save(&mut conn).await?; if !CONFIG.mail_enabled() { @@ -109,7 +109,12 @@ async fn ldap_import(data: Json, token: PublicToken, mut conn: Db UserOrgStatus::Accepted as i32 // Automatically mark user as accepted if no email invites }; - let mut new_org_user = UserOrganization::new(user.uuid.clone(), org_id.clone()); + let (org_name, org_email) = match Organization::find_by_uuid(&org_id, &mut conn).await { + Some(org) => (org.name, org.billing_email), + None => err!("Error looking up organization"), + }; + + let mut new_org_user = UserOrganization::new(user.uuid.clone(), org_id.clone(), Some(org_email.clone())); new_org_user.set_external_id(Some(user_data.external_id.clone())); new_org_user.access_all = false; new_org_user.atype = UserOrgType::User as i32; @@ -118,11 +123,6 @@ async fn ldap_import(data: Json, token: PublicToken, mut conn: Db new_org_user.save(&mut conn).await?; if CONFIG.mail_enabled() { - let (org_name, org_email) = match Organization::find_by_uuid(&org_id, &mut conn).await { - Some(org) => (org.name, org.billing_email), - None => err!("Error looking up organization"), - }; - mail::send_invite( &user_data.email, &user.uuid, diff --git a/src/api/core/two_factor/duo.rs b/src/api/core/two_factor/duo.rs index c5bfa9e5..6b89659d 100644 --- a/src/api/core/two_factor/duo.rs +++ b/src/api/core/two_factor/duo.rs @@ -284,10 +284,6 @@ fn sign_duo_values(key: &str, email: &str, ikey: &str, prefix: &str, expire: i64 } pub async fn validate_duo_login(email: &str, response: &str, conn: &mut DbConn) -> EmptyResult { - // email is as entered by the user, so it needs to be normalized before - // comparison with auth_user below. - let email = &email.to_lowercase(); - let split: Vec<&str> = response.split(':').collect(); if split.len() != 2 { err!( diff --git a/src/api/identity.rs b/src/api/identity.rs index 5e1fb328..d53ac472 100644 --- a/src/api/identity.rs +++ b/src/api/identity.rs @@ -1,10 +1,10 @@ -use chrono::Utc; -use jsonwebtoken::DecodingKey; +use chrono::{NaiveDateTime, Utc}; use num_traits::FromPrimitive; -use rocket::serde::json::Json; use rocket::{ form::{Form, FromForm}, - http::CookieJar, + http::Status, + response::Redirect, + serde::json::Json, Route, }; use serde_json::Value; @@ -12,23 +12,22 @@ use serde_json::Value; use crate::{ api::{ core::{ - accounts::{PreloginData, RegisterData, _prelogin, _register}, + accounts::{PreloginData, RegisterData, _prelogin, _register, kdf_upgrade}, log_user_event, two_factor::{authenticator, duo, email, enforce_2fa_policy, webauthn, yubikey}, }, push::register_push_device, ApiResult, EmptyResult, JsonResult, }, - auth::{encode_jwt, generate_organization_api_key_login_claims, generate_ssotoken_claims, ClientHeaders, ClientIp}, + auth, + auth::{AuthMethod, AuthMethodScope, ClientHeaders, ClientIp}, db::{models::*, DbConn}, error::MapResult, - mail, util, - util::{CookieManager, CustomRedirect}, - CONFIG, + mail, sso, util, CONFIG, }; pub fn routes() -> Vec { - routes![login, prelogin, identity_register, prevalidate, authorize, oidcsignin] + routes![login, prelogin, identity_register, _prevalidate, prevalidate, authorize, oidcsignin, oidcsignin_error] } #[post("/connect/token", data = "")] @@ -42,6 +41,7 @@ async fn login(data: Form, client_header: ClientHeaders, mut conn: _check_is_some(&data.refresh_token, "refresh_token cannot be blank")?; _refresh_login(data, &mut conn).await } + "password" if CONFIG.sso_enabled() && CONFIG.sso_only() => err!("SSO sign-in is required"), "password" => { _check_is_some(&data.client_id, "client_id cannot be blank")?; _check_is_some(&data.password, "password cannot be blank")?; @@ -65,15 +65,17 @@ async fn login(data: Form, client_header: ClientHeaders, mut conn: _api_key_login(data, &mut user_uuid, &mut conn, &client_header.ip).await } - "authorization_code" => { + "authorization_code" if CONFIG.sso_enabled() => { _check_is_some(&data.client_id, "client_id cannot be blank")?; _check_is_some(&data.code, "code cannot be blank")?; _check_is_some(&data.device_identifier, "device_identifier cannot be blank")?; _check_is_some(&data.device_name, "device_name cannot be blank")?; _check_is_some(&data.device_type, "device_type cannot be blank")?; - _authorization_login(data, &mut user_uuid, &mut conn, &client_header.ip).await + + _sso_login(data, &mut user_uuid, &mut conn, &client_header.ip).await } + "authorization_code" => err!("SSO sign-in is not available"), t => err!("Invalid type", t), }; @@ -107,180 +109,154 @@ async fn login(data: Form, client_header: ClientHeaders, mut conn: login_result } +// Return Status::Unauthorized to trigger logout async fn _refresh_login(data: ConnectData, conn: &mut DbConn) -> JsonResult { // Extract token - let token = data.refresh_token.unwrap(); + let refresh_token = match data.refresh_token { + Some(token) => token, + None => err_code!("Missing refresh_token", Status::Unauthorized.code), + }; - // Get device by refresh token - let mut device = Device::find_by_refresh_token(&token, conn).await.map_res("Invalid refresh token")?; - - let scope = "api offline_access"; - let scope_vec = vec!["api".into(), "offline_access".into()]; - - // Common - let user = User::find_by_uuid(&device.user_uuid, conn).await.unwrap(); // --- // Disabled this variable, it was used to generate the JWT // Because this might get used in the future, and is add by the Bitwarden Server, lets keep it, but then commented out // See: https://github.com/dani-garcia/vaultwarden/issues/4156 // --- // let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, conn).await; - let (access_token, expires_in) = device.refresh_tokens(&user, scope_vec); - device.save(conn).await?; + match auth::refresh_tokens(&refresh_token, conn).await { + Err(err) => err_code!(err.to_string(), Status::Unauthorized.code), + Ok((mut device, user, auth_tokens)) => { + // Save to update `device.updated_at` to track usage + device.save(conn).await?; - let result = json!({ - "access_token": access_token, - "expires_in": expires_in, - "token_type": "Bearer", - "refresh_token": device.refresh_token, - "Key": user.akey, - "PrivateKey": user.private_key, + let result = json!({ + "refresh_token": auth_tokens.refresh_token(), + "access_token": auth_tokens.access_token(), + "expires_in": auth_tokens.expires_in(), + "token_type": "Bearer", + "Key": user.akey, + "PrivateKey": user.private_key, - "Kdf": user.client_kdf_type, - "KdfIterations": user.client_kdf_iter, - "KdfMemory": user.client_kdf_memory, - "KdfParallelism": user.client_kdf_parallelism, - "ResetMasterPassword": false, // TODO: according to official server seems something like: user.password_hash.is_empty(), but would need testing - "scope": scope, - "unofficialServer": true, - }); + "Kdf": user.client_kdf_type, + "KdfIterations": user.client_kdf_iter, + "KdfMemory": user.client_kdf_memory, + "KdfParallelism": user.client_kdf_parallelism, + "ResetMasterPassword": false, // TODO: according to official server seems something like: user.password_hash.is_empty(), but would need testing + "scope": auth_tokens.scope(), + "unofficialServer": true, + }); - Ok(Json(result)) -} - -#[derive(Debug, Serialize, Deserialize)] -struct TokenPayload { - exp: i64, - email: Option, - nonce: String, -} - -async fn _authorization_login( - data: ConnectData, - user_uuid: &mut Option, - conn: &mut DbConn, - ip: &ClientIp, -) -> JsonResult { - let scope = match data.scope.as_ref() { - None => err!("Got no scope in OIDC data"), - Some(scope) => scope, - }; - if scope != "api offline_access" { - err!("Scope not supported") + Ok(Json(result)) + } } +} + +// After exchanging the code we need to check first if 2FA is needed before continuing +async fn _sso_login(data: ConnectData, user_uuid: &mut Option, conn: &mut DbConn, ip: &ClientIp) -> JsonResult { + AuthMethod::Sso.check_scope(data.scope.as_ref())?; + + // Ratelimit the login + crate::ratelimit::check_limit_login(&ip.ip)?; - let scope_vec = vec!["api".into(), "offline_access".into()]; let code = match data.code.as_ref() { None => err!("Got no code in OIDC data"), Some(code) => code, }; - let (refresh_token, id_token, user_info) = match get_auth_code_access_token(code).await { - Ok((refresh_token, id_token, user_info)) => (refresh_token, id_token, user_info), - Err(_err) => err!("Could not retrieve access token"), + let user_infos = sso::exchange_code(code, conn).await?; + + // Will trigger 2FA flow if needed + let user_data = match SsoUser::find_by_identifier_or_email(&user_infos.identifier, &user_infos.email, conn).await { + None => None, + Some((user, None)) if user.private_key.is_some() && !CONFIG.sso_signups_match_email() => { + error!( + "Login failure ({}), existing non SSO user ({}) with same email ({}) and association is disabled", + user_infos.identifier, user.uuid, user.email + ); + err_silent!("Existing non SSO user with same email") + } + Some((user, Some(sso_user))) if sso_user.identifier != user_infos.identifier => { + error!( + "Login failure ({}), existing SSO user ({}) with same email ({})", + user_infos.identifier, user.uuid, user.email + ); + err_silent!("Existing non SSO user with same email") + } + Some((user, sso_user)) => { + let (mut device, new_device) = get_device(&data, conn, &user).await?; + let twofactor_token = twofactor_auth(&user, &data, &mut device, ip, conn).await?; + + Some((user, device, new_device, twofactor_token, sso_user)) + } }; - let mut validation = jsonwebtoken::Validation::default(); - validation.insecure_disable_signature_validation(); + // We passed 2FA get full user informations + let auth_user = sso::redeem(&user_infos.state, conn).await?; - let token = - match jsonwebtoken::decode::(id_token.as_str(), &DecodingKey::from_secret(&[]), &validation) { - Err(_err) => err!("Could not decode id token"), - Ok(payload) => payload.claims, - }; - - // let expiry = token.exp; - let nonce = token.nonce; - let mut new_user = false; - - match SsoNonce::find(&nonce, conn).await { - Some(sso_nonce) => { - match sso_nonce.delete(conn).await { - Ok(_) => { - let user_email = match token.email { - Some(email) => email, - None => match user_info.email() { - None => err!("Neither id token nor userinfo contained an email"), - Some(email) => email.to_owned().to_string(), - }, - }; - let now = Utc::now().naive_utc(); - - let mut user = match User::find_by_mail(&user_email, conn).await { - Some(user) => user, - None => { - new_user = true; - User::new(user_email.clone()) - } - }; - - if new_user { - user.verified_at = Some(Utc::now().naive_utc()); - user.save(conn).await?; - } - - // Set the user_uuid here to be passed back used for event logging. - *user_uuid = Some(user.uuid.clone()); - - let (mut device, new_device) = get_device(&data, conn, &user).await; - - let twofactor_token = twofactor_auth(&user, &data, &mut device, ip, true, conn).await?; - - if CONFIG.mail_enabled() && new_device { - if let Err(e) = - mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device.name).await - { - error!("Error sending new device email: {:#?}", e); - - if CONFIG.require_device_email() { - err!("Could not send login notification email. Please contact your administrator.") - } - } - } - - if CONFIG.sso_acceptall_invites() { - for user_org in UserOrganization::find_invited_by_user(&user.uuid, conn).await.iter_mut() { - user_org.status = UserOrgStatus::Accepted as i32; - user_org.save(conn).await?; - } - } - - device.refresh_token = refresh_token.clone(); - device.save(conn).await?; - - let (access_token, expires_in) = device.refresh_tokens(&user, scope_vec); - device.save(conn).await?; - - let mut result = json!({ - "access_token": access_token, - "token_type": "Bearer", - "refresh_token": device.refresh_token, - "expires_in": expires_in, - "Key": user.akey, - "PrivateKey": user.private_key, - "Kdf": user.client_kdf_type, - "KdfIterations": user.client_kdf_iter, - "KdfMemory": user.client_kdf_memory, - "KdfParallelism": user.client_kdf_parallelism, - "ResetMasterPassword": user.password_hash.is_empty(), - "scope": scope, - "unofficialServer": true, - }); - - if let Some(token) = twofactor_token { - result["TwoFactorToken"] = Value::String(token); - } - - info!("User {} logged in successfully. IP: {}", user.email, ip.ip); - Ok(Json(result)) - } - Err(_) => err!("Failed to delete nonce"), - } - } + let now = Utc::now().naive_utc(); + let (user, mut device, new_device, twofactor_token, sso_user) = match user_data { None => { - err!("Invalid nonce") + if !CONFIG.is_email_domain_allowed(&user_infos.email) { + err!("Email domain not allowed"); + } + + if !user_infos.email_verified.unwrap_or(true) { + err!("Email needs to be verified before you can use VaultWarden"); + } + + let mut user = User::new(user_infos.email, user_infos.user_name); + user.verified_at = Some(now); + user.save(conn).await?; + + let (device, new_device) = get_device(&data, conn, &user).await?; + + (user, device, new_device, None, None) } + Some((mut user, device, new_device, twofactor_token, sso_user)) if user.private_key.is_none() => { + // User was invited a stub was created + user.verified_at = Some(now); + if let Some(user_name) = user_infos.user_name { + user.name = user_name; + } + + if !CONFIG.mail_enabled() { + UserOrganization::confirm_user_invitations(&user.uuid, conn).await?; + } + + user.save(conn).await?; + (user, device, new_device, twofactor_token, sso_user) + } + Some((user, device, new_device, twofactor_token, sso_user)) => { + if user.email != user_infos.email { + if CONFIG.mail_enabled() { + mail::send_sso_change_email(&user_infos.email).await?; + } + info!("User {} email changed in SSO provider from {} to {}", user.uuid, user.email, user_infos.email); + } + (user, device, new_device, twofactor_token, sso_user) + } + }; + + if sso_user.is_none() { + let user_sso = SsoUser { + user_uuid: user.uuid.clone(), + identifier: user_infos.identifier, + }; + user_sso.save(conn).await?; } + + // Set the user_uuid here to be passed back used for event logging. + *user_uuid = Some(user.uuid.clone()); + + let auth_tokens = sso::create_auth_tokens( + &device, + &user, + auth_user.refresh_token, + &auth_user.access_token, + auth_user.expires_in, + )?; + + authenticated_response(&user, &mut device, new_device, auth_tokens, twofactor_token, &now, conn, ip).await } async fn _password_login( @@ -290,19 +266,11 @@ async fn _password_login( ip: &ClientIp, ) -> JsonResult { // Validate scope - let scope = data.scope.as_ref().unwrap(); - if scope != "api offline_access" { - err!("Scope not supported") - } - let scope_vec = vec!["api".into(), "offline_access".into()]; + AuthMethod::Password.check_scope(data.scope.as_ref())?; // Ratelimit the login crate::ratelimit::check_limit_login(&ip.ip)?; - if CONFIG.sso_enabled() && CONFIG.sso_only() { - err!("SSO sign-in is required"); - } - // Get the user let username = data.username.as_ref().unwrap().trim(); let mut user = match User::find_by_mail(username, conn).await { @@ -345,15 +313,7 @@ async fn _password_login( ) } - // Change the KDF Iterations - if user.password_iterations != CONFIG.password_iterations() { - user.password_iterations = CONFIG.password_iterations(); - user.set_password(password, None, false, None); - - if let Err(e) = user.save(conn).await { - error!("Error updating user: {:#?}", e); - } - } + kdf_upgrade(&mut user, password, conn).await?; // Check if the user is disabled if !user.enabled { @@ -400,12 +360,28 @@ async fn _password_login( ) } - let (mut device, new_device) = get_device(&data, conn, &user).await; + let (mut device, new_device) = get_device(&data, conn, &user).await?; - let twofactor_token = twofactor_auth(&user, &data, &mut device, ip, false, conn).await?; + let twofactor_token = twofactor_auth(&user, &data, &mut device, ip, conn).await?; + let auth_tokens = auth::AuthTokens::new(&device, &user, AuthMethod::Password); + + authenticated_response(&user, &mut device, new_device, auth_tokens, twofactor_token, &now, conn, ip).await +} + +#[allow(clippy::too_many_arguments)] +async fn authenticated_response( + user: &User, + device: &mut Device, + new_device: bool, + auth_tokens: auth::AuthTokens, + twofactor_token: Option, + now: &NaiveDateTime, + conn: &mut DbConn, + ip: &ClientIp, +) -> JsonResult { if CONFIG.mail_enabled() && new_device { - if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device.name).await { + if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), now, &device.name).await { error!("Error sending new device email: {:#?}", e); if CONFIG.require_device_email() { @@ -421,28 +397,19 @@ async fn _password_login( // register push device if !new_device { - register_push_device(&mut device, conn).await?; + register_push_device(device, conn).await?; } - // Common - // --- - // Disabled this variable, it was used to generate the JWT - // Because this might get used in the future, and is add by the Bitwarden Server, lets keep it, but then commented out - // See: https://github.com/dani-garcia/vaultwarden/issues/4156 - // --- - // let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, conn).await; - let (access_token, expires_in) = device.refresh_tokens(&user, scope_vec); + // Save to update `device.updated_at` to track usage device.save(conn).await?; let mut result = json!({ - "access_token": access_token, - "expires_in": expires_in, + "access_token": auth_tokens.access_token(), + "expires_in": auth_tokens.expires_in(), "token_type": "Bearer", - "refresh_token": device.refresh_token, + "refresh_token": auth_tokens.refresh_token(), "Key": user.akey, "PrivateKey": user.private_key, - //"TwoFactorToken": "11122233333444555666777888999" - "Kdf": user.client_kdf_type, "KdfIterations": user.client_kdf_iter, "KdfMemory": user.client_kdf_memory, @@ -453,7 +420,7 @@ async fn _password_login( "object": "masterPasswordPolicy", }, - "scope": scope, + "scope": auth_tokens.scope(), "unofficialServer": true, "UserDecryptionOptions": { "HasMasterPassword": !user.password_hash.is_empty(), @@ -465,7 +432,7 @@ async fn _password_login( result["TwoFactorToken"] = Value::String(token); } - info!("User {} logged in successfully. IP: {}", username, ip.ip); + info!("User {} logged in successfully. IP: {}", user.email, ip.ip); Ok(Json(result)) } @@ -479,9 +446,9 @@ async fn _api_key_login( crate::ratelimit::check_limit_login(&ip.ip)?; // Validate scope - match data.scope.as_ref().unwrap().as_ref() { - "api" => _user_api_key_login(data, user_uuid, conn, ip).await, - "api.organization" => _organization_api_key_login(data, conn, ip).await, + match data.scope.as_ref() { + Some(scope) if scope == &AuthMethod::UserApiKey.scope() => _user_api_key_login(data, user_uuid, conn, ip).await, + Some(scope) if scope == &AuthMethod::OrgApiKey.scope() => _organization_api_key_login(data, conn, ip).await, _ => err!("Scope not supported"), } } @@ -529,7 +496,7 @@ async fn _user_api_key_login( ) } - let (mut device, new_device) = get_device(&data, conn, &user).await; + let (mut device, new_device) = get_device(&data, conn, &user).await?; if CONFIG.mail_enabled() && new_device { let now = Utc::now().naive_utc(); @@ -547,15 +514,15 @@ async fn _user_api_key_login( } } - // Common - let scope_vec = vec!["api".into()]; // --- // Disabled this variable, it was used to generate the JWT // Because this might get used in the future, and is add by the Bitwarden Server, lets keep it, but then commented out // See: https://github.com/dani-garcia/vaultwarden/issues/4156 // --- // let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, conn).await; - let (access_token, expires_in) = device.refresh_tokens(&user, scope_vec); + let access_claims = auth::LoginJwtClaims::default(&device, &user, &auth::AuthMethod::UserApiKey); + + // Save to update `device.updated_at` to track usage device.save(conn).await?; info!("User {} logged in successfully via API key. IP: {}", user.email, ip.ip); @@ -563,8 +530,8 @@ async fn _user_api_key_login( // Note: No refresh_token is returned. The CLI just repeats the // client_credentials login flow when the existing token expires. let result = json!({ - "access_token": access_token, - "expires_in": expires_in, + "access_token": access_claims.token(), + "expires_in": access_claims.expires_in(), "token_type": "Bearer", "Key": user.akey, "PrivateKey": user.private_key, @@ -574,7 +541,7 @@ async fn _user_api_key_login( "KdfMemory": user.client_kdf_memory, "KdfParallelism": user.client_kdf_parallelism, "ResetMasterPassword": false, // TODO: Same as above - "scope": "api", + "scope": auth::AuthMethod::UserApiKey.scope(), "unofficialServer": true, }); @@ -599,20 +566,20 @@ async fn _organization_api_key_login(data: ConnectData, conn: &mut DbConn, ip: & err!("Incorrect client_secret", format!("IP: {}. Organization: {}.", ip.ip, org_api_key.org_uuid)) } - let claim = generate_organization_api_key_login_claims(org_api_key.uuid, org_api_key.org_uuid); - let access_token = crate::auth::encode_jwt(&claim); + let claim = auth::generate_organization_api_key_login_claims(org_api_key.uuid, org_api_key.org_uuid); + let access_token = auth::encode_jwt(&claim); Ok(Json(json!({ "access_token": access_token, "expires_in": 3600, "token_type": "Bearer", - "scope": "api.organization", + "scope": auth::AuthMethod::OrgApiKey.scope(), "unofficialServer": true, }))) } /// Retrieves an existing device or creates a new device from ConnectData and the User -async fn get_device(data: &ConnectData, conn: &mut DbConn, user: &User) -> (Device, bool) { +async fn get_device(data: &ConnectData, conn: &mut DbConn, user: &User) -> ApiResult<(Device, bool)> { // On iOS, device_type sends "iOS", on others it sends a number // When unknown or unable to parse, return 14, which is 'Unknown Browser' let device_type = util::try_parse_string(data.device_type.as_ref()).unwrap_or(14); @@ -624,12 +591,13 @@ async fn get_device(data: &ConnectData, conn: &mut DbConn, user: &User) -> (Devi let device = match Device::find_by_uuid_and_user(&device_id, &user.uuid, conn).await { Some(device) => device, None => { + let device = Device::new(device_id, user.uuid.clone(), device_name, device_type); new_device = true; - Device::new(device_id, user.uuid.clone(), device_name, device_type) + device } }; - (device, new_device) + Ok((device, new_device)) } async fn twofactor_auth( @@ -637,7 +605,6 @@ async fn twofactor_auth( data: &ConnectData, device: &mut Device, ip: &ClientIp, - is_sso: bool, conn: &mut DbConn, ) -> ApiResult> { let twofactors = TwoFactor::find_by_user(&user.uuid, conn).await; @@ -655,17 +622,7 @@ async fn twofactor_auth( let twofactor_code = match data.two_factor_token { Some(ref code) => code, - None => { - if is_sso { - if CONFIG.sso_only() { - err!("2FA not supported with SSO login, contact your administrator"); - } else { - err!("2FA not supported with SSO login, log in directly using email and master password"); - } - } else { - err_json!(_json_err_twofactor(&twofactor_ids, &user.uuid, conn).await?, "2FA token not provided"); - } - } + None => err_json!(_json_err_twofactor(&twofactor_ids, &user.uuid, conn).await?, "2FA token not provided"), }; let selected_twofactor = twofactors.into_iter().find(|tf| tf.atype == selected_id && tf.enabled); @@ -681,9 +638,7 @@ async fn twofactor_auth( } Some(TwoFactorType::Webauthn) => webauthn::validate_webauthn_login(&user.uuid, twofactor_code, conn).await?, Some(TwoFactorType::YubiKey) => yubikey::validate_yubikey_login(twofactor_code, &selected_data?).await?, - Some(TwoFactorType::Duo) => { - duo::validate_duo_login(data.username.as_ref().unwrap().trim(), twofactor_code, conn).await? - } + Some(TwoFactorType::Duo) => duo::validate_duo_login(&user.email, twofactor_code, conn).await?, Some(TwoFactorType::Email) => { email::validate_email_code_str(&user.uuid, twofactor_code, &selected_data?, conn).await? } @@ -711,12 +666,13 @@ async fn twofactor_auth( TwoFactorIncomplete::mark_complete(&user.uuid, &device.uuid, conn).await?; - if !CONFIG.disable_2fa_remember() && remember == 1 { - Ok(Some(device.refresh_twofactor_remember())) + let two_factor = if !CONFIG.disable_2fa_remember() && remember == 1 { + Some(device.refresh_twofactor_remember()) } else { device.delete_twofactor_remember(); - Ok(None) - } + None + }; + Ok(two_factor) } fn _selected_data(tf: Option) -> ApiResult { @@ -870,176 +826,120 @@ fn _check_is_some(value: &Option, msg: &str) -> EmptyResult { Ok(()) } +// Deprecated but still needed for Mobile apps #[get("/account/prevalidate")] -#[allow(non_snake_case)] +fn _prevalidate() -> JsonResult { + prevalidate() +} + +#[get("/sso/prevalidate")] fn prevalidate() -> JsonResult { - let claims = generate_ssotoken_claims(); - let ssotoken = encode_jwt(&claims); - Ok(Json(json!({ - "token": ssotoken, - }))) + if CONFIG.sso_enabled() { + let sso_token = sso::encode_ssotoken_claims(); + Ok(Json(json!({ + "token": sso_token, + }))) + } else { + err!("SSO sign-in is not available") + } } -use openidconnect::core::{CoreClient, CoreProviderMetadata, CoreResponseType, CoreUserInfoClaims}; -use openidconnect::reqwest::async_http_client; -use openidconnect::{ - AuthenticationFlow, AuthorizationCode, ClientId, ClientSecret, CsrfToken, IssuerUrl, Nonce, OAuth2TokenResponse, - RedirectUrl, Scope, -}; - -async fn get_client_from_sso_config() -> ApiResult { - let redirect = CONFIG.sso_callback_path(); - let client_id = ClientId::new(CONFIG.sso_client_id()); - let client_secret = ClientSecret::new(CONFIG.sso_client_secret()); - let issuer_url = match IssuerUrl::new(CONFIG.sso_authority()) { - Ok(issuer) => issuer, - Err(_err) => err!("invalid issuer URL"), - }; - - let provider_metadata = match CoreProviderMetadata::discover_async(issuer_url, async_http_client).await { - Ok(metadata) => metadata, - Err(_err) => { - err!("Failed to discover OpenID provider") - } - }; - - let redirect_uri = match RedirectUrl::new(redirect) { - Ok(uri) => uri, - Err(err) => err!("Invalid redirection url: {}", err.to_string()), - }; - let client = CoreClient::from_provider_metadata(provider_metadata, client_id, Some(client_secret)) - .set_redirect_uri(redirect_uri); - - Ok(client) +#[get("/connect/oidc-signin?&", rank = 1)] +async fn oidcsignin(code: String, state: String, conn: DbConn) -> ApiResult { + oidcsignin_redirect( + state.clone(), + sso::OIDCCodeWrapper::Ok { + code, + state, + }, + &conn, + ) + .await } -#[get("/connect/oidc-signin?")] -fn oidcsignin(code: String, jar: &CookieJar<'_>, _conn: DbConn) -> ApiResult { - let cookiemanager = CookieManager::new(jar); - - let redirect_uri = match cookiemanager.get_cookie("redirect_uri".to_string()) { - None => err!("No redirect_uri in cookie"), - Some(uri) => uri, - }; - let orig_state = match cookiemanager.get_cookie("state".to_string()) { - None => err!("No state in cookie"), - Some(state) => state, - }; - - cookiemanager.delete_cookie("redirect_uri".to_string()); - cookiemanager.delete_cookie("state".to_string()); - - let redirect = CustomRedirect { - url: format!("{redirect_uri}?code={code}&state={orig_state}"), - headers: vec![], - }; - - Ok(redirect) +// Bitwarden client appear to only care for code and state so we pipe it through +// cf: https://github.com/bitwarden/clients/blob/8e46ef1ae5be8b62b0d3d0b9d1b1c62088a04638/libs/angular/src/auth/components/sso.component.ts#L68C11-L68C23) +#[get("/connect/oidc-signin?&&", rank = 2)] +async fn oidcsignin_error( + state: String, + error: String, + error_description: Option, + conn: DbConn, +) -> ApiResult { + oidcsignin_redirect( + state.clone(), + sso::OIDCCodeWrapper::Error { + state, + error, + error_description, + }, + &conn, + ) + .await } -#[derive(FromForm)] -#[allow(non_snake_case)] +// iss and scope parameters are needed for redirection to work on IOS. +async fn oidcsignin_redirect(state: String, wrapper: sso::OIDCCodeWrapper, conn: &DbConn) -> ApiResult { + let code = sso::encode_code_claims(wrapper); + + let nonce = match SsoNonce::find(&state, conn).await { + Some(n) => n, + None => err!(format!("Failed to retrive redirect_uri with {state}")), + }; + + let mut url = match url::Url::parse(&nonce.redirect_uri) { + Ok(url) => url, + Err(err) => err!(format!("Failed to parse redirect uri ({}): {err}", nonce.redirect_uri)), + }; + + url.query_pairs_mut() + .append_pair("code", &code) + .append_pair("state", &state) + .append_pair("scope", &AuthMethod::Sso.scope()) + .append_pair("iss", &CONFIG.domain()); + + debug!("Redirection to {url}"); + + Ok(Redirect::temporary(String::from(url))) +} + +#[derive(Debug, Clone, Default, FromForm)] struct AuthorizeData { - #[allow(unused)] #[field(name = uncased("client_id"))] #[field(name = uncased("clientid"))] - client_id: Option, + client_id: String, #[field(name = uncased("redirect_uri"))] #[field(name = uncased("redirecturi"))] - redirect_uri: Option, + redirect_uri: String, #[allow(unused)] - #[field(name = uncased("response_type"))] - #[field(name = uncased("responsetype"))] response_type: Option, #[allow(unused)] - #[field(name = uncased("scope"))] scope: Option, - #[field(name = uncased("state"))] - state: Option, + state: String, #[allow(unused)] - #[field(name = uncased("code_challenge"))] code_challenge: Option, #[allow(unused)] - #[field(name = uncased("code_challenge_method"))] code_challenge_method: Option, #[allow(unused)] - #[field(name = uncased("response_mode"))] response_mode: Option, #[allow(unused)] - #[field(name = uncased("domain_hint"))] domain_hint: Option, #[allow(unused)] #[field(name = uncased("ssoToken"))] - ssoToken: Option, + sso_token: Option, } +// The `redirect_uri` will change depending of the client (web, android, ios ..) #[get("/connect/authorize?")] -async fn authorize(data: AuthorizeData, jar: &CookieJar<'_>, mut conn: DbConn) -> ApiResult { - let cookiemanager = CookieManager::new(jar); - match get_client_from_sso_config().await { - Ok(client) => { - let (auth_url, _csrf_state, nonce) = client - .authorize_url( - AuthenticationFlow::::AuthorizationCode, - CsrfToken::new_random, - Nonce::new_random, - ) - .add_scope(Scope::new("email".to_string())) - .add_scope(Scope::new("profile".to_string())) - .url(); +async fn authorize(data: AuthorizeData, conn: DbConn) -> ApiResult { + let AuthorizeData { + client_id, + redirect_uri, + state, + .. + } = data; - let sso_nonce = SsoNonce::new(nonce.secret().to_string()); - sso_nonce.save(&mut conn).await?; + let auth_url = sso::authorize_url(state, &client_id, &redirect_uri, conn).await?; - let redirect_uri = match data.redirect_uri { - None => err!("No redirect_uri in data"), - Some(uri) => uri, - }; - cookiemanager.set_cookie("redirect_uri".to_string(), redirect_uri); - let state = match data.state { - None => err!("No state in data"), - Some(state) => state, - }; - cookiemanager.set_cookie("state".to_string(), state); - - let redirect = CustomRedirect { - url: format!("{}", auth_url), - headers: vec![], - }; - - Ok(redirect) - } - Err(_err) => err!("Unable to find client from identifier"), - } -} - -async fn get_auth_code_access_token(code: &str) -> ApiResult<(String, String, CoreUserInfoClaims)> { - let oidc_code = AuthorizationCode::new(String::from(code)); - match get_client_from_sso_config().await { - Ok(client) => match client.exchange_code(oidc_code).request_async(async_http_client).await { - Ok(token_response) => { - let refresh_token = match token_response.refresh_token() { - Some(token) => token.secret().to_string(), - None => String::new(), - }; - let id_token = match token_response.extra_fields().id_token() { - None => err!("Token response did not contain an id_token"), - Some(token) => token.to_string(), - }; - - let user_info: CoreUserInfoClaims = - match client.user_info(token_response.access_token().to_owned(), None) { - Err(_err) => err!("Token response did not contain user_info"), - Ok(info) => match info.request_async(async_http_client).await { - Err(_err) => err!("Request to user_info endpoint failed"), - Ok(claim) => claim, - }, - }; - - Ok((refresh_token, id_token, user_info)) - } - Err(err) => err!("Failed to contact token endpoint: {}", err.to_string()), - }, - Err(_err) => err!("Unable to find client"), - } + Ok(Redirect::temporary(String::from(auth_url))) } diff --git a/src/api/mod.rs b/src/api/mod.rs index d5281bda..753c60e1 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -35,7 +35,7 @@ pub use crate::api::{ use crate::db::{models::User, DbConn}; // Type aliases for API methods results -type ApiResult = Result; +pub type ApiResult = Result; pub type JsonResult = ApiResult>; pub type EmptyResult = ApiResult<()>; diff --git a/src/auth.rs b/src/auth.rs index d684249d..743287a6 100644 --- a/src/auth.rs +++ b/src/auth.rs @@ -1,6 +1,5 @@ // JWT Handling -// -use chrono::{TimeDelta, Utc}; +use chrono::{DateTime, TimeDelta, Utc}; use num_traits::FromPrimitive; use once_cell::sync::{Lazy, OnceCell}; @@ -9,18 +8,29 @@ use openssl::rsa::Rsa; use serde::de::DeserializeOwned; use serde::ser::Serialize; -use crate::{error::Error, CONFIG}; +use crate::{ + api::ApiResult, + db::{ + models::{Collection, Device, User, UserOrgStatus, UserOrgType, UserOrganization, UserStampException}, + DbConn, + }, + error::Error, + sso, CONFIG, +}; const JWT_ALGORITHM: Algorithm = Algorithm::RS256; -pub static DEFAULT_VALIDITY: Lazy = Lazy::new(|| TimeDelta::try_hours(2).unwrap()); +// Limit when BitWarden consider the token as expired +pub static BW_EXPIRATION: Lazy = Lazy::new(|| TimeDelta::try_minutes(5).unwrap()); + +pub static DEFAULT_REFRESH_VALIDITY: Lazy = Lazy::new(|| TimeDelta::try_days(30).unwrap()); +pub static DEFAULT_ACCESS_VALIDITY: Lazy = Lazy::new(|| TimeDelta::try_hours(2).unwrap()); static JWT_HEADER: Lazy = Lazy::new(|| Header::new(JWT_ALGORITHM)); pub static JWT_LOGIN_ISSUER: Lazy = Lazy::new(|| format!("{}|login", CONFIG.domain_origin())); static JWT_INVITE_ISSUER: Lazy = Lazy::new(|| format!("{}|invite", CONFIG.domain_origin())); static JWT_EMERGENCY_ACCESS_INVITE_ISSUER: Lazy = Lazy::new(|| format!("{}|emergencyaccessinvite", CONFIG.domain_origin())); -static JWT_SSOTOKEN_ISSUER: Lazy = Lazy::new(|| format!("{}|ssotoken", CONFIG.domain_origin())); static JWT_DELETE_ISSUER: Lazy = Lazy::new(|| format!("{}|delete", CONFIG.domain_origin())); static JWT_VERIFYEMAIL_ISSUER: Lazy = Lazy::new(|| format!("{}|verifyemail", CONFIG.domain_origin())); static JWT_ADMIN_ISSUER: Lazy = Lazy::new(|| format!("{}|admin", CONFIG.domain_origin())); @@ -73,7 +83,7 @@ pub fn encode_jwt(claims: &T) -> String { } } -fn decode_jwt(token: &str, issuer: String) -> Result { +pub fn decode_jwt(token: &str, issuer: String) -> Result { let mut validation = jsonwebtoken::Validation::new(JWT_ALGORITHM); validation.leeway = 30; // 30 seconds validation.validate_exp = true; @@ -92,6 +102,10 @@ fn decode_jwt(token: &str, issuer: String) -> Result Result { + decode_jwt(token, JWT_LOGIN_ISSUER.to_string()) +} + pub fn decode_login(token: &str) -> Result { decode_jwt(token, JWT_LOGIN_ISSUER.to_string()) } @@ -165,6 +179,73 @@ pub struct LoginJwtClaims { pub amr: Vec, } +impl LoginJwtClaims { + pub fn new(device: &Device, user: &User, nbf: i64, exp: i64, scope: Vec, now: DateTime) -> Self { + // --- + // Disabled these keys to be added to the JWT since they could cause the JWT to get too large + // Also These key/value pairs are not used anywhere by either Vaultwarden or Bitwarden Clients + // Because these might get used in the future, and they are added by the Bitwarden Server, lets keep it, but then commented out + // --- + // fn arg: orgs: Vec, + // --- + // let orgowner: Vec<_> = orgs.iter().filter(|o| o.atype == 0).map(|o| o.org_uuid.clone()).collect(); + // let orgadmin: Vec<_> = orgs.iter().filter(|o| o.atype == 1).map(|o| o.org_uuid.clone()).collect(); + // let orguser: Vec<_> = orgs.iter().filter(|o| o.atype == 2).map(|o| o.org_uuid.clone()).collect(); + // let orgmanager: Vec<_> = orgs.iter().filter(|o| o.atype == 3).map(|o| o.org_uuid.clone()).collect(); + + if exp <= (now + *BW_EXPIRATION).timestamp() { + warn!("Raise access_token lifetime to more than 5min.") + } + + // Create the JWT claims struct, to send to the client + Self { + nbf, + exp, + iss: JWT_LOGIN_ISSUER.to_string(), + sub: user.uuid.clone(), + premium: true, + name: user.name.clone(), + email: user.email.clone(), + email_verified: !CONFIG.mail_enabled() || user.verified_at.is_some(), + + // --- + // Disabled these keys to be added to the JWT since they could cause the JWT to get too large + // Also These key/value pairs are not used anywhere by either Vaultwarden or Bitwarden Clients + // Because these might get used in the future, and they are added by the Bitwarden Server, lets keep it, but then commented out + // See: https://github.com/dani-garcia/vaultwarden/issues/4156 + // --- + // orgowner, + // orgadmin, + // orguser, + // orgmanager, + sstamp: user.security_stamp.clone(), + device: device.uuid.clone(), + scope, + amr: vec!["Application".into()], + } + } + + pub fn default(device: &Device, user: &User, auth_method: &AuthMethod) -> Self { + let time_now = Utc::now(); + Self::new( + device, + user, + time_now.timestamp(), + (time_now + *DEFAULT_ACCESS_VALIDITY).timestamp(), + auth_method.scope_vec(), + time_now, + ) + } + + pub fn token(&self) -> String { + encode_jwt(&self) + } + + pub fn expires_in(&self) -> i64 { + self.exp - Utc::now().timestamp() + } +} + #[derive(Debug, Serialize, Deserialize)] pub struct InviteJwtClaims { // Not before @@ -318,28 +399,6 @@ pub fn generate_delete_claims(uuid: String) -> BasicJwtClaims { } } -#[derive(Debug, Serialize, Deserialize)] -pub struct SsoTokenJwtClaims { - // Not before - pub nbf: i64, - // Expiration time - pub exp: i64, - // Issuer - pub iss: String, - // Subject - pub sub: String, -} - -pub fn generate_ssotoken_claims() -> SsoTokenJwtClaims { - let time_now = Utc::now().naive_utc(); - SsoTokenJwtClaims { - nbf: time_now.timestamp(), - exp: (time_now + Duration::minutes(2)).timestamp(), - iss: JWT_SSOTOKEN_ISSUER.to_string(), - sub: "vaultwarden".to_string(), - } -} - pub fn generate_verify_email_claims(uuid: String) -> BasicJwtClaims { let time_now = Utc::now(); let expire_hours = i64::from(CONFIG.invitation_expiration_hours()); @@ -379,11 +438,6 @@ use rocket::{ request::{FromRequest, Outcome, Request}, }; -use crate::db::{ - models::{Collection, Device, User, UserOrgStatus, UserOrgType, UserOrganization, UserStampException}, - DbConn, -}; - pub struct Host { pub host: String, } @@ -887,3 +941,150 @@ impl<'r> FromRequest<'r> for WsAccessTokenHeader { }) } } + +#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum AuthMethod { + OrgApiKey, + Password, + Sso, + UserApiKey, +} + +pub trait AuthMethodScope { + fn scope_vec(&self) -> Vec; + fn scope(&self) -> String; + fn check_scope(&self, scope: Option<&String>) -> ApiResult; +} + +impl AuthMethodScope for AuthMethod { + fn scope(&self) -> String { + match self { + AuthMethod::OrgApiKey => "api.organization".to_string(), + AuthMethod::Password => "api offline_access".to_string(), + AuthMethod::Sso => "api offline_access".to_string(), + AuthMethod::UserApiKey => "api".to_string(), + } + } + + fn scope_vec(&self) -> Vec { + self.scope().split_whitespace().map(str::to_string).collect() + } + + fn check_scope(&self, scope: Option<&String>) -> ApiResult { + let method_scope = self.scope(); + match scope { + None => err!("Missing scope"), + Some(scope) if scope == &method_scope => Ok(method_scope), + Some(scope) => err!(format!("Scope ({scope}) not supported")), + } + } +} + +#[derive(Debug, Serialize, Deserialize)] +pub enum TokenWrapper { + Access(String), + Refresh(String), +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct RefreshJwtClaims { + // Not before + pub nbf: i64, + // Expiration time + pub exp: i64, + // Issuer + pub iss: String, + // Subject + pub sub: AuthMethod, + + pub device_token: String, + + pub token: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct AuthTokens { + pub refresh_claims: RefreshJwtClaims, + pub access_claims: LoginJwtClaims, +} + +impl AuthTokens { + pub fn refresh_token(&self) -> String { + encode_jwt(&self.refresh_claims) + } + + pub fn access_token(&self) -> String { + self.access_claims.token() + } + + pub fn expires_in(&self) -> i64 { + self.access_claims.expires_in() + } + + pub fn scope(&self) -> String { + self.refresh_claims.sub.scope() + } + + // Create refresh_token and access_token with default validity + pub fn new(device: &Device, user: &User, sub: AuthMethod) -> Self { + let time_now = Utc::now(); + + let access_claims = LoginJwtClaims::default(device, user, &sub); + + let refresh_claims = RefreshJwtClaims { + nbf: time_now.timestamp(), + exp: (time_now + *DEFAULT_REFRESH_VALIDITY).timestamp(), + iss: JWT_LOGIN_ISSUER.to_string(), + sub, + device_token: device.refresh_token.clone(), + token: None, + }; + + Self { + refresh_claims, + access_claims, + } + } +} + +pub async fn refresh_tokens(refresh_token: &str, conn: &mut DbConn) -> ApiResult<(Device, User, AuthTokens)> { + let time_now = Utc::now(); + + let refresh_claims = match decode_refresh(refresh_token) { + Err(err) => err!(format!("Impossible to read refresh_token: {err}")), + Ok(claims) => claims, + }; + + // Get device by refresh token + let mut device = match Device::find_by_refresh_token(&refresh_claims.device_token, conn).await { + None => err!("Invalid refresh token"), + Some(device) => device, + }; + + // Roll the Device.refresh_token this way it invalides old JWT refresh_token + device.roll_refresh_token(); + device.save(conn).await?; + + let user = match User::find_by_uuid(&device.user_uuid, conn).await { + None => err!("Impossible to find user"), + Some(user) => user, + }; + + if refresh_claims.exp < time_now.timestamp() { + err!("Expired refresh token"); + } + + let auth_tokens = match refresh_claims.sub { + AuthMethod::Sso if CONFIG.sso_enabled() && CONFIG.sso_auth_only_not_session() => { + AuthTokens::new(&device, &user, refresh_claims.sub) + } + AuthMethod::Sso if CONFIG.sso_enabled() => sso::exchange_refresh_token(&device, &user, &refresh_claims).await?, + AuthMethod::Sso => err!("SSO is now disabled, Login again using email and master password"), + AuthMethod::Password if CONFIG.sso_enabled() && CONFIG.sso_only() => err!("SSO is now required, Login again"), + AuthMethod::Password => AuthTokens::new(&device, &user, refresh_claims.sub), + _ => err!("Invalid auth method cannot refresh token"), + }; + + Ok((device, user, auth_tokens)) +} diff --git a/src/config.rs b/src/config.rs index 6b55a9b9..ce5d64d1 100644 --- a/src/config.rs +++ b/src/config.rs @@ -409,7 +409,9 @@ make_config! { /// Auth Request cleanup schedule |> Cron schedule of the job that cleans old auth requests from the auth request. /// Defaults to every minute. Set blank to disable this job. auth_request_purge_schedule: String, false, def, "30 * * * * *".to_string(); - + /// Purge incomplete sso nonce. + /// Defaults to daily. Set blank to disable this job. + purge_incomplete_sso_nonce: String, false, def, "0 20 0 * * *".to_string(); }, /// General settings @@ -609,19 +611,33 @@ make_config! { /// OpenID Connect SSO settings sso { /// Enabled - sso_enabled: bool, true, def, false; - /// Force SSO login - sso_only: bool, true, def, false; + sso_enabled: bool, true, def, false; + /// Disable Email+Master Password login + sso_only: bool, true, def, false; + /// Associate existing user based on email + sso_signups_match_email: bool, true, def, true; /// Client ID - sso_client_id: String, true, def, String::new(); + sso_client_id: String, false, def, String::new(); /// Client Key - sso_client_secret: Pass, true, def, String::new(); + sso_client_secret: Pass, false, def, String::new(); /// Authority Server - sso_authority: String, true, def, String::new(); + sso_authority: String, false, def, String::new(); + /// Scopes required for authorize + sso_scopes: String, false, def, "email profile".to_string(); + /// Additionnal authorization url parameters + sso_authorize_extra_params: String, false, def, String::new(); + /// Use PKCE during Auth Code flow + sso_pkce: bool, false, def, false; + /// Regex for additionnal trusted Id token audience + sso_audience_trusted: String, false, option; /// CallBack Path - sso_callback_path: String, false, gen, |c| generate_sso_callback_path(&c.domain); - /// Allow workaround so SSO logins accept all invites - sso_acceptall_invites: bool, true, def, false; + sso_callback_path: String, false, gen, |c| generate_sso_callback_path(&c.domain); + /// Optional sso master password policy + sso_master_password_policy: String, true, option; + /// Use sso only for auth not the session lifecycle + sso_auth_only_not_session: bool, true, def, false; + /// Log all tokens, LOG_LEVEL=debug is required + sso_debug_tokens: bool, true, def, false; }, /// Yubikey settings @@ -647,7 +663,7 @@ make_config! { /// Host duo_host: String, true, option; /// Application Key (generated automatically) - _duo_akey: Pass, false, option; + _duo_akey: Pass, true, option; }, /// SMTP Email Settings @@ -833,10 +849,14 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { err!("All Duo options need to be set for global Duo support") } - if cfg.sso_enabled - && (cfg.sso_client_id.is_empty() || cfg.sso_client_secret.is_empty() || cfg.sso_authority.is_empty()) - { - err!("`SSO_CLIENT_ID`, `SSO_CLIENT_SECRET` and `SSO_AUTHORITY` must be set for SSO support") + if cfg.sso_enabled { + if cfg.sso_client_id.is_empty() || cfg.sso_client_secret.is_empty() || cfg.sso_authority.is_empty() { + err!("`SSO_CLIENT_ID`, `SSO_CLIENT_SECRET` and `SSO_AUTHORITY` must be set for SSO support") + } + + internal_sso_issuer_url(&cfg.sso_authority)?; + internal_sso_redirect_url(&cfg.sso_callback_path)?; + check_master_password_policy(&cfg.sso_master_password_policy)?; } if cfg._enable_yubico { @@ -1011,6 +1031,28 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { Ok(()) } +fn internal_sso_issuer_url(sso_authority: &String) -> Result { + match openidconnect::IssuerUrl::new(sso_authority.clone()) { + Err(err) => err!(format!("Invalid sso_authority UR ({sso_authority}): {err}")), + Ok(issuer_url) => Ok(issuer_url), + } +} + +fn internal_sso_redirect_url(sso_callback_path: &String) -> Result { + match openidconnect::RedirectUrl::new(sso_callback_path.clone()) { + Err(err) => err!(format!("Invalid sso_callback_path ({sso_callback_path} built using `domain`) URL: {err}")), + Ok(redirect_url) => Ok(redirect_url), + } +} + +fn check_master_password_policy(sso_master_password_policy: &Option) -> Result<(), Error> { + let policy = sso_master_password_policy.as_ref().map(|mpp| serde_json::from_str::(mpp)); + if let Some(Err(error)) = policy { + err!(format!("Invalid sso_master_password_policy ({error}), Ensure that it's correctly escaped with ''")) + } + Ok(()) +} + /// Extracts an RFC 6454 web origin from a URL. fn extract_url_origin(url: &str) -> String { match Url::parse(url) { @@ -1088,6 +1130,26 @@ fn smtp_convert_deprecated_ssl_options(smtp_ssl: Option, smtp_explicit_tls "starttls".to_string() } +/// Allow to parse a multiline list of Key/Values (`key=value`) +/// Will ignore comment lines (starting with `//`) +fn parse_param_list(config: String) -> Vec<(String, String)> { + config + .lines() + .map(|l| l.trim()) + .filter(|l| !l.is_empty() && !l.starts_with("//")) + .filter_map(|l| { + let split = l.split('=').collect::>(); + match &split[..] { + [key, value] => Some(((*key).to_string(), (*value).to_string())), + _ => { + println!("[WARNING] Failed to parse ({l}). Expected key=value"); + None + } + } + }) + .collect() +} + impl Config { pub fn load() -> Result { // Loading from env and file @@ -1277,6 +1339,22 @@ impl Config { } } } + + pub fn sso_issuer_url(&self) -> Result { + internal_sso_issuer_url(&self.sso_authority()) + } + + pub fn sso_redirect_url(&self) -> Result { + internal_sso_redirect_url(&self.sso_callback_path()) + } + + pub fn sso_scopes_vec(&self) -> Vec { + self.sso_scopes().split_whitespace().map(str::to_string).collect() + } + + pub fn sso_authorize_extra_params_vec(&self) -> Vec<(String, String)> { + parse_param_list(self.sso_authorize_extra_params()) + } } use handlebars::{ @@ -1335,6 +1413,7 @@ where reg!("email/send_single_org_removed_from_org", ".html"); reg!("email/set_password", ".html"); reg!("email/smtp_test", ".html"); + reg!("email/sso_change_email", ".html"); reg!("email/twofactor_email", ".html"); reg!("email/verify_email", ".html"); reg!("email/welcome_must_verify", ".html"); diff --git a/src/db/models/device.rs b/src/db/models/device.rs index 60c63589..e5e165e5 100644 --- a/src/db/models/device.rs +++ b/src/db/models/device.rs @@ -1,6 +1,7 @@ use chrono::{NaiveDateTime, Utc}; +use data_encoding::{BASE64, BASE64URL}; -use crate::{crypto, CONFIG}; +use crate::crypto; use core::fmt; db_object! { @@ -42,13 +43,16 @@ impl Device { push_uuid: None, push_token: None, - refresh_token: String::new(), + refresh_token: crypto::encode_random_bytes::<64>(BASE64URL), twofactor_remember: None, } } + pub fn roll_refresh_token(&mut self) { + self.refresh_token = crypto::encode_random_bytes::<64>(BASE64URL) + } + pub fn refresh_twofactor_remember(&mut self) -> String { - use data_encoding::BASE64; let twofactor_remember = crypto::encode_random_bytes::<180>(BASE64); self.twofactor_remember = Some(twofactor_remember.clone()); @@ -59,61 +63,6 @@ impl Device { self.twofactor_remember = None; } - pub fn refresh_tokens(&mut self, user: &super::User, scope: Vec) -> (String, i64) { - // If there is no refresh token, we create one - if self.refresh_token.is_empty() { - use data_encoding::BASE64URL; - self.refresh_token = crypto::encode_random_bytes::<64>(BASE64URL); - } - - // Update the expiration of the device and the last update date - let time_now = Utc::now(); - self.updated_at = time_now.naive_utc(); - - // --- - // Disabled these keys to be added to the JWT since they could cause the JWT to get too large - // Also These key/value pairs are not used anywhere by either Vaultwarden or Bitwarden Clients - // Because these might get used in the future, and they are added by the Bitwarden Server, lets keep it, but then commented out - // --- - // fn arg: orgs: Vec, - // --- - // let orgowner: Vec<_> = orgs.iter().filter(|o| o.atype == 0).map(|o| o.org_uuid.clone()).collect(); - // let orgadmin: Vec<_> = orgs.iter().filter(|o| o.atype == 1).map(|o| o.org_uuid.clone()).collect(); - // let orguser: Vec<_> = orgs.iter().filter(|o| o.atype == 2).map(|o| o.org_uuid.clone()).collect(); - // let orgmanager: Vec<_> = orgs.iter().filter(|o| o.atype == 3).map(|o| o.org_uuid.clone()).collect(); - - // Create the JWT claims struct, to send to the client - use crate::auth::{encode_jwt, LoginJwtClaims, DEFAULT_VALIDITY, JWT_LOGIN_ISSUER}; - let claims = LoginJwtClaims { - nbf: time_now.timestamp(), - exp: (time_now + *DEFAULT_VALIDITY).timestamp(), - iss: JWT_LOGIN_ISSUER.to_string(), - sub: user.uuid.clone(), - - premium: true, - name: user.name.clone(), - email: user.email.clone(), - email_verified: !CONFIG.mail_enabled() || user.verified_at.is_some(), - - // --- - // Disabled these keys to be added to the JWT since they could cause the JWT to get too large - // Also These key/value pairs are not used anywhere by either Vaultwarden or Bitwarden Clients - // Because these might get used in the future, and they are added by the Bitwarden Server, lets keep it, but then commented out - // See: https://github.com/dani-garcia/vaultwarden/issues/4156 - // --- - // orgowner, - // orgadmin, - // orguser, - // orgmanager, - sstamp: user.security_stamp.clone(), - device: self.uuid.clone(), - scope, - amr: vec!["Application".into()], - }; - - (encode_jwt(&claims), DEFAULT_VALIDITY.num_seconds()) - } - pub fn is_push_device(&self) -> bool { matches!(DeviceType::from_i32(self.atype), DeviceType::Android | DeviceType::Ios) } diff --git a/src/db/models/mod.rs b/src/db/models/mod.rs index 9a4e7585..465ea5c7 100644 --- a/src/db/models/mod.rs +++ b/src/db/models/mod.rs @@ -32,4 +32,4 @@ pub use self::send::{Send, SendType}; pub use self::sso_nonce::SsoNonce; pub use self::two_factor::{TwoFactor, TwoFactorType}; pub use self::two_factor_incomplete::TwoFactorIncomplete; -pub use self::user::{Invitation, User, UserKdfType, UserStampException}; +pub use self::user::{Invitation, SsoUser, User, UserKdfType, UserStampException}; diff --git a/src/db/models/org_policy.rs b/src/db/models/org_policy.rs index e5a845f6..92f4d999 100644 --- a/src/db/models/org_policy.rs +++ b/src/db/models/org_policy.rs @@ -27,7 +27,7 @@ pub enum OrgPolicyType { MasterPassword = 1, PasswordGenerator = 2, SingleOrg = 3, - RequireSso = 4, + // RequireSso = 4, // Not supported PersonalOwnership = 5, DisableSend = 6, SendOptions = 7, @@ -77,12 +77,11 @@ impl OrgPolicy { } pub fn to_json(&self) -> Value { - let data_json: Value = serde_json::from_str(&self.data).unwrap_or(Value::Null); json!({ "id": self.uuid, "organizationId": self.org_uuid, "type": self.atype, - "data": data_json, + "data": serde_json::from_str(&self.data).unwrap_or(Value::Null), "enabled": self.enabled, "object": "policy", }) diff --git a/src/db/models/organization.rs b/src/db/models/organization.rs index fd952955..b7e68475 100644 --- a/src/db/models/organization.rs +++ b/src/db/models/organization.rs @@ -25,6 +25,7 @@ db_object! { pub uuid: String, pub user_uuid: String, pub org_uuid: String, + pub invited_by_email: Option, pub access_all: bool, pub akey: String, @@ -167,7 +168,7 @@ impl Organization { "useTotp": true, "usePolicies": true, // "UseScim": false, // Not supported (Not AGPLv3 Licensed) - "useSso": CONFIG.sso_enabled(), + "useSso": false, // Not supported // "UseKeyConnector": false, // Not supported "selfHost": true, "useApi": true, @@ -197,12 +198,13 @@ impl Organization { static ACTIVATE_REVOKE_DIFF: i32 = 128; impl UserOrganization { - pub fn new(user_uuid: String, org_uuid: String) -> Self { + pub fn new(user_uuid: String, org_uuid: String, invited_by_email: Option) -> Self { Self { uuid: crate::util::get_uuid(), user_uuid, org_uuid, + invited_by_email, access_all: false, akey: String::new(), @@ -385,7 +387,7 @@ impl UserOrganization { "resetPasswordEnrolled": self.reset_password_key.is_some(), "useResetPassword": CONFIG.mail_enabled(), "ssoBound": false, // Not supported - "useSso": CONFIG.sso_enabled(), + "useSso": false, // Not supported "useKeyConnector": false, "useSecretsManager": false, "usePasswordManager": true, @@ -652,6 +654,17 @@ impl UserOrganization { }} } + pub async fn confirm_user_invitations(user_uuid: &str, conn: &mut DbConn) -> EmptyResult { + db_run! { conn: { + diesel::update(users_organizations::table) + .filter(users_organizations::user_uuid.eq(user_uuid)) + .filter(users_organizations::status.eq(UserOrgStatus::Invited as i32)) + .set(users_organizations::status.eq(UserOrgStatus::Accepted as i32)) + .execute(conn) + .map_res("Error confirming invitations") + }} + } + pub async fn find_any_state_by_user(user_uuid: &str, conn: &mut DbConn) -> Vec { db_run! { conn: { users_organizations::table diff --git a/src/db/models/sso_nonce.rs b/src/db/models/sso_nonce.rs index 0a9533e0..881f075b 100644 --- a/src/db/models/sso_nonce.rs +++ b/src/db/models/sso_nonce.rs @@ -1,21 +1,34 @@ +use chrono::{NaiveDateTime, Utc}; + use crate::api::EmptyResult; -use crate::db::DbConn; +use crate::db::{DbConn, DbPool}; use crate::error::MapResult; +use crate::sso::NONCE_EXPIRATION; db_object! { #[derive(Identifiable, Queryable, Insertable)] #[diesel(table_name = sso_nonce)] - #[diesel(primary_key(nonce))] + #[diesel(primary_key(state))] pub struct SsoNonce { + pub state: String, pub nonce: String, + pub verifier: Option, + pub redirect_uri: String, + pub created_at: NaiveDateTime, } } /// Local methods impl SsoNonce { - pub fn new(nonce: String) -> Self { - Self { + pub fn new(state: String, nonce: String, verifier: Option, redirect_uri: String) -> Self { + let now = Utc::now().naive_utc(); + + SsoNonce { + state, nonce, + verifier, + redirect_uri, + created_at: now, } } } @@ -28,7 +41,7 @@ impl SsoNonce { diesel::replace_into(sso_nonce::table) .values(SsoNonceDb::to_db(self)) .execute(conn) - .map_res("Error saving SSO device") + .map_res("Error saving SSO nonce") } postgresql { let value = SsoNonceDb::to_db(self); @@ -40,21 +53,37 @@ impl SsoNonce { } } - pub async fn delete(self, conn: &mut DbConn) -> EmptyResult { + pub async fn delete(state: &str, conn: &mut DbConn) -> EmptyResult { db_run! { conn: { - diesel::delete(sso_nonce::table.filter(sso_nonce::nonce.eq(self.nonce))) + diesel::delete(sso_nonce::table.filter(sso_nonce::state.eq(state))) .execute(conn) .map_res("Error deleting SSO nonce") }} } - pub async fn find(nonce: &str, conn: &mut DbConn) -> Option { + pub async fn find(state: &str, conn: &DbConn) -> Option { + let oldest = Utc::now().naive_utc() - *NONCE_EXPIRATION; db_run! { conn: { sso_nonce::table - .filter(sso_nonce::nonce.eq(nonce)) + .filter(sso_nonce::state.eq(state)) + .filter(sso_nonce::created_at.ge(oldest)) .first::(conn) .ok() .from_db() }} } + + pub async fn delete_expired(pool: DbPool) -> EmptyResult { + debug!("Purging expired sso_nonce"); + if let Ok(conn) = pool.get().await { + let oldest = Utc::now().naive_utc() - *NONCE_EXPIRATION; + db_run! { conn: { + diesel::delete(sso_nonce::table.filter(sso_nonce::created_at.lt(oldest))) + .execute(conn) + .map_res("Error deleting expired SSO nonce") + }} + } else { + err!("Failed to get DB connection while purging expired sso_nonce") + } + } } diff --git a/src/db/models/user.rs b/src/db/models/user.rs index a02b694d..1369aa07 100644 --- a/src/db/models/user.rs +++ b/src/db/models/user.rs @@ -5,7 +5,7 @@ use crate::crypto; use crate::CONFIG; db_object! { - #[derive(Identifiable, Queryable, Insertable, AsChangeset)] + #[derive(Identifiable, Queryable, Insertable, AsChangeset, Selectable)] #[diesel(table_name = users)] #[diesel(treat_none_as_null = true)] #[diesel(primary_key(uuid))] @@ -60,6 +60,14 @@ db_object! { pub struct Invitation { pub email: String, } + + #[derive(Identifiable, Queryable, Insertable, Selectable)] + #[diesel(table_name = sso_users)] + #[diesel(primary_key(user_uuid))] + pub struct SsoUser { + pub user_uuid: String, + pub identifier: String, + } } pub enum UserKdfType { @@ -85,7 +93,7 @@ impl User { pub const CLIENT_KDF_TYPE_DEFAULT: i32 = UserKdfType::Pbkdf2 as i32; pub const CLIENT_KDF_ITER_DEFAULT: i32 = 600_000; - pub fn new(email: String) -> Self { + pub fn new(email: String, name: Option) -> Self { let now = Utc::now().naive_utc(); let email = email.to_lowercase(); @@ -97,7 +105,7 @@ impl User { verified_at: None, last_verifying_at: None, login_verify_count: 0, - name: email.clone(), + name: name.unwrap_or(email.clone()), email, akey: String::new(), email_new: None, @@ -456,3 +464,51 @@ impl Invitation { } } } + +impl SsoUser { + pub async fn save(&self, conn: &mut DbConn) -> EmptyResult { + db_run! { conn: + sqlite, mysql { + diesel::replace_into(sso_users::table) + .values(SsoUserDb::to_db(self)) + .execute(conn) + .map_res("Error saving SSO user") + } + postgresql { + let value = SsoUserDb::to_db(self); + diesel::insert_into(sso_users::table) + .values(&value) + .execute(conn) + .map_res("Error saving SSO user") + } + } + } + + // Written as an union to make the query more lisible than using an `or_filter`. + // But `first()` does not appear to work with `union()` so we use `load()`. + pub async fn find_by_identifier_or_email( + identifier: &str, + mail: &str, + conn: &DbConn, + ) -> Option<(User, Option)> { + let lower_mail = mail.to_lowercase(); + + db_run! {conn: { + users::table + .inner_join(sso_users::table) + .select(<(UserDb, Option)>::as_select()) + .filter(sso_users::identifier.eq(identifier)) + .union( + users::table + .left_join(sso_users::table) + .select(<(UserDb, Option)>::as_select()) + .filter(users::email.eq(lower_mail)) + ) + .load(conn) + .expect("Error searching user by SSO identifier and email") + .into_iter() + .next() + .map(|(user, sso_user)| { (user.from_db(), sso_user.from_db()) }) + }} + } +} diff --git a/src/db/schemas/mysql/schema.rs b/src/db/schemas/mysql/schema.rs index 91392524..bfcdf234 100644 --- a/src/db/schemas/mysql/schema.rs +++ b/src/db/schemas/mysql/schema.rs @@ -224,6 +224,7 @@ table! { uuid -> Text, user_uuid -> Text, org_uuid -> Text, + invited_by_email -> Nullable, access_all -> Bool, akey -> Text, status -> Integer, @@ -244,8 +245,19 @@ table! { } table! { - sso_nonce (nonce) { + sso_nonce (state) { + state -> Text, nonce -> Text, + verifier -> Nullable, + redirect_uri -> Text, + created_at -> Timestamp, + } +} + +table! { + sso_users (user_uuid) { + user_uuid -> Text, + identifier -> Text, } } @@ -342,6 +354,7 @@ joinable!(collections_groups -> collections (collections_uuid)); joinable!(collections_groups -> groups (groups_uuid)); joinable!(event -> users_organizations (uuid)); joinable!(auth_requests -> users (user_uuid)); +joinable!(sso_users -> users (user_uuid)); allow_tables_to_appear_in_same_query!( attachments, @@ -355,6 +368,7 @@ allow_tables_to_appear_in_same_query!( org_policies, organizations, sends, + sso_users, twofactor, users, users_collections, diff --git a/src/db/schemas/postgresql/schema.rs b/src/db/schemas/postgresql/schema.rs index fad549d8..7621ad43 100644 --- a/src/db/schemas/postgresql/schema.rs +++ b/src/db/schemas/postgresql/schema.rs @@ -224,6 +224,7 @@ table! { uuid -> Text, user_uuid -> Text, org_uuid -> Text, + invited_by_email -> Nullable, access_all -> Bool, akey -> Text, status -> Integer, @@ -244,8 +245,19 @@ table! { } table! { - sso_nonce (nonce) { + sso_nonce (state) { + state -> Text, nonce -> Text, + verifier -> Nullable, + redirect_uri -> Text, + created_at -> Timestamp, + } +} + +table! { + sso_users (user_uuid) { + user_uuid -> Text, + identifier -> Text, } } @@ -342,6 +354,7 @@ joinable!(collections_groups -> collections (collections_uuid)); joinable!(collections_groups -> groups (groups_uuid)); joinable!(event -> users_organizations (uuid)); joinable!(auth_requests -> users (user_uuid)); +joinable!(sso_users -> users (user_uuid)); allow_tables_to_appear_in_same_query!( attachments, @@ -355,6 +368,7 @@ allow_tables_to_appear_in_same_query!( org_policies, organizations, sends, + sso_users, twofactor, users, users_collections, diff --git a/src/db/schemas/sqlite/schema.rs b/src/db/schemas/sqlite/schema.rs index fad549d8..7621ad43 100644 --- a/src/db/schemas/sqlite/schema.rs +++ b/src/db/schemas/sqlite/schema.rs @@ -224,6 +224,7 @@ table! { uuid -> Text, user_uuid -> Text, org_uuid -> Text, + invited_by_email -> Nullable, access_all -> Bool, akey -> Text, status -> Integer, @@ -244,8 +245,19 @@ table! { } table! { - sso_nonce (nonce) { + sso_nonce (state) { + state -> Text, nonce -> Text, + verifier -> Nullable, + redirect_uri -> Text, + created_at -> Timestamp, + } +} + +table! { + sso_users (user_uuid) { + user_uuid -> Text, + identifier -> Text, } } @@ -342,6 +354,7 @@ joinable!(collections_groups -> collections (collections_uuid)); joinable!(collections_groups -> groups (groups_uuid)); joinable!(event -> users_organizations (uuid)); joinable!(auth_requests -> users (user_uuid)); +joinable!(sso_users -> users (user_uuid)); allow_tables_to_appear_in_same_query!( attachments, @@ -355,6 +368,7 @@ allow_tables_to_appear_in_same_query!( org_policies, organizations, sends, + sso_users, twofactor, users, users_collections, diff --git a/src/mail.rs b/src/mail.rs index 4ff6725a..dc248b28 100644 --- a/src/mail.rs +++ b/src/mail.rs @@ -492,6 +492,18 @@ pub async fn send_change_email(address: &str, token: &str) -> EmptyResult { send_email(address, &subject, body_html, body_text).await } +pub async fn send_sso_change_email(address: &str) -> EmptyResult { + let (subject, body_html, body_text) = get_text( + "email/sso_change_email", + json!({ + "url": format!("{}/#/settings/account", CONFIG.domain()), + "img_src": CONFIG._smtp_img_src(), + }), + )?; + + send_email(address, &subject, body_html, body_text).await +} + pub async fn send_set_password(address: &str, user_name: &str) -> EmptyResult { let (subject, body_html, body_text) = get_text( "email/set_password", diff --git a/src/main.rs b/src/main.rs index 73085901..4ceb7756 100644 --- a/src/main.rs +++ b/src/main.rs @@ -49,6 +49,7 @@ mod crypto; mod db; mod mail; mod ratelimit; +mod sso; mod util; use crate::api::purge_auth_requests; @@ -594,6 +595,13 @@ fn schedule_jobs(pool: db::DbPool) { })); } + // Purge sso nonce from incomplete flow (default to daily at 00h20). + if !CONFIG.purge_incomplete_sso_nonce().is_empty() { + sched.add(Job::new(CONFIG.purge_incomplete_sso_nonce().parse().unwrap(), || { + runtime.spawn(db::models::SsoNonce::delete_expired(pool.clone())); + })); + } + // Periodically check for jobs to run. We probably won't need any // jobs that run more often than once a minute, so a default poll // interval of 30 seconds should be sufficient. Users who want to diff --git a/src/sso.rs b/src/sso.rs new file mode 100644 index 00000000..d3ab90d6 --- /dev/null +++ b/src/sso.rs @@ -0,0 +1,536 @@ +use chrono::Utc; +use regex::Regex; +use std::borrow::Cow; +use std::sync::RwLock; +use std::time::Duration; +use url::Url; + +use mini_moka::sync::Cache; +use once_cell::sync::Lazy; +use openidconnect::core::{ + CoreClient, CoreIdTokenVerifier, CoreProviderMetadata, CoreResponseType, CoreUserInfoClaims, +}; +use openidconnect::reqwest::async_http_client; +use openidconnect::{ + AccessToken, AuthDisplay, AuthPrompt, AuthenticationFlow, AuthorizationCode, AuthorizationRequest, ClientId, + ClientSecret, CsrfToken, Nonce, OAuth2TokenResponse, PkceCodeChallenge, PkceCodeVerifier, RefreshToken, + ResponseType, Scope, +}; + +use crate::{ + api::ApiResult, + auth, + auth::{AuthMethod, AuthMethodScope, AuthTokens, TokenWrapper, BW_EXPIRATION, DEFAULT_REFRESH_VALIDITY}, + db::{ + models::{Device, SsoNonce, User}, + DbConn, + }, + CONFIG, +}; + +static AC_CACHE: Lazy> = + Lazy::new(|| Cache::builder().max_capacity(1000).time_to_live(Duration::from_secs(10 * 60)).build()); + +static CLIENT_CACHE: RwLock> = RwLock::new(None); + +static SSO_JWT_ISSUER: Lazy = Lazy::new(|| format!("{}|sso", CONFIG.domain_origin())); + +pub static NONCE_EXPIRATION: Lazy = Lazy::new(|| chrono::TimeDelta::try_minutes(10).unwrap()); + +trait AuthorizationRequestExt<'a> { + fn add_extra_params>, V: Into>>(self, params: Vec<(N, V)>) -> Self; +} + +impl<'a, AD: AuthDisplay, P: AuthPrompt, RT: ResponseType> AuthorizationRequestExt<'a> + for AuthorizationRequest<'a, AD, P, RT> +{ + fn add_extra_params>, V: Into>>(mut self, params: Vec<(N, V)>) -> Self { + for (key, value) in params { + self = self.add_extra_param(key, value); + } + self + } +} + +#[derive(Debug, Serialize, Deserialize)] +struct SsoTokenJwtClaims { + // Not before + pub nbf: i64, + // Expiration time + pub exp: i64, + // Issuer + pub iss: String, + // Subject + pub sub: String, +} + +pub fn encode_ssotoken_claims() -> String { + let time_now = Utc::now(); + let claims = SsoTokenJwtClaims { + nbf: time_now.timestamp(), + exp: (time_now + chrono::TimeDelta::try_minutes(2).unwrap()).timestamp(), + iss: SSO_JWT_ISSUER.to_string(), + sub: "vaultwarden".to_string(), + }; + + auth::encode_jwt(&claims) +} + +#[derive(Debug, Serialize, Deserialize)] +pub enum OIDCCodeWrapper { + Ok { + code: String, + state: String, + }, + Error { + state: String, + error: String, + error_description: Option, + }, +} + +#[derive(Debug, Serialize, Deserialize)] +struct OIDCCodeClaims { + // Expiration time + pub exp: i64, + // Issuer + pub iss: String, + + pub code: OIDCCodeWrapper, +} + +pub fn encode_code_claims(code: OIDCCodeWrapper) -> String { + let time_now = Utc::now(); + let claims = OIDCCodeClaims { + exp: (time_now + chrono::TimeDelta::try_minutes(5).unwrap()).timestamp(), + iss: SSO_JWT_ISSUER.to_string(), + code, + }; + + auth::encode_jwt(&claims) +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +struct BasicTokenClaims { + iat: Option, + nbf: Option, + exp: i64, +} + +impl BasicTokenClaims { + fn nbf(&self) -> i64 { + self.nbf.or(self.iat).unwrap_or_else(|| Utc::now().timestamp()) + } +} + +fn decode_token_claims(token_name: &str, token: &str) -> ApiResult { + let mut validation = jsonwebtoken::Validation::default(); + validation.set_issuer(&[CONFIG.sso_authority()]); + validation.insecure_disable_signature_validation(); + validation.validate_aud = false; + + match jsonwebtoken::decode(token, &jsonwebtoken::DecodingKey::from_secret(&[]), &validation) { + Ok(btc) => Ok(btc.claims), + Err(err) => err_silent!(format!("Failed to decode basic token claims from {token_name}: {err}")), + } +} + +#[rocket::async_trait] +trait CoreClientExt { + async fn _get_client() -> ApiResult; + async fn cached() -> ApiResult; + + async fn user_info_async(&self, access_token: AccessToken) -> ApiResult; + + fn vw_id_token_verifier(&self) -> CoreIdTokenVerifier<'_>; +} + +#[rocket::async_trait] +impl CoreClientExt for CoreClient { + // Call the OpenId discovery endpoint to retrieve configuration + async fn _get_client() -> ApiResult { + let client_id = ClientId::new(CONFIG.sso_client_id()); + let client_secret = ClientSecret::new(CONFIG.sso_client_secret()); + + let issuer_url = CONFIG.sso_issuer_url()?; + + let provider_metadata = match CoreProviderMetadata::discover_async(issuer_url, async_http_client).await { + Err(err) => err!(format!("Failed to discover OpenID provider: {err}")), + Ok(metadata) => metadata, + }; + + Ok(CoreClient::from_provider_metadata(provider_metadata, client_id, Some(client_secret)) + .set_redirect_uri(CONFIG.sso_redirect_url()?)) + } + + // Simple cache to prevent recalling the discovery endpoint each time + async fn cached() -> ApiResult { + let cc_client = CLIENT_CACHE.read().ok().and_then(|rw_lock| rw_lock.clone()); + match cc_client { + Some(client) => Ok(client), + None => Self::_get_client().await.map(|client| { + let mut cached_client = CLIENT_CACHE.write().unwrap(); + *cached_client = Some(client.clone()); + client + }), + } + } + + async fn user_info_async(&self, access_token: AccessToken) -> ApiResult { + let endpoint = match self.user_info(access_token, None) { + Err(err) => err!(format!("No user_info endpoint: {err}")), + Ok(endpoint) => endpoint, + }; + + match endpoint.request_async(async_http_client).await { + Err(err) => err!(format!("Request to user_info endpoint failed: {err}")), + Ok(user_info) => Ok(user_info), + } + } + + fn vw_id_token_verifier(&self) -> CoreIdTokenVerifier<'_> { + let mut verifier = self.id_token_verifier(); + if let Some(regex_str) = CONFIG.sso_audience_trusted() { + match Regex::new(®ex_str) { + Ok(regex) => { + verifier = verifier.set_other_audience_verifier_fn(move |aud| regex.is_match(aud)); + } + Err(err) => { + error!("Failed to parse SSO_AUDIENCE_TRUSTED={regex_str} regex: {err}"); + } + } + } + verifier + } +} + +// The `nonce` allow to protect against replay attacks +// redirect_uri from: https://github.com/bitwarden/server/blob/main/src/Identity/IdentityServer/ApiClient.cs +pub async fn authorize_url(state: String, client_id: &str, raw_redirect_uri: &str, mut conn: DbConn) -> ApiResult { + let scopes = CONFIG.sso_scopes_vec().into_iter().map(Scope::new); + + let redirect_uri = match client_id { + "web" | "browser" => format!("{}/sso-connector.html", CONFIG.domain()), + "desktop" | "mobile" => "bitwarden://sso-callback".to_string(), + "cli" => { + let port_regex = Regex::new(r"^http://localhost:([0-9]{4})$").unwrap(); + match port_regex.captures(raw_redirect_uri).and_then(|captures| captures.get(1).map(|c| c.as_str())) { + Some(port) => format!("http://localhost:{}", port), + None => err!("Failed to extract port number"), + } + } + _ => err!(format!("Unsupported client {client_id}")), + }; + + let client = CoreClient::cached().await?; + let mut auth_req = client + .authorize_url( + AuthenticationFlow::::AuthorizationCode, + || CsrfToken::new(state), + Nonce::new_random, + ) + .add_scopes(scopes) + .add_extra_params(CONFIG.sso_authorize_extra_params_vec()); + + let verifier = if CONFIG.sso_pkce() { + let (pkce_challenge, pkce_verifier) = PkceCodeChallenge::new_random_sha256(); + auth_req = auth_req.set_pkce_challenge(pkce_challenge); + Some(pkce_verifier.secret().to_string()) + } else { + None + }; + + let (auth_url, csrf_state, nonce) = auth_req.url(); + + let sso_nonce = SsoNonce::new(csrf_state.secret().to_string(), nonce.secret().to_string(), verifier, redirect_uri); + sso_nonce.save(&mut conn).await?; + + Ok(auth_url) +} + +#[derive(Clone, Debug)] +pub struct AuthenticatedUser { + pub refresh_token: Option, + pub access_token: String, + pub expires_in: Option, + pub identifier: String, + pub email: String, + pub email_verified: Option, + pub user_name: Option, +} + +#[derive(Clone, Debug)] +pub struct UserInformation { + pub state: String, + pub identifier: String, + pub email: String, + pub email_verified: Option, + pub user_name: Option, +} + +async fn decode_code_claims(code: &str, conn: &mut DbConn) -> ApiResult<(String, String)> { + match auth::decode_jwt::(code, SSO_JWT_ISSUER.to_string()) { + Ok(code_claims) => match code_claims.code { + OIDCCodeWrapper::Ok { + code, + state, + } => Ok((code, state)), + OIDCCodeWrapper::Error { + state, + error, + error_description, + } => { + if let Err(err) = SsoNonce::delete(&state, conn).await { + error!("Failed to delete database sso_nonce using {state}: {err}") + } + err!(format!( + "SSO authorization failed: {error}, {}", + error_description.as_ref().unwrap_or(&String::new()) + )) + } + }, + Err(err) => err!(format!("Failed to decode code wrapper: {err}")), + } +} + +// During the 2FA flow we will +// - retrieve the user information and then only discover he needs 2FA. +// - second time we will rely on the `AC_CACHE` since the `code` has already been exchanged. +// The `nonce` will ensure that the user is authorized only once. +// We return only the `UserInformation` to force calling `redeem` to obtain the `refresh_token`. +pub async fn exchange_code(wrapped_code: &str, conn: &mut DbConn) -> ApiResult { + let (code, state) = decode_code_claims(wrapped_code, conn).await?; + + if let Some(authenticated_user) = AC_CACHE.get(&state) { + return Ok(UserInformation { + state, + identifier: authenticated_user.identifier, + email: authenticated_user.email, + email_verified: authenticated_user.email_verified, + user_name: authenticated_user.user_name, + }); + } + + let oidc_code = AuthorizationCode::new(code.clone()); + let client = CoreClient::cached().await?; + + let nonce = match SsoNonce::find(&state, conn).await { + None => err!(format!("Invalid state cannot retrieve nonce")), + Some(nonce) => nonce, + }; + + let mut exchange = client.exchange_code(oidc_code); + + if CONFIG.sso_pkce() { + match nonce.verifier { + None => err!(format!("Missing verifier in the DB nonce table")), + Some(secret) => exchange = exchange.set_pkce_verifier(PkceCodeVerifier::new(secret)), + } + } + + match exchange.request_async(async_http_client).await { + Ok(token_response) => { + let user_info = client.user_info_async(token_response.access_token().to_owned()).await?; + let oidc_nonce = Nonce::new(nonce.nonce.clone()); + + let id_token = match token_response.extra_fields().id_token() { + None => err!("Token response did not contain an id_token"), + Some(token) => token, + }; + + if CONFIG.sso_debug_tokens() { + debug!("Id token: {}", id_token.to_string()); + debug!("Access token: {}", token_response.access_token().secret().to_string()); + debug!("Refresh token: {:?}", token_response.refresh_token().map(|t| t.secret().to_string())); + debug!("Expiration time: {:?}", token_response.expires_in()); + } + + let id_claims = match id_token.claims(&client.vw_id_token_verifier(), &oidc_nonce) { + Err(err) => err!(format!("Could not read id_token claims, {err}")), + Ok(claims) => claims, + }; + + let email = match id_claims.email() { + Some(email) => email.to_string(), + None => match user_info.email() { + None => err!("Neither id token nor userinfo contained an email"), + Some(email) => email.to_owned().to_string(), + }, + } + .to_lowercase(); + + let user_name = user_info.preferred_username().map(|un| un.to_string()); + + let refresh_token = token_response.refresh_token().map(|t| t.secret().to_string()); + if refresh_token.is_none() && CONFIG.sso_scopes_vec().contains(&"offline_access".to_string()) { + error!("Scope offline_access is present but response contain no refresh_token"); + } + + let identifier = format!("{}/{}", **id_claims.issuer(), **id_claims.subject()); + + let authenticated_user = AuthenticatedUser { + refresh_token, + access_token: token_response.access_token().secret().to_string(), + expires_in: token_response.expires_in(), + identifier: identifier.clone(), + email: email.clone(), + email_verified: id_claims.email_verified(), + user_name: user_name.clone(), + }; + + AC_CACHE.insert(state.clone(), authenticated_user.clone()); + + Ok(UserInformation { + state, + identifier, + email, + email_verified: id_claims.email_verified(), + user_name, + }) + } + Err(err) => err!(format!("Failed to contact token endpoint: {err}")), + } +} + +// User has passed 2FA flow we can delete `nonce` and clear the cache. +pub async fn redeem(state: &String, conn: &mut DbConn) -> ApiResult { + if let Err(err) = SsoNonce::delete(state, conn).await { + error!("Failed to delete database sso_nonce using {state}: {err}") + } + + if let Some(au) = AC_CACHE.get(state) { + AC_CACHE.invalidate(state); + Ok(au) + } else { + err!("Failed to retrieve user info from sso cache") + } +} + +// We always return a refresh_token (with no refresh_token some secrets are not displayed in the web front). +// If there is no SSO refresh_token, we keep the access_token to be able to call user_info to check for validity +pub fn create_auth_tokens( + device: &Device, + user: &User, + refresh_token: Option, + access_token: &str, + expires_in: Option, +) -> ApiResult { + if !CONFIG.sso_auth_only_not_session() { + let now = Utc::now(); + + let (ap_nbf, ap_exp) = match (decode_token_claims("access_token", access_token), expires_in) { + (Ok(ap), _) => (ap.nbf(), ap.exp), + (Err(_), Some(exp)) => (now.timestamp(), (now + exp).timestamp()), + _ => err!("Non jwt access_token and empty expires_in"), + }; + + let access_claims = + auth::LoginJwtClaims::new(device, user, ap_nbf, ap_exp, auth::AuthMethod::Sso.scope_vec(), now); + + _create_auth_tokens(device, refresh_token, access_claims, access_token) + } else { + Ok(AuthTokens::new(device, user, AuthMethod::Sso)) + } +} + +fn _create_auth_tokens( + device: &Device, + refresh_token: Option, + access_claims: auth::LoginJwtClaims, + access_token: &str, +) -> ApiResult { + let (nbf, exp, token) = if let Some(rt) = refresh_token.as_ref() { + match decode_token_claims("refresh_token", rt) { + Err(_) => { + let time_now = Utc::now(); + let exp = (time_now + *DEFAULT_REFRESH_VALIDITY).timestamp(); + debug!("Non jwt refresh_token (expiration set to {})", exp); + (time_now.timestamp(), exp, TokenWrapper::Refresh(rt.to_string())) + } + Ok(refresh_payload) => { + debug!("Refresh_payload: {:?}", refresh_payload); + (refresh_payload.nbf(), refresh_payload.exp, TokenWrapper::Refresh(rt.to_string())) + } + } + } else { + debug!("No refresh_token present"); + (access_claims.nbf, access_claims.exp, TokenWrapper::Access(access_token.to_string())) + }; + + let refresh_claims = auth::RefreshJwtClaims { + nbf, + exp, + iss: auth::JWT_LOGIN_ISSUER.to_string(), + sub: auth::AuthMethod::Sso, + device_token: device.refresh_token.clone(), + token: Some(token), + }; + + Ok(auth::AuthTokens { + refresh_claims, + access_claims, + }) +} + +// This endpoint is called in two case +// - the session is close to expiration we will try to extend it +// - the user is going to make an action and we check that the session is still valid +pub async fn exchange_refresh_token( + device: &Device, + user: &User, + refresh_claims: &auth::RefreshJwtClaims, +) -> ApiResult { + match &refresh_claims.token { + Some(TokenWrapper::Refresh(refresh_token)) => { + let rt = RefreshToken::new(refresh_token.to_string()); + + let client = CoreClient::cached().await?; + + let token_response = match client.exchange_refresh_token(&rt).request_async(async_http_client).await { + Err(err) => err!(format!("Request to exchange_refresh_token endpoint failed: {:?}", err)), + Ok(token_response) => token_response, + }; + + // Use new refresh_token if returned + let rolled_refresh_token = token_response + .refresh_token() + .map(|token| token.secret().to_string()) + .unwrap_or(refresh_token.to_string()); + + create_auth_tokens( + device, + user, + Some(rolled_refresh_token), + token_response.access_token().secret(), + token_response.expires_in(), + ) + } + Some(TokenWrapper::Access(access_token)) => { + let now = Utc::now(); + let exp_limit = (now + *BW_EXPIRATION).timestamp(); + + if refresh_claims.exp < exp_limit { + err_silent!("Access token is close to expiration but we have no refresh token") + } + + let client = CoreClient::cached().await?; + match client.user_info_async(AccessToken::new(access_token.to_string())).await { + Err(err) => { + err_silent!(format!("Failed to retrieve user info, token has probably been invalidated: {err}")) + } + Ok(_) => { + let access_claims = auth::LoginJwtClaims::new( + device, + user, + now.timestamp(), + refresh_claims.exp, + auth::AuthMethod::Sso.scope_vec(), + now, + ); + _create_auth_tokens(device, None, access_claims, access_token) + } + } + } + None => err!("No token present while in SSO"), + } +} diff --git a/src/static/templates/email/sso_change_email.hbs b/src/static/templates/email/sso_change_email.hbs new file mode 100644 index 00000000..5a512280 --- /dev/null +++ b/src/static/templates/email/sso_change_email.hbs @@ -0,0 +1,4 @@ +Your Email Changed + +Your email was changed in your SSO Provider. Please update your email in Account Settings ({{url}}). +{{> email/email_footer_text }} diff --git a/src/static/templates/email/sso_change_email.html.hbs b/src/static/templates/email/sso_change_email.html.hbs new file mode 100644 index 00000000..74cd445c --- /dev/null +++ b/src/static/templates/email/sso_change_email.html.hbs @@ -0,0 +1,11 @@ +Your Email Changed + +{{> email/email_header }} + + + + Your email was changed in your SSO Provider. Please update your email in Account Settings. + + + +{{> email/email_footer }} diff --git a/src/util.rs b/src/util.rs index 01e04adc..38e3c8ef 100644 --- a/src/util.rs +++ b/src/util.rs @@ -7,7 +7,7 @@ use num_traits::ToPrimitive; use once_cell::sync::Lazy; use rocket::{ fairing::{Fairing, Info, Kind}, - http::{ContentType, Cookie, CookieJar, Header, HeaderMap, Method, SameSite, Status}, + http::{ContentType, Header, HeaderMap, Method, Status}, request::FromParam, response::{self, Responder}, Data, Orbit, Request, Response, Rocket, @@ -130,10 +130,12 @@ impl Cors { // If a match exists, return it. Otherwise, return None. fn get_allowed_origin(headers: &HeaderMap<'_>) -> Option { let origin = Cors::get_header(headers, "Origin"); - let domain_origin = CONFIG.domain_origin(); - let sso_origin = CONFIG.sso_authority(); let safari_extension_origin = "file://"; - if origin == domain_origin || origin == safari_extension_origin || origin == sso_origin { + + if origin == CONFIG.domain_origin() + || origin == safari_extension_origin + || (CONFIG.sso_enabled() && origin == CONFIG.sso_authority()) + { Some(origin) } else { None @@ -258,33 +260,6 @@ impl<'r> FromParam<'r> for SafeString { } } -pub struct CustomRedirect { - pub url: String, - pub headers: Vec<(String, String)>, -} - -impl<'r> rocket::response::Responder<'r, 'static> for CustomRedirect { - fn respond_to(self, _: &rocket::request::Request<'_>) -> rocket::response::Result<'static> { - let mut response = Response::build() - .status(rocket::http::Status { - code: 307, - }) - .raw_header("Location", self.url) - .header(ContentType::HTML) - .finalize(); - - // Normal headers - response.set_raw_header("Referrer-Policy", "same-origin"); - response.set_raw_header("X-XSS-Protection", "0"); - - for header in &self.headers { - response.set_raw_header(header.0.clone(), header.1.clone()); - } - - Ok(response) - } -} - // Log all the routes from the main paths list, and the attachments endpoint // Effectively ignores, any static file route, and the alive endpoint const LOGGED_ROUTES: [&str; 7] = ["/api", "/admin", "/identity", "/icons", "/attachments", "/events", "/notifications"]; @@ -1022,29 +997,3 @@ mod tests { }); } } - -pub struct CookieManager<'a> { - jar: &'a CookieJar<'a>, -} - -impl<'a> CookieManager<'a> { - pub fn new(jar: &'a CookieJar<'a>) -> Self { - Self { - jar, - } - } - - pub fn set_cookie(&self, name: String, value: String) { - let cookie = Cookie::build((name, value)).same_site(SameSite::Lax); - - self.jar.add(cookie) - } - - pub fn get_cookie(&self, name: String) -> Option { - self.jar.get(&name).map(|c| c.value().to_string()) - } - - pub fn delete_cookie(&self, name: String) { - self.jar.remove(Cookie::from(name)); - } -}
&", rank = 1)] +async fn oidcsignin(code: String, state: String, conn: DbConn) -> ApiResult { + oidcsignin_redirect( + state.clone(), + sso::OIDCCodeWrapper::Ok { + code, + state, + }, + &conn, + ) + .await } -#[get("/connect/oidc-signin?")] -fn oidcsignin(code: String, jar: &CookieJar<'_>, _conn: DbConn) -> ApiResult { - let cookiemanager = CookieManager::new(jar); - - let redirect_uri = match cookiemanager.get_cookie("redirect_uri".to_string()) { - None => err!("No redirect_uri in cookie"), - Some(uri) => uri, - }; - let orig_state = match cookiemanager.get_cookie("state".to_string()) { - None => err!("No state in cookie"), - Some(state) => state, - }; - - cookiemanager.delete_cookie("redirect_uri".to_string()); - cookiemanager.delete_cookie("state".to_string()); - - let redirect = CustomRedirect { - url: format!("{redirect_uri}?code={code}&state={orig_state}"), - headers: vec![], - }; - - Ok(redirect) +// Bitwarden client appear to only care for code and state so we pipe it through +// cf: https://github.com/bitwarden/clients/blob/8e46ef1ae5be8b62b0d3d0b9d1b1c62088a04638/libs/angular/src/auth/components/sso.component.ts#L68C11-L68C23) +#[get("/connect/oidc-signin?&&", rank = 2)] +async fn oidcsignin_error( + state: String, + error: String, + error_description: Option, + conn: DbConn, +) -> ApiResult { + oidcsignin_redirect( + state.clone(), + sso::OIDCCodeWrapper::Error { + state, + error, + error_description, + }, + &conn, + ) + .await } -#[derive(FromForm)] -#[allow(non_snake_case)] +// iss and scope parameters are needed for redirection to work on IOS. +async fn oidcsignin_redirect(state: String, wrapper: sso::OIDCCodeWrapper, conn: &DbConn) -> ApiResult { + let code = sso::encode_code_claims(wrapper); + + let nonce = match SsoNonce::find(&state, conn).await { + Some(n) => n, + None => err!(format!("Failed to retrive redirect_uri with {state}")), + }; + + let mut url = match url::Url::parse(&nonce.redirect_uri) { + Ok(url) => url, + Err(err) => err!(format!("Failed to parse redirect uri ({}): {err}", nonce.redirect_uri)), + }; + + url.query_pairs_mut() + .append_pair("code", &code) + .append_pair("state", &state) + .append_pair("scope", &AuthMethod::Sso.scope()) + .append_pair("iss", &CONFIG.domain()); + + debug!("Redirection to {url}"); + + Ok(Redirect::temporary(String::from(url))) +} + +#[derive(Debug, Clone, Default, FromForm)] struct AuthorizeData { - #[allow(unused)] #[field(name = uncased("client_id"))] #[field(name = uncased("clientid"))] - client_id: Option, + client_id: String, #[field(name = uncased("redirect_uri"))] #[field(name = uncased("redirecturi"))] - redirect_uri: Option, + redirect_uri: String, #[allow(unused)] - #[field(name = uncased("response_type"))] - #[field(name = uncased("responsetype"))] response_type: Option, #[allow(unused)] - #[field(name = uncased("scope"))] scope: Option, - #[field(name = uncased("state"))] - state: Option, + state: String, #[allow(unused)] - #[field(name = uncased("code_challenge"))] code_challenge: Option, #[allow(unused)] - #[field(name = uncased("code_challenge_method"))] code_challenge_method: Option, #[allow(unused)] - #[field(name = uncased("response_mode"))] response_mode: Option, #[allow(unused)] - #[field(name = uncased("domain_hint"))] domain_hint: Option, #[allow(unused)] #[field(name = uncased("ssoToken"))] - ssoToken: Option, + sso_token: Option, } +// The `redirect_uri` will change depending of the client (web, android, ios ..) #[get("/connect/authorize?")] -async fn authorize(data: AuthorizeData, jar: &CookieJar<'_>, mut conn: DbConn) -> ApiResult { - let cookiemanager = CookieManager::new(jar); - match get_client_from_sso_config().await { - Ok(client) => { - let (auth_url, _csrf_state, nonce) = client - .authorize_url( - AuthenticationFlow::::AuthorizationCode, - CsrfToken::new_random, - Nonce::new_random, - ) - .add_scope(Scope::new("email".to_string())) - .add_scope(Scope::new("profile".to_string())) - .url(); +async fn authorize(data: AuthorizeData, conn: DbConn) -> ApiResult { + let AuthorizeData { + client_id, + redirect_uri, + state, + .. + } = data; - let sso_nonce = SsoNonce::new(nonce.secret().to_string()); - sso_nonce.save(&mut conn).await?; + let auth_url = sso::authorize_url(state, &client_id, &redirect_uri, conn).await?; - let redirect_uri = match data.redirect_uri { - None => err!("No redirect_uri in data"), - Some(uri) => uri, - }; - cookiemanager.set_cookie("redirect_uri".to_string(), redirect_uri); - let state = match data.state { - None => err!("No state in data"), - Some(state) => state, - }; - cookiemanager.set_cookie("state".to_string(), state); - - let redirect = CustomRedirect { - url: format!("{}", auth_url), - headers: vec![], - }; - - Ok(redirect) - } - Err(_err) => err!("Unable to find client from identifier"), - } -} - -async fn get_auth_code_access_token(code: &str) -> ApiResult<(String, String, CoreUserInfoClaims)> { - let oidc_code = AuthorizationCode::new(String::from(code)); - match get_client_from_sso_config().await { - Ok(client) => match client.exchange_code(oidc_code).request_async(async_http_client).await { - Ok(token_response) => { - let refresh_token = match token_response.refresh_token() { - Some(token) => token.secret().to_string(), - None => String::new(), - }; - let id_token = match token_response.extra_fields().id_token() { - None => err!("Token response did not contain an id_token"), - Some(token) => token.to_string(), - }; - - let user_info: CoreUserInfoClaims = - match client.user_info(token_response.access_token().to_owned(), None) { - Err(_err) => err!("Token response did not contain user_info"), - Ok(info) => match info.request_async(async_http_client).await { - Err(_err) => err!("Request to user_info endpoint failed"), - Ok(claim) => claim, - }, - }; - - Ok((refresh_token, id_token, user_info)) - } - Err(err) => err!("Failed to contact token endpoint: {}", err.to_string()), - }, - Err(_err) => err!("Unable to find client"), - } + Ok(Redirect::temporary(String::from(auth_url))) } diff --git a/src/api/mod.rs b/src/api/mod.rs index d5281bda..753c60e1 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -35,7 +35,7 @@ pub use crate::api::{ use crate::db::{models::User, DbConn}; // Type aliases for API methods results -type ApiResult = Result; +pub type ApiResult = Result; pub type JsonResult = ApiResult>; pub type EmptyResult = ApiResult<()>; diff --git a/src/auth.rs b/src/auth.rs index d684249d..743287a6 100644 --- a/src/auth.rs +++ b/src/auth.rs @@ -1,6 +1,5 @@ // JWT Handling -// -use chrono::{TimeDelta, Utc}; +use chrono::{DateTime, TimeDelta, Utc}; use num_traits::FromPrimitive; use once_cell::sync::{Lazy, OnceCell}; @@ -9,18 +8,29 @@ use openssl::rsa::Rsa; use serde::de::DeserializeOwned; use serde::ser::Serialize; -use crate::{error::Error, CONFIG}; +use crate::{ + api::ApiResult, + db::{ + models::{Collection, Device, User, UserOrgStatus, UserOrgType, UserOrganization, UserStampException}, + DbConn, + }, + error::Error, + sso, CONFIG, +}; const JWT_ALGORITHM: Algorithm = Algorithm::RS256; -pub static DEFAULT_VALIDITY: Lazy = Lazy::new(|| TimeDelta::try_hours(2).unwrap()); +// Limit when BitWarden consider the token as expired +pub static BW_EXPIRATION: Lazy = Lazy::new(|| TimeDelta::try_minutes(5).unwrap()); + +pub static DEFAULT_REFRESH_VALIDITY: Lazy = Lazy::new(|| TimeDelta::try_days(30).unwrap()); +pub static DEFAULT_ACCESS_VALIDITY: Lazy = Lazy::new(|| TimeDelta::try_hours(2).unwrap()); static JWT_HEADER: Lazy = Lazy::new(|| Header::new(JWT_ALGORITHM)); pub static JWT_LOGIN_ISSUER: Lazy = Lazy::new(|| format!("{}|login", CONFIG.domain_origin())); static JWT_INVITE_ISSUER: Lazy = Lazy::new(|| format!("{}|invite", CONFIG.domain_origin())); static JWT_EMERGENCY_ACCESS_INVITE_ISSUER: Lazy = Lazy::new(|| format!("{}|emergencyaccessinvite", CONFIG.domain_origin())); -static JWT_SSOTOKEN_ISSUER: Lazy = Lazy::new(|| format!("{}|ssotoken", CONFIG.domain_origin())); static JWT_DELETE_ISSUER: Lazy = Lazy::new(|| format!("{}|delete", CONFIG.domain_origin())); static JWT_VERIFYEMAIL_ISSUER: Lazy = Lazy::new(|| format!("{}|verifyemail", CONFIG.domain_origin())); static JWT_ADMIN_ISSUER: Lazy = Lazy::new(|| format!("{}|admin", CONFIG.domain_origin())); @@ -73,7 +83,7 @@ pub fn encode_jwt(claims: &T) -> String { } } -fn decode_jwt(token: &str, issuer: String) -> Result { +pub fn decode_jwt(token: &str, issuer: String) -> Result { let mut validation = jsonwebtoken::Validation::new(JWT_ALGORITHM); validation.leeway = 30; // 30 seconds validation.validate_exp = true; @@ -92,6 +102,10 @@ fn decode_jwt(token: &str, issuer: String) -> Result Result { + decode_jwt(token, JWT_LOGIN_ISSUER.to_string()) +} + pub fn decode_login(token: &str) -> Result { decode_jwt(token, JWT_LOGIN_ISSUER.to_string()) } @@ -165,6 +179,73 @@ pub struct LoginJwtClaims { pub amr: Vec, } +impl LoginJwtClaims { + pub fn new(device: &Device, user: &User, nbf: i64, exp: i64, scope: Vec, now: DateTime) -> Self { + // --- + // Disabled these keys to be added to the JWT since they could cause the JWT to get too large + // Also These key/value pairs are not used anywhere by either Vaultwarden or Bitwarden Clients + // Because these might get used in the future, and they are added by the Bitwarden Server, lets keep it, but then commented out + // --- + // fn arg: orgs: Vec, + // --- + // let orgowner: Vec<_> = orgs.iter().filter(|o| o.atype == 0).map(|o| o.org_uuid.clone()).collect(); + // let orgadmin: Vec<_> = orgs.iter().filter(|o| o.atype == 1).map(|o| o.org_uuid.clone()).collect(); + // let orguser: Vec<_> = orgs.iter().filter(|o| o.atype == 2).map(|o| o.org_uuid.clone()).collect(); + // let orgmanager: Vec<_> = orgs.iter().filter(|o| o.atype == 3).map(|o| o.org_uuid.clone()).collect(); + + if exp <= (now + *BW_EXPIRATION).timestamp() { + warn!("Raise access_token lifetime to more than 5min.") + } + + // Create the JWT claims struct, to send to the client + Self { + nbf, + exp, + iss: JWT_LOGIN_ISSUER.to_string(), + sub: user.uuid.clone(), + premium: true, + name: user.name.clone(), + email: user.email.clone(), + email_verified: !CONFIG.mail_enabled() || user.verified_at.is_some(), + + // --- + // Disabled these keys to be added to the JWT since they could cause the JWT to get too large + // Also These key/value pairs are not used anywhere by either Vaultwarden or Bitwarden Clients + // Because these might get used in the future, and they are added by the Bitwarden Server, lets keep it, but then commented out + // See: https://github.com/dani-garcia/vaultwarden/issues/4156 + // --- + // orgowner, + // orgadmin, + // orguser, + // orgmanager, + sstamp: user.security_stamp.clone(), + device: device.uuid.clone(), + scope, + amr: vec!["Application".into()], + } + } + + pub fn default(device: &Device, user: &User, auth_method: &AuthMethod) -> Self { + let time_now = Utc::now(); + Self::new( + device, + user, + time_now.timestamp(), + (time_now + *DEFAULT_ACCESS_VALIDITY).timestamp(), + auth_method.scope_vec(), + time_now, + ) + } + + pub fn token(&self) -> String { + encode_jwt(&self) + } + + pub fn expires_in(&self) -> i64 { + self.exp - Utc::now().timestamp() + } +} + #[derive(Debug, Serialize, Deserialize)] pub struct InviteJwtClaims { // Not before @@ -318,28 +399,6 @@ pub fn generate_delete_claims(uuid: String) -> BasicJwtClaims { } } -#[derive(Debug, Serialize, Deserialize)] -pub struct SsoTokenJwtClaims { - // Not before - pub nbf: i64, - // Expiration time - pub exp: i64, - // Issuer - pub iss: String, - // Subject - pub sub: String, -} - -pub fn generate_ssotoken_claims() -> SsoTokenJwtClaims { - let time_now = Utc::now().naive_utc(); - SsoTokenJwtClaims { - nbf: time_now.timestamp(), - exp: (time_now + Duration::minutes(2)).timestamp(), - iss: JWT_SSOTOKEN_ISSUER.to_string(), - sub: "vaultwarden".to_string(), - } -} - pub fn generate_verify_email_claims(uuid: String) -> BasicJwtClaims { let time_now = Utc::now(); let expire_hours = i64::from(CONFIG.invitation_expiration_hours()); @@ -379,11 +438,6 @@ use rocket::{ request::{FromRequest, Outcome, Request}, }; -use crate::db::{ - models::{Collection, Device, User, UserOrgStatus, UserOrgType, UserOrganization, UserStampException}, - DbConn, -}; - pub struct Host { pub host: String, } @@ -887,3 +941,150 @@ impl<'r> FromRequest<'r> for WsAccessTokenHeader { }) } } + +#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum AuthMethod { + OrgApiKey, + Password, + Sso, + UserApiKey, +} + +pub trait AuthMethodScope { + fn scope_vec(&self) -> Vec; + fn scope(&self) -> String; + fn check_scope(&self, scope: Option<&String>) -> ApiResult; +} + +impl AuthMethodScope for AuthMethod { + fn scope(&self) -> String { + match self { + AuthMethod::OrgApiKey => "api.organization".to_string(), + AuthMethod::Password => "api offline_access".to_string(), + AuthMethod::Sso => "api offline_access".to_string(), + AuthMethod::UserApiKey => "api".to_string(), + } + } + + fn scope_vec(&self) -> Vec { + self.scope().split_whitespace().map(str::to_string).collect() + } + + fn check_scope(&self, scope: Option<&String>) -> ApiResult { + let method_scope = self.scope(); + match scope { + None => err!("Missing scope"), + Some(scope) if scope == &method_scope => Ok(method_scope), + Some(scope) => err!(format!("Scope ({scope}) not supported")), + } + } +} + +#[derive(Debug, Serialize, Deserialize)] +pub enum TokenWrapper { + Access(String), + Refresh(String), +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct RefreshJwtClaims { + // Not before + pub nbf: i64, + // Expiration time + pub exp: i64, + // Issuer + pub iss: String, + // Subject + pub sub: AuthMethod, + + pub device_token: String, + + pub token: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct AuthTokens { + pub refresh_claims: RefreshJwtClaims, + pub access_claims: LoginJwtClaims, +} + +impl AuthTokens { + pub fn refresh_token(&self) -> String { + encode_jwt(&self.refresh_claims) + } + + pub fn access_token(&self) -> String { + self.access_claims.token() + } + + pub fn expires_in(&self) -> i64 { + self.access_claims.expires_in() + } + + pub fn scope(&self) -> String { + self.refresh_claims.sub.scope() + } + + // Create refresh_token and access_token with default validity + pub fn new(device: &Device, user: &User, sub: AuthMethod) -> Self { + let time_now = Utc::now(); + + let access_claims = LoginJwtClaims::default(device, user, &sub); + + let refresh_claims = RefreshJwtClaims { + nbf: time_now.timestamp(), + exp: (time_now + *DEFAULT_REFRESH_VALIDITY).timestamp(), + iss: JWT_LOGIN_ISSUER.to_string(), + sub, + device_token: device.refresh_token.clone(), + token: None, + }; + + Self { + refresh_claims, + access_claims, + } + } +} + +pub async fn refresh_tokens(refresh_token: &str, conn: &mut DbConn) -> ApiResult<(Device, User, AuthTokens)> { + let time_now = Utc::now(); + + let refresh_claims = match decode_refresh(refresh_token) { + Err(err) => err!(format!("Impossible to read refresh_token: {err}")), + Ok(claims) => claims, + }; + + // Get device by refresh token + let mut device = match Device::find_by_refresh_token(&refresh_claims.device_token, conn).await { + None => err!("Invalid refresh token"), + Some(device) => device, + }; + + // Roll the Device.refresh_token this way it invalides old JWT refresh_token + device.roll_refresh_token(); + device.save(conn).await?; + + let user = match User::find_by_uuid(&device.user_uuid, conn).await { + None => err!("Impossible to find user"), + Some(user) => user, + }; + + if refresh_claims.exp < time_now.timestamp() { + err!("Expired refresh token"); + } + + let auth_tokens = match refresh_claims.sub { + AuthMethod::Sso if CONFIG.sso_enabled() && CONFIG.sso_auth_only_not_session() => { + AuthTokens::new(&device, &user, refresh_claims.sub) + } + AuthMethod::Sso if CONFIG.sso_enabled() => sso::exchange_refresh_token(&device, &user, &refresh_claims).await?, + AuthMethod::Sso => err!("SSO is now disabled, Login again using email and master password"), + AuthMethod::Password if CONFIG.sso_enabled() && CONFIG.sso_only() => err!("SSO is now required, Login again"), + AuthMethod::Password => AuthTokens::new(&device, &user, refresh_claims.sub), + _ => err!("Invalid auth method cannot refresh token"), + }; + + Ok((device, user, auth_tokens)) +} diff --git a/src/config.rs b/src/config.rs index 6b55a9b9..ce5d64d1 100644 --- a/src/config.rs +++ b/src/config.rs @@ -409,7 +409,9 @@ make_config! { /// Auth Request cleanup schedule |> Cron schedule of the job that cleans old auth requests from the auth request. /// Defaults to every minute. Set blank to disable this job. auth_request_purge_schedule: String, false, def, "30 * * * * *".to_string(); - + /// Purge incomplete sso nonce. + /// Defaults to daily. Set blank to disable this job. + purge_incomplete_sso_nonce: String, false, def, "0 20 0 * * *".to_string(); }, /// General settings @@ -609,19 +611,33 @@ make_config! { /// OpenID Connect SSO settings sso { /// Enabled - sso_enabled: bool, true, def, false; - /// Force SSO login - sso_only: bool, true, def, false; + sso_enabled: bool, true, def, false; + /// Disable Email+Master Password login + sso_only: bool, true, def, false; + /// Associate existing user based on email + sso_signups_match_email: bool, true, def, true; /// Client ID - sso_client_id: String, true, def, String::new(); + sso_client_id: String, false, def, String::new(); /// Client Key - sso_client_secret: Pass, true, def, String::new(); + sso_client_secret: Pass, false, def, String::new(); /// Authority Server - sso_authority: String, true, def, String::new(); + sso_authority: String, false, def, String::new(); + /// Scopes required for authorize + sso_scopes: String, false, def, "email profile".to_string(); + /// Additionnal authorization url parameters + sso_authorize_extra_params: String, false, def, String::new(); + /// Use PKCE during Auth Code flow + sso_pkce: bool, false, def, false; + /// Regex for additionnal trusted Id token audience + sso_audience_trusted: String, false, option; /// CallBack Path - sso_callback_path: String, false, gen, |c| generate_sso_callback_path(&c.domain); - /// Allow workaround so SSO logins accept all invites - sso_acceptall_invites: bool, true, def, false; + sso_callback_path: String, false, gen, |c| generate_sso_callback_path(&c.domain); + /// Optional sso master password policy + sso_master_password_policy: String, true, option; + /// Use sso only for auth not the session lifecycle + sso_auth_only_not_session: bool, true, def, false; + /// Log all tokens, LOG_LEVEL=debug is required + sso_debug_tokens: bool, true, def, false; }, /// Yubikey settings @@ -647,7 +663,7 @@ make_config! { /// Host duo_host: String, true, option; /// Application Key (generated automatically) - _duo_akey: Pass, false, option; + _duo_akey: Pass, true, option; }, /// SMTP Email Settings @@ -833,10 +849,14 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { err!("All Duo options need to be set for global Duo support") } - if cfg.sso_enabled - && (cfg.sso_client_id.is_empty() || cfg.sso_client_secret.is_empty() || cfg.sso_authority.is_empty()) - { - err!("`SSO_CLIENT_ID`, `SSO_CLIENT_SECRET` and `SSO_AUTHORITY` must be set for SSO support") + if cfg.sso_enabled { + if cfg.sso_client_id.is_empty() || cfg.sso_client_secret.is_empty() || cfg.sso_authority.is_empty() { + err!("`SSO_CLIENT_ID`, `SSO_CLIENT_SECRET` and `SSO_AUTHORITY` must be set for SSO support") + } + + internal_sso_issuer_url(&cfg.sso_authority)?; + internal_sso_redirect_url(&cfg.sso_callback_path)?; + check_master_password_policy(&cfg.sso_master_password_policy)?; } if cfg._enable_yubico { @@ -1011,6 +1031,28 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { Ok(()) } +fn internal_sso_issuer_url(sso_authority: &String) -> Result { + match openidconnect::IssuerUrl::new(sso_authority.clone()) { + Err(err) => err!(format!("Invalid sso_authority UR ({sso_authority}): {err}")), + Ok(issuer_url) => Ok(issuer_url), + } +} + +fn internal_sso_redirect_url(sso_callback_path: &String) -> Result { + match openidconnect::RedirectUrl::new(sso_callback_path.clone()) { + Err(err) => err!(format!("Invalid sso_callback_path ({sso_callback_path} built using `domain`) URL: {err}")), + Ok(redirect_url) => Ok(redirect_url), + } +} + +fn check_master_password_policy(sso_master_password_policy: &Option) -> Result<(), Error> { + let policy = sso_master_password_policy.as_ref().map(|mpp| serde_json::from_str::(mpp)); + if let Some(Err(error)) = policy { + err!(format!("Invalid sso_master_password_policy ({error}), Ensure that it's correctly escaped with ''")) + } + Ok(()) +} + /// Extracts an RFC 6454 web origin from a URL. fn extract_url_origin(url: &str) -> String { match Url::parse(url) { @@ -1088,6 +1130,26 @@ fn smtp_convert_deprecated_ssl_options(smtp_ssl: Option, smtp_explicit_tls "starttls".to_string() } +/// Allow to parse a multiline list of Key/Values (`key=value`) +/// Will ignore comment lines (starting with `//`) +fn parse_param_list(config: String) -> Vec<(String, String)> { + config + .lines() + .map(|l| l.trim()) + .filter(|l| !l.is_empty() && !l.starts_with("//")) + .filter_map(|l| { + let split = l.split('=').collect::>(); + match &split[..] { + [key, value] => Some(((*key).to_string(), (*value).to_string())), + _ => { + println!("[WARNING] Failed to parse ({l}). Expected key=value"); + None + } + } + }) + .collect() +} + impl Config { pub fn load() -> Result { // Loading from env and file @@ -1277,6 +1339,22 @@ impl Config { } } } + + pub fn sso_issuer_url(&self) -> Result { + internal_sso_issuer_url(&self.sso_authority()) + } + + pub fn sso_redirect_url(&self) -> Result { + internal_sso_redirect_url(&self.sso_callback_path()) + } + + pub fn sso_scopes_vec(&self) -> Vec { + self.sso_scopes().split_whitespace().map(str::to_string).collect() + } + + pub fn sso_authorize_extra_params_vec(&self) -> Vec<(String, String)> { + parse_param_list(self.sso_authorize_extra_params()) + } } use handlebars::{ @@ -1335,6 +1413,7 @@ where reg!("email/send_single_org_removed_from_org", ".html"); reg!("email/set_password", ".html"); reg!("email/smtp_test", ".html"); + reg!("email/sso_change_email", ".html"); reg!("email/twofactor_email", ".html"); reg!("email/verify_email", ".html"); reg!("email/welcome_must_verify", ".html"); diff --git a/src/db/models/device.rs b/src/db/models/device.rs index 60c63589..e5e165e5 100644 --- a/src/db/models/device.rs +++ b/src/db/models/device.rs @@ -1,6 +1,7 @@ use chrono::{NaiveDateTime, Utc}; +use data_encoding::{BASE64, BASE64URL}; -use crate::{crypto, CONFIG}; +use crate::crypto; use core::fmt; db_object! { @@ -42,13 +43,16 @@ impl Device { push_uuid: None, push_token: None, - refresh_token: String::new(), + refresh_token: crypto::encode_random_bytes::<64>(BASE64URL), twofactor_remember: None, } } + pub fn roll_refresh_token(&mut self) { + self.refresh_token = crypto::encode_random_bytes::<64>(BASE64URL) + } + pub fn refresh_twofactor_remember(&mut self) -> String { - use data_encoding::BASE64; let twofactor_remember = crypto::encode_random_bytes::<180>(BASE64); self.twofactor_remember = Some(twofactor_remember.clone()); @@ -59,61 +63,6 @@ impl Device { self.twofactor_remember = None; } - pub fn refresh_tokens(&mut self, user: &super::User, scope: Vec) -> (String, i64) { - // If there is no refresh token, we create one - if self.refresh_token.is_empty() { - use data_encoding::BASE64URL; - self.refresh_token = crypto::encode_random_bytes::<64>(BASE64URL); - } - - // Update the expiration of the device and the last update date - let time_now = Utc::now(); - self.updated_at = time_now.naive_utc(); - - // --- - // Disabled these keys to be added to the JWT since they could cause the JWT to get too large - // Also These key/value pairs are not used anywhere by either Vaultwarden or Bitwarden Clients - // Because these might get used in the future, and they are added by the Bitwarden Server, lets keep it, but then commented out - // --- - // fn arg: orgs: Vec, - // --- - // let orgowner: Vec<_> = orgs.iter().filter(|o| o.atype == 0).map(|o| o.org_uuid.clone()).collect(); - // let orgadmin: Vec<_> = orgs.iter().filter(|o| o.atype == 1).map(|o| o.org_uuid.clone()).collect(); - // let orguser: Vec<_> = orgs.iter().filter(|o| o.atype == 2).map(|o| o.org_uuid.clone()).collect(); - // let orgmanager: Vec<_> = orgs.iter().filter(|o| o.atype == 3).map(|o| o.org_uuid.clone()).collect(); - - // Create the JWT claims struct, to send to the client - use crate::auth::{encode_jwt, LoginJwtClaims, DEFAULT_VALIDITY, JWT_LOGIN_ISSUER}; - let claims = LoginJwtClaims { - nbf: time_now.timestamp(), - exp: (time_now + *DEFAULT_VALIDITY).timestamp(), - iss: JWT_LOGIN_ISSUER.to_string(), - sub: user.uuid.clone(), - - premium: true, - name: user.name.clone(), - email: user.email.clone(), - email_verified: !CONFIG.mail_enabled() || user.verified_at.is_some(), - - // --- - // Disabled these keys to be added to the JWT since they could cause the JWT to get too large - // Also These key/value pairs are not used anywhere by either Vaultwarden or Bitwarden Clients - // Because these might get used in the future, and they are added by the Bitwarden Server, lets keep it, but then commented out - // See: https://github.com/dani-garcia/vaultwarden/issues/4156 - // --- - // orgowner, - // orgadmin, - // orguser, - // orgmanager, - sstamp: user.security_stamp.clone(), - device: self.uuid.clone(), - scope, - amr: vec!["Application".into()], - }; - - (encode_jwt(&claims), DEFAULT_VALIDITY.num_seconds()) - } - pub fn is_push_device(&self) -> bool { matches!(DeviceType::from_i32(self.atype), DeviceType::Android | DeviceType::Ios) } diff --git a/src/db/models/mod.rs b/src/db/models/mod.rs index 9a4e7585..465ea5c7 100644 --- a/src/db/models/mod.rs +++ b/src/db/models/mod.rs @@ -32,4 +32,4 @@ pub use self::send::{Send, SendType}; pub use self::sso_nonce::SsoNonce; pub use self::two_factor::{TwoFactor, TwoFactorType}; pub use self::two_factor_incomplete::TwoFactorIncomplete; -pub use self::user::{Invitation, User, UserKdfType, UserStampException}; +pub use self::user::{Invitation, SsoUser, User, UserKdfType, UserStampException}; diff --git a/src/db/models/org_policy.rs b/src/db/models/org_policy.rs index e5a845f6..92f4d999 100644 --- a/src/db/models/org_policy.rs +++ b/src/db/models/org_policy.rs @@ -27,7 +27,7 @@ pub enum OrgPolicyType { MasterPassword = 1, PasswordGenerator = 2, SingleOrg = 3, - RequireSso = 4, + // RequireSso = 4, // Not supported PersonalOwnership = 5, DisableSend = 6, SendOptions = 7, @@ -77,12 +77,11 @@ impl OrgPolicy { } pub fn to_json(&self) -> Value { - let data_json: Value = serde_json::from_str(&self.data).unwrap_or(Value::Null); json!({ "id": self.uuid, "organizationId": self.org_uuid, "type": self.atype, - "data": data_json, + "data": serde_json::from_str(&self.data).unwrap_or(Value::Null), "enabled": self.enabled, "object": "policy", }) diff --git a/src/db/models/organization.rs b/src/db/models/organization.rs index fd952955..b7e68475 100644 --- a/src/db/models/organization.rs +++ b/src/db/models/organization.rs @@ -25,6 +25,7 @@ db_object! { pub uuid: String, pub user_uuid: String, pub org_uuid: String, + pub invited_by_email: Option, pub access_all: bool, pub akey: String, @@ -167,7 +168,7 @@ impl Organization { "useTotp": true, "usePolicies": true, // "UseScim": false, // Not supported (Not AGPLv3 Licensed) - "useSso": CONFIG.sso_enabled(), + "useSso": false, // Not supported // "UseKeyConnector": false, // Not supported "selfHost": true, "useApi": true, @@ -197,12 +198,13 @@ impl Organization { static ACTIVATE_REVOKE_DIFF: i32 = 128; impl UserOrganization { - pub fn new(user_uuid: String, org_uuid: String) -> Self { + pub fn new(user_uuid: String, org_uuid: String, invited_by_email: Option) -> Self { Self { uuid: crate::util::get_uuid(), user_uuid, org_uuid, + invited_by_email, access_all: false, akey: String::new(), @@ -385,7 +387,7 @@ impl UserOrganization { "resetPasswordEnrolled": self.reset_password_key.is_some(), "useResetPassword": CONFIG.mail_enabled(), "ssoBound": false, // Not supported - "useSso": CONFIG.sso_enabled(), + "useSso": false, // Not supported "useKeyConnector": false, "useSecretsManager": false, "usePasswordManager": true, @@ -652,6 +654,17 @@ impl UserOrganization { }} } + pub async fn confirm_user_invitations(user_uuid: &str, conn: &mut DbConn) -> EmptyResult { + db_run! { conn: { + diesel::update(users_organizations::table) + .filter(users_organizations::user_uuid.eq(user_uuid)) + .filter(users_organizations::status.eq(UserOrgStatus::Invited as i32)) + .set(users_organizations::status.eq(UserOrgStatus::Accepted as i32)) + .execute(conn) + .map_res("Error confirming invitations") + }} + } + pub async fn find_any_state_by_user(user_uuid: &str, conn: &mut DbConn) -> Vec { db_run! { conn: { users_organizations::table diff --git a/src/db/models/sso_nonce.rs b/src/db/models/sso_nonce.rs index 0a9533e0..881f075b 100644 --- a/src/db/models/sso_nonce.rs +++ b/src/db/models/sso_nonce.rs @@ -1,21 +1,34 @@ +use chrono::{NaiveDateTime, Utc}; + use crate::api::EmptyResult; -use crate::db::DbConn; +use crate::db::{DbConn, DbPool}; use crate::error::MapResult; +use crate::sso::NONCE_EXPIRATION; db_object! { #[derive(Identifiable, Queryable, Insertable)] #[diesel(table_name = sso_nonce)] - #[diesel(primary_key(nonce))] + #[diesel(primary_key(state))] pub struct SsoNonce { + pub state: String, pub nonce: String, + pub verifier: Option, + pub redirect_uri: String, + pub created_at: NaiveDateTime, } } /// Local methods impl SsoNonce { - pub fn new(nonce: String) -> Self { - Self { + pub fn new(state: String, nonce: String, verifier: Option, redirect_uri: String) -> Self { + let now = Utc::now().naive_utc(); + + SsoNonce { + state, nonce, + verifier, + redirect_uri, + created_at: now, } } } @@ -28,7 +41,7 @@ impl SsoNonce { diesel::replace_into(sso_nonce::table) .values(SsoNonceDb::to_db(self)) .execute(conn) - .map_res("Error saving SSO device") + .map_res("Error saving SSO nonce") } postgresql { let value = SsoNonceDb::to_db(self); @@ -40,21 +53,37 @@ impl SsoNonce { } } - pub async fn delete(self, conn: &mut DbConn) -> EmptyResult { + pub async fn delete(state: &str, conn: &mut DbConn) -> EmptyResult { db_run! { conn: { - diesel::delete(sso_nonce::table.filter(sso_nonce::nonce.eq(self.nonce))) + diesel::delete(sso_nonce::table.filter(sso_nonce::state.eq(state))) .execute(conn) .map_res("Error deleting SSO nonce") }} } - pub async fn find(nonce: &str, conn: &mut DbConn) -> Option { + pub async fn find(state: &str, conn: &DbConn) -> Option { + let oldest = Utc::now().naive_utc() - *NONCE_EXPIRATION; db_run! { conn: { sso_nonce::table - .filter(sso_nonce::nonce.eq(nonce)) + .filter(sso_nonce::state.eq(state)) + .filter(sso_nonce::created_at.ge(oldest)) .first::(conn) .ok() .from_db() }} } + + pub async fn delete_expired(pool: DbPool) -> EmptyResult { + debug!("Purging expired sso_nonce"); + if let Ok(conn) = pool.get().await { + let oldest = Utc::now().naive_utc() - *NONCE_EXPIRATION; + db_run! { conn: { + diesel::delete(sso_nonce::table.filter(sso_nonce::created_at.lt(oldest))) + .execute(conn) + .map_res("Error deleting expired SSO nonce") + }} + } else { + err!("Failed to get DB connection while purging expired sso_nonce") + } + } } diff --git a/src/db/models/user.rs b/src/db/models/user.rs index a02b694d..1369aa07 100644 --- a/src/db/models/user.rs +++ b/src/db/models/user.rs @@ -5,7 +5,7 @@ use crate::crypto; use crate::CONFIG; db_object! { - #[derive(Identifiable, Queryable, Insertable, AsChangeset)] + #[derive(Identifiable, Queryable, Insertable, AsChangeset, Selectable)] #[diesel(table_name = users)] #[diesel(treat_none_as_null = true)] #[diesel(primary_key(uuid))] @@ -60,6 +60,14 @@ db_object! { pub struct Invitation { pub email: String, } + + #[derive(Identifiable, Queryable, Insertable, Selectable)] + #[diesel(table_name = sso_users)] + #[diesel(primary_key(user_uuid))] + pub struct SsoUser { + pub user_uuid: String, + pub identifier: String, + } } pub enum UserKdfType { @@ -85,7 +93,7 @@ impl User { pub const CLIENT_KDF_TYPE_DEFAULT: i32 = UserKdfType::Pbkdf2 as i32; pub const CLIENT_KDF_ITER_DEFAULT: i32 = 600_000; - pub fn new(email: String) -> Self { + pub fn new(email: String, name: Option) -> Self { let now = Utc::now().naive_utc(); let email = email.to_lowercase(); @@ -97,7 +105,7 @@ impl User { verified_at: None, last_verifying_at: None, login_verify_count: 0, - name: email.clone(), + name: name.unwrap_or(email.clone()), email, akey: String::new(), email_new: None, @@ -456,3 +464,51 @@ impl Invitation { } } } + +impl SsoUser { + pub async fn save(&self, conn: &mut DbConn) -> EmptyResult { + db_run! { conn: + sqlite, mysql { + diesel::replace_into(sso_users::table) + .values(SsoUserDb::to_db(self)) + .execute(conn) + .map_res("Error saving SSO user") + } + postgresql { + let value = SsoUserDb::to_db(self); + diesel::insert_into(sso_users::table) + .values(&value) + .execute(conn) + .map_res("Error saving SSO user") + } + } + } + + // Written as an union to make the query more lisible than using an `or_filter`. + // But `first()` does not appear to work with `union()` so we use `load()`. + pub async fn find_by_identifier_or_email( + identifier: &str, + mail: &str, + conn: &DbConn, + ) -> Option<(User, Option)> { + let lower_mail = mail.to_lowercase(); + + db_run! {conn: { + users::table + .inner_join(sso_users::table) + .select(<(UserDb, Option)>::as_select()) + .filter(sso_users::identifier.eq(identifier)) + .union( + users::table + .left_join(sso_users::table) + .select(<(UserDb, Option)>::as_select()) + .filter(users::email.eq(lower_mail)) + ) + .load(conn) + .expect("Error searching user by SSO identifier and email") + .into_iter() + .next() + .map(|(user, sso_user)| { (user.from_db(), sso_user.from_db()) }) + }} + } +} diff --git a/src/db/schemas/mysql/schema.rs b/src/db/schemas/mysql/schema.rs index 91392524..bfcdf234 100644 --- a/src/db/schemas/mysql/schema.rs +++ b/src/db/schemas/mysql/schema.rs @@ -224,6 +224,7 @@ table! { uuid -> Text, user_uuid -> Text, org_uuid -> Text, + invited_by_email -> Nullable, access_all -> Bool, akey -> Text, status -> Integer, @@ -244,8 +245,19 @@ table! { } table! { - sso_nonce (nonce) { + sso_nonce (state) { + state -> Text, nonce -> Text, + verifier -> Nullable, + redirect_uri -> Text, + created_at -> Timestamp, + } +} + +table! { + sso_users (user_uuid) { + user_uuid -> Text, + identifier -> Text, } } @@ -342,6 +354,7 @@ joinable!(collections_groups -> collections (collections_uuid)); joinable!(collections_groups -> groups (groups_uuid)); joinable!(event -> users_organizations (uuid)); joinable!(auth_requests -> users (user_uuid)); +joinable!(sso_users -> users (user_uuid)); allow_tables_to_appear_in_same_query!( attachments, @@ -355,6 +368,7 @@ allow_tables_to_appear_in_same_query!( org_policies, organizations, sends, + sso_users, twofactor, users, users_collections, diff --git a/src/db/schemas/postgresql/schema.rs b/src/db/schemas/postgresql/schema.rs index fad549d8..7621ad43 100644 --- a/src/db/schemas/postgresql/schema.rs +++ b/src/db/schemas/postgresql/schema.rs @@ -224,6 +224,7 @@ table! { uuid -> Text, user_uuid -> Text, org_uuid -> Text, + invited_by_email -> Nullable, access_all -> Bool, akey -> Text, status -> Integer, @@ -244,8 +245,19 @@ table! { } table! { - sso_nonce (nonce) { + sso_nonce (state) { + state -> Text, nonce -> Text, + verifier -> Nullable, + redirect_uri -> Text, + created_at -> Timestamp, + } +} + +table! { + sso_users (user_uuid) { + user_uuid -> Text, + identifier -> Text, } } @@ -342,6 +354,7 @@ joinable!(collections_groups -> collections (collections_uuid)); joinable!(collections_groups -> groups (groups_uuid)); joinable!(event -> users_organizations (uuid)); joinable!(auth_requests -> users (user_uuid)); +joinable!(sso_users -> users (user_uuid)); allow_tables_to_appear_in_same_query!( attachments, @@ -355,6 +368,7 @@ allow_tables_to_appear_in_same_query!( org_policies, organizations, sends, + sso_users, twofactor, users, users_collections, diff --git a/src/db/schemas/sqlite/schema.rs b/src/db/schemas/sqlite/schema.rs index fad549d8..7621ad43 100644 --- a/src/db/schemas/sqlite/schema.rs +++ b/src/db/schemas/sqlite/schema.rs @@ -224,6 +224,7 @@ table! { uuid -> Text, user_uuid -> Text, org_uuid -> Text, + invited_by_email -> Nullable, access_all -> Bool, akey -> Text, status -> Integer, @@ -244,8 +245,19 @@ table! { } table! { - sso_nonce (nonce) { + sso_nonce (state) { + state -> Text, nonce -> Text, + verifier -> Nullable, + redirect_uri -> Text, + created_at -> Timestamp, + } +} + +table! { + sso_users (user_uuid) { + user_uuid -> Text, + identifier -> Text, } } @@ -342,6 +354,7 @@ joinable!(collections_groups -> collections (collections_uuid)); joinable!(collections_groups -> groups (groups_uuid)); joinable!(event -> users_organizations (uuid)); joinable!(auth_requests -> users (user_uuid)); +joinable!(sso_users -> users (user_uuid)); allow_tables_to_appear_in_same_query!( attachments, @@ -355,6 +368,7 @@ allow_tables_to_appear_in_same_query!( org_policies, organizations, sends, + sso_users, twofactor, users, users_collections, diff --git a/src/mail.rs b/src/mail.rs index 4ff6725a..dc248b28 100644 --- a/src/mail.rs +++ b/src/mail.rs @@ -492,6 +492,18 @@ pub async fn send_change_email(address: &str, token: &str) -> EmptyResult { send_email(address, &subject, body_html, body_text).await } +pub async fn send_sso_change_email(address: &str) -> EmptyResult { + let (subject, body_html, body_text) = get_text( + "email/sso_change_email", + json!({ + "url": format!("{}/#/settings/account", CONFIG.domain()), + "img_src": CONFIG._smtp_img_src(), + }), + )?; + + send_email(address, &subject, body_html, body_text).await +} + pub async fn send_set_password(address: &str, user_name: &str) -> EmptyResult { let (subject, body_html, body_text) = get_text( "email/set_password", diff --git a/src/main.rs b/src/main.rs index 73085901..4ceb7756 100644 --- a/src/main.rs +++ b/src/main.rs @@ -49,6 +49,7 @@ mod crypto; mod db; mod mail; mod ratelimit; +mod sso; mod util; use crate::api::purge_auth_requests; @@ -594,6 +595,13 @@ fn schedule_jobs(pool: db::DbPool) { })); } + // Purge sso nonce from incomplete flow (default to daily at 00h20). + if !CONFIG.purge_incomplete_sso_nonce().is_empty() { + sched.add(Job::new(CONFIG.purge_incomplete_sso_nonce().parse().unwrap(), || { + runtime.spawn(db::models::SsoNonce::delete_expired(pool.clone())); + })); + } + // Periodically check for jobs to run. We probably won't need any // jobs that run more often than once a minute, so a default poll // interval of 30 seconds should be sufficient. Users who want to diff --git a/src/sso.rs b/src/sso.rs new file mode 100644 index 00000000..d3ab90d6 --- /dev/null +++ b/src/sso.rs @@ -0,0 +1,536 @@ +use chrono::Utc; +use regex::Regex; +use std::borrow::Cow; +use std::sync::RwLock; +use std::time::Duration; +use url::Url; + +use mini_moka::sync::Cache; +use once_cell::sync::Lazy; +use openidconnect::core::{ + CoreClient, CoreIdTokenVerifier, CoreProviderMetadata, CoreResponseType, CoreUserInfoClaims, +}; +use openidconnect::reqwest::async_http_client; +use openidconnect::{ + AccessToken, AuthDisplay, AuthPrompt, AuthenticationFlow, AuthorizationCode, AuthorizationRequest, ClientId, + ClientSecret, CsrfToken, Nonce, OAuth2TokenResponse, PkceCodeChallenge, PkceCodeVerifier, RefreshToken, + ResponseType, Scope, +}; + +use crate::{ + api::ApiResult, + auth, + auth::{AuthMethod, AuthMethodScope, AuthTokens, TokenWrapper, BW_EXPIRATION, DEFAULT_REFRESH_VALIDITY}, + db::{ + models::{Device, SsoNonce, User}, + DbConn, + }, + CONFIG, +}; + +static AC_CACHE: Lazy> = + Lazy::new(|| Cache::builder().max_capacity(1000).time_to_live(Duration::from_secs(10 * 60)).build()); + +static CLIENT_CACHE: RwLock> = RwLock::new(None); + +static SSO_JWT_ISSUER: Lazy = Lazy::new(|| format!("{}|sso", CONFIG.domain_origin())); + +pub static NONCE_EXPIRATION: Lazy = Lazy::new(|| chrono::TimeDelta::try_minutes(10).unwrap()); + +trait AuthorizationRequestExt<'a> { + fn add_extra_params>, V: Into>>(self, params: Vec<(N, V)>) -> Self; +} + +impl<'a, AD: AuthDisplay, P: AuthPrompt, RT: ResponseType> AuthorizationRequestExt<'a> + for AuthorizationRequest<'a, AD, P, RT> +{ + fn add_extra_params>, V: Into>>(mut self, params: Vec<(N, V)>) -> Self { + for (key, value) in params { + self = self.add_extra_param(key, value); + } + self + } +} + +#[derive(Debug, Serialize, Deserialize)] +struct SsoTokenJwtClaims { + // Not before + pub nbf: i64, + // Expiration time + pub exp: i64, + // Issuer + pub iss: String, + // Subject + pub sub: String, +} + +pub fn encode_ssotoken_claims() -> String { + let time_now = Utc::now(); + let claims = SsoTokenJwtClaims { + nbf: time_now.timestamp(), + exp: (time_now + chrono::TimeDelta::try_minutes(2).unwrap()).timestamp(), + iss: SSO_JWT_ISSUER.to_string(), + sub: "vaultwarden".to_string(), + }; + + auth::encode_jwt(&claims) +} + +#[derive(Debug, Serialize, Deserialize)] +pub enum OIDCCodeWrapper { + Ok { + code: String, + state: String, + }, + Error { + state: String, + error: String, + error_description: Option, + }, +} + +#[derive(Debug, Serialize, Deserialize)] +struct OIDCCodeClaims { + // Expiration time + pub exp: i64, + // Issuer + pub iss: String, + + pub code: OIDCCodeWrapper, +} + +pub fn encode_code_claims(code: OIDCCodeWrapper) -> String { + let time_now = Utc::now(); + let claims = OIDCCodeClaims { + exp: (time_now + chrono::TimeDelta::try_minutes(5).unwrap()).timestamp(), + iss: SSO_JWT_ISSUER.to_string(), + code, + }; + + auth::encode_jwt(&claims) +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +struct BasicTokenClaims { + iat: Option, + nbf: Option, + exp: i64, +} + +impl BasicTokenClaims { + fn nbf(&self) -> i64 { + self.nbf.or(self.iat).unwrap_or_else(|| Utc::now().timestamp()) + } +} + +fn decode_token_claims(token_name: &str, token: &str) -> ApiResult { + let mut validation = jsonwebtoken::Validation::default(); + validation.set_issuer(&[CONFIG.sso_authority()]); + validation.insecure_disable_signature_validation(); + validation.validate_aud = false; + + match jsonwebtoken::decode(token, &jsonwebtoken::DecodingKey::from_secret(&[]), &validation) { + Ok(btc) => Ok(btc.claims), + Err(err) => err_silent!(format!("Failed to decode basic token claims from {token_name}: {err}")), + } +} + +#[rocket::async_trait] +trait CoreClientExt { + async fn _get_client() -> ApiResult; + async fn cached() -> ApiResult; + + async fn user_info_async(&self, access_token: AccessToken) -> ApiResult; + + fn vw_id_token_verifier(&self) -> CoreIdTokenVerifier<'_>; +} + +#[rocket::async_trait] +impl CoreClientExt for CoreClient { + // Call the OpenId discovery endpoint to retrieve configuration + async fn _get_client() -> ApiResult { + let client_id = ClientId::new(CONFIG.sso_client_id()); + let client_secret = ClientSecret::new(CONFIG.sso_client_secret()); + + let issuer_url = CONFIG.sso_issuer_url()?; + + let provider_metadata = match CoreProviderMetadata::discover_async(issuer_url, async_http_client).await { + Err(err) => err!(format!("Failed to discover OpenID provider: {err}")), + Ok(metadata) => metadata, + }; + + Ok(CoreClient::from_provider_metadata(provider_metadata, client_id, Some(client_secret)) + .set_redirect_uri(CONFIG.sso_redirect_url()?)) + } + + // Simple cache to prevent recalling the discovery endpoint each time + async fn cached() -> ApiResult { + let cc_client = CLIENT_CACHE.read().ok().and_then(|rw_lock| rw_lock.clone()); + match cc_client { + Some(client) => Ok(client), + None => Self::_get_client().await.map(|client| { + let mut cached_client = CLIENT_CACHE.write().unwrap(); + *cached_client = Some(client.clone()); + client + }), + } + } + + async fn user_info_async(&self, access_token: AccessToken) -> ApiResult { + let endpoint = match self.user_info(access_token, None) { + Err(err) => err!(format!("No user_info endpoint: {err}")), + Ok(endpoint) => endpoint, + }; + + match endpoint.request_async(async_http_client).await { + Err(err) => err!(format!("Request to user_info endpoint failed: {err}")), + Ok(user_info) => Ok(user_info), + } + } + + fn vw_id_token_verifier(&self) -> CoreIdTokenVerifier<'_> { + let mut verifier = self.id_token_verifier(); + if let Some(regex_str) = CONFIG.sso_audience_trusted() { + match Regex::new(®ex_str) { + Ok(regex) => { + verifier = verifier.set_other_audience_verifier_fn(move |aud| regex.is_match(aud)); + } + Err(err) => { + error!("Failed to parse SSO_AUDIENCE_TRUSTED={regex_str} regex: {err}"); + } + } + } + verifier + } +} + +// The `nonce` allow to protect against replay attacks +// redirect_uri from: https://github.com/bitwarden/server/blob/main/src/Identity/IdentityServer/ApiClient.cs +pub async fn authorize_url(state: String, client_id: &str, raw_redirect_uri: &str, mut conn: DbConn) -> ApiResult { + let scopes = CONFIG.sso_scopes_vec().into_iter().map(Scope::new); + + let redirect_uri = match client_id { + "web" | "browser" => format!("{}/sso-connector.html", CONFIG.domain()), + "desktop" | "mobile" => "bitwarden://sso-callback".to_string(), + "cli" => { + let port_regex = Regex::new(r"^http://localhost:([0-9]{4})$").unwrap(); + match port_regex.captures(raw_redirect_uri).and_then(|captures| captures.get(1).map(|c| c.as_str())) { + Some(port) => format!("http://localhost:{}", port), + None => err!("Failed to extract port number"), + } + } + _ => err!(format!("Unsupported client {client_id}")), + }; + + let client = CoreClient::cached().await?; + let mut auth_req = client + .authorize_url( + AuthenticationFlow::::AuthorizationCode, + || CsrfToken::new(state), + Nonce::new_random, + ) + .add_scopes(scopes) + .add_extra_params(CONFIG.sso_authorize_extra_params_vec()); + + let verifier = if CONFIG.sso_pkce() { + let (pkce_challenge, pkce_verifier) = PkceCodeChallenge::new_random_sha256(); + auth_req = auth_req.set_pkce_challenge(pkce_challenge); + Some(pkce_verifier.secret().to_string()) + } else { + None + }; + + let (auth_url, csrf_state, nonce) = auth_req.url(); + + let sso_nonce = SsoNonce::new(csrf_state.secret().to_string(), nonce.secret().to_string(), verifier, redirect_uri); + sso_nonce.save(&mut conn).await?; + + Ok(auth_url) +} + +#[derive(Clone, Debug)] +pub struct AuthenticatedUser { + pub refresh_token: Option, + pub access_token: String, + pub expires_in: Option, + pub identifier: String, + pub email: String, + pub email_verified: Option, + pub user_name: Option, +} + +#[derive(Clone, Debug)] +pub struct UserInformation { + pub state: String, + pub identifier: String, + pub email: String, + pub email_verified: Option, + pub user_name: Option, +} + +async fn decode_code_claims(code: &str, conn: &mut DbConn) -> ApiResult<(String, String)> { + match auth::decode_jwt::(code, SSO_JWT_ISSUER.to_string()) { + Ok(code_claims) => match code_claims.code { + OIDCCodeWrapper::Ok { + code, + state, + } => Ok((code, state)), + OIDCCodeWrapper::Error { + state, + error, + error_description, + } => { + if let Err(err) = SsoNonce::delete(&state, conn).await { + error!("Failed to delete database sso_nonce using {state}: {err}") + } + err!(format!( + "SSO authorization failed: {error}, {}", + error_description.as_ref().unwrap_or(&String::new()) + )) + } + }, + Err(err) => err!(format!("Failed to decode code wrapper: {err}")), + } +} + +// During the 2FA flow we will +// - retrieve the user information and then only discover he needs 2FA. +// - second time we will rely on the `AC_CACHE` since the `code` has already been exchanged. +// The `nonce` will ensure that the user is authorized only once. +// We return only the `UserInformation` to force calling `redeem` to obtain the `refresh_token`. +pub async fn exchange_code(wrapped_code: &str, conn: &mut DbConn) -> ApiResult { + let (code, state) = decode_code_claims(wrapped_code, conn).await?; + + if let Some(authenticated_user) = AC_CACHE.get(&state) { + return Ok(UserInformation { + state, + identifier: authenticated_user.identifier, + email: authenticated_user.email, + email_verified: authenticated_user.email_verified, + user_name: authenticated_user.user_name, + }); + } + + let oidc_code = AuthorizationCode::new(code.clone()); + let client = CoreClient::cached().await?; + + let nonce = match SsoNonce::find(&state, conn).await { + None => err!(format!("Invalid state cannot retrieve nonce")), + Some(nonce) => nonce, + }; + + let mut exchange = client.exchange_code(oidc_code); + + if CONFIG.sso_pkce() { + match nonce.verifier { + None => err!(format!("Missing verifier in the DB nonce table")), + Some(secret) => exchange = exchange.set_pkce_verifier(PkceCodeVerifier::new(secret)), + } + } + + match exchange.request_async(async_http_client).await { + Ok(token_response) => { + let user_info = client.user_info_async(token_response.access_token().to_owned()).await?; + let oidc_nonce = Nonce::new(nonce.nonce.clone()); + + let id_token = match token_response.extra_fields().id_token() { + None => err!("Token response did not contain an id_token"), + Some(token) => token, + }; + + if CONFIG.sso_debug_tokens() { + debug!("Id token: {}", id_token.to_string()); + debug!("Access token: {}", token_response.access_token().secret().to_string()); + debug!("Refresh token: {:?}", token_response.refresh_token().map(|t| t.secret().to_string())); + debug!("Expiration time: {:?}", token_response.expires_in()); + } + + let id_claims = match id_token.claims(&client.vw_id_token_verifier(), &oidc_nonce) { + Err(err) => err!(format!("Could not read id_token claims, {err}")), + Ok(claims) => claims, + }; + + let email = match id_claims.email() { + Some(email) => email.to_string(), + None => match user_info.email() { + None => err!("Neither id token nor userinfo contained an email"), + Some(email) => email.to_owned().to_string(), + }, + } + .to_lowercase(); + + let user_name = user_info.preferred_username().map(|un| un.to_string()); + + let refresh_token = token_response.refresh_token().map(|t| t.secret().to_string()); + if refresh_token.is_none() && CONFIG.sso_scopes_vec().contains(&"offline_access".to_string()) { + error!("Scope offline_access is present but response contain no refresh_token"); + } + + let identifier = format!("{}/{}", **id_claims.issuer(), **id_claims.subject()); + + let authenticated_user = AuthenticatedUser { + refresh_token, + access_token: token_response.access_token().secret().to_string(), + expires_in: token_response.expires_in(), + identifier: identifier.clone(), + email: email.clone(), + email_verified: id_claims.email_verified(), + user_name: user_name.clone(), + }; + + AC_CACHE.insert(state.clone(), authenticated_user.clone()); + + Ok(UserInformation { + state, + identifier, + email, + email_verified: id_claims.email_verified(), + user_name, + }) + } + Err(err) => err!(format!("Failed to contact token endpoint: {err}")), + } +} + +// User has passed 2FA flow we can delete `nonce` and clear the cache. +pub async fn redeem(state: &String, conn: &mut DbConn) -> ApiResult { + if let Err(err) = SsoNonce::delete(state, conn).await { + error!("Failed to delete database sso_nonce using {state}: {err}") + } + + if let Some(au) = AC_CACHE.get(state) { + AC_CACHE.invalidate(state); + Ok(au) + } else { + err!("Failed to retrieve user info from sso cache") + } +} + +// We always return a refresh_token (with no refresh_token some secrets are not displayed in the web front). +// If there is no SSO refresh_token, we keep the access_token to be able to call user_info to check for validity +pub fn create_auth_tokens( + device: &Device, + user: &User, + refresh_token: Option, + access_token: &str, + expires_in: Option, +) -> ApiResult { + if !CONFIG.sso_auth_only_not_session() { + let now = Utc::now(); + + let (ap_nbf, ap_exp) = match (decode_token_claims("access_token", access_token), expires_in) { + (Ok(ap), _) => (ap.nbf(), ap.exp), + (Err(_), Some(exp)) => (now.timestamp(), (now + exp).timestamp()), + _ => err!("Non jwt access_token and empty expires_in"), + }; + + let access_claims = + auth::LoginJwtClaims::new(device, user, ap_nbf, ap_exp, auth::AuthMethod::Sso.scope_vec(), now); + + _create_auth_tokens(device, refresh_token, access_claims, access_token) + } else { + Ok(AuthTokens::new(device, user, AuthMethod::Sso)) + } +} + +fn _create_auth_tokens( + device: &Device, + refresh_token: Option, + access_claims: auth::LoginJwtClaims, + access_token: &str, +) -> ApiResult { + let (nbf, exp, token) = if let Some(rt) = refresh_token.as_ref() { + match decode_token_claims("refresh_token", rt) { + Err(_) => { + let time_now = Utc::now(); + let exp = (time_now + *DEFAULT_REFRESH_VALIDITY).timestamp(); + debug!("Non jwt refresh_token (expiration set to {})", exp); + (time_now.timestamp(), exp, TokenWrapper::Refresh(rt.to_string())) + } + Ok(refresh_payload) => { + debug!("Refresh_payload: {:?}", refresh_payload); + (refresh_payload.nbf(), refresh_payload.exp, TokenWrapper::Refresh(rt.to_string())) + } + } + } else { + debug!("No refresh_token present"); + (access_claims.nbf, access_claims.exp, TokenWrapper::Access(access_token.to_string())) + }; + + let refresh_claims = auth::RefreshJwtClaims { + nbf, + exp, + iss: auth::JWT_LOGIN_ISSUER.to_string(), + sub: auth::AuthMethod::Sso, + device_token: device.refresh_token.clone(), + token: Some(token), + }; + + Ok(auth::AuthTokens { + refresh_claims, + access_claims, + }) +} + +// This endpoint is called in two case +// - the session is close to expiration we will try to extend it +// - the user is going to make an action and we check that the session is still valid +pub async fn exchange_refresh_token( + device: &Device, + user: &User, + refresh_claims: &auth::RefreshJwtClaims, +) -> ApiResult { + match &refresh_claims.token { + Some(TokenWrapper::Refresh(refresh_token)) => { + let rt = RefreshToken::new(refresh_token.to_string()); + + let client = CoreClient::cached().await?; + + let token_response = match client.exchange_refresh_token(&rt).request_async(async_http_client).await { + Err(err) => err!(format!("Request to exchange_refresh_token endpoint failed: {:?}", err)), + Ok(token_response) => token_response, + }; + + // Use new refresh_token if returned + let rolled_refresh_token = token_response + .refresh_token() + .map(|token| token.secret().to_string()) + .unwrap_or(refresh_token.to_string()); + + create_auth_tokens( + device, + user, + Some(rolled_refresh_token), + token_response.access_token().secret(), + token_response.expires_in(), + ) + } + Some(TokenWrapper::Access(access_token)) => { + let now = Utc::now(); + let exp_limit = (now + *BW_EXPIRATION).timestamp(); + + if refresh_claims.exp < exp_limit { + err_silent!("Access token is close to expiration but we have no refresh token") + } + + let client = CoreClient::cached().await?; + match client.user_info_async(AccessToken::new(access_token.to_string())).await { + Err(err) => { + err_silent!(format!("Failed to retrieve user info, token has probably been invalidated: {err}")) + } + Ok(_) => { + let access_claims = auth::LoginJwtClaims::new( + device, + user, + now.timestamp(), + refresh_claims.exp, + auth::AuthMethod::Sso.scope_vec(), + now, + ); + _create_auth_tokens(device, None, access_claims, access_token) + } + } + } + None => err!("No token present while in SSO"), + } +} diff --git a/src/static/templates/email/sso_change_email.hbs b/src/static/templates/email/sso_change_email.hbs new file mode 100644 index 00000000..5a512280 --- /dev/null +++ b/src/static/templates/email/sso_change_email.hbs @@ -0,0 +1,4 @@ +Your Email Changed + +Your email was changed in your SSO Provider. Please update your email in Account Settings ({{url}}). +{{> email/email_footer_text }} diff --git a/src/static/templates/email/sso_change_email.html.hbs b/src/static/templates/email/sso_change_email.html.hbs new file mode 100644 index 00000000..74cd445c --- /dev/null +++ b/src/static/templates/email/sso_change_email.html.hbs @@ -0,0 +1,11 @@ +Your Email Changed + +{{> email/email_header }} + + + + Your email was changed in your SSO Provider. Please update your email in Account Settings. + + + +{{> email/email_footer }} diff --git a/src/util.rs b/src/util.rs index 01e04adc..38e3c8ef 100644 --- a/src/util.rs +++ b/src/util.rs @@ -7,7 +7,7 @@ use num_traits::ToPrimitive; use once_cell::sync::Lazy; use rocket::{ fairing::{Fairing, Info, Kind}, - http::{ContentType, Cookie, CookieJar, Header, HeaderMap, Method, SameSite, Status}, + http::{ContentType, Header, HeaderMap, Method, Status}, request::FromParam, response::{self, Responder}, Data, Orbit, Request, Response, Rocket, @@ -130,10 +130,12 @@ impl Cors { // If a match exists, return it. Otherwise, return None. fn get_allowed_origin(headers: &HeaderMap<'_>) -> Option { let origin = Cors::get_header(headers, "Origin"); - let domain_origin = CONFIG.domain_origin(); - let sso_origin = CONFIG.sso_authority(); let safari_extension_origin = "file://"; - if origin == domain_origin || origin == safari_extension_origin || origin == sso_origin { + + if origin == CONFIG.domain_origin() + || origin == safari_extension_origin + || (CONFIG.sso_enabled() && origin == CONFIG.sso_authority()) + { Some(origin) } else { None @@ -258,33 +260,6 @@ impl<'r> FromParam<'r> for SafeString { } } -pub struct CustomRedirect { - pub url: String, - pub headers: Vec<(String, String)>, -} - -impl<'r> rocket::response::Responder<'r, 'static> for CustomRedirect { - fn respond_to(self, _: &rocket::request::Request<'_>) -> rocket::response::Result<'static> { - let mut response = Response::build() - .status(rocket::http::Status { - code: 307, - }) - .raw_header("Location", self.url) - .header(ContentType::HTML) - .finalize(); - - // Normal headers - response.set_raw_header("Referrer-Policy", "same-origin"); - response.set_raw_header("X-XSS-Protection", "0"); - - for header in &self.headers { - response.set_raw_header(header.0.clone(), header.1.clone()); - } - - Ok(response) - } -} - // Log all the routes from the main paths list, and the attachments endpoint // Effectively ignores, any static file route, and the alive endpoint const LOGGED_ROUTES: [&str; 7] = ["/api", "/admin", "/identity", "/icons", "/attachments", "/events", "/notifications"]; @@ -1022,29 +997,3 @@ mod tests { }); } } - -pub struct CookieManager<'a> { - jar: &'a CookieJar<'a>, -} - -impl<'a> CookieManager<'a> { - pub fn new(jar: &'a CookieJar<'a>) -> Self { - Self { - jar, - } - } - - pub fn set_cookie(&self, name: String, value: String) { - let cookie = Cookie::build((name, value)).same_site(SameSite::Lax); - - self.jar.add(cookie) - } - - pub fn get_cookie(&self, name: String) -> Option { - self.jar.get(&name).map(|c| c.value().to_string()) - } - - pub fn delete_cookie(&self, name: String) { - self.jar.remove(Cookie::from(name)); - } -}
")] -fn oidcsignin(code: String, jar: &CookieJar<'_>, _conn: DbConn) -> ApiResult { - let cookiemanager = CookieManager::new(jar); - - let redirect_uri = match cookiemanager.get_cookie("redirect_uri".to_string()) { - None => err!("No redirect_uri in cookie"), - Some(uri) => uri, - }; - let orig_state = match cookiemanager.get_cookie("state".to_string()) { - None => err!("No state in cookie"), - Some(state) => state, - }; - - cookiemanager.delete_cookie("redirect_uri".to_string()); - cookiemanager.delete_cookie("state".to_string()); - - let redirect = CustomRedirect { - url: format!("{redirect_uri}?code={code}&state={orig_state}"), - headers: vec![], - }; - - Ok(redirect) +// Bitwarden client appear to only care for code and state so we pipe it through +// cf: https://github.com/bitwarden/clients/blob/8e46ef1ae5be8b62b0d3d0b9d1b1c62088a04638/libs/angular/src/auth/components/sso.component.ts#L68C11-L68C23) +#[get("/connect/oidc-signin?&&", rank = 2)] +async fn oidcsignin_error( + state: String, + error: String, + error_description: Option, + conn: DbConn, +) -> ApiResult { + oidcsignin_redirect( + state.clone(), + sso::OIDCCodeWrapper::Error { + state, + error, + error_description, + }, + &conn, + ) + .await } -#[derive(FromForm)] -#[allow(non_snake_case)] +// iss and scope parameters are needed for redirection to work on IOS. +async fn oidcsignin_redirect(state: String, wrapper: sso::OIDCCodeWrapper, conn: &DbConn) -> ApiResult { + let code = sso::encode_code_claims(wrapper); + + let nonce = match SsoNonce::find(&state, conn).await { + Some(n) => n, + None => err!(format!("Failed to retrive redirect_uri with {state}")), + }; + + let mut url = match url::Url::parse(&nonce.redirect_uri) { + Ok(url) => url, + Err(err) => err!(format!("Failed to parse redirect uri ({}): {err}", nonce.redirect_uri)), + }; + + url.query_pairs_mut() + .append_pair("code", &code) + .append_pair("state", &state) + .append_pair("scope", &AuthMethod::Sso.scope()) + .append_pair("iss", &CONFIG.domain()); + + debug!("Redirection to {url}"); + + Ok(Redirect::temporary(String::from(url))) +} + +#[derive(Debug, Clone, Default, FromForm)] struct AuthorizeData { - #[allow(unused)] #[field(name = uncased("client_id"))] #[field(name = uncased("clientid"))] - client_id: Option, + client_id: String, #[field(name = uncased("redirect_uri"))] #[field(name = uncased("redirecturi"))] - redirect_uri: Option, + redirect_uri: String, #[allow(unused)] - #[field(name = uncased("response_type"))] - #[field(name = uncased("responsetype"))] response_type: Option, #[allow(unused)] - #[field(name = uncased("scope"))] scope: Option, - #[field(name = uncased("state"))] - state: Option, + state: String, #[allow(unused)] - #[field(name = uncased("code_challenge"))] code_challenge: Option, #[allow(unused)] - #[field(name = uncased("code_challenge_method"))] code_challenge_method: Option, #[allow(unused)] - #[field(name = uncased("response_mode"))] response_mode: Option, #[allow(unused)] - #[field(name = uncased("domain_hint"))] domain_hint: Option, #[allow(unused)] #[field(name = uncased("ssoToken"))] - ssoToken: Option, + sso_token: Option, } +// The `redirect_uri` will change depending of the client (web, android, ios ..) #[get("/connect/authorize?")] -async fn authorize(data: AuthorizeData, jar: &CookieJar<'_>, mut conn: DbConn) -> ApiResult { - let cookiemanager = CookieManager::new(jar); - match get_client_from_sso_config().await { - Ok(client) => { - let (auth_url, _csrf_state, nonce) = client - .authorize_url( - AuthenticationFlow::::AuthorizationCode, - CsrfToken::new_random, - Nonce::new_random, - ) - .add_scope(Scope::new("email".to_string())) - .add_scope(Scope::new("profile".to_string())) - .url(); +async fn authorize(data: AuthorizeData, conn: DbConn) -> ApiResult { + let AuthorizeData { + client_id, + redirect_uri, + state, + .. + } = data; - let sso_nonce = SsoNonce::new(nonce.secret().to_string()); - sso_nonce.save(&mut conn).await?; + let auth_url = sso::authorize_url(state, &client_id, &redirect_uri, conn).await?; - let redirect_uri = match data.redirect_uri { - None => err!("No redirect_uri in data"), - Some(uri) => uri, - }; - cookiemanager.set_cookie("redirect_uri".to_string(), redirect_uri); - let state = match data.state { - None => err!("No state in data"), - Some(state) => state, - }; - cookiemanager.set_cookie("state".to_string(), state); - - let redirect = CustomRedirect { - url: format!("{}", auth_url), - headers: vec![], - }; - - Ok(redirect) - } - Err(_err) => err!("Unable to find client from identifier"), - } -} - -async fn get_auth_code_access_token(code: &str) -> ApiResult<(String, String, CoreUserInfoClaims)> { - let oidc_code = AuthorizationCode::new(String::from(code)); - match get_client_from_sso_config().await { - Ok(client) => match client.exchange_code(oidc_code).request_async(async_http_client).await { - Ok(token_response) => { - let refresh_token = match token_response.refresh_token() { - Some(token) => token.secret().to_string(), - None => String::new(), - }; - let id_token = match token_response.extra_fields().id_token() { - None => err!("Token response did not contain an id_token"), - Some(token) => token.to_string(), - }; - - let user_info: CoreUserInfoClaims = - match client.user_info(token_response.access_token().to_owned(), None) { - Err(_err) => err!("Token response did not contain user_info"), - Ok(info) => match info.request_async(async_http_client).await { - Err(_err) => err!("Request to user_info endpoint failed"), - Ok(claim) => claim, - }, - }; - - Ok((refresh_token, id_token, user_info)) - } - Err(err) => err!("Failed to contact token endpoint: {}", err.to_string()), - }, - Err(_err) => err!("Unable to find client"), - } + Ok(Redirect::temporary(String::from(auth_url))) } diff --git a/src/api/mod.rs b/src/api/mod.rs index d5281bda..753c60e1 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -35,7 +35,7 @@ pub use crate::api::{ use crate::db::{models::User, DbConn}; // Type aliases for API methods results -type ApiResult = Result; +pub type ApiResult = Result; pub type JsonResult = ApiResult>; pub type EmptyResult = ApiResult<()>; diff --git a/src/auth.rs b/src/auth.rs index d684249d..743287a6 100644 --- a/src/auth.rs +++ b/src/auth.rs @@ -1,6 +1,5 @@ // JWT Handling -// -use chrono::{TimeDelta, Utc}; +use chrono::{DateTime, TimeDelta, Utc}; use num_traits::FromPrimitive; use once_cell::sync::{Lazy, OnceCell}; @@ -9,18 +8,29 @@ use openssl::rsa::Rsa; use serde::de::DeserializeOwned; use serde::ser::Serialize; -use crate::{error::Error, CONFIG}; +use crate::{ + api::ApiResult, + db::{ + models::{Collection, Device, User, UserOrgStatus, UserOrgType, UserOrganization, UserStampException}, + DbConn, + }, + error::Error, + sso, CONFIG, +}; const JWT_ALGORITHM: Algorithm = Algorithm::RS256; -pub static DEFAULT_VALIDITY: Lazy = Lazy::new(|| TimeDelta::try_hours(2).unwrap()); +// Limit when BitWarden consider the token as expired +pub static BW_EXPIRATION: Lazy = Lazy::new(|| TimeDelta::try_minutes(5).unwrap()); + +pub static DEFAULT_REFRESH_VALIDITY: Lazy = Lazy::new(|| TimeDelta::try_days(30).unwrap()); +pub static DEFAULT_ACCESS_VALIDITY: Lazy = Lazy::new(|| TimeDelta::try_hours(2).unwrap()); static JWT_HEADER: Lazy = Lazy::new(|| Header::new(JWT_ALGORITHM)); pub static JWT_LOGIN_ISSUER: Lazy = Lazy::new(|| format!("{}|login", CONFIG.domain_origin())); static JWT_INVITE_ISSUER: Lazy = Lazy::new(|| format!("{}|invite", CONFIG.domain_origin())); static JWT_EMERGENCY_ACCESS_INVITE_ISSUER: Lazy = Lazy::new(|| format!("{}|emergencyaccessinvite", CONFIG.domain_origin())); -static JWT_SSOTOKEN_ISSUER: Lazy = Lazy::new(|| format!("{}|ssotoken", CONFIG.domain_origin())); static JWT_DELETE_ISSUER: Lazy = Lazy::new(|| format!("{}|delete", CONFIG.domain_origin())); static JWT_VERIFYEMAIL_ISSUER: Lazy = Lazy::new(|| format!("{}|verifyemail", CONFIG.domain_origin())); static JWT_ADMIN_ISSUER: Lazy = Lazy::new(|| format!("{}|admin", CONFIG.domain_origin())); @@ -73,7 +83,7 @@ pub fn encode_jwt(claims: &T) -> String { } } -fn decode_jwt(token: &str, issuer: String) -> Result { +pub fn decode_jwt(token: &str, issuer: String) -> Result { let mut validation = jsonwebtoken::Validation::new(JWT_ALGORITHM); validation.leeway = 30; // 30 seconds validation.validate_exp = true; @@ -92,6 +102,10 @@ fn decode_jwt(token: &str, issuer: String) -> Result Result { + decode_jwt(token, JWT_LOGIN_ISSUER.to_string()) +} + pub fn decode_login(token: &str) -> Result { decode_jwt(token, JWT_LOGIN_ISSUER.to_string()) } @@ -165,6 +179,73 @@ pub struct LoginJwtClaims { pub amr: Vec, } +impl LoginJwtClaims { + pub fn new(device: &Device, user: &User, nbf: i64, exp: i64, scope: Vec, now: DateTime) -> Self { + // --- + // Disabled these keys to be added to the JWT since they could cause the JWT to get too large + // Also These key/value pairs are not used anywhere by either Vaultwarden or Bitwarden Clients + // Because these might get used in the future, and they are added by the Bitwarden Server, lets keep it, but then commented out + // --- + // fn arg: orgs: Vec, + // --- + // let orgowner: Vec<_> = orgs.iter().filter(|o| o.atype == 0).map(|o| o.org_uuid.clone()).collect(); + // let orgadmin: Vec<_> = orgs.iter().filter(|o| o.atype == 1).map(|o| o.org_uuid.clone()).collect(); + // let orguser: Vec<_> = orgs.iter().filter(|o| o.atype == 2).map(|o| o.org_uuid.clone()).collect(); + // let orgmanager: Vec<_> = orgs.iter().filter(|o| o.atype == 3).map(|o| o.org_uuid.clone()).collect(); + + if exp <= (now + *BW_EXPIRATION).timestamp() { + warn!("Raise access_token lifetime to more than 5min.") + } + + // Create the JWT claims struct, to send to the client + Self { + nbf, + exp, + iss: JWT_LOGIN_ISSUER.to_string(), + sub: user.uuid.clone(), + premium: true, + name: user.name.clone(), + email: user.email.clone(), + email_verified: !CONFIG.mail_enabled() || user.verified_at.is_some(), + + // --- + // Disabled these keys to be added to the JWT since they could cause the JWT to get too large + // Also These key/value pairs are not used anywhere by either Vaultwarden or Bitwarden Clients + // Because these might get used in the future, and they are added by the Bitwarden Server, lets keep it, but then commented out + // See: https://github.com/dani-garcia/vaultwarden/issues/4156 + // --- + // orgowner, + // orgadmin, + // orguser, + // orgmanager, + sstamp: user.security_stamp.clone(), + device: device.uuid.clone(), + scope, + amr: vec!["Application".into()], + } + } + + pub fn default(device: &Device, user: &User, auth_method: &AuthMethod) -> Self { + let time_now = Utc::now(); + Self::new( + device, + user, + time_now.timestamp(), + (time_now + *DEFAULT_ACCESS_VALIDITY).timestamp(), + auth_method.scope_vec(), + time_now, + ) + } + + pub fn token(&self) -> String { + encode_jwt(&self) + } + + pub fn expires_in(&self) -> i64 { + self.exp - Utc::now().timestamp() + } +} + #[derive(Debug, Serialize, Deserialize)] pub struct InviteJwtClaims { // Not before @@ -318,28 +399,6 @@ pub fn generate_delete_claims(uuid: String) -> BasicJwtClaims { } } -#[derive(Debug, Serialize, Deserialize)] -pub struct SsoTokenJwtClaims { - // Not before - pub nbf: i64, - // Expiration time - pub exp: i64, - // Issuer - pub iss: String, - // Subject - pub sub: String, -} - -pub fn generate_ssotoken_claims() -> SsoTokenJwtClaims { - let time_now = Utc::now().naive_utc(); - SsoTokenJwtClaims { - nbf: time_now.timestamp(), - exp: (time_now + Duration::minutes(2)).timestamp(), - iss: JWT_SSOTOKEN_ISSUER.to_string(), - sub: "vaultwarden".to_string(), - } -} - pub fn generate_verify_email_claims(uuid: String) -> BasicJwtClaims { let time_now = Utc::now(); let expire_hours = i64::from(CONFIG.invitation_expiration_hours()); @@ -379,11 +438,6 @@ use rocket::{ request::{FromRequest, Outcome, Request}, }; -use crate::db::{ - models::{Collection, Device, User, UserOrgStatus, UserOrgType, UserOrganization, UserStampException}, - DbConn, -}; - pub struct Host { pub host: String, } @@ -887,3 +941,150 @@ impl<'r> FromRequest<'r> for WsAccessTokenHeader { }) } } + +#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum AuthMethod { + OrgApiKey, + Password, + Sso, + UserApiKey, +} + +pub trait AuthMethodScope { + fn scope_vec(&self) -> Vec; + fn scope(&self) -> String; + fn check_scope(&self, scope: Option<&String>) -> ApiResult; +} + +impl AuthMethodScope for AuthMethod { + fn scope(&self) -> String { + match self { + AuthMethod::OrgApiKey => "api.organization".to_string(), + AuthMethod::Password => "api offline_access".to_string(), + AuthMethod::Sso => "api offline_access".to_string(), + AuthMethod::UserApiKey => "api".to_string(), + } + } + + fn scope_vec(&self) -> Vec { + self.scope().split_whitespace().map(str::to_string).collect() + } + + fn check_scope(&self, scope: Option<&String>) -> ApiResult { + let method_scope = self.scope(); + match scope { + None => err!("Missing scope"), + Some(scope) if scope == &method_scope => Ok(method_scope), + Some(scope) => err!(format!("Scope ({scope}) not supported")), + } + } +} + +#[derive(Debug, Serialize, Deserialize)] +pub enum TokenWrapper { + Access(String), + Refresh(String), +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct RefreshJwtClaims { + // Not before + pub nbf: i64, + // Expiration time + pub exp: i64, + // Issuer + pub iss: String, + // Subject + pub sub: AuthMethod, + + pub device_token: String, + + pub token: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct AuthTokens { + pub refresh_claims: RefreshJwtClaims, + pub access_claims: LoginJwtClaims, +} + +impl AuthTokens { + pub fn refresh_token(&self) -> String { + encode_jwt(&self.refresh_claims) + } + + pub fn access_token(&self) -> String { + self.access_claims.token() + } + + pub fn expires_in(&self) -> i64 { + self.access_claims.expires_in() + } + + pub fn scope(&self) -> String { + self.refresh_claims.sub.scope() + } + + // Create refresh_token and access_token with default validity + pub fn new(device: &Device, user: &User, sub: AuthMethod) -> Self { + let time_now = Utc::now(); + + let access_claims = LoginJwtClaims::default(device, user, &sub); + + let refresh_claims = RefreshJwtClaims { + nbf: time_now.timestamp(), + exp: (time_now + *DEFAULT_REFRESH_VALIDITY).timestamp(), + iss: JWT_LOGIN_ISSUER.to_string(), + sub, + device_token: device.refresh_token.clone(), + token: None, + }; + + Self { + refresh_claims, + access_claims, + } + } +} + +pub async fn refresh_tokens(refresh_token: &str, conn: &mut DbConn) -> ApiResult<(Device, User, AuthTokens)> { + let time_now = Utc::now(); + + let refresh_claims = match decode_refresh(refresh_token) { + Err(err) => err!(format!("Impossible to read refresh_token: {err}")), + Ok(claims) => claims, + }; + + // Get device by refresh token + let mut device = match Device::find_by_refresh_token(&refresh_claims.device_token, conn).await { + None => err!("Invalid refresh token"), + Some(device) => device, + }; + + // Roll the Device.refresh_token this way it invalides old JWT refresh_token + device.roll_refresh_token(); + device.save(conn).await?; + + let user = match User::find_by_uuid(&device.user_uuid, conn).await { + None => err!("Impossible to find user"), + Some(user) => user, + }; + + if refresh_claims.exp < time_now.timestamp() { + err!("Expired refresh token"); + } + + let auth_tokens = match refresh_claims.sub { + AuthMethod::Sso if CONFIG.sso_enabled() && CONFIG.sso_auth_only_not_session() => { + AuthTokens::new(&device, &user, refresh_claims.sub) + } + AuthMethod::Sso if CONFIG.sso_enabled() => sso::exchange_refresh_token(&device, &user, &refresh_claims).await?, + AuthMethod::Sso => err!("SSO is now disabled, Login again using email and master password"), + AuthMethod::Password if CONFIG.sso_enabled() && CONFIG.sso_only() => err!("SSO is now required, Login again"), + AuthMethod::Password => AuthTokens::new(&device, &user, refresh_claims.sub), + _ => err!("Invalid auth method cannot refresh token"), + }; + + Ok((device, user, auth_tokens)) +} diff --git a/src/config.rs b/src/config.rs index 6b55a9b9..ce5d64d1 100644 --- a/src/config.rs +++ b/src/config.rs @@ -409,7 +409,9 @@ make_config! { /// Auth Request cleanup schedule |> Cron schedule of the job that cleans old auth requests from the auth request. /// Defaults to every minute. Set blank to disable this job. auth_request_purge_schedule: String, false, def, "30 * * * * *".to_string(); - + /// Purge incomplete sso nonce. + /// Defaults to daily. Set blank to disable this job. + purge_incomplete_sso_nonce: String, false, def, "0 20 0 * * *".to_string(); }, /// General settings @@ -609,19 +611,33 @@ make_config! { /// OpenID Connect SSO settings sso { /// Enabled - sso_enabled: bool, true, def, false; - /// Force SSO login - sso_only: bool, true, def, false; + sso_enabled: bool, true, def, false; + /// Disable Email+Master Password login + sso_only: bool, true, def, false; + /// Associate existing user based on email + sso_signups_match_email: bool, true, def, true; /// Client ID - sso_client_id: String, true, def, String::new(); + sso_client_id: String, false, def, String::new(); /// Client Key - sso_client_secret: Pass, true, def, String::new(); + sso_client_secret: Pass, false, def, String::new(); /// Authority Server - sso_authority: String, true, def, String::new(); + sso_authority: String, false, def, String::new(); + /// Scopes required for authorize + sso_scopes: String, false, def, "email profile".to_string(); + /// Additionnal authorization url parameters + sso_authorize_extra_params: String, false, def, String::new(); + /// Use PKCE during Auth Code flow + sso_pkce: bool, false, def, false; + /// Regex for additionnal trusted Id token audience + sso_audience_trusted: String, false, option; /// CallBack Path - sso_callback_path: String, false, gen, |c| generate_sso_callback_path(&c.domain); - /// Allow workaround so SSO logins accept all invites - sso_acceptall_invites: bool, true, def, false; + sso_callback_path: String, false, gen, |c| generate_sso_callback_path(&c.domain); + /// Optional sso master password policy + sso_master_password_policy: String, true, option; + /// Use sso only for auth not the session lifecycle + sso_auth_only_not_session: bool, true, def, false; + /// Log all tokens, LOG_LEVEL=debug is required + sso_debug_tokens: bool, true, def, false; }, /// Yubikey settings @@ -647,7 +663,7 @@ make_config! { /// Host duo_host: String, true, option; /// Application Key (generated automatically) - _duo_akey: Pass, false, option; + _duo_akey: Pass, true, option; }, /// SMTP Email Settings @@ -833,10 +849,14 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { err!("All Duo options need to be set for global Duo support") } - if cfg.sso_enabled - && (cfg.sso_client_id.is_empty() || cfg.sso_client_secret.is_empty() || cfg.sso_authority.is_empty()) - { - err!("`SSO_CLIENT_ID`, `SSO_CLIENT_SECRET` and `SSO_AUTHORITY` must be set for SSO support") + if cfg.sso_enabled { + if cfg.sso_client_id.is_empty() || cfg.sso_client_secret.is_empty() || cfg.sso_authority.is_empty() { + err!("`SSO_CLIENT_ID`, `SSO_CLIENT_SECRET` and `SSO_AUTHORITY` must be set for SSO support") + } + + internal_sso_issuer_url(&cfg.sso_authority)?; + internal_sso_redirect_url(&cfg.sso_callback_path)?; + check_master_password_policy(&cfg.sso_master_password_policy)?; } if cfg._enable_yubico { @@ -1011,6 +1031,28 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { Ok(()) } +fn internal_sso_issuer_url(sso_authority: &String) -> Result { + match openidconnect::IssuerUrl::new(sso_authority.clone()) { + Err(err) => err!(format!("Invalid sso_authority UR ({sso_authority}): {err}")), + Ok(issuer_url) => Ok(issuer_url), + } +} + +fn internal_sso_redirect_url(sso_callback_path: &String) -> Result { + match openidconnect::RedirectUrl::new(sso_callback_path.clone()) { + Err(err) => err!(format!("Invalid sso_callback_path ({sso_callback_path} built using `domain`) URL: {err}")), + Ok(redirect_url) => Ok(redirect_url), + } +} + +fn check_master_password_policy(sso_master_password_policy: &Option) -> Result<(), Error> { + let policy = sso_master_password_policy.as_ref().map(|mpp| serde_json::from_str::(mpp)); + if let Some(Err(error)) = policy { + err!(format!("Invalid sso_master_password_policy ({error}), Ensure that it's correctly escaped with ''")) + } + Ok(()) +} + /// Extracts an RFC 6454 web origin from a URL. fn extract_url_origin(url: &str) -> String { match Url::parse(url) { @@ -1088,6 +1130,26 @@ fn smtp_convert_deprecated_ssl_options(smtp_ssl: Option, smtp_explicit_tls "starttls".to_string() } +/// Allow to parse a multiline list of Key/Values (`key=value`) +/// Will ignore comment lines (starting with `//`) +fn parse_param_list(config: String) -> Vec<(String, String)> { + config + .lines() + .map(|l| l.trim()) + .filter(|l| !l.is_empty() && !l.starts_with("//")) + .filter_map(|l| { + let split = l.split('=').collect::>(); + match &split[..] { + [key, value] => Some(((*key).to_string(), (*value).to_string())), + _ => { + println!("[WARNING] Failed to parse ({l}). Expected key=value"); + None + } + } + }) + .collect() +} + impl Config { pub fn load() -> Result { // Loading from env and file @@ -1277,6 +1339,22 @@ impl Config { } } } + + pub fn sso_issuer_url(&self) -> Result { + internal_sso_issuer_url(&self.sso_authority()) + } + + pub fn sso_redirect_url(&self) -> Result { + internal_sso_redirect_url(&self.sso_callback_path()) + } + + pub fn sso_scopes_vec(&self) -> Vec { + self.sso_scopes().split_whitespace().map(str::to_string).collect() + } + + pub fn sso_authorize_extra_params_vec(&self) -> Vec<(String, String)> { + parse_param_list(self.sso_authorize_extra_params()) + } } use handlebars::{ @@ -1335,6 +1413,7 @@ where reg!("email/send_single_org_removed_from_org", ".html"); reg!("email/set_password", ".html"); reg!("email/smtp_test", ".html"); + reg!("email/sso_change_email", ".html"); reg!("email/twofactor_email", ".html"); reg!("email/verify_email", ".html"); reg!("email/welcome_must_verify", ".html"); diff --git a/src/db/models/device.rs b/src/db/models/device.rs index 60c63589..e5e165e5 100644 --- a/src/db/models/device.rs +++ b/src/db/models/device.rs @@ -1,6 +1,7 @@ use chrono::{NaiveDateTime, Utc}; +use data_encoding::{BASE64, BASE64URL}; -use crate::{crypto, CONFIG}; +use crate::crypto; use core::fmt; db_object! { @@ -42,13 +43,16 @@ impl Device { push_uuid: None, push_token: None, - refresh_token: String::new(), + refresh_token: crypto::encode_random_bytes::<64>(BASE64URL), twofactor_remember: None, } } + pub fn roll_refresh_token(&mut self) { + self.refresh_token = crypto::encode_random_bytes::<64>(BASE64URL) + } + pub fn refresh_twofactor_remember(&mut self) -> String { - use data_encoding::BASE64; let twofactor_remember = crypto::encode_random_bytes::<180>(BASE64); self.twofactor_remember = Some(twofactor_remember.clone()); @@ -59,61 +63,6 @@ impl Device { self.twofactor_remember = None; } - pub fn refresh_tokens(&mut self, user: &super::User, scope: Vec) -> (String, i64) { - // If there is no refresh token, we create one - if self.refresh_token.is_empty() { - use data_encoding::BASE64URL; - self.refresh_token = crypto::encode_random_bytes::<64>(BASE64URL); - } - - // Update the expiration of the device and the last update date - let time_now = Utc::now(); - self.updated_at = time_now.naive_utc(); - - // --- - // Disabled these keys to be added to the JWT since they could cause the JWT to get too large - // Also These key/value pairs are not used anywhere by either Vaultwarden or Bitwarden Clients - // Because these might get used in the future, and they are added by the Bitwarden Server, lets keep it, but then commented out - // --- - // fn arg: orgs: Vec, - // --- - // let orgowner: Vec<_> = orgs.iter().filter(|o| o.atype == 0).map(|o| o.org_uuid.clone()).collect(); - // let orgadmin: Vec<_> = orgs.iter().filter(|o| o.atype == 1).map(|o| o.org_uuid.clone()).collect(); - // let orguser: Vec<_> = orgs.iter().filter(|o| o.atype == 2).map(|o| o.org_uuid.clone()).collect(); - // let orgmanager: Vec<_> = orgs.iter().filter(|o| o.atype == 3).map(|o| o.org_uuid.clone()).collect(); - - // Create the JWT claims struct, to send to the client - use crate::auth::{encode_jwt, LoginJwtClaims, DEFAULT_VALIDITY, JWT_LOGIN_ISSUER}; - let claims = LoginJwtClaims { - nbf: time_now.timestamp(), - exp: (time_now + *DEFAULT_VALIDITY).timestamp(), - iss: JWT_LOGIN_ISSUER.to_string(), - sub: user.uuid.clone(), - - premium: true, - name: user.name.clone(), - email: user.email.clone(), - email_verified: !CONFIG.mail_enabled() || user.verified_at.is_some(), - - // --- - // Disabled these keys to be added to the JWT since they could cause the JWT to get too large - // Also These key/value pairs are not used anywhere by either Vaultwarden or Bitwarden Clients - // Because these might get used in the future, and they are added by the Bitwarden Server, lets keep it, but then commented out - // See: https://github.com/dani-garcia/vaultwarden/issues/4156 - // --- - // orgowner, - // orgadmin, - // orguser, - // orgmanager, - sstamp: user.security_stamp.clone(), - device: self.uuid.clone(), - scope, - amr: vec!["Application".into()], - }; - - (encode_jwt(&claims), DEFAULT_VALIDITY.num_seconds()) - } - pub fn is_push_device(&self) -> bool { matches!(DeviceType::from_i32(self.atype), DeviceType::Android | DeviceType::Ios) } diff --git a/src/db/models/mod.rs b/src/db/models/mod.rs index 9a4e7585..465ea5c7 100644 --- a/src/db/models/mod.rs +++ b/src/db/models/mod.rs @@ -32,4 +32,4 @@ pub use self::send::{Send, SendType}; pub use self::sso_nonce::SsoNonce; pub use self::two_factor::{TwoFactor, TwoFactorType}; pub use self::two_factor_incomplete::TwoFactorIncomplete; -pub use self::user::{Invitation, User, UserKdfType, UserStampException}; +pub use self::user::{Invitation, SsoUser, User, UserKdfType, UserStampException}; diff --git a/src/db/models/org_policy.rs b/src/db/models/org_policy.rs index e5a845f6..92f4d999 100644 --- a/src/db/models/org_policy.rs +++ b/src/db/models/org_policy.rs @@ -27,7 +27,7 @@ pub enum OrgPolicyType { MasterPassword = 1, PasswordGenerator = 2, SingleOrg = 3, - RequireSso = 4, + // RequireSso = 4, // Not supported PersonalOwnership = 5, DisableSend = 6, SendOptions = 7, @@ -77,12 +77,11 @@ impl OrgPolicy { } pub fn to_json(&self) -> Value { - let data_json: Value = serde_json::from_str(&self.data).unwrap_or(Value::Null); json!({ "id": self.uuid, "organizationId": self.org_uuid, "type": self.atype, - "data": data_json, + "data": serde_json::from_str(&self.data).unwrap_or(Value::Null), "enabled": self.enabled, "object": "policy", }) diff --git a/src/db/models/organization.rs b/src/db/models/organization.rs index fd952955..b7e68475 100644 --- a/src/db/models/organization.rs +++ b/src/db/models/organization.rs @@ -25,6 +25,7 @@ db_object! { pub uuid: String, pub user_uuid: String, pub org_uuid: String, + pub invited_by_email: Option, pub access_all: bool, pub akey: String, @@ -167,7 +168,7 @@ impl Organization { "useTotp": true, "usePolicies": true, // "UseScim": false, // Not supported (Not AGPLv3 Licensed) - "useSso": CONFIG.sso_enabled(), + "useSso": false, // Not supported // "UseKeyConnector": false, // Not supported "selfHost": true, "useApi": true, @@ -197,12 +198,13 @@ impl Organization { static ACTIVATE_REVOKE_DIFF: i32 = 128; impl UserOrganization { - pub fn new(user_uuid: String, org_uuid: String) -> Self { + pub fn new(user_uuid: String, org_uuid: String, invited_by_email: Option) -> Self { Self { uuid: crate::util::get_uuid(), user_uuid, org_uuid, + invited_by_email, access_all: false, akey: String::new(), @@ -385,7 +387,7 @@ impl UserOrganization { "resetPasswordEnrolled": self.reset_password_key.is_some(), "useResetPassword": CONFIG.mail_enabled(), "ssoBound": false, // Not supported - "useSso": CONFIG.sso_enabled(), + "useSso": false, // Not supported "useKeyConnector": false, "useSecretsManager": false, "usePasswordManager": true, @@ -652,6 +654,17 @@ impl UserOrganization { }} } + pub async fn confirm_user_invitations(user_uuid: &str, conn: &mut DbConn) -> EmptyResult { + db_run! { conn: { + diesel::update(users_organizations::table) + .filter(users_organizations::user_uuid.eq(user_uuid)) + .filter(users_organizations::status.eq(UserOrgStatus::Invited as i32)) + .set(users_organizations::status.eq(UserOrgStatus::Accepted as i32)) + .execute(conn) + .map_res("Error confirming invitations") + }} + } + pub async fn find_any_state_by_user(user_uuid: &str, conn: &mut DbConn) -> Vec { db_run! { conn: { users_organizations::table diff --git a/src/db/models/sso_nonce.rs b/src/db/models/sso_nonce.rs index 0a9533e0..881f075b 100644 --- a/src/db/models/sso_nonce.rs +++ b/src/db/models/sso_nonce.rs @@ -1,21 +1,34 @@ +use chrono::{NaiveDateTime, Utc}; + use crate::api::EmptyResult; -use crate::db::DbConn; +use crate::db::{DbConn, DbPool}; use crate::error::MapResult; +use crate::sso::NONCE_EXPIRATION; db_object! { #[derive(Identifiable, Queryable, Insertable)] #[diesel(table_name = sso_nonce)] - #[diesel(primary_key(nonce))] + #[diesel(primary_key(state))] pub struct SsoNonce { + pub state: String, pub nonce: String, + pub verifier: Option, + pub redirect_uri: String, + pub created_at: NaiveDateTime, } } /// Local methods impl SsoNonce { - pub fn new(nonce: String) -> Self { - Self { + pub fn new(state: String, nonce: String, verifier: Option, redirect_uri: String) -> Self { + let now = Utc::now().naive_utc(); + + SsoNonce { + state, nonce, + verifier, + redirect_uri, + created_at: now, } } } @@ -28,7 +41,7 @@ impl SsoNonce { diesel::replace_into(sso_nonce::table) .values(SsoNonceDb::to_db(self)) .execute(conn) - .map_res("Error saving SSO device") + .map_res("Error saving SSO nonce") } postgresql { let value = SsoNonceDb::to_db(self); @@ -40,21 +53,37 @@ impl SsoNonce { } } - pub async fn delete(self, conn: &mut DbConn) -> EmptyResult { + pub async fn delete(state: &str, conn: &mut DbConn) -> EmptyResult { db_run! { conn: { - diesel::delete(sso_nonce::table.filter(sso_nonce::nonce.eq(self.nonce))) + diesel::delete(sso_nonce::table.filter(sso_nonce::state.eq(state))) .execute(conn) .map_res("Error deleting SSO nonce") }} } - pub async fn find(nonce: &str, conn: &mut DbConn) -> Option { + pub async fn find(state: &str, conn: &DbConn) -> Option { + let oldest = Utc::now().naive_utc() - *NONCE_EXPIRATION; db_run! { conn: { sso_nonce::table - .filter(sso_nonce::nonce.eq(nonce)) + .filter(sso_nonce::state.eq(state)) + .filter(sso_nonce::created_at.ge(oldest)) .first::(conn) .ok() .from_db() }} } + + pub async fn delete_expired(pool: DbPool) -> EmptyResult { + debug!("Purging expired sso_nonce"); + if let Ok(conn) = pool.get().await { + let oldest = Utc::now().naive_utc() - *NONCE_EXPIRATION; + db_run! { conn: { + diesel::delete(sso_nonce::table.filter(sso_nonce::created_at.lt(oldest))) + .execute(conn) + .map_res("Error deleting expired SSO nonce") + }} + } else { + err!("Failed to get DB connection while purging expired sso_nonce") + } + } } diff --git a/src/db/models/user.rs b/src/db/models/user.rs index a02b694d..1369aa07 100644 --- a/src/db/models/user.rs +++ b/src/db/models/user.rs @@ -5,7 +5,7 @@ use crate::crypto; use crate::CONFIG; db_object! { - #[derive(Identifiable, Queryable, Insertable, AsChangeset)] + #[derive(Identifiable, Queryable, Insertable, AsChangeset, Selectable)] #[diesel(table_name = users)] #[diesel(treat_none_as_null = true)] #[diesel(primary_key(uuid))] @@ -60,6 +60,14 @@ db_object! { pub struct Invitation { pub email: String, } + + #[derive(Identifiable, Queryable, Insertable, Selectable)] + #[diesel(table_name = sso_users)] + #[diesel(primary_key(user_uuid))] + pub struct SsoUser { + pub user_uuid: String, + pub identifier: String, + } } pub enum UserKdfType { @@ -85,7 +93,7 @@ impl User { pub const CLIENT_KDF_TYPE_DEFAULT: i32 = UserKdfType::Pbkdf2 as i32; pub const CLIENT_KDF_ITER_DEFAULT: i32 = 600_000; - pub fn new(email: String) -> Self { + pub fn new(email: String, name: Option) -> Self { let now = Utc::now().naive_utc(); let email = email.to_lowercase(); @@ -97,7 +105,7 @@ impl User { verified_at: None, last_verifying_at: None, login_verify_count: 0, - name: email.clone(), + name: name.unwrap_or(email.clone()), email, akey: String::new(), email_new: None, @@ -456,3 +464,51 @@ impl Invitation { } } } + +impl SsoUser { + pub async fn save(&self, conn: &mut DbConn) -> EmptyResult { + db_run! { conn: + sqlite, mysql { + diesel::replace_into(sso_users::table) + .values(SsoUserDb::to_db(self)) + .execute(conn) + .map_res("Error saving SSO user") + } + postgresql { + let value = SsoUserDb::to_db(self); + diesel::insert_into(sso_users::table) + .values(&value) + .execute(conn) + .map_res("Error saving SSO user") + } + } + } + + // Written as an union to make the query more lisible than using an `or_filter`. + // But `first()` does not appear to work with `union()` so we use `load()`. + pub async fn find_by_identifier_or_email( + identifier: &str, + mail: &str, + conn: &DbConn, + ) -> Option<(User, Option)> { + let lower_mail = mail.to_lowercase(); + + db_run! {conn: { + users::table + .inner_join(sso_users::table) + .select(<(UserDb, Option)>::as_select()) + .filter(sso_users::identifier.eq(identifier)) + .union( + users::table + .left_join(sso_users::table) + .select(<(UserDb, Option)>::as_select()) + .filter(users::email.eq(lower_mail)) + ) + .load(conn) + .expect("Error searching user by SSO identifier and email") + .into_iter() + .next() + .map(|(user, sso_user)| { (user.from_db(), sso_user.from_db()) }) + }} + } +} diff --git a/src/db/schemas/mysql/schema.rs b/src/db/schemas/mysql/schema.rs index 91392524..bfcdf234 100644 --- a/src/db/schemas/mysql/schema.rs +++ b/src/db/schemas/mysql/schema.rs @@ -224,6 +224,7 @@ table! { uuid -> Text, user_uuid -> Text, org_uuid -> Text, + invited_by_email -> Nullable, access_all -> Bool, akey -> Text, status -> Integer, @@ -244,8 +245,19 @@ table! { } table! { - sso_nonce (nonce) { + sso_nonce (state) { + state -> Text, nonce -> Text, + verifier -> Nullable, + redirect_uri -> Text, + created_at -> Timestamp, + } +} + +table! { + sso_users (user_uuid) { + user_uuid -> Text, + identifier -> Text, } } @@ -342,6 +354,7 @@ joinable!(collections_groups -> collections (collections_uuid)); joinable!(collections_groups -> groups (groups_uuid)); joinable!(event -> users_organizations (uuid)); joinable!(auth_requests -> users (user_uuid)); +joinable!(sso_users -> users (user_uuid)); allow_tables_to_appear_in_same_query!( attachments, @@ -355,6 +368,7 @@ allow_tables_to_appear_in_same_query!( org_policies, organizations, sends, + sso_users, twofactor, users, users_collections, diff --git a/src/db/schemas/postgresql/schema.rs b/src/db/schemas/postgresql/schema.rs index fad549d8..7621ad43 100644 --- a/src/db/schemas/postgresql/schema.rs +++ b/src/db/schemas/postgresql/schema.rs @@ -224,6 +224,7 @@ table! { uuid -> Text, user_uuid -> Text, org_uuid -> Text, + invited_by_email -> Nullable, access_all -> Bool, akey -> Text, status -> Integer, @@ -244,8 +245,19 @@ table! { } table! { - sso_nonce (nonce) { + sso_nonce (state) { + state -> Text, nonce -> Text, + verifier -> Nullable, + redirect_uri -> Text, + created_at -> Timestamp, + } +} + +table! { + sso_users (user_uuid) { + user_uuid -> Text, + identifier -> Text, } } @@ -342,6 +354,7 @@ joinable!(collections_groups -> collections (collections_uuid)); joinable!(collections_groups -> groups (groups_uuid)); joinable!(event -> users_organizations (uuid)); joinable!(auth_requests -> users (user_uuid)); +joinable!(sso_users -> users (user_uuid)); allow_tables_to_appear_in_same_query!( attachments, @@ -355,6 +368,7 @@ allow_tables_to_appear_in_same_query!( org_policies, organizations, sends, + sso_users, twofactor, users, users_collections, diff --git a/src/db/schemas/sqlite/schema.rs b/src/db/schemas/sqlite/schema.rs index fad549d8..7621ad43 100644 --- a/src/db/schemas/sqlite/schema.rs +++ b/src/db/schemas/sqlite/schema.rs @@ -224,6 +224,7 @@ table! { uuid -> Text, user_uuid -> Text, org_uuid -> Text, + invited_by_email -> Nullable, access_all -> Bool, akey -> Text, status -> Integer, @@ -244,8 +245,19 @@ table! { } table! { - sso_nonce (nonce) { + sso_nonce (state) { + state -> Text, nonce -> Text, + verifier -> Nullable, + redirect_uri -> Text, + created_at -> Timestamp, + } +} + +table! { + sso_users (user_uuid) { + user_uuid -> Text, + identifier -> Text, } } @@ -342,6 +354,7 @@ joinable!(collections_groups -> collections (collections_uuid)); joinable!(collections_groups -> groups (groups_uuid)); joinable!(event -> users_organizations (uuid)); joinable!(auth_requests -> users (user_uuid)); +joinable!(sso_users -> users (user_uuid)); allow_tables_to_appear_in_same_query!( attachments, @@ -355,6 +368,7 @@ allow_tables_to_appear_in_same_query!( org_policies, organizations, sends, + sso_users, twofactor, users, users_collections, diff --git a/src/mail.rs b/src/mail.rs index 4ff6725a..dc248b28 100644 --- a/src/mail.rs +++ b/src/mail.rs @@ -492,6 +492,18 @@ pub async fn send_change_email(address: &str, token: &str) -> EmptyResult { send_email(address, &subject, body_html, body_text).await } +pub async fn send_sso_change_email(address: &str) -> EmptyResult { + let (subject, body_html, body_text) = get_text( + "email/sso_change_email", + json!({ + "url": format!("{}/#/settings/account", CONFIG.domain()), + "img_src": CONFIG._smtp_img_src(), + }), + )?; + + send_email(address, &subject, body_html, body_text).await +} + pub async fn send_set_password(address: &str, user_name: &str) -> EmptyResult { let (subject, body_html, body_text) = get_text( "email/set_password", diff --git a/src/main.rs b/src/main.rs index 73085901..4ceb7756 100644 --- a/src/main.rs +++ b/src/main.rs @@ -49,6 +49,7 @@ mod crypto; mod db; mod mail; mod ratelimit; +mod sso; mod util; use crate::api::purge_auth_requests; @@ -594,6 +595,13 @@ fn schedule_jobs(pool: db::DbPool) { })); } + // Purge sso nonce from incomplete flow (default to daily at 00h20). + if !CONFIG.purge_incomplete_sso_nonce().is_empty() { + sched.add(Job::new(CONFIG.purge_incomplete_sso_nonce().parse().unwrap(), || { + runtime.spawn(db::models::SsoNonce::delete_expired(pool.clone())); + })); + } + // Periodically check for jobs to run. We probably won't need any // jobs that run more often than once a minute, so a default poll // interval of 30 seconds should be sufficient. Users who want to diff --git a/src/sso.rs b/src/sso.rs new file mode 100644 index 00000000..d3ab90d6 --- /dev/null +++ b/src/sso.rs @@ -0,0 +1,536 @@ +use chrono::Utc; +use regex::Regex; +use std::borrow::Cow; +use std::sync::RwLock; +use std::time::Duration; +use url::Url; + +use mini_moka::sync::Cache; +use once_cell::sync::Lazy; +use openidconnect::core::{ + CoreClient, CoreIdTokenVerifier, CoreProviderMetadata, CoreResponseType, CoreUserInfoClaims, +}; +use openidconnect::reqwest::async_http_client; +use openidconnect::{ + AccessToken, AuthDisplay, AuthPrompt, AuthenticationFlow, AuthorizationCode, AuthorizationRequest, ClientId, + ClientSecret, CsrfToken, Nonce, OAuth2TokenResponse, PkceCodeChallenge, PkceCodeVerifier, RefreshToken, + ResponseType, Scope, +}; + +use crate::{ + api::ApiResult, + auth, + auth::{AuthMethod, AuthMethodScope, AuthTokens, TokenWrapper, BW_EXPIRATION, DEFAULT_REFRESH_VALIDITY}, + db::{ + models::{Device, SsoNonce, User}, + DbConn, + }, + CONFIG, +}; + +static AC_CACHE: Lazy> = + Lazy::new(|| Cache::builder().max_capacity(1000).time_to_live(Duration::from_secs(10 * 60)).build()); + +static CLIENT_CACHE: RwLock> = RwLock::new(None); + +static SSO_JWT_ISSUER: Lazy = Lazy::new(|| format!("{}|sso", CONFIG.domain_origin())); + +pub static NONCE_EXPIRATION: Lazy = Lazy::new(|| chrono::TimeDelta::try_minutes(10).unwrap()); + +trait AuthorizationRequestExt<'a> { + fn add_extra_params>, V: Into>>(self, params: Vec<(N, V)>) -> Self; +} + +impl<'a, AD: AuthDisplay, P: AuthPrompt, RT: ResponseType> AuthorizationRequestExt<'a> + for AuthorizationRequest<'a, AD, P, RT> +{ + fn add_extra_params>, V: Into>>(mut self, params: Vec<(N, V)>) -> Self { + for (key, value) in params { + self = self.add_extra_param(key, value); + } + self + } +} + +#[derive(Debug, Serialize, Deserialize)] +struct SsoTokenJwtClaims { + // Not before + pub nbf: i64, + // Expiration time + pub exp: i64, + // Issuer + pub iss: String, + // Subject + pub sub: String, +} + +pub fn encode_ssotoken_claims() -> String { + let time_now = Utc::now(); + let claims = SsoTokenJwtClaims { + nbf: time_now.timestamp(), + exp: (time_now + chrono::TimeDelta::try_minutes(2).unwrap()).timestamp(), + iss: SSO_JWT_ISSUER.to_string(), + sub: "vaultwarden".to_string(), + }; + + auth::encode_jwt(&claims) +} + +#[derive(Debug, Serialize, Deserialize)] +pub enum OIDCCodeWrapper { + Ok { + code: String, + state: String, + }, + Error { + state: String, + error: String, + error_description: Option, + }, +} + +#[derive(Debug, Serialize, Deserialize)] +struct OIDCCodeClaims { + // Expiration time + pub exp: i64, + // Issuer + pub iss: String, + + pub code: OIDCCodeWrapper, +} + +pub fn encode_code_claims(code: OIDCCodeWrapper) -> String { + let time_now = Utc::now(); + let claims = OIDCCodeClaims { + exp: (time_now + chrono::TimeDelta::try_minutes(5).unwrap()).timestamp(), + iss: SSO_JWT_ISSUER.to_string(), + code, + }; + + auth::encode_jwt(&claims) +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +struct BasicTokenClaims { + iat: Option, + nbf: Option, + exp: i64, +} + +impl BasicTokenClaims { + fn nbf(&self) -> i64 { + self.nbf.or(self.iat).unwrap_or_else(|| Utc::now().timestamp()) + } +} + +fn decode_token_claims(token_name: &str, token: &str) -> ApiResult { + let mut validation = jsonwebtoken::Validation::default(); + validation.set_issuer(&[CONFIG.sso_authority()]); + validation.insecure_disable_signature_validation(); + validation.validate_aud = false; + + match jsonwebtoken::decode(token, &jsonwebtoken::DecodingKey::from_secret(&[]), &validation) { + Ok(btc) => Ok(btc.claims), + Err(err) => err_silent!(format!("Failed to decode basic token claims from {token_name}: {err}")), + } +} + +#[rocket::async_trait] +trait CoreClientExt { + async fn _get_client() -> ApiResult; + async fn cached() -> ApiResult; + + async fn user_info_async(&self, access_token: AccessToken) -> ApiResult; + + fn vw_id_token_verifier(&self) -> CoreIdTokenVerifier<'_>; +} + +#[rocket::async_trait] +impl CoreClientExt for CoreClient { + // Call the OpenId discovery endpoint to retrieve configuration + async fn _get_client() -> ApiResult { + let client_id = ClientId::new(CONFIG.sso_client_id()); + let client_secret = ClientSecret::new(CONFIG.sso_client_secret()); + + let issuer_url = CONFIG.sso_issuer_url()?; + + let provider_metadata = match CoreProviderMetadata::discover_async(issuer_url, async_http_client).await { + Err(err) => err!(format!("Failed to discover OpenID provider: {err}")), + Ok(metadata) => metadata, + }; + + Ok(CoreClient::from_provider_metadata(provider_metadata, client_id, Some(client_secret)) + .set_redirect_uri(CONFIG.sso_redirect_url()?)) + } + + // Simple cache to prevent recalling the discovery endpoint each time + async fn cached() -> ApiResult { + let cc_client = CLIENT_CACHE.read().ok().and_then(|rw_lock| rw_lock.clone()); + match cc_client { + Some(client) => Ok(client), + None => Self::_get_client().await.map(|client| { + let mut cached_client = CLIENT_CACHE.write().unwrap(); + *cached_client = Some(client.clone()); + client + }), + } + } + + async fn user_info_async(&self, access_token: AccessToken) -> ApiResult { + let endpoint = match self.user_info(access_token, None) { + Err(err) => err!(format!("No user_info endpoint: {err}")), + Ok(endpoint) => endpoint, + }; + + match endpoint.request_async(async_http_client).await { + Err(err) => err!(format!("Request to user_info endpoint failed: {err}")), + Ok(user_info) => Ok(user_info), + } + } + + fn vw_id_token_verifier(&self) -> CoreIdTokenVerifier<'_> { + let mut verifier = self.id_token_verifier(); + if let Some(regex_str) = CONFIG.sso_audience_trusted() { + match Regex::new(®ex_str) { + Ok(regex) => { + verifier = verifier.set_other_audience_verifier_fn(move |aud| regex.is_match(aud)); + } + Err(err) => { + error!("Failed to parse SSO_AUDIENCE_TRUSTED={regex_str} regex: {err}"); + } + } + } + verifier + } +} + +// The `nonce` allow to protect against replay attacks +// redirect_uri from: https://github.com/bitwarden/server/blob/main/src/Identity/IdentityServer/ApiClient.cs +pub async fn authorize_url(state: String, client_id: &str, raw_redirect_uri: &str, mut conn: DbConn) -> ApiResult { + let scopes = CONFIG.sso_scopes_vec().into_iter().map(Scope::new); + + let redirect_uri = match client_id { + "web" | "browser" => format!("{}/sso-connector.html", CONFIG.domain()), + "desktop" | "mobile" => "bitwarden://sso-callback".to_string(), + "cli" => { + let port_regex = Regex::new(r"^http://localhost:([0-9]{4})$").unwrap(); + match port_regex.captures(raw_redirect_uri).and_then(|captures| captures.get(1).map(|c| c.as_str())) { + Some(port) => format!("http://localhost:{}", port), + None => err!("Failed to extract port number"), + } + } + _ => err!(format!("Unsupported client {client_id}")), + }; + + let client = CoreClient::cached().await?; + let mut auth_req = client + .authorize_url( + AuthenticationFlow::::AuthorizationCode, + || CsrfToken::new(state), + Nonce::new_random, + ) + .add_scopes(scopes) + .add_extra_params(CONFIG.sso_authorize_extra_params_vec()); + + let verifier = if CONFIG.sso_pkce() { + let (pkce_challenge, pkce_verifier) = PkceCodeChallenge::new_random_sha256(); + auth_req = auth_req.set_pkce_challenge(pkce_challenge); + Some(pkce_verifier.secret().to_string()) + } else { + None + }; + + let (auth_url, csrf_state, nonce) = auth_req.url(); + + let sso_nonce = SsoNonce::new(csrf_state.secret().to_string(), nonce.secret().to_string(), verifier, redirect_uri); + sso_nonce.save(&mut conn).await?; + + Ok(auth_url) +} + +#[derive(Clone, Debug)] +pub struct AuthenticatedUser { + pub refresh_token: Option, + pub access_token: String, + pub expires_in: Option, + pub identifier: String, + pub email: String, + pub email_verified: Option, + pub user_name: Option, +} + +#[derive(Clone, Debug)] +pub struct UserInformation { + pub state: String, + pub identifier: String, + pub email: String, + pub email_verified: Option, + pub user_name: Option, +} + +async fn decode_code_claims(code: &str, conn: &mut DbConn) -> ApiResult<(String, String)> { + match auth::decode_jwt::(code, SSO_JWT_ISSUER.to_string()) { + Ok(code_claims) => match code_claims.code { + OIDCCodeWrapper::Ok { + code, + state, + } => Ok((code, state)), + OIDCCodeWrapper::Error { + state, + error, + error_description, + } => { + if let Err(err) = SsoNonce::delete(&state, conn).await { + error!("Failed to delete database sso_nonce using {state}: {err}") + } + err!(format!( + "SSO authorization failed: {error}, {}", + error_description.as_ref().unwrap_or(&String::new()) + )) + } + }, + Err(err) => err!(format!("Failed to decode code wrapper: {err}")), + } +} + +// During the 2FA flow we will +// - retrieve the user information and then only discover he needs 2FA. +// - second time we will rely on the `AC_CACHE` since the `code` has already been exchanged. +// The `nonce` will ensure that the user is authorized only once. +// We return only the `UserInformation` to force calling `redeem` to obtain the `refresh_token`. +pub async fn exchange_code(wrapped_code: &str, conn: &mut DbConn) -> ApiResult { + let (code, state) = decode_code_claims(wrapped_code, conn).await?; + + if let Some(authenticated_user) = AC_CACHE.get(&state) { + return Ok(UserInformation { + state, + identifier: authenticated_user.identifier, + email: authenticated_user.email, + email_verified: authenticated_user.email_verified, + user_name: authenticated_user.user_name, + }); + } + + let oidc_code = AuthorizationCode::new(code.clone()); + let client = CoreClient::cached().await?; + + let nonce = match SsoNonce::find(&state, conn).await { + None => err!(format!("Invalid state cannot retrieve nonce")), + Some(nonce) => nonce, + }; + + let mut exchange = client.exchange_code(oidc_code); + + if CONFIG.sso_pkce() { + match nonce.verifier { + None => err!(format!("Missing verifier in the DB nonce table")), + Some(secret) => exchange = exchange.set_pkce_verifier(PkceCodeVerifier::new(secret)), + } + } + + match exchange.request_async(async_http_client).await { + Ok(token_response) => { + let user_info = client.user_info_async(token_response.access_token().to_owned()).await?; + let oidc_nonce = Nonce::new(nonce.nonce.clone()); + + let id_token = match token_response.extra_fields().id_token() { + None => err!("Token response did not contain an id_token"), + Some(token) => token, + }; + + if CONFIG.sso_debug_tokens() { + debug!("Id token: {}", id_token.to_string()); + debug!("Access token: {}", token_response.access_token().secret().to_string()); + debug!("Refresh token: {:?}", token_response.refresh_token().map(|t| t.secret().to_string())); + debug!("Expiration time: {:?}", token_response.expires_in()); + } + + let id_claims = match id_token.claims(&client.vw_id_token_verifier(), &oidc_nonce) { + Err(err) => err!(format!("Could not read id_token claims, {err}")), + Ok(claims) => claims, + }; + + let email = match id_claims.email() { + Some(email) => email.to_string(), + None => match user_info.email() { + None => err!("Neither id token nor userinfo contained an email"), + Some(email) => email.to_owned().to_string(), + }, + } + .to_lowercase(); + + let user_name = user_info.preferred_username().map(|un| un.to_string()); + + let refresh_token = token_response.refresh_token().map(|t| t.secret().to_string()); + if refresh_token.is_none() && CONFIG.sso_scopes_vec().contains(&"offline_access".to_string()) { + error!("Scope offline_access is present but response contain no refresh_token"); + } + + let identifier = format!("{}/{}", **id_claims.issuer(), **id_claims.subject()); + + let authenticated_user = AuthenticatedUser { + refresh_token, + access_token: token_response.access_token().secret().to_string(), + expires_in: token_response.expires_in(), + identifier: identifier.clone(), + email: email.clone(), + email_verified: id_claims.email_verified(), + user_name: user_name.clone(), + }; + + AC_CACHE.insert(state.clone(), authenticated_user.clone()); + + Ok(UserInformation { + state, + identifier, + email, + email_verified: id_claims.email_verified(), + user_name, + }) + } + Err(err) => err!(format!("Failed to contact token endpoint: {err}")), + } +} + +// User has passed 2FA flow we can delete `nonce` and clear the cache. +pub async fn redeem(state: &String, conn: &mut DbConn) -> ApiResult { + if let Err(err) = SsoNonce::delete(state, conn).await { + error!("Failed to delete database sso_nonce using {state}: {err}") + } + + if let Some(au) = AC_CACHE.get(state) { + AC_CACHE.invalidate(state); + Ok(au) + } else { + err!("Failed to retrieve user info from sso cache") + } +} + +// We always return a refresh_token (with no refresh_token some secrets are not displayed in the web front). +// If there is no SSO refresh_token, we keep the access_token to be able to call user_info to check for validity +pub fn create_auth_tokens( + device: &Device, + user: &User, + refresh_token: Option, + access_token: &str, + expires_in: Option, +) -> ApiResult { + if !CONFIG.sso_auth_only_not_session() { + let now = Utc::now(); + + let (ap_nbf, ap_exp) = match (decode_token_claims("access_token", access_token), expires_in) { + (Ok(ap), _) => (ap.nbf(), ap.exp), + (Err(_), Some(exp)) => (now.timestamp(), (now + exp).timestamp()), + _ => err!("Non jwt access_token and empty expires_in"), + }; + + let access_claims = + auth::LoginJwtClaims::new(device, user, ap_nbf, ap_exp, auth::AuthMethod::Sso.scope_vec(), now); + + _create_auth_tokens(device, refresh_token, access_claims, access_token) + } else { + Ok(AuthTokens::new(device, user, AuthMethod::Sso)) + } +} + +fn _create_auth_tokens( + device: &Device, + refresh_token: Option, + access_claims: auth::LoginJwtClaims, + access_token: &str, +) -> ApiResult { + let (nbf, exp, token) = if let Some(rt) = refresh_token.as_ref() { + match decode_token_claims("refresh_token", rt) { + Err(_) => { + let time_now = Utc::now(); + let exp = (time_now + *DEFAULT_REFRESH_VALIDITY).timestamp(); + debug!("Non jwt refresh_token (expiration set to {})", exp); + (time_now.timestamp(), exp, TokenWrapper::Refresh(rt.to_string())) + } + Ok(refresh_payload) => { + debug!("Refresh_payload: {:?}", refresh_payload); + (refresh_payload.nbf(), refresh_payload.exp, TokenWrapper::Refresh(rt.to_string())) + } + } + } else { + debug!("No refresh_token present"); + (access_claims.nbf, access_claims.exp, TokenWrapper::Access(access_token.to_string())) + }; + + let refresh_claims = auth::RefreshJwtClaims { + nbf, + exp, + iss: auth::JWT_LOGIN_ISSUER.to_string(), + sub: auth::AuthMethod::Sso, + device_token: device.refresh_token.clone(), + token: Some(token), + }; + + Ok(auth::AuthTokens { + refresh_claims, + access_claims, + }) +} + +// This endpoint is called in two case +// - the session is close to expiration we will try to extend it +// - the user is going to make an action and we check that the session is still valid +pub async fn exchange_refresh_token( + device: &Device, + user: &User, + refresh_claims: &auth::RefreshJwtClaims, +) -> ApiResult { + match &refresh_claims.token { + Some(TokenWrapper::Refresh(refresh_token)) => { + let rt = RefreshToken::new(refresh_token.to_string()); + + let client = CoreClient::cached().await?; + + let token_response = match client.exchange_refresh_token(&rt).request_async(async_http_client).await { + Err(err) => err!(format!("Request to exchange_refresh_token endpoint failed: {:?}", err)), + Ok(token_response) => token_response, + }; + + // Use new refresh_token if returned + let rolled_refresh_token = token_response + .refresh_token() + .map(|token| token.secret().to_string()) + .unwrap_or(refresh_token.to_string()); + + create_auth_tokens( + device, + user, + Some(rolled_refresh_token), + token_response.access_token().secret(), + token_response.expires_in(), + ) + } + Some(TokenWrapper::Access(access_token)) => { + let now = Utc::now(); + let exp_limit = (now + *BW_EXPIRATION).timestamp(); + + if refresh_claims.exp < exp_limit { + err_silent!("Access token is close to expiration but we have no refresh token") + } + + let client = CoreClient::cached().await?; + match client.user_info_async(AccessToken::new(access_token.to_string())).await { + Err(err) => { + err_silent!(format!("Failed to retrieve user info, token has probably been invalidated: {err}")) + } + Ok(_) => { + let access_claims = auth::LoginJwtClaims::new( + device, + user, + now.timestamp(), + refresh_claims.exp, + auth::AuthMethod::Sso.scope_vec(), + now, + ); + _create_auth_tokens(device, None, access_claims, access_token) + } + } + } + None => err!("No token present while in SSO"), + } +} diff --git a/src/static/templates/email/sso_change_email.hbs b/src/static/templates/email/sso_change_email.hbs new file mode 100644 index 00000000..5a512280 --- /dev/null +++ b/src/static/templates/email/sso_change_email.hbs @@ -0,0 +1,4 @@ +Your Email Changed + +Your email was changed in your SSO Provider. Please update your email in Account Settings ({{url}}). +{{> email/email_footer_text }} diff --git a/src/static/templates/email/sso_change_email.html.hbs b/src/static/templates/email/sso_change_email.html.hbs new file mode 100644 index 00000000..74cd445c --- /dev/null +++ b/src/static/templates/email/sso_change_email.html.hbs @@ -0,0 +1,11 @@ +Your Email Changed + +{{> email/email_header }} + + + + Your email was changed in your SSO Provider. Please update your email in Account Settings. + + + +{{> email/email_footer }} diff --git a/src/util.rs b/src/util.rs index 01e04adc..38e3c8ef 100644 --- a/src/util.rs +++ b/src/util.rs @@ -7,7 +7,7 @@ use num_traits::ToPrimitive; use once_cell::sync::Lazy; use rocket::{ fairing::{Fairing, Info, Kind}, - http::{ContentType, Cookie, CookieJar, Header, HeaderMap, Method, SameSite, Status}, + http::{ContentType, Header, HeaderMap, Method, Status}, request::FromParam, response::{self, Responder}, Data, Orbit, Request, Response, Rocket, @@ -130,10 +130,12 @@ impl Cors { // If a match exists, return it. Otherwise, return None. fn get_allowed_origin(headers: &HeaderMap<'_>) -> Option { let origin = Cors::get_header(headers, "Origin"); - let domain_origin = CONFIG.domain_origin(); - let sso_origin = CONFIG.sso_authority(); let safari_extension_origin = "file://"; - if origin == domain_origin || origin == safari_extension_origin || origin == sso_origin { + + if origin == CONFIG.domain_origin() + || origin == safari_extension_origin + || (CONFIG.sso_enabled() && origin == CONFIG.sso_authority()) + { Some(origin) } else { None @@ -258,33 +260,6 @@ impl<'r> FromParam<'r> for SafeString { } } -pub struct CustomRedirect { - pub url: String, - pub headers: Vec<(String, String)>, -} - -impl<'r> rocket::response::Responder<'r, 'static> for CustomRedirect { - fn respond_to(self, _: &rocket::request::Request<'_>) -> rocket::response::Result<'static> { - let mut response = Response::build() - .status(rocket::http::Status { - code: 307, - }) - .raw_header("Location", self.url) - .header(ContentType::HTML) - .finalize(); - - // Normal headers - response.set_raw_header("Referrer-Policy", "same-origin"); - response.set_raw_header("X-XSS-Protection", "0"); - - for header in &self.headers { - response.set_raw_header(header.0.clone(), header.1.clone()); - } - - Ok(response) - } -} - // Log all the routes from the main paths list, and the attachments endpoint // Effectively ignores, any static file route, and the alive endpoint const LOGGED_ROUTES: [&str; 7] = ["/api", "/admin", "/identity", "/icons", "/attachments", "/events", "/notifications"]; @@ -1022,29 +997,3 @@ mod tests { }); } } - -pub struct CookieManager<'a> { - jar: &'a CookieJar<'a>, -} - -impl<'a> CookieManager<'a> { - pub fn new(jar: &'a CookieJar<'a>) -> Self { - Self { - jar, - } - } - - pub fn set_cookie(&self, name: String, value: String) { - let cookie = Cookie::build((name, value)).same_site(SameSite::Lax); - - self.jar.add(cookie) - } - - pub fn get_cookie(&self, name: String) -> Option { - self.jar.get(&name).map(|c| c.value().to_string()) - } - - pub fn delete_cookie(&self, name: String) { - self.jar.remove(Cookie::from(name)); - } -}