From 3a3390963c91566e1b60ee4bb3d0f7fea5a950a6 Mon Sep 17 00:00:00 2001 From: BlackDex Date: Mon, 29 Mar 2021 10:27:58 +0200 Subject: [PATCH 01/25] Icon and SMTP Debug fixes. - We need to add some feature to enable smtp debugging again. See: https://github.com/lettre/lettre/pull/584 - Upstream added the fallback icon again, probably because of caching ;). See: https://github.com/bitwarden/server/pull/1149 - Enabled gzip and brotli compression support with reqwest. Some sites seem to force this, or assume that because of the User-Agent string it is supported. This caused some failed icons. Fixes #1540 --- Cargo.lock | 148 ++++++++++++++++++++++------ Cargo.toml | 5 +- src/api/icons.rs | 25 ++--- src/static/images/fallback-icon.png | Bin 0 -> 331 bytes 4 files changed, 134 insertions(+), 44 deletions(-) create mode 100644 src/static/images/fallback-icon.png diff --git a/Cargo.lock b/Cargo.lock index feab7340..e60c8d0d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -26,6 +26,35 @@ dependencies = [ "memchr", ] +[[package]] +name = "alloc-no-stdlib" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5192ec435945d87bc2f70992b4d818154b5feede43c09fb7592146374eac90a6" + +[[package]] +name = "alloc-stdlib" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "697ed7edc0f1711de49ce108c541623a0af97c6c60b2f6e2b65229847ac843c2" +dependencies = [ + "alloc-no-stdlib", +] + +[[package]] +name = "async-compression" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b72c1f1154e234325b50864a349b9c8e56939e266a4c307c0f159812df2f9537" +dependencies = [ + "brotli", + "flate2", + "futures-core", + "memchr", + "pin-project-lite", + "tokio", +] + [[package]] name = "atty" version = "0.2.14" @@ -159,6 +188,7 @@ dependencies = [ "serde_json", "syslog", "time 0.2.26", + "tracing", "u2f", "uuid", "yubico", @@ -194,6 +224,27 @@ dependencies = [ "byte-tools 0.3.1", ] +[[package]] +name = "brotli" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f29919120f08613aadcd4383764e00526fc9f18b6c0895814faeed0dd78613e" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", + "brotli-decompressor", +] + +[[package]] +name = "brotli-decompressor" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1052e1c3b8d4d80eb84a8b94f0a1498797b5fb96314c001156a1c761940ef4ec" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", +] + [[package]] name = "buf_redux" version = "0.8.4" @@ -298,9 +349,9 @@ dependencies = [ [[package]] name = "const_fn" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28b9d6de7f49e22cf97ad17fc4036ece69300032f45f78f30b4a4482cdc3f4a6" +checksum = "076a6803b0dacd6a88cfe64deba628b01533ff5ef265687e6938280c1afd0a28" [[package]] name = "constant_time_eq" @@ -341,6 +392,15 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8aebca1129a03dc6dc2b127edd729435bbc4a37e1d5f4d7513165089ceb02634" +[[package]] +name = "crc32fast" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a" +dependencies = [ + "cfg-if 1.0.0", +] + [[package]] name = "crypto-mac" version = "0.3.0" @@ -401,7 +461,7 @@ dependencies = [ "bitflags", "proc-macro2 1.0.24", "quote 1.0.9", - "syn 1.0.64", + "syn 1.0.65", ] [[package]] @@ -429,7 +489,7 @@ checksum = "45f5098f628d02a7a0f68ddba586fb61e80edec3bdc1be3b921f4ceec60858d3" dependencies = [ "proc-macro2 1.0.24", "quote 1.0.9", - "syn 1.0.64", + "syn 1.0.65", ] [[package]] @@ -525,6 +585,18 @@ dependencies = [ "syslog", ] +[[package]] +name = "flate2" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd3aec53de10fe96d7d8c565eb17f2c687bb5518a2ec453b5b1252964526abe0" +dependencies = [ + "cfg-if 1.0.0", + "crc32fast", + "libc", + "miniz_oxide", +] + [[package]] name = "fnv" version = "1.0.7" @@ -651,7 +723,7 @@ dependencies = [ "proc-macro-hack", "proc-macro2 1.0.24", "quote 1.0.9", - "syn 1.0.64", + "syn 1.0.65", ] [[package]] @@ -770,9 +842,9 @@ dependencies = [ [[package]] name = "handlebars" -version = "3.5.3" +version = "3.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdb0867bbc5a3da37a753e78021d5fcf8a4db00e18dd2dd90fd36e24190e162d" +checksum = "580b6f551b29a3a02436318aed09ba1c58eea177dc49e39beac627ad356730a5" dependencies = [ "log 0.4.14", "pest", @@ -841,7 +913,7 @@ dependencies = [ "markup5ever", "proc-macro2 1.0.24", "quote 1.0.9", - "syn 1.0.64", + "syn 1.0.65", ] [[package]] @@ -899,9 +971,9 @@ dependencies = [ [[package]] name = "hyper" -version = "0.14.4" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8e946c2b1349055e0b72ae281b238baf1a3ea7307c7e9f9d64673bdd9c26ac7" +checksum = "8bf09f61b52cfcf4c00de50df88ae423d6c02354e385a86341133b5338630ad1" dependencies = [ "bytes 1.0.1", "futures-channel", @@ -940,7 +1012,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes 1.0.1", - "hyper 0.14.4", + "hyper 0.14.5", "native-tls", "tokio", "tokio-native-tls", @@ -1225,7 +1297,7 @@ dependencies = [ "migrations_internals", "proc-macro2 1.0.24", "quote 1.0.9", - "syn 1.0.64", + "syn 1.0.65", ] [[package]] @@ -1443,7 +1515,7 @@ checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" dependencies = [ "proc-macro2 1.0.24", "quote 1.0.9", - "syn 1.0.64", + "syn 1.0.65", ] [[package]] @@ -1718,7 +1790,7 @@ dependencies = [ "pest_meta", "proc-macro2 1.0.24", "quote 1.0.9", - "syn 1.0.64", + "syn 1.0.65", ] [[package]] @@ -1793,7 +1865,7 @@ checksum = "a490329918e856ed1b083f244e3bfe2d8c4f336407e4ea9e1a9f479ff09049e5" dependencies = [ "proc-macro2 1.0.24", "quote 1.0.9", - "syn 1.0.64", + "syn 1.0.65", ] [[package]] @@ -2087,6 +2159,7 @@ version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf12057f289428dbf5c591c74bf10392e4a8003f993405a902f20117019022d4" dependencies = [ + "async-compression", "base64 0.13.0", "bytes 1.0.1", "encoding_rs", @@ -2094,7 +2167,7 @@ dependencies = [ "futures-util", "http", "http-body", - "hyper 0.14.4", + "hyper 0.14.5", "hyper-tls", "ipnet", "js-sys", @@ -2109,6 +2182,7 @@ dependencies = [ "serde_urlencoded", "tokio", "tokio-native-tls", + "tokio-util", "url 2.2.1", "wasm-bindgen", "wasm-bindgen-futures", @@ -2306,9 +2380,9 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.1.2" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d493c5f39e02dfb062cd8f33301f90f9b13b650e8c1b1d0fd75c19dd64bff69d" +checksum = "3670b1d2fdf6084d192bc71ead7aabe6c06aa2ea3fbd9cc3ac111fa5c2b1bd84" dependencies = [ "bitflags", "core-foundation", @@ -2319,9 +2393,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dee48cdde5ed250b0d3252818f646e174ab414036edb884dde62d80a3ac6082d" +checksum = "3676258fd3cfe2c9a0ec99ce3038798d847ce3e4bb17746373eb9f0f1ac16339" dependencies = [ "core-foundation-sys", "libc", @@ -2359,7 +2433,7 @@ checksum = "b093b7a2bb58203b5da3056c05b4ec1fed827dcfdb37347a8841695263b3d06d" dependencies = [ "proc-macro2 1.0.24", "quote 1.0.9", - "syn 1.0.64", + "syn 1.0.65", ] [[package]] @@ -2482,11 +2556,10 @@ checksum = "fe0f37c9e8f3c5a4a66ad655a93c74daac4ad00c441533bf5c6e7990bb42604e" [[package]] name = "socket2" -version = "0.3.19" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "122e570113d28d773067fab24266b66753f6ea915758651696b6e35e49f88d6e" +checksum = "9e3dfc207c526015c632472a77be09cf1b6e46866581aecae5cc38fb4235dea2" dependencies = [ - "cfg-if 1.0.0", "libc", "winapi 0.3.9", ] @@ -2542,7 +2615,7 @@ dependencies = [ "quote 1.0.9", "serde", "serde_derive", - "syn 1.0.64", + "syn 1.0.65", ] [[package]] @@ -2558,7 +2631,7 @@ dependencies = [ "serde_derive", "serde_json", "sha1", - "syn 1.0.64", + "syn 1.0.65", ] [[package]] @@ -2611,9 +2684,9 @@ dependencies = [ [[package]] name = "syn" -version = "1.0.64" +version = "1.0.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fd9d1e9976102a03c542daa2eff1b43f9d72306342f3f8b3ed5fb8908195d6f" +checksum = "f3a1d708c221c5a612956ef9f75b37e454e88d1f7b899fbd3a18d4252012d663" dependencies = [ "proc-macro2 1.0.24", "quote 1.0.9", @@ -2718,7 +2791,7 @@ dependencies = [ "proc-macro2 1.0.24", "quote 1.0.9", "standback", - "syn 1.0.64", + "syn 1.0.65", ] [[package]] @@ -2797,10 +2870,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01ebdc2bb4498ab1ab5f5b73c5803825e60199229ccba0698170e3be0e7f959f" dependencies = [ "cfg-if 1.0.0", + "log 0.4.14", "pin-project-lite", + "tracing-attributes", "tracing-core", ] +[[package]] +name = "tracing-attributes" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c42e6fa53307c8a17e4ccd4dc81cf5ec38db9209f59b222210375b54ee40d1e2" +dependencies = [ + "proc-macro2 1.0.24", + "quote 1.0.9", + "syn 1.0.65", +] + [[package]] name = "tracing-core" version = "0.1.17" @@ -3032,7 +3118,7 @@ dependencies = [ "log 0.4.14", "proc-macro2 1.0.24", "quote 1.0.9", - "syn 1.0.64", + "syn 1.0.65", "wasm-bindgen-shared", ] @@ -3066,7 +3152,7 @@ checksum = "96eb45c1b2ee33545a813a92dbb53856418bf7eb54ab34f7f7ff1448a5b3735d" dependencies = [ "proc-macro2 1.0.24", "quote 1.0.9", - "syn 1.0.64", + "syn 1.0.65", "wasm-bindgen-backend", "wasm-bindgen-shared", ] diff --git a/Cargo.toml b/Cargo.toml index bb8ad8cb..24c24eba 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,7 +32,7 @@ rocket = { version = "0.5.0-dev", features = ["tls"], default-features = false } rocket_contrib = "0.5.0-dev" # HTTP client -reqwest = { version = "0.11.2", features = ["blocking", "json"] } +reqwest = { version = "0.11.2", features = ["blocking", "json", "gzip", "brotli"] } # multipart/form-data support multipart = { version = "0.17.1", features = ["server"], default-features = false } @@ -99,11 +99,12 @@ num-traits = "0.2.14" num-derive = "0.3.3" # Email libraries +tracing = { version = "0.1", features = ["log"] } # Needed to have lettre trace logging used when SMTP_DEBUG is enabled. lettre = { version = "0.10.0-beta.3", features = ["smtp-transport", "builder", "serde", "native-tls", "hostname", "tracing"], default-features = false } newline-converter = "0.2.0" # Template library -handlebars = { version = "3.5.3", features = ["dir_source"] } +handlebars = { version = "3.5.4", features = ["dir_source"] } # For favicon extraction from main website html5ever = "0.25.1" diff --git a/src/api/icons.rs b/src/api/icons.rs index 747569c6..6da3af0b 100644 --- a/src/api/icons.rs +++ b/src/api/icons.rs @@ -18,8 +18,6 @@ pub fn routes() -> Vec { routes![icon] } -const ALLOWED_CHARS: &str = "_-."; - static CLIENT: Lazy = Lazy::new(|| { // Generate the default headers let mut default_headers = header::HeaderMap::new(); @@ -45,13 +43,18 @@ static ICON_SIZE_REGEX: Lazy = Lazy::new(|| Regex::new(r"(?x)(\d+)\D*(\d+ static ICON_BLACKLIST_REGEX: Lazy>> = Lazy::new(|| RwLock::new(HashMap::new())); #[get("//icon.png")] -fn icon(domain: String) -> Option>>> { +fn icon(domain: String) -> Cached>> { + const FALLBACK_ICON: &[u8] = include_bytes!("../static/images/fallback-icon.png"); + if !is_valid_domain(&domain) { warn!("Invalid domain: {}", domain); - return None; + return Cached::ttl(Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl()); } - get_icon(&domain).map(|icon| Cached::ttl(Content(ContentType::new("image", "x-icon"), icon), CONFIG.icon_cache_ttl())) + match get_icon(&domain) { + Some(i) => Cached::ttl(Content(ContentType::new("image", "x-icon"), i), CONFIG.icon_cache_ttl()), + _ => Cached::ttl(Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl()), + } } /// Returns if the domain provided is valid or not. @@ -59,6 +62,8 @@ fn icon(domain: String) -> Option>>> { /// This does some manual checks and makes use of Url to do some basic checking. /// domains can't be larger then 63 characters (not counting multiple subdomains) according to the RFC's, but we limit the total size to 255. fn is_valid_domain(domain: &str) -> bool { + const ALLOWED_CHARS: &str = "_-."; + // If parsing the domain fails using Url, it will not work with reqwest. if let Err(parse_error) = Url::parse(format!("https://{}", domain).as_str()) { debug!("Domain parse error: '{}' - {:?}", domain, parse_error); @@ -486,10 +491,10 @@ fn get_icon_url(domain: &str) -> Result { iconlist.sort_by_key(|x| x.priority); // There always is an icon in the list, so no need to check if it exists, and just return the first one - Ok(IconUrlResult{ + Ok(IconUrlResult { iconlist, cookies: cookie_str, - referer + referer, }) } @@ -510,9 +515,7 @@ fn get_page_with_cookies(url: &str, cookie_str: &str, referer: &str) -> Result Result, Error> { info!("Downloaded icon from {}", icon.href); res.copy_to(&mut buffer)?; break; - }, + } _ => warn!("Download failed for {}", icon.href), }; } diff --git a/src/static/images/fallback-icon.png b/src/static/images/fallback-icon.png new file mode 100644 index 0000000000000000000000000000000000000000..e30b532e65a0304622ad2938bee02fcbc2f89987 GIT binary patch literal 331 zcmV-R0kr;!P)k@@&$eP0S64l=K_sN$BuK6XOfRIOm@a>|#`zV3>$SuCU-E zQMLhNNdZQ}mLg4h7}w;&4I~^WgDEch53KQxu_K8J#uJg;;S%H2>0%`0#3hDNkUZ3| z)RKrpF(U^@SmwIYFTOIS06B_$xJX4jq-Bc*iBq~XFz2#3&YDm1Y*5AUj*7Bt@7bor zt()Ab4=v2u=8Re9(v!>TLqviPy@%-#G5Z?8B+o8VCr^bA$ish5vPevi7gqEa3aqk6 djV-3-_yJsJ=ebCXRj>d6002ovPDHLkV1l Date: Mon, 29 Mar 2021 11:16:20 +0200 Subject: [PATCH 02/25] fix(env.template): IP_HEADER defaults to X-Real-IP This was wrong in commit 88c56de97b48bb5b9b8af350d0d0e0d5f080ff0e. --- .env.template | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.env.template b/.env.template index 34e0e294..a85ce22d 100644 --- a/.env.template +++ b/.env.template @@ -36,9 +36,9 @@ ## Automatically reload the templates for every request, slow, use only for development # RELOAD_TEMPLATES=false -## Client IP Header, used to identify the IP of the client, defaults to "X-Client-IP" +## Client IP Header, used to identify the IP of the client, defaults to "X-Real-IP" ## Set to the string "none" (without quotes), to disable any headers and just use the remote IP -# IP_HEADER=X-Client-IP +# IP_HEADER=X-Real-IP ## Cache time-to-live for successfully obtained icons, in seconds (0 is "forever") # ICON_CACHE_TTL=2592000 From 9caf4bf38381f4ec0680a4b29ba26f9c1921fba0 Mon Sep 17 00:00:00 2001 From: BlackDex Date: Tue, 30 Mar 2021 21:45:10 +0200 Subject: [PATCH 03/25] Misc changes. Some small changes in general: - Moved the SQL Version check struct into the function. - Updated hadolint to 2.0.0 - Fixed hadolint 2.0.0 warnings - Updated github workflows - Added .editorconfig for some general shared editor settings. --- .dockerignore | 2 ++ .editorconfig | 23 +++++++++++++++++++++++ .github/workflows/build.yml | 14 ++++++-------- .github/workflows/hadolint.yml | 3 ++- docker/Dockerfile.j2 | 2 +- docker/amd64/Dockerfile | 2 +- docker/amd64/Dockerfile.alpine | 2 +- docker/arm64/Dockerfile | 2 +- docker/armv6/Dockerfile | 2 +- docker/armv7/Dockerfile | 2 +- docker/armv7/Dockerfile.alpine | 2 +- src/db/mod.rs | 14 +++++++------- 12 files changed, 47 insertions(+), 23 deletions(-) create mode 100644 .editorconfig diff --git a/.dockerignore b/.dockerignore index b3e43a23..69f51d2a 100644 --- a/.dockerignore +++ b/.dockerignore @@ -4,6 +4,8 @@ target # Data folder data .env +.env.template +.gitattributes # IDE files .vscode diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..27c2a5e3 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,23 @@ +# EditorConfig is awesome: https://EditorConfig.org + +# top-most EditorConfig file +root = true + +[*] +end_of_line = lf +charset = utf-8 + +[*.{rs,py}] +indent_style = space +indent_size = 4 +trim_trailing_whitespace = true +insert_final_newline = true + +[*.{yml,yaml}] +indent_style = space +indent_size = 2 +trim_trailing_whitespace = true +insert_final_newline = true + +[Makefile] +indent_style = tab diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 81151f17..2c4d4620 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -2,23 +2,21 @@ name: Build on: push: - # Ignore when there are only changes done too one of these paths - paths-ignore: - - "**.md" - - "**.txt" - - "azure-pipelines.yml" - - "docker/**" - - "hooks/**" - - "tools/**" pull_request: # Ignore when there are only changes done too one of these paths paths-ignore: - "**.md" - "**.txt" + - ".dockerignore" + - ".env.template" + - ".gitattributes" + - ".gitignore" - "azure-pipelines.yml" - "docker/**" - "hooks/**" - "tools/**" + - ".github/FUNDING.yml" + - ".github/ISSUE_TEMPLATE/**" jobs: build: diff --git a/.github/workflows/hadolint.yml b/.github/workflows/hadolint.yml index 7b799554..f2cd9732 100644 --- a/.github/workflows/hadolint.yml +++ b/.github/workflows/hadolint.yml @@ -1,6 +1,7 @@ name: Hadolint on: + push: pull_request: # Ignore when there are only changes done too one of these paths paths: @@ -24,7 +25,7 @@ jobs: sudo curl -L https://github.com/hadolint/hadolint/releases/download/v$HADOLINT_VERSION/hadolint-$(uname -s)-$(uname -m) -o /usr/local/bin/hadolint && \ sudo chmod +x /usr/local/bin/hadolint env: - HADOLINT_VERSION: 1.19.0 + HADOLINT_VERSION: 2.0.0 # End Download hadolint # Test Dockerfiles diff --git a/docker/Dockerfile.j2 b/docker/Dockerfile.j2 index 7793ff2d..be4b4151 100644 --- a/docker/Dockerfile.j2 +++ b/docker/Dockerfile.j2 @@ -250,6 +250,7 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage +WORKDIR / COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault {% if package_arch_target is defined %} @@ -264,6 +265,5 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] # Configures the startup! -WORKDIR / ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/amd64/Dockerfile b/docker/amd64/Dockerfile index 08cdb1a8..e24cca81 100644 --- a/docker/amd64/Dockerfile +++ b/docker/amd64/Dockerfile @@ -98,6 +98,7 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage +WORKDIR / COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/release/bitwarden_rs . @@ -108,6 +109,5 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] # Configures the startup! -WORKDIR / ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/amd64/Dockerfile.alpine b/docker/amd64/Dockerfile.alpine index 71d63132..eed79fc1 100644 --- a/docker/amd64/Dockerfile.alpine +++ b/docker/amd64/Dockerfile.alpine @@ -93,6 +93,7 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage +WORKDIR / COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/x86_64-unknown-linux-musl/release/bitwarden_rs . @@ -103,6 +104,5 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] # Configures the startup! -WORKDIR / ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/arm64/Dockerfile b/docker/arm64/Dockerfile index c6faf743..b6b50fbd 100644 --- a/docker/arm64/Dockerfile +++ b/docker/arm64/Dockerfile @@ -144,6 +144,7 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage +WORKDIR / COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/aarch64-unknown-linux-gnu/release/bitwarden_rs . @@ -154,6 +155,5 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] # Configures the startup! -WORKDIR / ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/armv6/Dockerfile b/docker/armv6/Dockerfile index c095e8c9..c6dc75e1 100644 --- a/docker/armv6/Dockerfile +++ b/docker/armv6/Dockerfile @@ -144,6 +144,7 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage +WORKDIR / COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/arm-unknown-linux-gnueabi/release/bitwarden_rs . @@ -154,6 +155,5 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] # Configures the startup! -WORKDIR / ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/armv7/Dockerfile b/docker/armv7/Dockerfile index a880d061..51d4c75c 100644 --- a/docker/armv7/Dockerfile +++ b/docker/armv7/Dockerfile @@ -144,6 +144,7 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage +WORKDIR / COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/armv7-unknown-linux-gnueabihf/release/bitwarden_rs . @@ -154,6 +155,5 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] # Configures the startup! -WORKDIR / ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/armv7/Dockerfile.alpine b/docker/armv7/Dockerfile.alpine index 7a7e4433..14b7e9b8 100644 --- a/docker/armv7/Dockerfile.alpine +++ b/docker/armv7/Dockerfile.alpine @@ -99,6 +99,7 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage +WORKDIR / COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/armv7-unknown-linux-musleabihf/release/bitwarden_rs . @@ -109,6 +110,5 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] # Configures the startup! -WORKDIR / ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/src/db/mod.rs b/src/db/mod.rs index 19191582..2472caa6 100644 --- a/src/db/mod.rs +++ b/src/db/mod.rs @@ -241,15 +241,15 @@ pub fn backup_database() -> Result<(), Error> { } -use diesel::sql_types::Text; -#[derive(QueryableByName,Debug)] -struct SqlVersion { - #[sql_type = "Text"] - version: String, -} - /// Get the SQL Server version pub fn get_sql_server_version(conn: &DbConn) -> String { + use diesel::sql_types::Text; + #[derive(QueryableByName)] + struct SqlVersion { + #[sql_type = "Text"] + version: String, + } + db_run! {@raw conn: postgresql, mysql { match diesel::sql_query("SELECT version() AS version;").get_result::(conn).ok() { From 3bddc176d6d923492f4f2fda3c72227cd2b684f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Garc=C3=ADa?= Date: Tue, 30 Mar 2021 23:27:55 +0200 Subject: [PATCH 04/25] Updated sponsors --- README.md | 32 ++++++++++++++++++++++++++++++-- 1 file changed, 30 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 169923dd..2eeaee6c 100644 --- a/README.md +++ b/README.md @@ -57,5 +57,33 @@ If you prefer to chat, we're usually hanging around at [#bitwarden_rs:matrix.org ### Sponsors Thanks for your contribution to the project! -- [@ChonoN](https://github.com/ChonoN) -- [@themightychris](https://github.com/themightychris) + + + + +
+ + netDpay +
+ netDpay +
+
+ +
+ + + + + + + + +
+ + ChonoN + +
+ + themightychris + +
From 15feff3e79b319dcbfd2d775d5521896a6068bc6 Mon Sep 17 00:00:00 2001 From: Jake Howard Date: Sun, 28 Mar 2021 16:48:45 +0100 Subject: [PATCH 05/25] Add fmt to CI --- .github/workflows/build.yml | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 2c4d4620..9b476d3e 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -89,7 +89,7 @@ jobs: with: profile: minimal target: ${{ matrix.target-triple }} - components: clippy + components: clippy, rustfmt # End Uses the rust-toolchain file to determine version @@ -111,6 +111,15 @@ jobs: # End Run cargo clippy + # Run cargo fmt + - name: '`cargo fmt`' + uses: actions-rs/cargo@v1 + with: + command: fmt + args: --all -- --check + # End Run cargo fmt + + # Build the binary - name: '`cargo build --release --features ${{ matrix.features }} --target ${{ matrix.target-triple }}`' uses: actions-rs/cargo@v1 From 0af3956abd2ff9b550130716e41fc14088dac84c Mon Sep 17 00:00:00 2001 From: Jake Howard Date: Wed, 31 Mar 2021 21:18:35 +0100 Subject: [PATCH 06/25] Run `cargo fmt` on codebase --- build.rs | 12 ++-- src/api/admin.rs | 21 +++--- src/api/core/ciphers.rs | 84 +++++++++++++++--------- src/api/core/mod.rs | 4 +- src/api/core/organizations.rs | 40 ++++++----- src/api/core/two_factor/authenticator.rs | 6 +- src/api/core/two_factor/email.rs | 13 +++- src/api/icons.rs | 32 +++++++-- src/api/identity.rs | 33 +++++++--- src/api/notifications.rs | 19 +++--- src/api/web.rs | 50 +++++++++++--- src/auth.rs | 9 ++- src/config.rs | 6 +- src/db/mod.rs | 13 +--- src/db/models/attachment.rs | 1 - src/db/models/cipher.rs | 36 +++++----- src/db/models/collection.rs | 10 ++- src/db/models/device.rs | 24 +++++-- src/db/models/favorite.rs | 24 +++---- src/db/models/folder.rs | 1 - src/db/models/mod.rs | 4 +- src/db/models/org_policy.rs | 8 +-- src/db/models/organization.rs | 28 ++++---- src/db/models/user.rs | 8 +-- src/error.rs | 4 +- src/mail.rs | 38 +++++++---- src/main.rs | 9 ++- src/util.rs | 12 ++-- 28 files changed, 347 insertions(+), 202 deletions(-) diff --git a/build.rs b/build.rs index 0277d21e..bce425e8 100644 --- a/build.rs +++ b/build.rs @@ -1,7 +1,7 @@ -use std::process::Command; use std::env; +use std::process::Command; -fn main() { +fn main() { // This allow using #[cfg(sqlite)] instead of #[cfg(feature = "sqlite")], which helps when trying to add them through macros #[cfg(feature = "sqlite")] println!("cargo:rustc-cfg=sqlite"); @@ -11,8 +11,10 @@ fn main() { println!("cargo:rustc-cfg=postgresql"); #[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))] - compile_error!("You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite"); - + compile_error!( + "You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite" + ); + if let Ok(version) = env::var("BWRS_VERSION") { println!("cargo:rustc-env=BWRS_VERSION={}", version); println!("cargo:rustc-env=CARGO_PKG_VERSION={}", version); @@ -61,7 +63,7 @@ fn read_git_info() -> Result<(), std::io::Error> { } else { format!("{}-{}", last_tag, rev_short) }; - + println!("cargo:rustc-env=BWRS_VERSION={}", version); println!("cargo:rustc-env=CARGO_PKG_VERSION={}", version); diff --git a/src/api/admin.rs b/src/api/admin.rs index d484407a..7ed3d270 100644 --- a/src/api/admin.rs +++ b/src/api/admin.rs @@ -142,7 +142,8 @@ fn admin_url(referer: Referer) -> String { fn admin_login(flash: Option) -> ApiResult> { // If there is an error, show it let msg = flash.map(|msg| format!("{}: {}", msg.name(), msg.msg())); - let json = json!({"page_content": "admin/login", "version": VERSION, "error": msg, "urlpath": CONFIG.domain_path()}); + let json = + json!({"page_content": "admin/login", "version": VERSION, "error": msg, "urlpath": CONFIG.domain_path()}); // Return the page let text = CONFIG.render_template(BASE_TEMPLATE, &json)?; @@ -329,7 +330,8 @@ fn get_users_json(_token: AdminToken, conn: DbConn) -> Json { fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult> { let users = User::get_all(&conn); let dt_fmt = "%Y-%m-%d %H:%M:%S %Z"; - let users_json: Vec = users.iter() + let users_json: Vec = users + .iter() .map(|u| { let mut usr = u.to_json(&conn); usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &conn)); @@ -339,7 +341,7 @@ fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult> { usr["created_at"] = json!(format_naive_datetime_local(&u.created_at, dt_fmt)); usr["last_active"] = match u.last_active(&conn) { Some(dt) => json!(format_naive_datetime_local(&dt, dt_fmt)), - None => json!("Never") + None => json!("Never"), }; usr }) @@ -424,7 +426,6 @@ fn update_user_org_type(data: Json, _token: AdminToken, conn: D user_to_edit.save(&conn) } - #[post("/users/update_revision")] fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult { User::update_all_revisions(&conn) @@ -433,7 +434,8 @@ fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult { #[get("/organizations/overview")] fn organizations_overview(_token: AdminToken, conn: DbConn) -> ApiResult> { let organizations = Organization::get_all(&conn); - let organizations_json: Vec = organizations.iter() + let organizations_json: Vec = organizations + .iter() .map(|o| { let mut org = o.to_json(); org["user_count"] = json!(UserOrganization::count_by_org(&o.uuid, &conn)); @@ -524,7 +526,8 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu // TODO: Maybe we need to cache this using a LazyStatic or something. Github only allows 60 requests per hour, and we use 3 here already. let (latest_release, latest_commit, latest_web_build) = if has_http_access { ( - match get_github_api::("https://api.github.com/repos/dani-garcia/bitwarden_rs/releases/latest") { + match get_github_api::("https://api.github.com/repos/dani-garcia/bitwarden_rs/releases/latest") + { Ok(r) => r.tag_name, _ => "-".to_string(), }, @@ -540,7 +543,9 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu if running_within_docker { "-".to_string() } else { - match get_github_api::("https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest") { + match get_github_api::( + "https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest", + ) { Ok(r) => r.tag_name.trim_start_matches('v').to_string(), _ => "-".to_string(), } @@ -552,7 +557,7 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu let ip_header_name = match &ip_header.0 { Some(h) => h, - _ => "" + _ => "", }; let diagnostics_json = json!({ diff --git a/src/api/core/ciphers.rs b/src/api/core/ciphers.rs index 7b0de205..d566b0b9 100644 --- a/src/api/core/ciphers.rs +++ b/src/api/core/ciphers.rs @@ -91,7 +91,8 @@ fn sync(data: Form, headers: Headers, conn: DbConn) -> Json { let folders_json: Vec = folders.iter().map(Folder::to_json).collect(); let collections = Collection::find_by_user_uuid(&headers.user.uuid, &conn); - let collections_json: Vec = collections.iter() + let collections_json: Vec = collections + .iter() .map(|c| c.to_json_details(&headers.user.uuid, &conn)) .collect(); @@ -105,10 +106,7 @@ fn sync(data: Form, headers: Headers, conn: DbConn) -> Json { .collect(); let sends = Send::find_by_user(&headers.user.uuid, &conn); - let sends_json: Vec = sends - .iter() - .map(|s| s.to_json()) - .collect(); + let sends_json: Vec = sends.iter().map(|s| s.to_json()).collect(); let domains_json = if data.exclude_domains { Value::Null @@ -236,7 +234,7 @@ fn post_ciphers_create(data: JsonUpcase, headers: Headers, conn // Check if there are one more more collections selected when this cipher is part of an organization. // err if this is not the case before creating an empty cipher. - if data.Cipher.OrganizationId.is_some() && data.CollectionIds.is_empty() { + if data.Cipher.OrganizationId.is_some() && data.CollectionIds.is_empty() { err!("You must select at least one collection."); } @@ -278,17 +276,15 @@ fn post_ciphers(data: JsonUpcase, headers: Headers, conn: DbConn, nt /// allowed to delete or share such ciphers to an org, however. /// /// Ref: https://bitwarden.com/help/article/policies/#personal-ownership -fn enforce_personal_ownership_policy( - data: &CipherData, - headers: &Headers, - conn: &DbConn -) -> EmptyResult { +fn enforce_personal_ownership_policy(data: &CipherData, headers: &Headers, conn: &DbConn) -> EmptyResult { if data.OrganizationId.is_none() { let user_uuid = &headers.user.uuid; let policy_type = OrgPolicyType::PersonalOwnership; if OrgPolicy::is_applicable_to_user(user_uuid, policy_type, conn) { - err!("Due to an Enterprise Policy, you are restricted from \ - saving items to your personal vault.") + err!( + "Due to an Enterprise Policy, you are restricted from \ + saving items to your personal vault." + ) } } Ok(()) @@ -307,11 +303,12 @@ pub fn update_cipher_from_data( // Check that the client isn't updating an existing cipher with stale data. if let Some(dt) = data.LastKnownRevisionDate { - match NaiveDateTime::parse_from_str(&dt, "%+") { // ISO 8601 format - Err(err) => - warn!("Error parsing LastKnownRevisionDate '{}': {}", dt, err), - Ok(dt) if cipher.updated_at.signed_duration_since(dt).num_seconds() > 1 => - err!("The client copy of this cipher is out of date. Resync the client and try again."), + match NaiveDateTime::parse_from_str(&dt, "%+") { + // ISO 8601 format + Err(err) => warn!("Error parsing LastKnownRevisionDate '{}': {}", dt, err), + Ok(dt) if cipher.updated_at.signed_duration_since(dt).num_seconds() > 1 => { + err!("The client copy of this cipher is out of date. Resync the client and try again.") + } Ok(_) => (), } } @@ -384,12 +381,9 @@ pub fn update_cipher_from_data( // But, we at least know we do not need to store and return this specific key. fn _clean_cipher_data(mut json_data: Value) -> Value { if json_data.is_array() { - json_data.as_array_mut() - .unwrap() - .iter_mut() - .for_each(|ref mut f| { - f.as_object_mut().unwrap().remove("Response"); - }); + json_data.as_array_mut().unwrap().iter_mut().for_each(|ref mut f| { + f.as_object_mut().unwrap().remove("Response"); + }); }; json_data } @@ -411,13 +405,13 @@ pub fn update_cipher_from_data( data["Uris"] = _clean_cipher_data(data["Uris"].clone()); } data - }, + } None => err!("Data missing"), }; cipher.name = data.Name; cipher.notes = data.Notes; - cipher.fields = data.Fields.map(|f| _clean_cipher_data(f).to_string() ); + cipher.fields = data.Fields.map(|f| _clean_cipher_data(f).to_string()); cipher.data = type_data.to_string(); cipher.password_history = data.PasswordHistory.map(|f| f.to_string()); @@ -832,7 +826,13 @@ fn post_attachment( let file_name = HEXLOWER.encode(&crypto::get_random(vec![0; 10])); let path = base_path.join(&file_name); - let size = match field.data.save().memory_threshold(0).size_limit(size_limit).with_path(path.clone()) { + let size = match field + .data + .save() + .memory_threshold(0) + .size_limit(size_limit) + .with_path(path.clone()) + { SaveResult::Full(SavedData::File(_, size)) => size as i32, SaveResult::Full(other) => { std::fs::remove_file(path).ok(); @@ -881,7 +881,11 @@ fn post_attachment_admin( post_attachment(uuid, data, content_type, headers, conn, nt) } -#[post("/ciphers//attachment//share", format = "multipart/form-data", data = "")] +#[post( + "/ciphers//attachment//share", + format = "multipart/form-data", + data = "" +)] fn post_attachment_share( uuid: String, attachment_id: String, @@ -984,12 +988,22 @@ fn delete_cipher_selected_admin(data: JsonUpcase, headers: Headers, conn: } #[post("/ciphers/delete-admin", data = "")] -fn delete_cipher_selected_post_admin(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { +fn delete_cipher_selected_post_admin( + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify, +) -> EmptyResult { delete_cipher_selected_post(data, headers, conn, nt) } #[put("/ciphers/delete-admin", data = "")] -fn delete_cipher_selected_put_admin(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { +fn delete_cipher_selected_put_admin( + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify, +) -> EmptyResult { delete_cipher_selected_put(data, headers, conn, nt) } @@ -1140,7 +1154,13 @@ fn _delete_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, soft_del Ok(()) } -fn _delete_multiple_ciphers(data: JsonUpcase, headers: Headers, conn: DbConn, soft_delete: bool, nt: Notify) -> EmptyResult { +fn _delete_multiple_ciphers( + data: JsonUpcase, + headers: Headers, + conn: DbConn, + soft_delete: bool, + nt: Notify, +) -> EmptyResult { let data: Value = data.into_inner().data; let uuids = match data.get("Ids") { @@ -1192,7 +1212,7 @@ fn _restore_multiple_ciphers(data: JsonUpcase, headers: &Headers, conn: & for uuid in uuids { match _restore_cipher_by_uuid(uuid, headers, conn, nt) { Ok(json) => ciphers.push(json.into_inner()), - err => return err + err => return err, } } diff --git a/src/api/core/mod.rs b/src/api/core/mod.rs index 36e83f0e..70e1866e 100644 --- a/src/api/core/mod.rs +++ b/src/api/core/mod.rs @@ -2,8 +2,8 @@ mod accounts; mod ciphers; mod folders; mod organizations; -pub mod two_factor; mod sends; +pub mod two_factor; pub use sends::start_send_deletion_scheduler; @@ -32,9 +32,9 @@ pub fn routes() -> Vec { // // Move this somewhere else // +use rocket::response::Response; use rocket::Route; use rocket_contrib::json::Json; -use rocket::response::Response; use serde_json::Value; use crate::{ diff --git a/src/api/core/organizations.rs b/src/api/core/organizations.rs index 5698c187..ce3c46ea 100644 --- a/src/api/core/organizations.rs +++ b/src/api/core/organizations.rs @@ -5,7 +5,7 @@ use serde_json::Value; use crate::{ api::{EmptyResult, JsonResult, JsonUpcase, JsonUpcaseVec, Notify, NumberOrString, PasswordData, UpdateType}, - auth::{decode_invite, AdminHeaders, Headers, OwnerHeaders, ManagerHeaders, ManagerHeadersLoose}, + auth::{decode_invite, AdminHeaders, Headers, ManagerHeaders, ManagerHeadersLoose, OwnerHeaders}, db::{models::*, DbConn}, mail, CONFIG, }; @@ -333,7 +333,12 @@ fn post_organization_collection_delete_user( } #[delete("/organizations//collections/")] -fn delete_organization_collection(org_id: String, col_id: String, _headers: ManagerHeaders, conn: DbConn) -> EmptyResult { +fn delete_organization_collection( + org_id: String, + col_id: String, + _headers: ManagerHeaders, + conn: DbConn, +) -> EmptyResult { match Collection::find_by_uuid(&col_id, &conn) { None => err!("Collection not found"), Some(collection) => { @@ -426,9 +431,7 @@ fn put_collection_users( continue; } - CollectionUser::save(&user.user_uuid, &coll_id, - d.ReadOnly, d.HidePasswords, - &conn)?; + CollectionUser::save(&user.user_uuid, &coll_id, d.ReadOnly, d.HidePasswords, &conn)?; } Ok(()) @@ -544,9 +547,7 @@ fn send_invite(org_id: String, data: JsonUpcase, headers: AdminHeade match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) { None => err!("Collection not found in Organization"), Some(collection) => { - CollectionUser::save(&user.uuid, &collection.uuid, - col.ReadOnly, col.HidePasswords, - &conn)?; + CollectionUser::save(&user.uuid, &collection.uuid, col.ReadOnly, col.HidePasswords, &conn)?; } } } @@ -801,9 +802,13 @@ fn edit_user( match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) { None => err!("Collection not found in Organization"), Some(collection) => { - CollectionUser::save(&user_to_edit.user_uuid, &collection.uuid, - col.ReadOnly, col.HidePasswords, - &conn)?; + CollectionUser::save( + &user_to_edit.user_uuid, + &collection.uuid, + col.ReadOnly, + col.HidePasswords, + &conn, + )?; } } } @@ -989,7 +994,13 @@ struct PolicyData { } #[put("/organizations//policies/", data = "")] -fn put_policy(org_id: String, pol_type: i32, data: Json, _headers: AdminHeaders, conn: DbConn) -> JsonResult { +fn put_policy( + org_id: String, + pol_type: i32, + data: Json, + _headers: AdminHeaders, + conn: DbConn, +) -> JsonResult { let data: PolicyData = data.into_inner(); let pol_type_enum = match OrgPolicyType::from_i32(pol_type) { @@ -1127,8 +1138,7 @@ fn import(org_id: String, data: JsonUpcase, headers: Headers, con // If user is not part of the organization, but it exists } else if UserOrganization::find_by_email_and_org(&user_data.Email, &org_id, &conn).is_none() { - if let Some (user) = User::find_by_mail(&user_data.Email, &conn) { - + if let Some(user) = User::find_by_mail(&user_data.Email, &conn) { let user_org_status = if CONFIG.mail_enabled() { UserOrgStatus::Invited as i32 } else { @@ -1164,7 +1174,7 @@ fn import(org_id: String, data: JsonUpcase, headers: Headers, con // If this flag is enabled, any user that isn't provided in the Users list will be removed (by default they will be kept unless they have Deleted == true) if data.OverwriteExisting { for user_org in UserOrganization::find_by_org_and_type(&org_id, UserOrgType::User as i32, &conn) { - if let Some (user_email) = User::find_by_uuid(&user_org.user_uuid, &conn).map(|u| u.email) { + if let Some(user_email) = User::find_by_uuid(&user_org.user_uuid, &conn).map(|u| u.email) { if !data.Users.iter().any(|u| u.Email == user_email) { user_org.delete(&conn)?; } diff --git a/src/api/core/two_factor/authenticator.rs b/src/api/core/two_factor/authenticator.rs index f4bd5df5..5f6cb452 100644 --- a/src/api/core/two_factor/authenticator.rs +++ b/src/api/core/two_factor/authenticator.rs @@ -141,7 +141,11 @@ pub fn validate_totp_code(user_uuid: &str, totp_code: u64, secret: &str, ip: &Cl // The amount of steps back and forward in time // Also check if we need to disable time drifted TOTP codes. // If that is the case, we set the steps to 0 so only the current TOTP is valid. - let steps: i64 = if CONFIG.authenticator_disable_time_drift() { 0 } else { 1 }; + let steps: i64 = if CONFIG.authenticator_disable_time_drift() { + 0 + } else { + 1 + }; for step in -steps..=steps { let time_step = current_timestamp / 30i64 + step; diff --git a/src/api/core/two_factor/email.rs b/src/api/core/two_factor/email.rs index 6aa6e013..a12d47ec 100644 --- a/src/api/core/two_factor/email.rs +++ b/src/api/core/two_factor/email.rs @@ -65,7 +65,10 @@ pub fn send_token(user_uuid: &str, conn: &DbConn) -> EmptyResult { twofactor.data = twofactor_data.to_json(); twofactor.save(&conn)?; - mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?)?; + mail::send_token( + &twofactor_data.email, + &twofactor_data.last_token.map_res("Token is empty")?, + )?; Ok(()) } @@ -132,7 +135,10 @@ fn send_email(data: JsonUpcase, headers: Headers, conn: DbConn) - ); twofactor.save(&conn)?; - mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?)?; + mail::send_token( + &twofactor_data.email, + &twofactor_data.last_token.map_res("Token is empty")?, + )?; Ok(()) } @@ -186,7 +192,8 @@ fn email(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonRes /// Validate the email code when used as TwoFactor token mechanism pub fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, conn: &DbConn) -> EmptyResult { let mut email_data = EmailTokenData::from_json(&data)?; - let mut twofactor = TwoFactor::find_by_user_and_type(&user_uuid, TwoFactorType::Email as i32, &conn).map_res("Two factor not found")?; + let mut twofactor = TwoFactor::find_by_user_and_type(&user_uuid, TwoFactorType::Email as i32, &conn) + .map_res("Two factor not found")?; let issued_token = match &email_data.last_token { Some(t) => t, _ => err!("No token available"), diff --git a/src/api/icons.rs b/src/api/icons.rs index 6da3af0b..ae3187cb 100644 --- a/src/api/icons.rs +++ b/src/api/icons.rs @@ -22,10 +22,18 @@ static CLIENT: Lazy = Lazy::new(|| { // Generate the default headers let mut default_headers = header::HeaderMap::new(); default_headers.insert(header::USER_AGENT, header::HeaderValue::from_static("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.1.1 Safari/605.1.15")); - default_headers.insert(header::ACCEPT_LANGUAGE, header::HeaderValue::from_static("en-US,en;q=0.8")); + default_headers.insert( + header::ACCEPT_LANGUAGE, + header::HeaderValue::from_static("en-US,en;q=0.8"), + ); default_headers.insert(header::CACHE_CONTROL, header::HeaderValue::from_static("no-cache")); default_headers.insert(header::PRAGMA, header::HeaderValue::from_static("no-cache")); - default_headers.insert(header::ACCEPT, header::HeaderValue::from_static("text/html,application/xhtml+xml,application/xml; q=0.9,image/webp,image/apng,*/*;q=0.8")); + default_headers.insert( + header::ACCEPT, + header::HeaderValue::from_static( + "text/html,application/xhtml+xml,application/xml; q=0.9,image/webp,image/apng,*/*;q=0.8", + ), + ); // Reuse the client between requests Client::builder() @@ -48,12 +56,18 @@ fn icon(domain: String) -> Cached>> { if !is_valid_domain(&domain) { warn!("Invalid domain: {}", domain); - return Cached::ttl(Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl()); + return Cached::ttl( + Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), + CONFIG.icon_cache_negttl(), + ); } match get_icon(&domain) { Some(i) => Cached::ttl(Content(ContentType::new("image", "x-icon"), i), CONFIG.icon_cache_ttl()), - _ => Cached::ttl(Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl()), + _ => Cached::ttl( + Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), + CONFIG.icon_cache_negttl(), + ), } } @@ -74,7 +88,10 @@ fn is_valid_domain(domain: &str) -> bool { || domain.starts_with('-') || domain.ends_with('-') { - debug!("Domain validation error: '{}' is either empty, contains '..', starts with an '.', starts or ends with a '-'", domain); + debug!( + "Domain validation error: '{}' is either empty, contains '..', starts with an '.', starts or ends with a '-'", + domain + ); return false; } else if domain.len() > 255 { debug!("Domain validation error: '{}' exceeds 255 characters", domain); @@ -83,7 +100,10 @@ fn is_valid_domain(domain: &str) -> bool { for c in domain.chars() { if !c.is_alphanumeric() && !ALLOWED_CHARS.contains(c) { - debug!("Domain validation error: '{}' contains an invalid character '{}'", domain, c); + debug!( + "Domain validation error: '{}' contains an invalid character '{}'", + domain, c + ); return false; } } diff --git a/src/api/identity.rs b/src/api/identity.rs index dcfe607a..22a7a11e 100644 --- a/src/api/identity.rs +++ b/src/api/identity.rs @@ -114,7 +114,10 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult if user.verified_at.is_none() && CONFIG.mail_enabled() && CONFIG.signups_verify() { let now = now.naive_utc(); - if user.last_verifying_at.is_none() || now.signed_duration_since(user.last_verifying_at.unwrap()).num_seconds() > CONFIG.signups_verify_resend_time() as i64 { + if user.last_verifying_at.is_none() + || now.signed_duration_since(user.last_verifying_at.unwrap()).num_seconds() + > CONFIG.signups_verify_resend_time() as i64 + { let resend_limit = CONFIG.signups_verify_resend_limit() as i32; if resend_limit == 0 || user.login_verify_count < resend_limit { // We want to send another email verification if we require signups to verify @@ -168,7 +171,7 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult "Key": user.akey, "PrivateKey": user.private_key, //"TwoFactorToken": "11122233333444555666777888999" - + "Kdf": user.client_kdf_type, "KdfIterations": user.client_kdf_iter, "ResetMasterPassword": false,// TODO: Same as above @@ -231,12 +234,13 @@ fn twofactor_auth( let twofactor_code = match data.two_factor_token { Some(ref code) => code, - None => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA token not provided"), + None => err_json!( + _json_err_twofactor(&twofactor_ids, user_uuid, conn)?, + "2FA token not provided" + ), }; - let selected_twofactor = twofactors - .into_iter() - .find(|tf| tf.atype == selected_id && tf.enabled); + let selected_twofactor = twofactors.into_iter().find(|tf| tf.atype == selected_id && tf.enabled); use crate::api::core::two_factor as _tf; use crate::crypto::ct_eq; @@ -245,18 +249,27 @@ fn twofactor_auth( let mut remember = data.two_factor_remember.unwrap_or(0); match TwoFactorType::from_i32(selected_id) { - Some(TwoFactorType::Authenticator) => _tf::authenticator::validate_totp_code_str(user_uuid, twofactor_code, &selected_data?, ip, conn)?, + Some(TwoFactorType::Authenticator) => { + _tf::authenticator::validate_totp_code_str(user_uuid, twofactor_code, &selected_data?, ip, conn)? + } Some(TwoFactorType::U2f) => _tf::u2f::validate_u2f_login(user_uuid, twofactor_code, conn)?, Some(TwoFactorType::YubiKey) => _tf::yubikey::validate_yubikey_login(twofactor_code, &selected_data?)?, - Some(TwoFactorType::Duo) => _tf::duo::validate_duo_login(data.username.as_ref().unwrap(), twofactor_code, conn)?, - Some(TwoFactorType::Email) => _tf::email::validate_email_code_str(user_uuid, twofactor_code, &selected_data?, conn)?, + Some(TwoFactorType::Duo) => { + _tf::duo::validate_duo_login(data.username.as_ref().unwrap(), twofactor_code, conn)? + } + Some(TwoFactorType::Email) => { + _tf::email::validate_email_code_str(user_uuid, twofactor_code, &selected_data?, conn)? + } Some(TwoFactorType::Remember) => { match device.twofactor_remember { Some(ref code) if !CONFIG.disable_2fa_remember() && ct_eq(code, twofactor_code) => { remember = 1; // Make sure we also return the token here, otherwise it will only remember the first time } - _ => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA Remember token not provided"), + _ => err_json!( + _json_err_twofactor(&twofactor_ids, user_uuid, conn)?, + "2FA Remember token not provided" + ), } } _ => err!("Invalid two factor provider"), diff --git a/src/api/notifications.rs b/src/api/notifications.rs index 8876937d..c1b9c316 100644 --- a/src/api/notifications.rs +++ b/src/api/notifications.rs @@ -4,12 +4,7 @@ use rocket::Route; use rocket_contrib::json::Json; use serde_json::Value as JsonValue; -use crate::{ - api::EmptyResult, - auth::Headers, - db::DbConn, - Error, CONFIG, -}; +use crate::{api::EmptyResult, auth::Headers, db::DbConn, Error, CONFIG}; pub fn routes() -> Vec { routes![negotiate, websockets_err] @@ -19,12 +14,18 @@ static SHOW_WEBSOCKETS_MSG: AtomicBool = AtomicBool::new(true); #[get("/hub")] fn websockets_err() -> EmptyResult { - if CONFIG.websocket_enabled() && SHOW_WEBSOCKETS_MSG.compare_exchange(true, false, Ordering::Relaxed, Ordering::Relaxed).is_ok() { - err!(" + if CONFIG.websocket_enabled() + && SHOW_WEBSOCKETS_MSG + .compare_exchange(true, false, Ordering::Relaxed, Ordering::Relaxed) + .is_ok() + { + err!( + " ########################################################### '/notifications/hub' should be proxied to the websocket server or notifications won't work. Go to the Wiki for more info, or disable WebSockets setting WEBSOCKET_ENABLED=false. - ###########################################################################################\n") + ###########################################################################################\n" + ) } else { Err(Error::empty()) } diff --git a/src/api/web.rs b/src/api/web.rs index 90e572a8..5d2048ea 100644 --- a/src/api/web.rs +++ b/src/api/web.rs @@ -76,18 +76,48 @@ fn alive() -> Json { #[get("/bwrs_static/")] fn static_files(filename: String) -> Result, Error> { match filename.as_ref() { - "mail-github.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/mail-github.png"))), - "logo-gray.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/logo-gray.png"))), - "shield-white.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/shield-white.png"))), - "error-x.svg" => Ok(Content(ContentType::SVG, include_bytes!("../static/images/error-x.svg"))), + "mail-github.png" => Ok(Content( + ContentType::PNG, + include_bytes!("../static/images/mail-github.png"), + )), + "logo-gray.png" => Ok(Content( + ContentType::PNG, + include_bytes!("../static/images/logo-gray.png"), + )), + "shield-white.png" => Ok(Content( + ContentType::PNG, + include_bytes!("../static/images/shield-white.png"), + )), + "error-x.svg" => Ok(Content( + ContentType::SVG, + include_bytes!("../static/images/error-x.svg"), + )), "hibp.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/hibp.png"))), - "bootstrap.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/bootstrap.css"))), - "bootstrap-native.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/bootstrap-native.js"))), - "identicon.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/identicon.js"))), - "datatables.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/datatables.js"))), - "datatables.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/datatables.css"))), - "jquery-3.5.1.slim.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/jquery-3.5.1.slim.js"))), + "bootstrap.css" => Ok(Content( + ContentType::CSS, + include_bytes!("../static/scripts/bootstrap.css"), + )), + "bootstrap-native.js" => Ok(Content( + ContentType::JavaScript, + include_bytes!("../static/scripts/bootstrap-native.js"), + )), + "identicon.js" => Ok(Content( + ContentType::JavaScript, + include_bytes!("../static/scripts/identicon.js"), + )), + "datatables.js" => Ok(Content( + ContentType::JavaScript, + include_bytes!("../static/scripts/datatables.js"), + )), + "datatables.css" => Ok(Content( + ContentType::CSS, + include_bytes!("../static/scripts/datatables.css"), + )), + "jquery-3.5.1.slim.js" => Ok(Content( + ContentType::JavaScript, + include_bytes!("../static/scripts/jquery-3.5.1.slim.js"), + )), _ => err!(format!("Static file not found: {}", filename)), } } diff --git a/src/auth.rs b/src/auth.rs index 59d1370d..4fe9a7b5 100644 --- a/src/auth.rs +++ b/src/auth.rs @@ -223,10 +223,9 @@ use crate::db::{ }; pub struct Host { - pub host: String + pub host: String, } - impl<'a, 'r> FromRequest<'a, 'r> for Host { type Error = &'static str; @@ -506,7 +505,11 @@ impl<'a, 'r> FromRequest<'a, 'r> for ManagerHeaders { }; if !headers.org_user.has_full_access() { - match CollectionUser::find_by_collection_and_user(&col_id, &headers.org_user.user_uuid, &conn) { + match CollectionUser::find_by_collection_and_user( + &col_id, + &headers.org_user.user_uuid, + &conn, + ) { Some(_) => (), None => err_handler!("The current user isn't a manager for this collection"), } diff --git a/src/config.rs b/src/config.rs index 6c41c975..df4e90b4 100644 --- a/src/config.rs +++ b/src/config.rs @@ -878,9 +878,7 @@ fn js_escape_helper<'reg, 'rc>( .param(0) .ok_or_else(|| RenderError::new("Param not found for helper \"js_escape\""))?; - let no_quote = h - .param(1) - .is_some(); + let no_quote = h.param(1).is_some(); let value = param .value() @@ -888,7 +886,7 @@ fn js_escape_helper<'reg, 'rc>( .ok_or_else(|| RenderError::new("Param for helper \"js_escape\" is not a String"))?; let mut escaped_value = value.replace('\\', "").replace('\'', "\\x22").replace('\"', "\\x27"); - if ! no_quote { + if !no_quote { escaped_value = format!(""{}"", escaped_value); } diff --git a/src/db/mod.rs b/src/db/mod.rs index 2472caa6..85926162 100644 --- a/src/db/mod.rs +++ b/src/db/mod.rs @@ -25,7 +25,6 @@ pub mod __mysql_schema; #[path = "schemas/postgresql/schema.rs"] pub mod __postgresql_schema; - // This is used to generate the main DbConn and DbPool enums, which contain one variant for each database supported macro_rules! generate_connections { ( $( $name:ident: $ty:ty ),+ ) => { @@ -110,7 +109,6 @@ impl DbConnType { } } - #[macro_export] macro_rules! db_run { // Same for all dbs @@ -155,7 +153,6 @@ macro_rules! db_run { }; } - pub trait FromDb { type Output; #[allow(clippy::wrong_self_convention)] @@ -240,7 +237,6 @@ pub fn backup_database() -> Result<(), Error> { Ok(()) } - /// Get the SQL Server version pub fn get_sql_server_version(conn: &DbConn) -> String { use diesel::sql_types::Text; @@ -308,8 +304,7 @@ mod sqlite_migrations { use diesel::{Connection, RunQueryDsl}; // Make sure the database is up to date (create if it doesn't exist, or run the migrations) - let connection = - diesel::sqlite::SqliteConnection::establish(&crate::CONFIG.database_url())?; + let connection = diesel::sqlite::SqliteConnection::establish(&crate::CONFIG.database_url())?; // Disable Foreign Key Checks during migration // Scoped to a connection. @@ -337,8 +332,7 @@ mod mysql_migrations { pub fn run_migrations() -> Result<(), super::Error> { use diesel::{Connection, RunQueryDsl}; // Make sure the database is up to date (create if it doesn't exist, or run the migrations) - let connection = - diesel::mysql::MysqlConnection::establish(&crate::CONFIG.database_url())?; + let connection = diesel::mysql::MysqlConnection::establish(&crate::CONFIG.database_url())?; // Disable Foreign Key Checks during migration // Scoped to a connection/session. @@ -359,8 +353,7 @@ mod postgresql_migrations { pub fn run_migrations() -> Result<(), super::Error> { use diesel::{Connection, RunQueryDsl}; // Make sure the database is up to date (create if it doesn't exist, or run the migrations) - let connection = - diesel::pg::PgConnection::establish(&crate::CONFIG.database_url())?; + let connection = diesel::pg::PgConnection::establish(&crate::CONFIG.database_url())?; // Disable Foreign Key Checks during migration // FIXME: Per https://www.postgresql.org/docs/12/sql-set-constraints.html, diff --git a/src/db/models/attachment.rs b/src/db/models/attachment.rs index 6a8fc5b0..76bc474e 100644 --- a/src/db/models/attachment.rs +++ b/src/db/models/attachment.rs @@ -59,7 +59,6 @@ use crate::error::MapResult; /// Database methods impl Attachment { - pub fn save(&self, conn: &DbConn) -> EmptyResult { db_run! { conn: sqlite, mysql { diff --git a/src/db/models/cipher.rs b/src/db/models/cipher.rs index 365865f8..6de8afbd 100644 --- a/src/db/models/cipher.rs +++ b/src/db/models/cipher.rs @@ -2,14 +2,7 @@ use chrono::{NaiveDateTime, Utc}; use serde_json::Value; use super::{ - Attachment, - CollectionCipher, - Favorite, - FolderCipher, - Organization, - User, - UserOrgStatus, - UserOrgType, + Attachment, CollectionCipher, Favorite, FolderCipher, Organization, User, UserOrgStatus, UserOrgType, UserOrganization, }; @@ -90,17 +83,24 @@ impl Cipher { attachments.iter().map(|c| c.to_json(host)).collect() }; - let fields_json = self.fields.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null); - let password_history_json = self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null); + let fields_json = self + .fields + .as_ref() + .and_then(|s| serde_json::from_str(s).ok()) + .unwrap_or(Value::Null); + let password_history_json = self + .password_history + .as_ref() + .and_then(|s| serde_json::from_str(s).ok()) + .unwrap_or(Value::Null); - let (read_only, hide_passwords) = - match self.get_access_restrictions(&user_uuid, conn) { - Some((ro, hp)) => (ro, hp), - None => { - error!("Cipher ownership assertion failure"); - (true, true) - }, - }; + let (read_only, hide_passwords) = match self.get_access_restrictions(&user_uuid, conn) { + Some((ro, hp)) => (ro, hp), + None => { + error!("Cipher ownership assertion failure"); + (true, true) + } + }; // Get the type_data or a default to an empty json object '{}'. // If not passing an empty object, mobile clients will crash. diff --git a/src/db/models/collection.rs b/src/db/models/collection.rs index 5fbc3128..06a2d671 100644 --- a/src/db/models/collection.rs +++ b/src/db/models/collection.rs @@ -1,6 +1,6 @@ use serde_json::Value; -use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization, User, Cipher}; +use super::{Cipher, Organization, User, UserOrgStatus, UserOrgType, UserOrganization}; db_object! { #[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] @@ -284,7 +284,13 @@ impl CollectionUser { }} } - pub fn save(user_uuid: &str, collection_uuid: &str, read_only: bool, hide_passwords: bool, conn: &DbConn) -> EmptyResult { + pub fn save( + user_uuid: &str, + collection_uuid: &str, + read_only: bool, + hide_passwords: bool, + conn: &DbConn, + ) -> EmptyResult { User::update_uuid_revision(&user_uuid, conn); db_run! { conn: diff --git a/src/db/models/device.rs b/src/db/models/device.rs index 77837fca..b2297fe2 100644 --- a/src/db/models/device.rs +++ b/src/db/models/device.rs @@ -74,10 +74,26 @@ impl Device { let time_now = Utc::now().naive_utc(); self.updated_at = time_now; - let orgowner: Vec<_> = orgs.iter().filter(|o| o.atype == 0).map(|o| o.org_uuid.clone()).collect(); - let orgadmin: Vec<_> = orgs.iter().filter(|o| o.atype == 1).map(|o| o.org_uuid.clone()).collect(); - let orguser: Vec<_> = orgs.iter().filter(|o| o.atype == 2).map(|o| o.org_uuid.clone()).collect(); - let orgmanager: Vec<_> = orgs.iter().filter(|o| o.atype == 3).map(|o| o.org_uuid.clone()).collect(); + let orgowner: Vec<_> = orgs + .iter() + .filter(|o| o.atype == 0) + .map(|o| o.org_uuid.clone()) + .collect(); + let orgadmin: Vec<_> = orgs + .iter() + .filter(|o| o.atype == 1) + .map(|o| o.org_uuid.clone()) + .collect(); + let orguser: Vec<_> = orgs + .iter() + .filter(|o| o.atype == 2) + .map(|o| o.org_uuid.clone()) + .collect(); + let orgmanager: Vec<_> = orgs + .iter() + .filter(|o| o.atype == 3) + .map(|o| o.org_uuid.clone()) + .collect(); // Create the JWT claims struct, to send to the client use crate::auth::{encode_jwt, LoginJwtClaims, DEFAULT_VALIDITY, JWT_LOGIN_ISSUER}; diff --git a/src/db/models/favorite.rs b/src/db/models/favorite.rs index 4f610f21..2fc20380 100644 --- a/src/db/models/favorite.rs +++ b/src/db/models/favorite.rs @@ -20,7 +20,7 @@ use crate::error::MapResult; impl Favorite { // Returns whether the specified cipher is a favorite of the specified user. pub fn is_favorite(cipher_uuid: &str, user_uuid: &str, conn: &DbConn) -> bool { - db_run!{ conn: { + db_run! { conn: { let query = favorites::table .filter(favorites::cipher_uuid.eq(cipher_uuid)) .filter(favorites::user_uuid.eq(user_uuid)) @@ -36,19 +36,19 @@ impl Favorite { match (old, new) { (false, true) => { User::update_uuid_revision(user_uuid, &conn); - db_run!{ conn: { - diesel::insert_into(favorites::table) - .values(( - favorites::user_uuid.eq(user_uuid), - favorites::cipher_uuid.eq(cipher_uuid), - )) - .execute(conn) - .map_res("Error adding favorite") - }} + db_run! { conn: { + diesel::insert_into(favorites::table) + .values(( + favorites::user_uuid.eq(user_uuid), + favorites::cipher_uuid.eq(cipher_uuid), + )) + .execute(conn) + .map_res("Error adding favorite") + }} } (true, false) => { User::update_uuid_revision(user_uuid, &conn); - db_run!{ conn: { + db_run! { conn: { diesel::delete( favorites::table .filter(favorites::user_uuid.eq(user_uuid)) @@ -59,7 +59,7 @@ impl Favorite { }} } // Otherwise, the favorite status is already what it should be. - _ => Ok(()) + _ => Ok(()), } } diff --git a/src/db/models/folder.rs b/src/db/models/folder.rs index b5bbcc79..ec83d117 100644 --- a/src/db/models/folder.rs +++ b/src/db/models/folder.rs @@ -109,7 +109,6 @@ impl Folder { User::update_uuid_revision(&self.user_uuid, conn); FolderCipher::delete_all_by_folder(&self.uuid, &conn)?; - db_run! { conn: { diesel::delete(folders::table.filter(folders::uuid.eq(&self.uuid))) .execute(conn) diff --git a/src/db/models/mod.rs b/src/db/models/mod.rs index a4fb635b..2179b7f0 100644 --- a/src/db/models/mod.rs +++ b/src/db/models/mod.rs @@ -6,9 +6,9 @@ mod favorite; mod folder; mod org_policy; mod organization; +mod send; mod two_factor; mod user; -mod send; pub use self::attachment::Attachment; pub use self::cipher::Cipher; @@ -18,6 +18,6 @@ pub use self::favorite::Favorite; pub use self::folder::{Folder, FolderCipher}; pub use self::org_policy::{OrgPolicy, OrgPolicyType}; pub use self::organization::{Organization, UserOrgStatus, UserOrgType, UserOrganization}; +pub use self::send::{Send, SendType}; pub use self::two_factor::{TwoFactor, TwoFactorType}; pub use self::user::{Invitation, User, UserStampException}; -pub use self::send::{Send, SendType}; \ No newline at end of file diff --git a/src/db/models/org_policy.rs b/src/db/models/org_policy.rs index 0707eccc..810a9c3f 100644 --- a/src/db/models/org_policy.rs +++ b/src/db/models/org_policy.rs @@ -4,7 +4,7 @@ use crate::api::EmptyResult; use crate::db::DbConn; use crate::error::MapResult; -use super::{Organization, UserOrganization, UserOrgStatus, UserOrgType}; +use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization}; db_object! { #[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] @@ -20,8 +20,7 @@ db_object! { } } -#[derive(Copy, Clone)] -#[derive(num_derive::FromPrimitive)] +#[derive(Copy, Clone, num_derive::FromPrimitive)] pub enum OrgPolicyType { TwoFactorAuthentication = 0, MasterPassword = 1, @@ -175,7 +174,8 @@ impl OrgPolicy { /// and the user is not an owner or admin of that org. This is only useful for checking /// applicability of policy types that have these particular semantics. pub fn is_applicable_to_user(user_uuid: &str, policy_type: OrgPolicyType, conn: &DbConn) -> bool { - for policy in OrgPolicy::find_by_user(user_uuid, conn) { // Returns confirmed users only. + for policy in OrgPolicy::find_by_user(user_uuid, conn) { + // Returns confirmed users only. if policy.enabled && policy.has_type(policy_type) { let org_uuid = &policy.org_uuid; if let Some(user) = UserOrganization::find_by_user_and_org(user_uuid, org_uuid, conn) { diff --git a/src/db/models/organization.rs b/src/db/models/organization.rs index 1eeb04d2..9931ed5b 100644 --- a/src/db/models/organization.rs +++ b/src/db/models/organization.rs @@ -1,8 +1,8 @@ +use num_traits::FromPrimitive; use serde_json::Value; use std::cmp::Ordering; -use num_traits::FromPrimitive; -use super::{CollectionUser, User, OrgPolicy}; +use super::{CollectionUser, OrgPolicy, User}; db_object! { #[derive(Identifiable, Queryable, Insertable, AsChangeset)] @@ -35,8 +35,7 @@ pub enum UserOrgStatus { Confirmed = 2, } -#[derive(Copy, Clone, PartialEq, Eq)] -#[derive(num_derive::FromPrimitive)] +#[derive(Copy, Clone, PartialEq, Eq, num_derive::FromPrimitive)] pub enum UserOrgType { Owner = 0, Admin = 1, @@ -117,7 +116,10 @@ impl PartialOrd for i32 { } fn le(&self, other: &UserOrgType) -> bool { - matches!(self.partial_cmp(other), Some(Ordering::Less) | Some(Ordering::Equal) | None) + matches!( + self.partial_cmp(other), + Some(Ordering::Less) | Some(Ordering::Equal) | None + ) } } @@ -236,7 +238,6 @@ impl Organization { UserOrganization::delete_all_by_organization(&self.uuid, &conn)?; OrgPolicy::delete_all_by_organization(&self.uuid, &conn)?; - db_run! { conn: { diesel::delete(organizations::table.filter(organizations::uuid.eq(self.uuid))) .execute(conn) @@ -347,11 +348,13 @@ impl UserOrganization { let collections = CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn); collections .iter() - .map(|c| json!({ - "Id": c.collection_uuid, - "ReadOnly": c.read_only, - "HidePasswords": c.hide_passwords, - })) + .map(|c| { + json!({ + "Id": c.collection_uuid, + "ReadOnly": c.read_only, + "HidePasswords": c.hide_passwords, + }) + }) .collect() }; @@ -446,8 +449,7 @@ impl UserOrganization { } pub fn has_full_access(&self) -> bool { - (self.access_all || self.atype >= UserOrgType::Admin) && - self.has_status(UserOrgStatus::Confirmed) + (self.access_all || self.atype >= UserOrgType::Admin) && self.has_status(UserOrgStatus::Confirmed) } pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { diff --git a/src/db/models/user.rs b/src/db/models/user.rs index fdd2dcae..6f61dfeb 100644 --- a/src/db/models/user.rs +++ b/src/db/models/user.rs @@ -63,8 +63,8 @@ enum UserStatus { #[derive(Serialize, Deserialize)] pub struct UserStampException { - pub route: String, - pub security_stamp: String + pub route: String, + pub security_stamp: String, } /// Local methods @@ -162,7 +162,7 @@ impl User { pub fn set_stamp_exception(&mut self, route_exception: &str) { let stamp_exception = UserStampException { route: route_exception.to_string(), - security_stamp: self.security_stamp.to_string() + security_stamp: self.security_stamp.to_string(), }; self.stamp_exception = Some(serde_json::to_string(&stamp_exception).unwrap_or_default()); } @@ -341,7 +341,7 @@ impl User { pub fn last_active(&self, conn: &DbConn) -> Option { match Device::find_latest_active_by_user(&self.uuid, conn) { Some(device) => Some(device.updated_at), - None => None + None => None, } } } diff --git a/src/error.rs b/src/error.rs index a0b28a4b..9c597a8b 100644 --- a/src/error.rs +++ b/src/error.rs @@ -33,10 +33,10 @@ macro_rules! make_error { }; } +use diesel::r2d2::PoolError as R2d2Err; use diesel::result::Error as DieselErr; use diesel::ConnectionError as DieselConErr; use diesel_migrations::RunMigrationsError as DieselMigErr; -use diesel::r2d2::PoolError as R2d2Err; use handlebars::RenderError as HbErr; use jsonwebtoken::errors::Error as JwtErr; use regex::Error as RegexErr; @@ -191,7 +191,7 @@ use rocket::response::{self, Responder, Response}; impl<'r> Responder<'r> for Error { fn respond_to(self, _: &Request) -> response::Result<'r> { match self.error { - ErrorKind::EmptyError(_) => {} // Don't print the error in this situation + ErrorKind::EmptyError(_) => {} // Don't print the error in this situation ErrorKind::SimpleError(_) => {} // Don't print the error in this situation _ => error!(target: "error", "{:#?}", self), }; diff --git a/src/mail.rs b/src/mail.rs index cd9edd9e..eb7c84c5 100644 --- a/src/mail.rs +++ b/src/mail.rs @@ -1,4 +1,4 @@ -use std::{str::FromStr}; +use std::str::FromStr; use chrono::{DateTime, Local}; use percent_encoding::{percent_encode, NON_ALPHANUMERIC}; @@ -58,21 +58,32 @@ fn mailer() -> SmtpTransport { let smtp_client = match CONFIG.smtp_auth_mechanism() { Some(mechanism) => { - let allowed_mechanisms = [SmtpAuthMechanism::Plain, SmtpAuthMechanism::Login, SmtpAuthMechanism::Xoauth2]; + let allowed_mechanisms = [ + SmtpAuthMechanism::Plain, + SmtpAuthMechanism::Login, + SmtpAuthMechanism::Xoauth2, + ]; let mut selected_mechanisms = vec![]; for wanted_mechanism in mechanism.split(',') { for m in &allowed_mechanisms { - if m.to_string().to_lowercase() == wanted_mechanism.trim_matches(|c| c == '"' || c == '\'' || c == ' ').to_lowercase() { + if m.to_string().to_lowercase() + == wanted_mechanism + .trim_matches(|c| c == '"' || c == '\'' || c == ' ') + .to_lowercase() + { selected_mechanisms.push(*m); } } - }; + } if !selected_mechanisms.is_empty() { smtp_client.authentication(selected_mechanisms) } else { // Only show a warning, and return without setting an actual authentication mechanism - warn!("No valid SMTP Auth mechanism found for '{}', using default values", mechanism); + warn!( + "No valid SMTP Auth mechanism found for '{}', using default values", + mechanism + ); smtp_client } } @@ -316,31 +327,30 @@ fn send_email(address: &str, subject: &str, body_html: String, body_text: String let smtp_from = &CONFIG.smtp_from(); let email = Message::builder() - .message_id(Some(format!("<{}@{}>", crate::util::get_uuid(), smtp_from.split('@').collect::>()[1] ))) + .message_id(Some(format!( + "<{}@{}>", + crate::util::get_uuid(), + smtp_from.split('@').collect::>()[1] + ))) .to(Mailbox::new(None, Address::from_str(&address)?)) .from(Mailbox::new( Some(CONFIG.smtp_from_name()), Address::from_str(smtp_from)?, )) .subject(subject) - .multipart( - MultiPart::alternative() - .singlepart(text) - .singlepart(html) - )?; + .multipart(MultiPart::alternative().singlepart(text).singlepart(html))?; match mailer().send(&email) { Ok(_) => Ok(()), // Match some common errors and make them more user friendly Err(e) => { - if e.is_client() { - err!(format!("SMTP Client error: {}", e)); + err!(format!("SMTP Client error: {}", e)); } else if e.is_transient() { err!(format!("SMTP 4xx error: {:?}", e)); } else if e.is_permanent() { err!(format!("SMTP 5xx error: {:?}", e)); - } else if e.is_timeout() { + } else if e.is_timeout() { err!(format!("SMTP timeout error: {:?}", e)); } else { Err(e.into()) diff --git a/src/main.rs b/src/main.rs index 50975c66..1908d967 100644 --- a/src/main.rs +++ b/src/main.rs @@ -123,7 +123,9 @@ fn init_logging(level: log::LevelFilter) -> Result<(), fern::InitError> { // Enable smtp debug logging only specifically for smtp when need. // This can contain sensitive information we do not want in the default debug/trace logging. if CONFIG.smtp_debug() { - println!("[WARNING] SMTP Debugging is enabled (SMTP_DEBUG=true). Sensitive information could be disclosed via logs!"); + println!( + "[WARNING] SMTP Debugging is enabled (SMTP_DEBUG=true). Sensitive information could be disclosed via logs!" + ); println!("[WARNING] Only enable SMTP_DEBUG during troubleshooting!\n"); logger = logger.level_for("lettre::transport::smtp", log::LevelFilter::Debug) } else { @@ -294,7 +296,10 @@ fn check_web_vault() { let index_path = Path::new(&CONFIG.web_vault_folder()).join("index.html"); if !index_path.exists() { - error!("Web vault is not found at '{}'. To install it, please follow the steps in: ", CONFIG.web_vault_folder()); + error!( + "Web vault is not found at '{}'. To install it, please follow the steps in: ", + CONFIG.web_vault_folder() + ); error!("https://github.com/dani-garcia/bitwarden_rs/wiki/Building-binary#install-the-web-vault"); error!("You can also set the environment variable 'WEB_VAULT_ENABLED=false' to disable it"); exit(1); diff --git a/src/util.rs b/src/util.rs index feafa467..1858d4a3 100644 --- a/src/util.rs +++ b/src/util.rs @@ -28,7 +28,10 @@ impl Fairing for AppHeaders { res.set_raw_header("X-Frame-Options", "SAMEORIGIN"); res.set_raw_header("X-Content-Type-Options", "nosniff"); res.set_raw_header("X-XSS-Protection", "1; mode=block"); - let csp = format!("frame-ancestors 'self' chrome-extension://nngceckbapebfimnlniiiahkandclblb moz-extension://* {};", CONFIG.allowed_iframe_ancestors()); + let csp = format!( + "frame-ancestors 'self' chrome-extension://nngceckbapebfimnlniiiahkandclblb moz-extension://* {};", + CONFIG.allowed_iframe_ancestors() + ); res.set_raw_header("Content-Security-Policy", csp); // Disable cache unless otherwise specified @@ -293,8 +296,7 @@ where use std::env; -pub fn get_env_str_value(key: &str) -> Option -{ +pub fn get_env_str_value(key: &str) -> Option { let key_file = format!("{}_FILE", key); let value_from_env = env::var(key); let value_file = env::var(&key_file); @@ -304,9 +306,9 @@ pub fn get_env_str_value(key: &str) -> Option (Ok(v_env), Err(_)) => Some(v_env), (Err(_), Ok(v_file)) => match fs::read_to_string(v_file) { Ok(content) => Some(content.trim().to_string()), - Err(e) => panic!("Failed to load {}: {:?}", key, e) + Err(e) => panic!("Failed to load {}: {:?}", key, e), }, - _ => None + _ => None, } } From 93c881a7a9abf30c1d2cfea961d5637de2757b86 Mon Sep 17 00:00:00 2001 From: Jake Howard Date: Wed, 31 Mar 2021 21:45:05 +0100 Subject: [PATCH 07/25] Reflow some lines manually --- src/api/admin.rs | 8 ++++++-- src/api/core/ciphers.rs | 5 +---- src/api/core/two_factor/authenticator.rs | 6 +----- src/db/models/org_policy.rs | 2 +- 4 files changed, 9 insertions(+), 12 deletions(-) diff --git a/src/api/admin.rs b/src/api/admin.rs index 7ed3d270..9ef22b7c 100644 --- a/src/api/admin.rs +++ b/src/api/admin.rs @@ -142,8 +142,12 @@ fn admin_url(referer: Referer) -> String { fn admin_login(flash: Option) -> ApiResult> { // If there is an error, show it let msg = flash.map(|msg| format!("{}: {}", msg.name(), msg.msg())); - let json = - json!({"page_content": "admin/login", "version": VERSION, "error": msg, "urlpath": CONFIG.domain_path()}); + let json = json!({ + "page_content": "admin/login", + "version": VERSION, + "error": msg, + "urlpath": CONFIG.domain_path() + }); // Return the page let text = CONFIG.render_template(BASE_TEMPLATE, &json)?; diff --git a/src/api/core/ciphers.rs b/src/api/core/ciphers.rs index d566b0b9..8d5bcbc9 100644 --- a/src/api/core/ciphers.rs +++ b/src/api/core/ciphers.rs @@ -281,10 +281,7 @@ fn enforce_personal_ownership_policy(data: &CipherData, headers: &Headers, conn: let user_uuid = &headers.user.uuid; let policy_type = OrgPolicyType::PersonalOwnership; if OrgPolicy::is_applicable_to_user(user_uuid, policy_type, conn) { - err!( - "Due to an Enterprise Policy, you are restricted from \ - saving items to your personal vault." - ) + err!("Due to an Enterprise Policy, you are restricted from saving items to your personal vault.") } } Ok(()) diff --git a/src/api/core/two_factor/authenticator.rs b/src/api/core/two_factor/authenticator.rs index 5f6cb452..3578e874 100644 --- a/src/api/core/two_factor/authenticator.rs +++ b/src/api/core/two_factor/authenticator.rs @@ -141,11 +141,7 @@ pub fn validate_totp_code(user_uuid: &str, totp_code: u64, secret: &str, ip: &Cl // The amount of steps back and forward in time // Also check if we need to disable time drifted TOTP codes. // If that is the case, we set the steps to 0 so only the current TOTP is valid. - let steps: i64 = if CONFIG.authenticator_disable_time_drift() { - 0 - } else { - 1 - }; + let steps = !CONFIG.authenticator_disable_time_drift() as i64; for step in -steps..=steps { let time_step = current_timestamp / 30i64 + step; diff --git a/src/db/models/org_policy.rs b/src/db/models/org_policy.rs index 810a9c3f..1a040fc1 100644 --- a/src/db/models/org_policy.rs +++ b/src/db/models/org_policy.rs @@ -174,8 +174,8 @@ impl OrgPolicy { /// and the user is not an owner or admin of that org. This is only useful for checking /// applicability of policy types that have these particular semantics. pub fn is_applicable_to_user(user_uuid: &str, policy_type: OrgPolicyType, conn: &DbConn) -> bool { + // Returns confirmed users only. for policy in OrgPolicy::find_by_user(user_uuid, conn) { - // Returns confirmed users only. if policy.enabled && policy.has_type(policy_type) { let org_uuid = &policy.org_uuid; if let Some(user) = UserOrganization::find_by_user_and_org(user_uuid, org_uuid, conn) { From 233f03ca2b9a43c07c9aba19615a8e054193cf3b Mon Sep 17 00:00:00 2001 From: Jake Howard Date: Thu, 1 Apr 2021 20:44:58 +0100 Subject: [PATCH 08/25] Just ignore scripts Nothing else in `src/static` is vendored external scripts, so just ignore these. This also fixes the glob, which previously wasn't matching anything --- .gitattributes | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitattributes b/.gitattributes index 20701478..b33a6211 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,3 +1,3 @@ # Ignore vendored scripts in GitHub stats -src/static/* linguist-vendored +src/static/scripts/* linguist-vendored From 3565bfc939ac456b64bd8c1d0b9a9a2b3e65996c Mon Sep 17 00:00:00 2001 From: Jeremy Lin Date: Thu, 1 Apr 2021 21:57:08 -0700 Subject: [PATCH 09/25] Sync global_domains.json to bitwarden/server@261916d (Stack Exchange) --- src/static/global_domains.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/static/global_domains.json b/src/static/global_domains.json index 4a475f92..f7809f51 100644 --- a/src/static/global_domains.json +++ b/src/static/global_domains.json @@ -772,7 +772,8 @@ "stackoverflow.com", "serverfault.com", "mathoverflow.net", - "askubuntu.com" + "askubuntu.com", + "stackapps.com" ], "Excluded": false }, From 1d0eaac260d251abed23106e6356cb07e5b6e994 Mon Sep 17 00:00:00 2001 From: BlackDex Date: Sat, 3 Apr 2021 22:51:44 +0200 Subject: [PATCH 10/25] Updated icon fetching. - Added image type checking, and prevent downloading non images. We didn't checked this before, which could in turn could allow someone to download an arbitrary file. - This also prevents SVG images from being used, while they work on the web-vault and desktop client, they didn't on the mobile versions. - Because of this image type checking we can return a valid file type instead of only 'x-icon' (which is still used as a fallback). - Prevent rel values with `icon-mask`, these are not valid favicons. --- src/api/icons.rs | 58 +++++++++++++++++++++++++++++++++++++----------- 1 file changed, 45 insertions(+), 13 deletions(-) diff --git a/src/api/icons.rs b/src/api/icons.rs index 6da3af0b..3f8a41e1 100644 --- a/src/api/icons.rs +++ b/src/api/icons.rs @@ -37,6 +37,7 @@ static CLIENT: Lazy = Lazy::new(|| { // Build Regex only once since this takes a lot of time. static ICON_REL_REGEX: Lazy = Lazy::new(|| Regex::new(r"(?i)icon$|apple.*icon").unwrap()); +static ICON_REL_BLACKLIST: Lazy = Lazy::new(|| Regex::new(r"(?i)mask-icon").unwrap()); static ICON_SIZE_REGEX: Lazy = Lazy::new(|| Regex::new(r"(?x)(\d+)\D*(\d+)").unwrap()); // Special HashMap which holds the user defined Regex to speedup matching the regex. @@ -52,7 +53,9 @@ fn icon(domain: String) -> Cached>> { } match get_icon(&domain) { - Some(i) => Cached::ttl(Content(ContentType::new("image", "x-icon"), i), CONFIG.icon_cache_ttl()), + Some((icon, icon_type)) => { + Cached::ttl(Content(ContentType::new("image", icon_type), icon), CONFIG.icon_cache_ttl()) + }, _ => Cached::ttl(Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl()), } } @@ -243,7 +246,7 @@ fn is_domain_blacklisted(domain: &str) -> bool { is_blacklisted } -fn get_icon(domain: &str) -> Option> { +fn get_icon(domain: &str) -> Option<(Vec, String)> { let path = format!("{}/{}.png", CONFIG.icon_cache_folder(), domain); // Check for expiration of negatively cached copy @@ -252,7 +255,11 @@ fn get_icon(domain: &str) -> Option> { } if let Some(icon) = get_cached_icon(&path) { - return Some(icon); + let icon_type = match get_icon_type(&icon) { + Some(x) => x, + _ => "x-icon", + }; + return Some((icon, icon_type.to_string())); } if CONFIG.disable_icon_download() { @@ -261,9 +268,9 @@ fn get_icon(domain: &str) -> Option> { // Get the icon, or None in case of error match download_icon(&domain) { - Ok(icon) => { + Ok((icon, icon_type)) => { save_icon(&path, &icon); - Some(icon) + Some((icon, icon_type.unwrap_or("x-icon").to_string())) } Err(e) => { error!("Error downloading icon: {:?}", e); @@ -324,7 +331,6 @@ fn icon_is_expired(path: &str) -> bool { expired.unwrap_or(true) } -#[derive(Debug)] struct Icon { priority: u8, href: String, @@ -348,7 +354,7 @@ fn get_favicons_node(node: &std::rc::Rc, icons: &mut Ve let attr_name = attr.name.local.as_ref(); let attr_value = attr.value.as_ref(); - if attr_name == "rel" && ICON_REL_REGEX.is_match(attr_value) { + if attr_name == "rel" && ICON_REL_REGEX.is_match(attr_value) && !ICON_REL_BLACKLIST.is_match(attr_value) { has_rel = true; } else if attr_name == "href" { href = Some(attr_value); @@ -597,7 +603,7 @@ fn parse_sizes(sizes: Option<&str>) -> (u16, u16) { (width, height) } -fn download_icon(domain: &str) -> Result, Error> { +fn download_icon(domain: &str) -> Result<(Vec, Option<&str>), Error> { if is_domain_blacklisted(domain) { err!("Domain is blacklisted", domain) } @@ -605,6 +611,7 @@ fn download_icon(domain: &str) -> Result, Error> { let icon_result = get_icon_url(&domain)?; let mut buffer = Vec::new(); + let mut icon_type: Option<&str> = None; use data_url::DataUrl; @@ -616,17 +623,31 @@ fn download_icon(domain: &str) -> Result, Error> { Ok((body, _fragment)) => { // Also check if the size is atleast 67 bytes, which seems to be the smallest png i could create if body.len() >= 67 { + // Check if the icon type is allowed, else try an icon from the list. + icon_type = get_icon_type(&body); + if icon_type.is_none() { + debug!("Icon from {} data:image uri, is not a valid image type", domain); + continue; + } + info!("Extracted icon from data:image uri for {}", domain); buffer = body; break; } } - _ => warn!("data uri is invalid"), + _ => warn!("Extracted icon from data:image uri is invalid"), }; } else { match get_page_with_cookies(&icon.href, &icon_result.cookies, &icon_result.referer) { Ok(mut res) => { - info!("Downloaded icon from {}", icon.href); res.copy_to(&mut buffer)?; + // Check if the icon type is allowed, else try an icon from the list. + icon_type = get_icon_type(&buffer); + if icon_type.is_none() { + buffer.clear(); + debug!("Icon from {}, is not a valid image type", icon.href); + continue; + } + info!("Downloaded icon from {}", icon.href); break; } _ => warn!("Download failed for {}", icon.href), @@ -635,10 +656,10 @@ fn download_icon(domain: &str) -> Result, Error> { } if buffer.is_empty() { - err!("Empty response") + err!("Empty response downloading icon") } - Ok(buffer) + Ok((buffer, icon_type)) } fn save_icon(path: &str, icon: &[u8]) { @@ -650,7 +671,18 @@ fn save_icon(path: &str, icon: &[u8]) { create_dir_all(&CONFIG.icon_cache_folder()).expect("Error creating icon cache"); } Err(e) => { - info!("Icon save error: {:?}", e); + warn!("Icon save error: {:?}", e); } } } + +fn get_icon_type(bytes: &[u8]) -> Option<&'static str> { + match bytes { + [137, 80, 78, 71, ..] => Some("png"), + [0, 0, 1, 0, ..] => Some("x-icon"), + [82, 73, 70, 70, ..] => Some("webp"), + [255, 216, 255, ..] => Some("jpeg"), + [66, 77, ..] => Some("bmp"), + _ => None + } +} From 95fc88ae5bef5f4d1e9a8da4f5de7c955fb75a19 Mon Sep 17 00:00:00 2001 From: BlackDex Date: Mon, 5 Apr 2021 15:09:16 +0200 Subject: [PATCH 11/25] Some admin interface updates. - Fixed bug when web-vault is disabled. - Updated sql-server version check to be simpler thx to @weiznich ( https://github.com/dani-garcia/bitwarden_rs/pull/1548#discussion_r604767196 ) - Use `VACUUM INTO` to create a SQLite backup instead of using the external sqlite3 application. - This also removes the dependancy of having the sqlite3 packages installed on the final image unnecessary, and thus removed it. - Updated backup filename to also have the current time. - Add specific bitwarden_rs web-vault version check (to match letter patched versions) Will work when https://github.com/dani-garcia/bw_web_builds/pull/33 is build (But still works without it also). --- docker/Dockerfile.j2 | 4 -- docker/amd64/Dockerfile | 1 - docker/amd64/Dockerfile.alpine | 1 - docker/arm64/Dockerfile | 1 - docker/armv6/Dockerfile | 1 - docker/armv7/Dockerfile | 1 - docker/armv7/Dockerfile.alpine | 1 - src/api/admin.rs | 27 +++++++---- src/db/mod.rs | 56 ++++++++-------------- src/static/templates/admin/diagnostics.hbs | 13 ++++- 10 files changed, 50 insertions(+), 56 deletions(-) diff --git a/docker/Dockerfile.j2 b/docker/Dockerfile.j2 index be4b4151..71630fa6 100644 --- a/docker/Dockerfile.j2 +++ b/docker/Dockerfile.j2 @@ -215,9 +215,6 @@ RUN apk add --no-cache \ openssl \ curl \ dumb-init \ -{% if "sqlite" in features %} - sqlite \ -{% endif %} {% if "mysql" in features %} mariadb-connector-c \ {% endif %} @@ -232,7 +229,6 @@ RUN apt-get update && apt-get install -y \ ca-certificates \ curl \ dumb-init \ - sqlite3 \ libmariadb-dev-compat \ libpq5 \ && rm -rf /var/lib/apt/lists/* diff --git a/docker/amd64/Dockerfile b/docker/amd64/Dockerfile index e24cca81..f524e21a 100644 --- a/docker/amd64/Dockerfile +++ b/docker/amd64/Dockerfile @@ -86,7 +86,6 @@ RUN apt-get update && apt-get install -y \ ca-certificates \ curl \ dumb-init \ - sqlite3 \ libmariadb-dev-compat \ libpq5 \ && rm -rf /var/lib/apt/lists/* diff --git a/docker/amd64/Dockerfile.alpine b/docker/amd64/Dockerfile.alpine index eed79fc1..a7923d30 100644 --- a/docker/amd64/Dockerfile.alpine +++ b/docker/amd64/Dockerfile.alpine @@ -82,7 +82,6 @@ RUN apk add --no-cache \ openssl \ curl \ dumb-init \ - sqlite \ postgresql-libs \ ca-certificates diff --git a/docker/arm64/Dockerfile b/docker/arm64/Dockerfile index b6b50fbd..5ef151c5 100644 --- a/docker/arm64/Dockerfile +++ b/docker/arm64/Dockerfile @@ -129,7 +129,6 @@ RUN apt-get update && apt-get install -y \ ca-certificates \ curl \ dumb-init \ - sqlite3 \ libmariadb-dev-compat \ libpq5 \ && rm -rf /var/lib/apt/lists/* diff --git a/docker/armv6/Dockerfile b/docker/armv6/Dockerfile index c6dc75e1..d86bc5d1 100644 --- a/docker/armv6/Dockerfile +++ b/docker/armv6/Dockerfile @@ -129,7 +129,6 @@ RUN apt-get update && apt-get install -y \ ca-certificates \ curl \ dumb-init \ - sqlite3 \ libmariadb-dev-compat \ libpq5 \ && rm -rf /var/lib/apt/lists/* diff --git a/docker/armv7/Dockerfile b/docker/armv7/Dockerfile index 51d4c75c..ab95f629 100644 --- a/docker/armv7/Dockerfile +++ b/docker/armv7/Dockerfile @@ -129,7 +129,6 @@ RUN apt-get update && apt-get install -y \ ca-certificates \ curl \ dumb-init \ - sqlite3 \ libmariadb-dev-compat \ libpq5 \ && rm -rf /var/lib/apt/lists/* diff --git a/docker/armv7/Dockerfile.alpine b/docker/armv7/Dockerfile.alpine index 14b7e9b8..07895816 100644 --- a/docker/armv7/Dockerfile.alpine +++ b/docker/armv7/Dockerfile.alpine @@ -86,7 +86,6 @@ RUN apk add --no-cache \ openssl \ curl \ dumb-init \ - sqlite \ ca-certificates RUN mkdir /data diff --git a/src/api/admin.rs b/src/api/admin.rs index d484407a..d5a743c9 100644 --- a/src/api/admin.rs +++ b/src/api/admin.rs @@ -1,7 +1,7 @@ use once_cell::sync::Lazy; use serde::de::DeserializeOwned; use serde_json::Value; -use std::{env, process::Command, time::Duration}; +use std::{env, time::Duration}; use reqwest::{blocking::Client, header::USER_AGENT}; use rocket::{ @@ -68,7 +68,6 @@ static CAN_BACKUP: Lazy = Lazy::new(|| { DbConnType::from_url(&CONFIG.database_url()) .map(|t| t == DbConnType::sqlite) .unwrap_or(false) - && Command::new("sqlite3").arg("-version").status().is_ok() }); #[get("/")] @@ -502,9 +501,17 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu use std::net::ToSocketAddrs; // Get current running versions - let vault_version_path = format!("{}/{}", CONFIG.web_vault_folder(), "version.json"); - let vault_version_str = read_file_string(&vault_version_path)?; - let web_vault_version: WebVaultVersion = serde_json::from_str(&vault_version_str)?; + let web_vault_version: WebVaultVersion = match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "bwrs-version.json")) { + Ok(s) => serde_json::from_str(&s)?, + _ => { + match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "version.json")) { + Ok(s) => serde_json::from_str(&s)?, + _ => { + WebVaultVersion{version: String::from("Version file missing")} + }, + } + }, + }; // Execute some environment checks let running_within_docker = is_running_in_docker(); @@ -557,9 +564,10 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu let diagnostics_json = json!({ "dns_resolved": dns_resolved, - "web_vault_version": web_vault_version.version, "latest_release": latest_release, "latest_commit": latest_commit, + "web_vault_enabled": &CONFIG.web_vault_enabled(), + "web_vault_version": web_vault_version.version, "latest_web_build": latest_web_build, "running_within_docker": running_within_docker, "has_http_access": has_http_access, @@ -571,6 +579,7 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu "db_type": *DB_TYPE, "db_version": get_sql_server_version(&conn), "admin_url": format!("{}/diagnostics", admin_url(Referer(None))), + "server_time_local": Local::now().format("%Y-%m-%d %H:%M:%S %Z").to_string(), "server_time": Utc::now().format("%Y-%m-%d %H:%M:%S UTC").to_string(), // Run the date/time check as the last item to minimize the difference }); @@ -596,11 +605,11 @@ fn delete_config(_token: AdminToken) -> EmptyResult { } #[post("/config/backup_db")] -fn backup_db(_token: AdminToken) -> EmptyResult { +fn backup_db(_token: AdminToken, conn: DbConn) -> EmptyResult { if *CAN_BACKUP { - backup_database() + backup_database(&conn) } else { - err!("Can't back up current DB (either it's not SQLite or the 'sqlite' binary is not present)"); + err!("Can't back up current DB (Only SQLite supports this feature)"); } } diff --git a/src/db/mod.rs b/src/db/mod.rs index 2472caa6..83532ce2 100644 --- a/src/db/mod.rs +++ b/src/db/mod.rs @@ -1,5 +1,3 @@ -use std::process::Command; - use chrono::prelude::*; use diesel::r2d2::{ConnectionManager, Pool, PooledConnection}; use rocket::{ @@ -144,6 +142,7 @@ macro_rules! db_run { // Different code for each db ( @raw $conn:ident: $( $($db:ident),+ $body:block )+ ) => { #[allow(unused)] use diesel::prelude::*; + #[allow(unused_variables)] match $conn { $($( #[cfg($db)] @@ -221,21 +220,21 @@ macro_rules! db_object { // Reexport the models, needs to be after the macros are defined so it can access them pub mod models; -/// Creates a back-up of the database using sqlite3 -pub fn backup_database() -> Result<(), Error> { - use std::path::Path; - let db_url = CONFIG.database_url(); - let db_path = Path::new(&db_url).parent().unwrap(); - - let now: DateTime = Utc::now(); - let file_date = now.format("%Y%m%d").to_string(); - let backup_command: String = format!("{}{}{}", ".backup 'db_", file_date, ".sqlite3'"); - - Command::new("sqlite3") - .current_dir(db_path) - .args(&["db.sqlite3", &backup_command]) - .output() - .expect("Can't open database, sqlite3 is not available, make sure it's installed and available on the PATH"); +/// Creates a back-up of the sqlite database +/// MySQL/MariaDB and PostgreSQL are not supported. +pub fn backup_database(conn: &DbConn) -> Result<(), Error> { + db_run! {@raw conn: + postgresql, mysql { + err!("PostgreSQL and MySQL/MariaDB do not support this backup feature"); + } + sqlite { + use std::path::Path; + let db_url = CONFIG.database_url(); + let db_path = Path::new(&db_url).parent().unwrap().to_string_lossy(); + let file_date = Utc::now().format("%Y%m%d_%H%M%S").to_string(); + diesel::sql_query(format!("VACUUM INTO '{}/db_{}.sqlite3'", db_path, file_date)).execute(conn)?; + } + } Ok(()) } @@ -243,29 +242,14 @@ pub fn backup_database() -> Result<(), Error> { /// Get the SQL Server version pub fn get_sql_server_version(conn: &DbConn) -> String { - use diesel::sql_types::Text; - #[derive(QueryableByName)] - struct SqlVersion { - #[sql_type = "Text"] - version: String, - } - db_run! {@raw conn: postgresql, mysql { - match diesel::sql_query("SELECT version() AS version;").get_result::(conn).ok() { - Some(v) => { - v.version - }, - _ => "Unknown".to_string() - } + no_arg_sql_function!(version, diesel::sql_types::Text); + diesel::select(version).get_result::(conn).unwrap_or_else(|_| "Unknown".to_string()) } sqlite { - match diesel::sql_query("SELECT sqlite_version() AS version;").get_result::(conn).ok() { - Some(v) => { - v.version - }, - _ => "Unknown".to_string() - } + no_arg_sql_function!(sqlite_version, diesel::sql_types::Text); + diesel::select(sqlite_version).get_result::(conn).unwrap_or_else(|_| "Unknown".to_string()) } } } diff --git a/src/static/templates/admin/diagnostics.hbs b/src/static/templates/admin/diagnostics.hbs index 8d7901db..1d5ca711 100644 --- a/src/static/templates/admin/diagnostics.hbs +++ b/src/static/templates/admin/diagnostics.hbs @@ -20,6 +20,7 @@
{{diagnostics.latest_release}}-{{diagnostics.latest_commit}}
+ {{#if diagnostics.web_vault_enabled}}
Web Installed Ok Update @@ -35,6 +36,13 @@ {{diagnostics.latest_web_build}} {{/unless}} + {{/if}} + {{#unless diagnostics.web_vault_enabled}} +
Web Installed
+
+ Web Vault is disabled +
+ {{/unless}}
Database
{{diagnostics.db_type}}: {{diagnostics.db_version}} @@ -118,7 +126,10 @@
{{diagnostics.dns_resolved}}
- +
Date & Time (Local)
+
+ Server: {{diagnostics.server_time_local}} +
Date & Time (UTC) Ok Error From 73ff8d79f70b36483d1d33587cdc9549c8e472bd Mon Sep 17 00:00:00 2001 From: Jeremy Lin Date: Fri, 2 Apr 2021 20:16:49 -0700 Subject: [PATCH 12/25] Add a generic job scheduler Also rewrite deletion of old sends using the job scheduler. --- .env.template | 13 +++++++++++++ Cargo.lock | 33 ++++++++++++++++++++++++++++++++- Cargo.toml | 10 ++++++++++ src/api/core/mod.rs | 2 +- src/api/core/sends.rs | 24 ++++++++---------------- src/api/mod.rs | 2 +- src/config.rs | 8 ++++++++ src/db/models/send.rs | 22 ++++++++++++++++------ src/main.rs | 43 +++++++++++++++++++++++++++++++++++++------ 9 files changed, 126 insertions(+), 31 deletions(-) diff --git a/.env.template b/.env.template index a85ce22d..ce571ff6 100644 --- a/.env.template +++ b/.env.template @@ -56,6 +56,19 @@ # WEBSOCKET_ADDRESS=0.0.0.0 # WEBSOCKET_PORT=3012 +## Job scheduler settings +## +## Job schedules use a cron-like syntax (as parsed by https://crates.io/crates/cron), +## and are always in terms of UTC time (regardless of your local time zone settings). +## +## How often (in ms) the job scheduler thread checks for jobs that need running. +## Set to 0 to globally disable scheduled jobs. +# JOB_POLL_INTERVAL_MS=30000 +## +## Cron schedule of the job that checks for Sends past their deletion date. +## Defaults to hourly. Set blank to disable this job. +# SEND_PURGE_SCHEDULE="0 0 * * * *" + ## Enable extended logging, which shows timestamps and targets in the logs # EXTENDED_LOGGING=true diff --git a/Cargo.lock b/Cargo.lock index e60c8d0d..7a18fadc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -161,6 +161,7 @@ dependencies = [ "handlebars", "html5ever", "idna 0.2.2", + "job_scheduler", "jsonwebtoken", "lettre", "libsqlite3-sys", @@ -401,6 +402,17 @@ dependencies = [ "cfg-if 1.0.0", ] +[[package]] +name = "cron" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e009ed0b762cf7a967a34dfdc67d5967d3f828f12901d37081432c3dd1668f8f" +dependencies = [ + "chrono", + "nom 4.1.1", + "once_cell", +] + [[package]] name = "crypto-mac" version = "0.3.0" @@ -1097,6 +1109,16 @@ version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd25036021b0de88a0aff6b850051563c6516d0bf53f8638938edbb9de732736" +[[package]] +name = "job_scheduler" +version = "1.2.1" +source = "git+https://github.com/jjlin/job_scheduler?rev=ee023418dbba2bfe1e30a5fd7d937f9e33739806#ee023418dbba2bfe1e30a5fd7d937f9e33739806" +dependencies = [ + "chrono", + "cron", + "uuid", +] + [[package]] name = "js-sys" version = "0.3.49" @@ -1160,7 +1182,7 @@ dependencies = [ "idna 0.2.2", "mime 0.3.16", "native-tls", - "nom", + "nom 6.1.2", "once_cell", "quoted_printable", "rand 0.8.3", @@ -1475,6 +1497,15 @@ version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" +[[package]] +name = "nom" +version = "4.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c349f68f25f596b9f44cf0e7c69752a5c633b0550c3ff849518bfba0233774a" +dependencies = [ + "memchr", +] + [[package]] name = "nom" version = "6.1.2" diff --git a/Cargo.toml b/Cargo.toml index 24c24eba..4edffc42 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -73,6 +73,9 @@ chrono = { version = "0.4.19", features = ["serde"] } chrono-tz = "0.5.3" time = "0.2.26" +# Job scheduler +job_scheduler = "1.2.1" + # TOTP library oath = "0.10.2" @@ -136,3 +139,10 @@ rocket_contrib = { git = 'https://github.com/SergioBenitez/Rocket', rev = '263e3 # For favicon extraction from main website data-url = { git = 'https://github.com/servo/rust-url', package="data-url", rev = '540ede02d0771824c0c80ff9f57fe8eff38b1291' } + +# The maintainer of the `job_scheduler` crate doesn't seem to have responded +# to any issues or PRs for almost a year (as of April 2021). This hopefully +# temporary fork updates Cargo.toml to use more up-to-date dependencies. +# In particular, `cron` has since implemented parsing of some common syntax +# that wasn't previously supported (https://github.com/zslayton/cron/pull/64). +job_scheduler = { git = 'https://github.com/jjlin/job_scheduler', rev = 'ee023418dbba2bfe1e30a5fd7d937f9e33739806' } diff --git a/src/api/core/mod.rs b/src/api/core/mod.rs index 36e83f0e..8a7e5f9b 100644 --- a/src/api/core/mod.rs +++ b/src/api/core/mod.rs @@ -5,7 +5,7 @@ mod organizations; pub mod two_factor; mod sends; -pub use sends::start_send_deletion_scheduler; +pub use sends::purge_sends; pub fn routes() -> Vec { let mut mod_routes = routes![ diff --git a/src/api/core/sends.rs b/src/api/core/sends.rs index ec6809a2..3cd568c5 100644 --- a/src/api/core/sends.rs +++ b/src/api/core/sends.rs @@ -9,7 +9,7 @@ use serde_json::Value; use crate::{ api::{ApiResult, EmptyResult, JsonResult, JsonUpcase, Notify, UpdateType}, auth::{Headers, Host}, - db::{models::*, DbConn}, + db::{models::*, DbConn, DbPool}, CONFIG, }; @@ -27,21 +27,13 @@ pub fn routes() -> Vec { ] } -pub fn start_send_deletion_scheduler(pool: crate::db::DbPool) { - std::thread::spawn(move || { - loop { - if let Ok(conn) = pool.get() { - info!("Initiating send deletion"); - for send in Send::find_all(&conn) { - if chrono::Utc::now().naive_utc() >= send.deletion_date { - send.delete(&conn).ok(); - } - } - } - - std::thread::sleep(std::time::Duration::from_secs(3600)); - } - }); +pub fn purge_sends(pool: DbPool) { + debug!("Purging sends"); + if let Ok(conn) = pool.get() { + Send::purge(&conn); + } else { + error!("Failed to get DB connection while purging sends") + } } #[derive(Deserialize)] diff --git a/src/api/mod.rs b/src/api/mod.rs index 840c65ff..f417751c 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -10,8 +10,8 @@ use serde_json::Value; pub use crate::api::{ admin::routes as admin_routes, + core::purge_sends, core::routes as core_routes, - core::start_send_deletion_scheduler, icons::routes as icons_routes, identity::routes as identity_routes, notifications::routes as notifications_routes, diff --git a/src/config.rs b/src/config.rs index 6c41c975..7c3c5461 100644 --- a/src/config.rs +++ b/src/config.rs @@ -316,6 +316,14 @@ make_config! { /// Websocket port websocket_port: u16, false, def, 3012; }, + jobs { + /// Job scheduler poll interval |> How often the job scheduler thread checks for jobs to run. + /// Set to 0 to globally disable scheduled jobs. + job_poll_interval_ms: u64, false, def, 30_000; + /// Send purge schedule |> Cron schedule of the job that checks for Sends past their deletion date. + /// Defaults to hourly. Set blank to disable this job. + send_purge_schedule: String, false, def, "0 0 * * * *".to_string(); + }, /// General settings settings { diff --git a/src/db/models/send.rs b/src/db/models/send.rs index 0356d818..0644b1e1 100644 --- a/src/db/models/send.rs +++ b/src/db/models/send.rs @@ -205,6 +205,13 @@ impl Send { }} } + /// Purge all sends that are past their deletion date. + pub fn purge(conn: &DbConn) { + for send in Self::find_by_past_deletion_date(&conn) { + send.delete(&conn).ok(); + } + } + pub fn update_users_revision(&self, conn: &DbConn) { match &self.user_uuid { Some(user_uuid) => { @@ -223,12 +230,6 @@ impl Send { Ok(()) } - pub fn find_all(conn: &DbConn) -> Vec { - db_run! {conn: { - sends::table.load::(conn).expect("Error loading sends").from_db() - }} - } - pub fn find_by_access_id(access_id: &str, conn: &DbConn) -> Option { use data_encoding::BASE64URL_NOPAD; use uuid::Uuid; @@ -271,4 +272,13 @@ impl Send { .load::(conn).expect("Error loading sends").from_db() }} } + + pub fn find_by_past_deletion_date(conn: &DbConn) -> Vec { + let now = Utc::now().naive_utc(); + db_run! {conn: { + sends::table + .filter(sends::deletion_date.lt(now)) + .load::(conn).expect("Error loading sends").from_db() + }} + } } diff --git a/src/main.rs b/src/main.rs index 50975c66..4cdf4ff2 100644 --- a/src/main.rs +++ b/src/main.rs @@ -16,6 +16,7 @@ extern crate diesel; #[macro_use] extern crate diesel_migrations; +use job_scheduler::{JobScheduler, Job}; use std::{ fs::create_dir_all, panic, @@ -23,6 +24,7 @@ use std::{ process::{exit, Command}, str::FromStr, thread, + time::Duration, }; #[macro_use] @@ -56,7 +58,9 @@ fn main() { create_icon_cache_folder(); - launch_rocket(extra_debug); + let pool = create_db_pool(); + schedule_jobs(pool.clone()); + launch_rocket(pool, extra_debug); // Blocks until program termination. } const HELP: &str = "\ @@ -301,17 +305,17 @@ fn check_web_vault() { } } -fn launch_rocket(extra_debug: bool) { - let pool = match util::retry_db(db::DbPool::from_config, CONFIG.db_connection_retries()) { +fn create_db_pool() -> db::DbPool { + match util::retry_db(db::DbPool::from_config, CONFIG.db_connection_retries()) { Ok(p) => p, Err(e) => { error!("Error creating database pool: {:?}", e); exit(1); } - }; - - api::start_send_deletion_scheduler(pool.clone()); + } +} +fn launch_rocket(pool: db::DbPool, extra_debug: bool) { let basepath = &CONFIG.domain_path(); // If adding more paths here, consider also adding them to @@ -334,3 +338,30 @@ fn launch_rocket(extra_debug: bool) { // The launch will restore the original logging level error!("Launch error {:#?}", result); } + +fn schedule_jobs(pool: db::DbPool) { + if CONFIG.job_poll_interval_ms() == 0 { + info!("Job scheduler disabled."); + return; + } + thread::Builder::new().name("job-scheduler".to_string()).spawn(move || { + let mut sched = JobScheduler::new(); + + // Purge sends that are past their deletion date. + if !CONFIG.send_purge_schedule().is_empty() { + sched.add(Job::new(CONFIG.send_purge_schedule().parse().unwrap(), || { + api::purge_sends(pool.clone()); + })); + } + + // Periodically check for jobs to run. We probably won't need any + // jobs that run more often than once a minute, so a default poll + // interval of 30 seconds should be sufficient. Users who want to + // schedule jobs to run more frequently for some reason can reduce + // the poll interval accordingly. + loop { + sched.tick(); + thread::sleep(Duration::from_millis(CONFIG.job_poll_interval_ms())); + } + }).expect("Error spawning job scheduler thread"); +} From d77333576b1268cd24f17348ffe6d72e07855f54 Mon Sep 17 00:00:00 2001 From: Jeremy Lin Date: Fri, 2 Apr 2021 20:52:15 -0700 Subject: [PATCH 13/25] Add support for auto-deleting trashed items Upstream will soon auto-delete trashed items after 30 days, but some people use the trash as an archive folder, so to avoid unexpected data loss, this implementation requires the user to explicitly enable auto-deletion. --- .env.template | 4 ++++ src/api/core/ciphers.rs | 11 ++++++++++- src/api/core/mod.rs | 1 + src/api/mod.rs | 1 + src/config.rs | 8 ++++++++ src/db/models/cipher.rs | 24 +++++++++++++++++++++++- src/main.rs | 7 +++++++ 7 files changed, 54 insertions(+), 2 deletions(-) diff --git a/.env.template b/.env.template index ce571ff6..e5665296 100644 --- a/.env.template +++ b/.env.template @@ -68,6 +68,10 @@ ## Cron schedule of the job that checks for Sends past their deletion date. ## Defaults to hourly. Set blank to disable this job. # SEND_PURGE_SCHEDULE="0 0 * * * *" +## +## Cron schedule of the job that checks for trashed items to delete permanently. +## Defaults to daily. Set blank to disable this job. +# TRASH_PURGE_SCHEDULE="0 0 0 * * *" ## Enable extended logging, which shows timestamps and targets in the logs # EXTENDED_LOGGING=true diff --git a/src/api/core/ciphers.rs b/src/api/core/ciphers.rs index 7b0de205..58ae80b1 100644 --- a/src/api/core/ciphers.rs +++ b/src/api/core/ciphers.rs @@ -13,7 +13,7 @@ use crate::{ api::{self, EmptyResult, JsonResult, JsonUpcase, Notify, PasswordData, UpdateType}, auth::Headers, crypto, - db::{models::*, DbConn}, + db::{models::*, DbConn, DbPool}, CONFIG, }; @@ -77,6 +77,15 @@ pub fn routes() -> Vec { ] } +pub fn purge_trashed_ciphers(pool: DbPool) { + debug!("Purging trashed ciphers"); + if let Ok(conn) = pool.get() { + Cipher::purge_trash(&conn); + } else { + error!("Failed to get DB connection while purging trashed ciphers") + } +} + #[derive(FromForm, Default)] struct SyncData { #[form(field = "excludeDomains")] diff --git a/src/api/core/mod.rs b/src/api/core/mod.rs index 8a7e5f9b..2964d4fb 100644 --- a/src/api/core/mod.rs +++ b/src/api/core/mod.rs @@ -5,6 +5,7 @@ mod organizations; pub mod two_factor; mod sends; +pub use ciphers::purge_trashed_ciphers; pub use sends::purge_sends; pub fn routes() -> Vec { diff --git a/src/api/mod.rs b/src/api/mod.rs index f417751c..2132b30b 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -11,6 +11,7 @@ use serde_json::Value; pub use crate::api::{ admin::routes as admin_routes, core::purge_sends, + core::purge_trashed_ciphers, core::routes as core_routes, icons::routes as icons_routes, identity::routes as identity_routes, diff --git a/src/config.rs b/src/config.rs index 7c3c5461..bc2f359e 100644 --- a/src/config.rs +++ b/src/config.rs @@ -323,6 +323,9 @@ make_config! { /// Send purge schedule |> Cron schedule of the job that checks for Sends past their deletion date. /// Defaults to hourly. Set blank to disable this job. send_purge_schedule: String, false, def, "0 0 * * * *".to_string(); + /// Trash purge schedule |> Cron schedule of the job that checks for trashed items to delete permanently. + /// Defaults to daily. Set blank to disable this job. + trash_purge_schedule: String, false, def, "0 0 0 * * *".to_string(); }, /// General settings @@ -347,6 +350,11 @@ make_config! { /// Per-organization attachment limit (KB) |> Limit in kilobytes for an organization attachments, once the limit is exceeded it won't be possible to upload more org_attachment_limit: i64, true, option; + /// Trash auto-delete days |> Number of days to wait before auto-deleting a trashed item. + /// If unset, trashed items are not auto-deleted. This setting applies globally, so make + /// sure to inform all users of any changes to this setting. + trash_auto_delete_days: i64, true, option; + /// Disable icon downloads |> Set to true to disable icon downloading, this would still serve icons from /// $ICON_CACHE_FOLDER, but it won't produce any external network request. Needs to set $ICON_CACHE_TTL to 0, /// otherwise it will delete them and they won't be downloaded again. diff --git a/src/db/models/cipher.rs b/src/db/models/cipher.rs index 365865f8..e4ae04c8 100644 --- a/src/db/models/cipher.rs +++ b/src/db/models/cipher.rs @@ -1,6 +1,8 @@ -use chrono::{NaiveDateTime, Utc}; +use chrono::{Duration, NaiveDateTime, Utc}; use serde_json::Value; +use crate::CONFIG; + use super::{ Attachment, CollectionCipher, @@ -271,6 +273,17 @@ impl Cipher { Ok(()) } + /// Purge all ciphers that are old enough to be auto-deleted. + pub fn purge_trash(conn: &DbConn) { + if let Some(auto_delete_days) = CONFIG.trash_auto_delete_days() { + let now = Utc::now().naive_utc(); + let dt = now - Duration::days(auto_delete_days); + for cipher in Self::find_deleted_before(&dt, conn) { + cipher.delete(&conn).ok(); + } + } + } + pub fn move_to_folder(&self, folder_uuid: Option, user_uuid: &str, conn: &DbConn) -> EmptyResult { User::update_uuid_revision(user_uuid, conn); @@ -511,6 +524,15 @@ impl Cipher { }} } + /// Find all ciphers that were deleted before the specified datetime. + pub fn find_deleted_before(dt: &NaiveDateTime, conn: &DbConn) -> Vec { + db_run! {conn: { + ciphers::table + .filter(ciphers::deleted_at.lt(dt)) + .load::(conn).expect("Error loading ciphers").from_db() + }} + } + pub fn get_collections(&self, user_id: &str, conn: &DbConn) -> Vec { db_run! {conn: { ciphers_collections::table diff --git a/src/main.rs b/src/main.rs index 4cdf4ff2..d5985bac 100644 --- a/src/main.rs +++ b/src/main.rs @@ -354,6 +354,13 @@ fn schedule_jobs(pool: db::DbPool) { })); } + // Purge trashed items that are old enough to be auto-deleted. + if !CONFIG.trash_purge_schedule().is_empty() { + sched.add(Job::new(CONFIG.trash_purge_schedule().parse().unwrap(), || { + api::purge_trashed_ciphers(pool.clone()); + })); + } + // Periodically check for jobs to run. We probably won't need any // jobs that run more often than once a minute, so a default poll // interval of 30 seconds should be sufficient. Users who want to From 90e0b7fec6cc025561f9f732fb06d15f72e5c892 Mon Sep 17 00:00:00 2001 From: Jeremy Lin Date: Mon, 5 Apr 2021 23:12:36 -0700 Subject: [PATCH 14/25] Offset scheduled jobs by 5 minutes This is intended to avoid contention with database backups that many users probably schedule to start at exactly the top of an hour. --- .env.template | 8 ++++---- src/config.rs | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.env.template b/.env.template index e5665296..e4d0b1e1 100644 --- a/.env.template +++ b/.env.template @@ -66,12 +66,12 @@ # JOB_POLL_INTERVAL_MS=30000 ## ## Cron schedule of the job that checks for Sends past their deletion date. -## Defaults to hourly. Set blank to disable this job. -# SEND_PURGE_SCHEDULE="0 0 * * * *" +## Defaults to hourly (5 minutes after the hour). Set blank to disable this job. +# SEND_PURGE_SCHEDULE="0 5 * * * *" ## ## Cron schedule of the job that checks for trashed items to delete permanently. -## Defaults to daily. Set blank to disable this job. -# TRASH_PURGE_SCHEDULE="0 0 0 * * *" +## Defaults to daily (5 minutes after midnight). Set blank to disable this job. +# TRASH_PURGE_SCHEDULE="0 5 0 * * *" ## Enable extended logging, which shows timestamps and targets in the logs # EXTENDED_LOGGING=true diff --git a/src/config.rs b/src/config.rs index bc2f359e..86031c72 100644 --- a/src/config.rs +++ b/src/config.rs @@ -322,10 +322,10 @@ make_config! { job_poll_interval_ms: u64, false, def, 30_000; /// Send purge schedule |> Cron schedule of the job that checks for Sends past their deletion date. /// Defaults to hourly. Set blank to disable this job. - send_purge_schedule: String, false, def, "0 0 * * * *".to_string(); + send_purge_schedule: String, false, def, "0 5 * * * *".to_string(); /// Trash purge schedule |> Cron schedule of the job that checks for trashed items to delete permanently. /// Defaults to daily. Set blank to disable this job. - trash_purge_schedule: String, false, def, "0 0 0 * * *".to_string(); + trash_purge_schedule: String, false, def, "0 5 0 * * *".to_string(); }, /// General settings From b268c3dd1cfda78f113cc5c3bf06e08324590379 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Garc=C3=ADa?= Date: Tue, 6 Apr 2021 20:38:22 +0200 Subject: [PATCH 15/25] Update web vault and add unnoficialserver response --- docker/Dockerfile.j2 | 4 ++-- docker/amd64/Dockerfile | 12 ++++++------ docker/amd64/Dockerfile.alpine | 12 ++++++------ docker/arm64/Dockerfile | 12 ++++++------ docker/armv6/Dockerfile | 12 ++++++------ docker/armv7/Dockerfile | 12 ++++++------ docker/armv7/Dockerfile.alpine | 12 ++++++------ src/api/core/ciphers.rs | 1 + src/api/identity.rs | 6 ++++-- 9 files changed, 43 insertions(+), 40 deletions(-) diff --git a/docker/Dockerfile.j2 b/docker/Dockerfile.j2 index 71630fa6..f003dd0f 100644 --- a/docker/Dockerfile.j2 +++ b/docker/Dockerfile.j2 @@ -44,8 +44,8 @@ # https://docs.docker.com/develop/develop-images/multistage-build/ # https://whitfin.io/speeding-up-rust-docker-builds/ ####################### VAULT BUILD IMAGE ####################### -{% set vault_version = "2.19.0" %} -{% set vault_image_digest = "sha256:8747cfaa2c6d87d1749e119dd884697e8099389aa9aca30a4d73d4ff796fe0e4" %} +{% set vault_version = "2.19.0b" %} +{% set vault_image_digest = "sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e" %} # The web-vault digest specifies a particular web-vault build on Docker Hub. # Using the digest instead of the tag name provides better security, # as the digest of an image is immutable, whereas a tag name can later diff --git a/docker/amd64/Dockerfile b/docker/amd64/Dockerfile index f524e21a..e0f6c70b 100644 --- a/docker/amd64/Dockerfile +++ b/docker/amd64/Dockerfile @@ -14,15 +14,15 @@ # - From https://hub.docker.com/r/bitwardenrs/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull bitwardenrs/web-vault:v2.19.0 -# $ docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.19.0 -# [bitwardenrs/web-vault@sha256:8747cfaa2c6d87d1749e119dd884697e8099389aa9aca30a4d73d4ff796fe0e4] +# $ docker pull bitwardenrs/web-vault:v2.19.0b +# $ docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.19.0b +# [bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:8747cfaa2c6d87d1749e119dd884697e8099389aa9aca30a4d73d4ff796fe0e4 -# [bitwardenrs/web-vault:v2.19.0] +# $ docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e +# [bitwardenrs/web-vault:v2.19.0b] # -FROM bitwardenrs/web-vault@sha256:8747cfaa2c6d87d1749e119dd884697e8099389aa9aca30a4d73d4ff796fe0e4 as vault +FROM bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e as vault ########################## BUILD IMAGE ########################## FROM rust:1.50 as build diff --git a/docker/amd64/Dockerfile.alpine b/docker/amd64/Dockerfile.alpine index a7923d30..71b3130f 100644 --- a/docker/amd64/Dockerfile.alpine +++ b/docker/amd64/Dockerfile.alpine @@ -14,15 +14,15 @@ # - From https://hub.docker.com/r/bitwardenrs/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull bitwardenrs/web-vault:v2.19.0 -# $ docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.19.0 -# [bitwardenrs/web-vault@sha256:8747cfaa2c6d87d1749e119dd884697e8099389aa9aca30a4d73d4ff796fe0e4] +# $ docker pull bitwardenrs/web-vault:v2.19.0b +# $ docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.19.0b +# [bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:8747cfaa2c6d87d1749e119dd884697e8099389aa9aca30a4d73d4ff796fe0e4 -# [bitwardenrs/web-vault:v2.19.0] +# $ docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e +# [bitwardenrs/web-vault:v2.19.0b] # -FROM bitwardenrs/web-vault@sha256:8747cfaa2c6d87d1749e119dd884697e8099389aa9aca30a4d73d4ff796fe0e4 as vault +FROM bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e as vault ########################## BUILD IMAGE ########################## FROM clux/muslrust:nightly-2021-02-22 as build diff --git a/docker/arm64/Dockerfile b/docker/arm64/Dockerfile index 5ef151c5..937d192f 100644 --- a/docker/arm64/Dockerfile +++ b/docker/arm64/Dockerfile @@ -14,15 +14,15 @@ # - From https://hub.docker.com/r/bitwardenrs/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull bitwardenrs/web-vault:v2.19.0 -# $ docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.19.0 -# [bitwardenrs/web-vault@sha256:8747cfaa2c6d87d1749e119dd884697e8099389aa9aca30a4d73d4ff796fe0e4] +# $ docker pull bitwardenrs/web-vault:v2.19.0b +# $ docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.19.0b +# [bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:8747cfaa2c6d87d1749e119dd884697e8099389aa9aca30a4d73d4ff796fe0e4 -# [bitwardenrs/web-vault:v2.19.0] +# $ docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e +# [bitwardenrs/web-vault:v2.19.0b] # -FROM bitwardenrs/web-vault@sha256:8747cfaa2c6d87d1749e119dd884697e8099389aa9aca30a4d73d4ff796fe0e4 as vault +FROM bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e as vault ########################## BUILD IMAGE ########################## FROM rust:1.50 as build diff --git a/docker/armv6/Dockerfile b/docker/armv6/Dockerfile index d86bc5d1..2423ee16 100644 --- a/docker/armv6/Dockerfile +++ b/docker/armv6/Dockerfile @@ -14,15 +14,15 @@ # - From https://hub.docker.com/r/bitwardenrs/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull bitwardenrs/web-vault:v2.19.0 -# $ docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.19.0 -# [bitwardenrs/web-vault@sha256:8747cfaa2c6d87d1749e119dd884697e8099389aa9aca30a4d73d4ff796fe0e4] +# $ docker pull bitwardenrs/web-vault:v2.19.0b +# $ docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.19.0b +# [bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:8747cfaa2c6d87d1749e119dd884697e8099389aa9aca30a4d73d4ff796fe0e4 -# [bitwardenrs/web-vault:v2.19.0] +# $ docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e +# [bitwardenrs/web-vault:v2.19.0b] # -FROM bitwardenrs/web-vault@sha256:8747cfaa2c6d87d1749e119dd884697e8099389aa9aca30a4d73d4ff796fe0e4 as vault +FROM bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e as vault ########################## BUILD IMAGE ########################## FROM rust:1.50 as build diff --git a/docker/armv7/Dockerfile b/docker/armv7/Dockerfile index ab95f629..4f612f72 100644 --- a/docker/armv7/Dockerfile +++ b/docker/armv7/Dockerfile @@ -14,15 +14,15 @@ # - From https://hub.docker.com/r/bitwardenrs/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull bitwardenrs/web-vault:v2.19.0 -# $ docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.19.0 -# [bitwardenrs/web-vault@sha256:8747cfaa2c6d87d1749e119dd884697e8099389aa9aca30a4d73d4ff796fe0e4] +# $ docker pull bitwardenrs/web-vault:v2.19.0b +# $ docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.19.0b +# [bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:8747cfaa2c6d87d1749e119dd884697e8099389aa9aca30a4d73d4ff796fe0e4 -# [bitwardenrs/web-vault:v2.19.0] +# $ docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e +# [bitwardenrs/web-vault:v2.19.0b] # -FROM bitwardenrs/web-vault@sha256:8747cfaa2c6d87d1749e119dd884697e8099389aa9aca30a4d73d4ff796fe0e4 as vault +FROM bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e as vault ########################## BUILD IMAGE ########################## FROM rust:1.50 as build diff --git a/docker/armv7/Dockerfile.alpine b/docker/armv7/Dockerfile.alpine index 07895816..430e043e 100644 --- a/docker/armv7/Dockerfile.alpine +++ b/docker/armv7/Dockerfile.alpine @@ -14,15 +14,15 @@ # - From https://hub.docker.com/r/bitwardenrs/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull bitwardenrs/web-vault:v2.19.0 -# $ docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.19.0 -# [bitwardenrs/web-vault@sha256:8747cfaa2c6d87d1749e119dd884697e8099389aa9aca30a4d73d4ff796fe0e4] +# $ docker pull bitwardenrs/web-vault:v2.19.0b +# $ docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.19.0b +# [bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:8747cfaa2c6d87d1749e119dd884697e8099389aa9aca30a4d73d4ff796fe0e4 -# [bitwardenrs/web-vault:v2.19.0] +# $ docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e +# [bitwardenrs/web-vault:v2.19.0b] # -FROM bitwardenrs/web-vault@sha256:8747cfaa2c6d87d1749e119dd884697e8099389aa9aca30a4d73d4ff796fe0e4 as vault +FROM bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e as vault ########################## BUILD IMAGE ########################## FROM messense/rust-musl-cross:armv7-musleabihf as build diff --git a/src/api/core/ciphers.rs b/src/api/core/ciphers.rs index 58ae80b1..a46ecb9c 100644 --- a/src/api/core/ciphers.rs +++ b/src/api/core/ciphers.rs @@ -133,6 +133,7 @@ fn sync(data: Form, headers: Headers, conn: DbConn) -> Json { "Ciphers": ciphers_json, "Domains": domains_json, "Sends": sends_json, + "unofficialServer": true, "Object": "sync" })) } diff --git a/src/api/identity.rs b/src/api/identity.rs index dcfe607a..630c1781 100644 --- a/src/api/identity.rs +++ b/src/api/identity.rs @@ -72,7 +72,8 @@ fn _refresh_login(data: ConnectData, conn: DbConn) -> JsonResult { "Kdf": user.client_kdf_type, "KdfIterations": user.client_kdf_iter, "ResetMasterPassword": false, // TODO: according to official server seems something like: user.password_hash.is_empty(), but would need testing - "scope": "api offline_access" + "scope": "api offline_access", + "unofficialServer": true, }))) } @@ -172,7 +173,8 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult "Kdf": user.client_kdf_type, "KdfIterations": user.client_kdf_iter, "ResetMasterPassword": false,// TODO: Same as above - "scope": "api offline_access" + "scope": "api offline_access", + "unofficialServer": true, }); if let Some(token) = twofactor_token { From 155109dea120e109e1e027d4e1312b6adad4c231 Mon Sep 17 00:00:00 2001 From: Jake Howard Date: Tue, 6 Apr 2021 21:04:37 +0100 Subject: [PATCH 16/25] Extract client creation to a single place --- src/api/admin.rs | 10 ++++------ src/api/core/mod.rs | 7 ++----- src/api/core/two_factor/duo.rs | 14 +++++++------- src/api/icons.rs | 6 +++--- src/util.rs | 19 +++++++++++++++++-- 5 files changed, 33 insertions(+), 23 deletions(-) diff --git a/src/api/admin.rs b/src/api/admin.rs index d5a743c9..af0d8ebc 100644 --- a/src/api/admin.rs +++ b/src/api/admin.rs @@ -3,7 +3,7 @@ use serde::de::DeserializeOwned; use serde_json::Value; use std::{env, time::Duration}; -use reqwest::{blocking::Client, header::USER_AGENT}; + use rocket::{ http::{Cookie, Cookies, SameSite}, request::{self, FlashMessage, Form, FromRequest, Outcome, Request}, @@ -19,7 +19,7 @@ use crate::{ db::{backup_database, get_sql_server_version, models::*, DbConn, DbConnType}, error::{Error, MapResult}, mail, - util::{format_naive_datetime_local, get_display_size, is_running_in_docker}, + util::{format_naive_datetime_local, get_display_size, is_running_in_docker, get_reqwest_client}, CONFIG, }; @@ -469,24 +469,22 @@ struct GitCommit { } fn get_github_api(url: &str) -> Result { - let github_api = Client::builder().build()?; + let github_api = get_reqwest_client(); Ok(github_api .get(url) .timeout(Duration::from_secs(10)) - .header(USER_AGENT, "Bitwarden_RS") .send()? .error_for_status()? .json::()?) } fn has_http_access() -> bool { - let http_access = Client::builder().build().unwrap(); + let http_access = get_reqwest_client(); match http_access .head("https://github.com/dani-garcia/bitwarden_rs") .timeout(Duration::from_secs(10)) - .header(USER_AGENT, "Bitwarden_RS") .send() { Ok(r) => r.status().is_success(), diff --git a/src/api/core/mod.rs b/src/api/core/mod.rs index 2964d4fb..d24d8cdf 100644 --- a/src/api/core/mod.rs +++ b/src/api/core/mod.rs @@ -43,6 +43,7 @@ use crate::{ auth::Headers, db::DbConn, error::Error, + util::get_reqwest_client, }; #[put("/devices/identifier//clear-token")] @@ -147,20 +148,16 @@ fn put_eq_domains(data: JsonUpcase, headers: Headers, conn: DbC #[get("/hibp/breach?")] fn hibp_breach(username: String) -> JsonResult { - let user_agent = "Bitwarden_RS"; let url = format!( "https://haveibeenpwned.com/api/v3/breachedaccount/{}?truncateResponse=false&includeUnverified=false", username ); - use reqwest::{blocking::Client, header::USER_AGENT}; - if let Some(api_key) = crate::CONFIG.hibp_api_key() { - let hibp_client = Client::builder().build()?; + let hibp_client = get_reqwest_client(); let res = hibp_client .get(&url) - .header(USER_AGENT, user_agent) .header("hibp-api-key", api_key) .send()?; diff --git a/src/api/core/two_factor/duo.rs b/src/api/core/two_factor/duo.rs index 18eda4b2..688ab785 100644 --- a/src/api/core/two_factor/duo.rs +++ b/src/api/core/two_factor/duo.rs @@ -12,6 +12,7 @@ use crate::{ DbConn, }, error::MapResult, + util::get_reqwest_client, CONFIG, }; @@ -185,9 +186,7 @@ fn activate_duo_put(data: JsonUpcase, headers: Headers, conn: DbC } fn duo_api_request(method: &str, path: &str, params: &str, data: &DuoData) -> EmptyResult { - const AGENT: &str = "bitwarden_rs:Duo/1.0 (Rust)"; - - use reqwest::{blocking::Client, header::*, Method}; + use reqwest::{header, Method}; use std::str::FromStr; // https://duo.com/docs/authapi#api-details @@ -199,11 +198,12 @@ fn duo_api_request(method: &str, path: &str, params: &str, data: &DuoData) -> Em let m = Method::from_str(method).unwrap_or_default(); - Client::new() - .request(m, &url) + let client = get_reqwest_client(); + + client.request(m, &url) .basic_auth(username, Some(password)) - .header(USER_AGENT, AGENT) - .header(DATE, date) + .header(header::USER_AGENT, "bitwarden_rs:Duo/1.0 (Rust)") + .header(header::DATE, date) .send()? .error_for_status()?; diff --git a/src/api/icons.rs b/src/api/icons.rs index 3f8a41e1..59aba43e 100644 --- a/src/api/icons.rs +++ b/src/api/icons.rs @@ -12,7 +12,7 @@ use regex::Regex; use reqwest::{blocking::Client, blocking::Response, header, Url}; use rocket::{http::ContentType, http::Cookie, response::Content, Route}; -use crate::{error::Error, util::Cached, CONFIG}; +use crate::{error::Error, util::{Cached, get_reqwest_client_builder}, CONFIG}; pub fn routes() -> Vec { routes![icon] @@ -28,11 +28,11 @@ static CLIENT: Lazy = Lazy::new(|| { default_headers.insert(header::ACCEPT, header::HeaderValue::from_static("text/html,application/xhtml+xml,application/xml; q=0.9,image/webp,image/apng,*/*;q=0.8")); // Reuse the client between requests - Client::builder() + get_reqwest_client_builder() .timeout(Duration::from_secs(CONFIG.icon_download_timeout())) .default_headers(default_headers) .build() - .unwrap() + .expect("Failed to build icon client") }); // Build Regex only once since this takes a lot of time. diff --git a/src/util.rs b/src/util.rs index feafa467..6dd6c4a7 100644 --- a/src/util.rs +++ b/src/util.rs @@ -478,7 +478,6 @@ pub fn retry(func: F, max_tries: u32) -> Result where F: Fn() -> Result, { - use std::{thread::sleep, time::Duration}; let mut tries = 0; loop { @@ -497,12 +496,13 @@ where } } +use std::{thread::sleep, time::Duration}; + pub fn retry_db(func: F, max_tries: u32) -> Result where F: Fn() -> Result, E: std::error::Error, { - use std::{thread::sleep, time::Duration}; let mut tries = 0; loop { @@ -522,3 +522,18 @@ where } } } + +use reqwest::{blocking::{Client, ClientBuilder}, header}; + +pub fn get_reqwest_client() -> Client { + get_reqwest_client_builder().build().expect("Failed to build client") +} + +pub fn get_reqwest_client_builder() -> ClientBuilder { + let mut headers = header::HeaderMap::new(); + headers.insert(header::USER_AGENT, header::HeaderValue::from_static("Bitwarden_RS")); + Client::builder() + .default_headers(headers) + .timeout(Duration::from_secs(10)) + +} From 3ab90259f20063b72c5560da3346840da7223acc Mon Sep 17 00:00:00 2001 From: Jake Howard Date: Tue, 6 Apr 2021 21:54:42 +0100 Subject: [PATCH 17/25] Modify rustfmt file --- rustfmt.toml | 5 ++ src/api/admin.rs | 9 +-- src/api/core/accounts.rs | 10 +--- src/api/core/ciphers.rs | 74 +++++++++--------------- src/api/core/folders.rs | 10 +--- src/api/core/mod.rs | 16 +---- src/api/core/organizations.rs | 18 ++---- src/api/core/sends.rs | 23 +------- src/api/core/two_factor/authenticator.rs | 23 ++------ src/api/core/two_factor/duo.rs | 6 +- src/api/core/two_factor/email.rs | 16 +---- src/api/core/two_factor/mod.rs | 8 +-- src/api/core/two_factor/u2f.rs | 24 ++------ src/api/icons.rs | 27 +++++---- src/api/identity.rs | 32 +++------- src/api/mod.rs | 6 +- src/api/notifications.rs | 19 +++--- src/api/web.rs | 54 +++++------------ src/auth.rs | 24 ++++---- src/config.rs | 19 ++---- src/crypto.rs | 4 +- src/db/mod.rs | 4 +- src/db/models/cipher.rs | 23 +++----- src/db/models/collection.rs | 21 +++---- src/db/models/device.rs | 24 ++------ src/db/models/organization.rs | 13 ++--- src/db/models/user.rs | 4 +- src/error.rs | 6 +- src/mail.rs | 26 ++------- src/util.rs | 16 +++-- 30 files changed, 169 insertions(+), 395 deletions(-) diff --git a/rustfmt.toml b/rustfmt.toml index 679c7ab8..630b42b2 100644 --- a/rustfmt.toml +++ b/rustfmt.toml @@ -1,2 +1,7 @@ version = "Two" +edition = "2018" max_width = 120 +newline_style = "Unix" +use_small_heuristics = "Off" +struct_lit_single_line = false +overflow_delimited_expr = true diff --git a/src/api/admin.rs b/src/api/admin.rs index 9ef22b7c..79033c00 100644 --- a/src/api/admin.rs +++ b/src/api/admin.rs @@ -65,9 +65,7 @@ static DB_TYPE: Lazy<&str> = Lazy::new(|| { }); static CAN_BACKUP: Lazy = Lazy::new(|| { - DbConnType::from_url(&CONFIG.database_url()) - .map(|t| t == DbConnType::sqlite) - .unwrap_or(false) + DbConnType::from_url(&CONFIG.database_url()).map(|t| t == DbConnType::sqlite).unwrap_or(false) && Command::new("sqlite3").arg("-version").status().is_ok() }); @@ -171,10 +169,7 @@ fn post_admin_login( // If the token is invalid, redirect to login page if !_validate_token(&data.token) { error!("Invalid admin token. IP: {}", ip.ip); - Err(Flash::error( - Redirect::to(admin_url(referer)), - "Invalid admin token, please try again.", - )) + Err(Flash::error(Redirect::to(admin_url(referer)), "Invalid admin token, please try again.")) } else { // If the token received is valid, generate JWT and save it as a cookie let claims = generate_admin_claims(); diff --git a/src/api/core/accounts.rs b/src/api/core/accounts.rs index 6e45a947..c9390cf7 100644 --- a/src/api/core/accounts.rs +++ b/src/api/core/accounts.rs @@ -320,15 +320,7 @@ fn post_rotatekey(data: JsonUpcase, headers: Headers, conn: DbConn, nt: err!("The cipher is not owned by the user") } - update_cipher_from_data( - &mut saved_cipher, - cipher_data, - &headers, - false, - &conn, - &nt, - UpdateType::CipherUpdate, - )? + update_cipher_from_data(&mut saved_cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::CipherUpdate)? } // Update user data diff --git a/src/api/core/ciphers.rs b/src/api/core/ciphers.rs index 8d5bcbc9..815f607c 100644 --- a/src/api/core/ciphers.rs +++ b/src/api/core/ciphers.rs @@ -91,19 +91,15 @@ fn sync(data: Form, headers: Headers, conn: DbConn) -> Json { let folders_json: Vec = folders.iter().map(Folder::to_json).collect(); let collections = Collection::find_by_user_uuid(&headers.user.uuid, &conn); - let collections_json: Vec = collections - .iter() - .map(|c| c.to_json_details(&headers.user.uuid, &conn)) - .collect(); + let collections_json: Vec = + collections.iter().map(|c| c.to_json_details(&headers.user.uuid, &conn)).collect(); let policies = OrgPolicy::find_by_user(&headers.user.uuid, &conn); let policies_json: Vec = policies.iter().map(OrgPolicy::to_json).collect(); let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn); - let ciphers_json: Vec = ciphers - .iter() - .map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)) - .collect(); + let ciphers_json: Vec = + ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect(); let sends = Send::find_by_user(&headers.user.uuid, &conn); let sends_json: Vec = sends.iter().map(|s| s.to_json()).collect(); @@ -130,10 +126,8 @@ fn sync(data: Form, headers: Headers, conn: DbConn) -> Json { fn get_ciphers(headers: Headers, conn: DbConn) -> Json { let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn); - let ciphers_json: Vec = ciphers - .iter() - .map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)) - .collect(); + let ciphers_json: Vec = + ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect(); Json(json!({ "Data": ciphers_json, @@ -583,11 +577,8 @@ fn post_collections_admin( } let posted_collections: HashSet = data.CollectionIds.iter().cloned().collect(); - let current_collections: HashSet = cipher - .get_collections(&headers.user.uuid, &conn) - .iter() - .cloned() - .collect(); + let current_collections: HashSet = + cipher.get_collections(&headers.user.uuid, &conn).iter().cloned().collect(); for collection in posted_collections.symmetric_difference(¤t_collections) { match Collection::find_by_uuid(&collection, &conn) { @@ -823,30 +814,25 @@ fn post_attachment( let file_name = HEXLOWER.encode(&crypto::get_random(vec![0; 10])); let path = base_path.join(&file_name); - let size = match field - .data - .save() - .memory_threshold(0) - .size_limit(size_limit) - .with_path(path.clone()) - { - SaveResult::Full(SavedData::File(_, size)) => size as i32, - SaveResult::Full(other) => { - std::fs::remove_file(path).ok(); - error = Some(format!("Attachment is not a file: {:?}", other)); - return; - } - SaveResult::Partial(_, reason) => { - std::fs::remove_file(path).ok(); - error = Some(format!("Attachment size limit exceeded with this file: {:?}", reason)); - return; - } - SaveResult::Error(e) => { - std::fs::remove_file(path).ok(); - error = Some(format!("Error: {:?}", e)); - return; - } - }; + let size = + match field.data.save().memory_threshold(0).size_limit(size_limit).with_path(path.clone()) { + SaveResult::Full(SavedData::File(_, size)) => size as i32, + SaveResult::Full(other) => { + std::fs::remove_file(path).ok(); + error = Some(format!("Attachment is not a file: {:?}", other)); + return; + } + SaveResult::Partial(_, reason) => { + std::fs::remove_file(path).ok(); + error = Some(format!("Attachment size limit exceeded with this file: {:?}", reason)); + return; + } + SaveResult::Error(e) => { + std::fs::remove_file(path).ok(); + error = Some(format!("Error: {:?}", e)); + return; + } + }; let mut attachment = Attachment::new(file_name, cipher.uuid.clone(), name, size); attachment.akey = attachment_key.clone(); @@ -878,11 +864,7 @@ fn post_attachment_admin( post_attachment(uuid, data, content_type, headers, conn, nt) } -#[post( - "/ciphers//attachment//share", - format = "multipart/form-data", - data = "" -)] +#[post("/ciphers//attachment//share", format = "multipart/form-data", data = "")] fn post_attachment_share( uuid: String, attachment_id: String, diff --git a/src/api/core/folders.rs b/src/api/core/folders.rs index 2779fe61..57ec7f18 100644 --- a/src/api/core/folders.rs +++ b/src/api/core/folders.rs @@ -8,15 +8,7 @@ use crate::{ }; pub fn routes() -> Vec { - routes![ - get_folders, - get_folder, - post_folders, - post_folder, - put_folder, - delete_folder_post, - delete_folder, - ] + routes![get_folders, get_folder, post_folders, post_folder, put_folder, delete_folder_post, delete_folder,] } #[get("/folders")] diff --git a/src/api/core/mod.rs b/src/api/core/mod.rs index 70e1866e..a6f5c432 100644 --- a/src/api/core/mod.rs +++ b/src/api/core/mod.rs @@ -8,14 +8,8 @@ pub mod two_factor; pub use sends::start_send_deletion_scheduler; pub fn routes() -> Vec { - let mut mod_routes = routes![ - clear_device_token, - put_device_token, - get_eq_domains, - post_eq_domains, - put_eq_domains, - hibp_breach, - ]; + let mut mod_routes = + routes![clear_device_token, put_device_token, get_eq_domains, post_eq_domains, put_eq_domains, hibp_breach,]; let mut routes = Vec::new(); routes.append(&mut accounts::routes()); @@ -157,11 +151,7 @@ fn hibp_breach(username: String) -> JsonResult { if let Some(api_key) = crate::CONFIG.hibp_api_key() { let hibp_client = Client::builder().build()?; - let res = hibp_client - .get(&url) - .header(USER_AGENT, user_agent) - .header("hibp-api-key", api_key) - .send()?; + let res = hibp_client.get(&url).header(USER_AGENT, user_agent).header("hibp-api-key", api_key).send()?; // If we get a 404, return a 404, it means no breached accounts if res.status() == 404 { diff --git a/src/api/core/organizations.rs b/src/api/core/organizations.rs index ce3c46ea..cfe3932e 100644 --- a/src/api/core/organizations.rs +++ b/src/api/core/organizations.rs @@ -446,10 +446,8 @@ struct OrgIdData { #[get("/ciphers/organization-details?")] fn get_org_details(data: Form, headers: Headers, conn: DbConn) -> Json { let ciphers = Cipher::find_by_org(&data.organization_id, &conn); - let ciphers_json: Vec = ciphers - .iter() - .map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)) - .collect(); + let ciphers_json: Vec = + ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect(); Json(json!({ "Data": ciphers_json, @@ -904,16 +902,8 @@ fn post_org_import( .into_iter() .map(|cipher_data| { let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone()); - update_cipher_from_data( - &mut cipher, - cipher_data, - &headers, - false, - &conn, - &nt, - UpdateType::CipherCreate, - ) - .ok(); + update_cipher_from_data(&mut cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::CipherCreate) + .ok(); cipher }) .collect(); diff --git a/src/api/core/sends.rs b/src/api/core/sends.rs index ec6809a2..f99b8d11 100644 --- a/src/api/core/sends.rs +++ b/src/api/core/sends.rs @@ -16,15 +16,7 @@ use crate::{ const SEND_INACCESSIBLE_MSG: &str = "Send does not exist or is no longer available"; pub fn routes() -> Vec { - routes![ - post_send, - post_send_file, - post_access, - post_access_file, - put_send, - delete_send, - put_remove_password - ] + routes![post_send, post_send_file, post_access, post_access_file, put_send, delete_send, put_remove_password] } pub fn start_send_deletion_scheduler(pool: crate::db::DbPool) { @@ -179,13 +171,7 @@ fn post_send_file(data: Data, content_type: &ContentType, headers: Headers, conn None => err!("No model entry present"), }; - let size = match data_entry - .data - .save() - .memory_threshold(0) - .size_limit(size_limit) - .with_path(&file_path) - { + let size = match data_entry.data.save().memory_threshold(0).size_limit(size_limit).with_path(&file_path) { SaveResult::Full(SavedData::File(_, size)) => size as i32, SaveResult::Full(other) => { std::fs::remove_file(&file_path).ok(); @@ -206,10 +192,7 @@ fn post_send_file(data: Data, content_type: &ContentType, headers: Headers, conn if let Some(o) = data_value.as_object_mut() { o.insert(String::from("Id"), Value::String(file_id)); o.insert(String::from("Size"), Value::Number(size.into())); - o.insert( - String::from("SizeName"), - Value::String(crate::util::get_display_size(size)), - ); + o.insert(String::from("SizeName"), Value::String(crate::util::get_display_size(size))); } send.data = serde_json::to_string(&data_value)?; diff --git a/src/api/core/two_factor/authenticator.rs b/src/api/core/two_factor/authenticator.rs index 3578e874..2d076b27 100644 --- a/src/api/core/two_factor/authenticator.rs +++ b/src/api/core/two_factor/authenticator.rs @@ -17,11 +17,7 @@ use crate::{ pub use crate::config::CONFIG; pub fn routes() -> Vec { - routes![ - generate_authenticator, - activate_authenticator, - activate_authenticator_put, - ] + routes![generate_authenticator, activate_authenticator, activate_authenticator_put,] } #[post("/two-factor/get-authenticator", data = "")] @@ -163,22 +159,11 @@ pub fn validate_totp_code(user_uuid: &str, totp_code: u64, secret: &str, ip: &Cl twofactor.save(&conn)?; return Ok(()); } else if generated == totp_code && time_step <= twofactor.last_used as i64 { - warn!( - "This or a TOTP code within {} steps back and forward has already been used!", - steps - ); - err!(format!( - "Invalid TOTP code! Server time: {} IP: {}", - current_time.format("%F %T UTC"), - ip.ip - )); + warn!("This or a TOTP code within {} steps back and forward has already been used!", steps); + err!(format!("Invalid TOTP code! Server time: {} IP: {}", current_time.format("%F %T UTC"), ip.ip)); } } // Else no valide code received, deny access - err!(format!( - "Invalid TOTP code! Server time: {} IP: {}", - current_time.format("%F %T UTC"), - ip.ip - )); + err!(format!("Invalid TOTP code! Server time: {} IP: {}", current_time.format("%F %T UTC"), ip.ip)); } diff --git a/src/api/core/two_factor/duo.rs b/src/api/core/two_factor/duo.rs index 18eda4b2..69aac86d 100644 --- a/src/api/core/two_factor/duo.rs +++ b/src/api/core/two_factor/duo.rs @@ -59,7 +59,11 @@ impl DuoData { ik.replace_range(digits.., replaced); sk.replace_range(digits.., replaced); - Self { host, ik, sk } + Self { + host, + ik, + sk, + } } } diff --git a/src/api/core/two_factor/email.rs b/src/api/core/two_factor/email.rs index a12d47ec..c47f9498 100644 --- a/src/api/core/two_factor/email.rs +++ b/src/api/core/two_factor/email.rs @@ -65,10 +65,7 @@ pub fn send_token(user_uuid: &str, conn: &DbConn) -> EmptyResult { twofactor.data = twofactor_data.to_json(); twofactor.save(&conn)?; - mail::send_token( - &twofactor_data.email, - &twofactor_data.last_token.map_res("Token is empty")?, - )?; + mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?)?; Ok(()) } @@ -128,17 +125,10 @@ fn send_email(data: JsonUpcase, headers: Headers, conn: DbConn) - let twofactor_data = EmailTokenData::new(data.Email, generated_token); // Uses EmailVerificationChallenge as type to show that it's not verified yet. - let twofactor = TwoFactor::new( - user.uuid, - TwoFactorType::EmailVerificationChallenge, - twofactor_data.to_json(), - ); + let twofactor = TwoFactor::new(user.uuid, TwoFactorType::EmailVerificationChallenge, twofactor_data.to_json()); twofactor.save(&conn)?; - mail::send_token( - &twofactor_data.email, - &twofactor_data.last_token.map_res("Token is empty")?, - )?; + mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?)?; Ok(()) } diff --git a/src/api/core/two_factor/mod.rs b/src/api/core/two_factor/mod.rs index a3dfd319..0d0d2bd2 100644 --- a/src/api/core/two_factor/mod.rs +++ b/src/api/core/two_factor/mod.rs @@ -20,13 +20,7 @@ pub mod u2f; pub mod yubikey; pub fn routes() -> Vec { - let mut routes = routes![ - get_twofactor, - get_recover, - recover, - disable_twofactor, - disable_twofactor_put, - ]; + let mut routes = routes![get_twofactor, get_recover, recover, disable_twofactor, disable_twofactor_put,]; routes.append(&mut authenticator::routes()); routes.append(&mut duo::routes()); diff --git a/src/api/core/two_factor/u2f.rs b/src/api/core/two_factor/u2f.rs index f841240b..3455beab 100644 --- a/src/api/core/two_factor/u2f.rs +++ b/src/api/core/two_factor/u2f.rs @@ -28,13 +28,7 @@ static APP_ID: Lazy = Lazy::new(|| format!("{}/app-id.json", &CONFIG.dom static U2F: Lazy = Lazy::new(|| U2f::new(APP_ID.clone())); pub fn routes() -> Vec { - routes![ - generate_u2f, - generate_u2f_challenge, - activate_u2f, - activate_u2f_put, - delete_u2f, - ] + routes![generate_u2f, generate_u2f_challenge, activate_u2f, activate_u2f_put, delete_u2f,] } #[post("/two-factor/get-u2f", data = "")] @@ -161,10 +155,7 @@ fn activate_u2f(data: JsonUpcase, headers: Headers, conn: DbConn) let response: RegisterResponseCopy = serde_json::from_str(&data.DeviceResponse)?; - let error_code = response - .error_code - .clone() - .map_or("0".into(), NumberOrString::into_string); + let error_code = response.error_code.clone().map_or("0".into(), NumberOrString::into_string); if error_code != "0" { err!("Error registering U2F token") @@ -300,20 +291,13 @@ fn _old_parse_registrations(registations: &str) -> Vec { let regs: Vec = serde_json::from_str(registations).expect("Can't parse Registration data"); - regs.into_iter() - .map(|r| serde_json::from_value(r).unwrap()) - .map(|Helper(r)| r) - .collect() + regs.into_iter().map(|r| serde_json::from_value(r).unwrap()).map(|Helper(r)| r).collect() } pub fn generate_u2f_login(user_uuid: &str, conn: &DbConn) -> ApiResult { let challenge = _create_u2f_challenge(user_uuid, TwoFactorType::U2fLoginChallenge, conn); - let registrations: Vec<_> = get_u2f_registrations(user_uuid, conn)? - .1 - .into_iter() - .map(|r| r.reg) - .collect(); + let registrations: Vec<_> = get_u2f_registrations(user_uuid, conn)?.1.into_iter().map(|r| r.reg).collect(); if registrations.is_empty() { err!("No U2F devices registered") diff --git a/src/api/icons.rs b/src/api/icons.rs index ae3187cb..2eaf89e2 100644 --- a/src/api/icons.rs +++ b/src/api/icons.rs @@ -22,10 +22,7 @@ static CLIENT: Lazy = Lazy::new(|| { // Generate the default headers let mut default_headers = header::HeaderMap::new(); default_headers.insert(header::USER_AGENT, header::HeaderValue::from_static("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.1.1 Safari/605.1.15")); - default_headers.insert( - header::ACCEPT_LANGUAGE, - header::HeaderValue::from_static("en-US,en;q=0.8"), - ); + default_headers.insert(header::ACCEPT_LANGUAGE, header::HeaderValue::from_static("en-US,en;q=0.8")); default_headers.insert(header::CACHE_CONTROL, header::HeaderValue::from_static("no-cache")); default_headers.insert(header::PRAGMA, header::HeaderValue::from_static("no-cache")); default_headers.insert( @@ -64,10 +61,7 @@ fn icon(domain: String) -> Cached>> { match get_icon(&domain) { Some(i) => Cached::ttl(Content(ContentType::new("image", "x-icon"), i), CONFIG.icon_cache_ttl()), - _ => Cached::ttl( - Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), - CONFIG.icon_cache_negttl(), - ), + _ => Cached::ttl(Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl()), } } @@ -100,10 +94,7 @@ fn is_valid_domain(domain: &str) -> bool { for c in domain.chars() { if !c.is_alphanumeric() && !ALLOWED_CHARS.contains(c) { - debug!( - "Domain validation error: '{}' contains an invalid character '{}'", - domain, c - ); + debug!("Domain validation error: '{}' contains an invalid character '{}'", domain, c); return false; } } @@ -352,12 +343,20 @@ struct Icon { impl Icon { const fn new(priority: u8, href: String) -> Self { - Self { href, priority } + Self { + href, + priority, + } } } fn get_favicons_node(node: &std::rc::Rc, icons: &mut Vec, url: &Url) { - if let markup5ever_rcdom::NodeData::Element { name, attrs, .. } = &node.data { + if let markup5ever_rcdom::NodeData::Element { + name, + attrs, + .. + } = &node.data + { if name.local.as_ref() == "link" { let mut has_rel = false; let mut href = None; diff --git a/src/api/identity.rs b/src/api/identity.rs index 22a7a11e..df05d4db 100644 --- a/src/api/identity.rs +++ b/src/api/identity.rs @@ -87,27 +87,18 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult let username = data.username.as_ref().unwrap(); let user = match User::find_by_mail(username, &conn) { Some(user) => user, - None => err!( - "Username or password is incorrect. Try again", - format!("IP: {}. Username: {}.", ip.ip, username) - ), + None => err!("Username or password is incorrect. Try again", format!("IP: {}. Username: {}.", ip.ip, username)), }; // Check password let password = data.password.as_ref().unwrap(); if !user.check_valid_password(password) { - err!( - "Username or password is incorrect. Try again", - format!("IP: {}. Username: {}.", ip.ip, username) - ) + err!("Username or password is incorrect. Try again", format!("IP: {}. Username: {}.", ip.ip, username)) } // Check if the user is disabled if !user.enabled { - err!( - "This user has been disabled", - format!("IP: {}. Username: {}.", ip.ip, username) - ) + err!("This user has been disabled", format!("IP: {}. Username: {}.", ip.ip, username)) } let now = Local::now(); @@ -137,10 +128,7 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult } // We still want the login to fail until they actually verified the email address - err!( - "Please verify your email before trying again.", - format!("IP: {}. Username: {}.", ip.ip, username) - ) + err!("Please verify your email before trying again.", format!("IP: {}. Username: {}.", ip.ip, username)) } let (mut device, new_device) = get_device(&data, &conn, &user); @@ -234,10 +222,7 @@ fn twofactor_auth( let twofactor_code = match data.two_factor_token { Some(ref code) => code, - None => err_json!( - _json_err_twofactor(&twofactor_ids, user_uuid, conn)?, - "2FA token not provided" - ), + None => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA token not provided"), }; let selected_twofactor = twofactors.into_iter().find(|tf| tf.atype == selected_id && tf.enabled); @@ -266,10 +251,9 @@ fn twofactor_auth( Some(ref code) if !CONFIG.disable_2fa_remember() && ct_eq(code, twofactor_code) => { remember = 1; // Make sure we also return the token here, otherwise it will only remember the first time } - _ => err_json!( - _json_err_twofactor(&twofactor_ids, user_uuid, conn)?, - "2FA Remember token not provided" - ), + _ => { + err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA Remember token not provided") + } } } _ => err!("Invalid two factor provider"), diff --git a/src/api/mod.rs b/src/api/mod.rs index 840c65ff..465e58bb 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -54,9 +54,9 @@ impl NumberOrString { use std::num::ParseIntError as PIE; match self { NumberOrString::Number(n) => Ok(n), - NumberOrString::String(s) => s - .parse() - .map_err(|e: PIE| crate::Error::new("Can't convert to number", e.to_string())), + NumberOrString::String(s) => { + s.parse().map_err(|e: PIE| crate::Error::new("Can't convert to number", e.to_string())) + } } } } diff --git a/src/api/notifications.rs b/src/api/notifications.rs index c1b9c316..a64ea9d8 100644 --- a/src/api/notifications.rs +++ b/src/api/notifications.rs @@ -15,9 +15,7 @@ static SHOW_WEBSOCKETS_MSG: AtomicBool = AtomicBool::new(true); #[get("/hub")] fn websockets_err() -> EmptyResult { if CONFIG.websocket_enabled() - && SHOW_WEBSOCKETS_MSG - .compare_exchange(true, false, Ordering::Relaxed, Ordering::Relaxed) - .is_ok() + && SHOW_WEBSOCKETS_MSG.compare_exchange(true, false, Ordering::Relaxed, Ordering::Relaxed).is_ok() { err!( " @@ -205,9 +203,7 @@ impl Handler for WsHandler { let handler_insert = self.out.clone(); let handler_update = self.out.clone(); - self.users - .map - .upsert(user_uuid, || vec![handler_insert], |ref mut v| v.push(handler_update)); + self.users.map.upsert(user_uuid, || vec![handler_insert], |ref mut v| v.push(handler_update)); // Schedule a ping to keep the connection alive self.out.timeout(PING_MS, PING) @@ -217,7 +213,11 @@ impl Handler for WsHandler { if let Message::Text(text) = msg.clone() { let json = &text[..text.len() - 1]; // Remove last char - if let Ok(InitialMessage { protocol, version }) = from_str::(json) { + if let Ok(InitialMessage { + protocol, + version, + }) = from_str::(json) + { if &protocol == "messagepack" && version == 1 { return self.out.send(&INITIAL_RESPONSE[..]); // Respond to initial message } @@ -296,10 +296,7 @@ impl WebSocketUsers { // NOTE: The last modified date needs to be updated before calling these methods pub fn send_user_update(&self, ut: UpdateType, user: &User) { let data = create_update( - vec![ - ("UserId".into(), user.uuid.clone().into()), - ("Date".into(), serialize_date(user.updated_at)), - ], + vec![("UserId".into(), user.uuid.clone().into()), ("Date".into(), serialize_date(user.updated_at))], ut, ); diff --git a/src/api/web.rs b/src/api/web.rs index 5d2048ea..29c64ae4 100644 --- a/src/api/web.rs +++ b/src/api/web.rs @@ -76,48 +76,22 @@ fn alive() -> Json { #[get("/bwrs_static/")] fn static_files(filename: String) -> Result, Error> { match filename.as_ref() { - "mail-github.png" => Ok(Content( - ContentType::PNG, - include_bytes!("../static/images/mail-github.png"), - )), - "logo-gray.png" => Ok(Content( - ContentType::PNG, - include_bytes!("../static/images/logo-gray.png"), - )), - "shield-white.png" => Ok(Content( - ContentType::PNG, - include_bytes!("../static/images/shield-white.png"), - )), - "error-x.svg" => Ok(Content( - ContentType::SVG, - include_bytes!("../static/images/error-x.svg"), - )), + "mail-github.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/mail-github.png"))), + "logo-gray.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/logo-gray.png"))), + "shield-white.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/shield-white.png"))), + "error-x.svg" => Ok(Content(ContentType::SVG, include_bytes!("../static/images/error-x.svg"))), "hibp.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/hibp.png"))), - "bootstrap.css" => Ok(Content( - ContentType::CSS, - include_bytes!("../static/scripts/bootstrap.css"), - )), - "bootstrap-native.js" => Ok(Content( - ContentType::JavaScript, - include_bytes!("../static/scripts/bootstrap-native.js"), - )), - "identicon.js" => Ok(Content( - ContentType::JavaScript, - include_bytes!("../static/scripts/identicon.js"), - )), - "datatables.js" => Ok(Content( - ContentType::JavaScript, - include_bytes!("../static/scripts/datatables.js"), - )), - "datatables.css" => Ok(Content( - ContentType::CSS, - include_bytes!("../static/scripts/datatables.css"), - )), - "jquery-3.5.1.slim.js" => Ok(Content( - ContentType::JavaScript, - include_bytes!("../static/scripts/jquery-3.5.1.slim.js"), - )), + "bootstrap.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/bootstrap.css"))), + "bootstrap-native.js" => { + Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/bootstrap-native.js"))) + } + "identicon.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/identicon.js"))), + "datatables.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/datatables.js"))), + "datatables.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/datatables.css"))), + "jquery-3.5.1.slim.js" => { + Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/jquery-3.5.1.slim.js"))) + } _ => err!(format!("Static file not found: {}", filename)), } } diff --git a/src/auth.rs b/src/auth.rs index 4fe9a7b5..516583c0 100644 --- a/src/auth.rs +++ b/src/auth.rs @@ -260,7 +260,9 @@ impl<'a, 'r> FromRequest<'a, 'r> for Host { format!("{}://{}", protocol, host) }; - Outcome::Success(Host { host }) + Outcome::Success(Host { + host, + }) } } @@ -316,10 +318,8 @@ impl<'a, 'r> FromRequest<'a, 'r> for Headers { }; if user.security_stamp != claims.sstamp { - if let Some(stamp_exception) = user - .stamp_exception - .as_deref() - .and_then(|s| serde_json::from_str::(s).ok()) + if let Some(stamp_exception) = + user.stamp_exception.as_deref().and_then(|s| serde_json::from_str::(s).ok()) { let current_route = match request.route().and_then(|r| r.name) { Some(name) => name, @@ -337,7 +337,11 @@ impl<'a, 'r> FromRequest<'a, 'r> for Headers { } } - Outcome::Success(Headers { host, device, user }) + Outcome::Success(Headers { + host, + device, + user, + }) } } @@ -639,10 +643,10 @@ impl<'a, 'r> FromRequest<'a, 'r> for ClientIp { None }; - let ip = ip - .or_else(|| req.remote().map(|r| r.ip())) - .unwrap_or_else(|| "0.0.0.0".parse().unwrap()); + let ip = ip.or_else(|| req.remote().map(|r| r.ip())).unwrap_or_else(|| "0.0.0.0".parse().unwrap()); - Outcome::Success(ClientIp { ip }) + Outcome::Success(ClientIp { + ip, + }) } } diff --git a/src/config.rs b/src/config.rs index df4e90b4..670d74fa 100644 --- a/src/config.rs +++ b/src/config.rs @@ -511,10 +511,7 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { let limit = 256; if cfg.database_max_conns < 1 || cfg.database_max_conns > limit { - err!(format!( - "`DATABASE_MAX_CONNS` contains an invalid value. Ensure it is between 1 and {}.", - limit, - )); + err!(format!("`DATABASE_MAX_CONNS` contains an invalid value. Ensure it is between 1 and {}.", limit,)); } let dom = cfg.domain.to_lowercase(); @@ -855,9 +852,7 @@ fn case_helper<'reg, 'rc>( rc: &mut RenderContext<'reg, 'rc>, out: &mut dyn Output, ) -> HelperResult { - let param = h - .param(0) - .ok_or_else(|| RenderError::new("Param not found for helper \"case\""))?; + let param = h.param(0).ok_or_else(|| RenderError::new("Param not found for helper \"case\""))?; let value = param.value().clone(); if h.params().iter().skip(1).any(|x| x.value() == &value) { @@ -874,16 +869,12 @@ fn js_escape_helper<'reg, 'rc>( _rc: &mut RenderContext<'reg, 'rc>, out: &mut dyn Output, ) -> HelperResult { - let param = h - .param(0) - .ok_or_else(|| RenderError::new("Param not found for helper \"js_escape\""))?; + let param = h.param(0).ok_or_else(|| RenderError::new("Param not found for helper \"js_escape\""))?; let no_quote = h.param(1).is_some(); - let value = param - .value() - .as_str() - .ok_or_else(|| RenderError::new("Param for helper \"js_escape\" is not a String"))?; + let value = + param.value().as_str().ok_or_else(|| RenderError::new("Param for helper \"js_escape\" is not a String"))?; let mut escaped_value = value.replace('\\', "").replace('\'', "\\x22").replace('\"', "\\x27"); if !no_quote { diff --git a/src/crypto.rs b/src/crypto.rs index ecff2ce0..43b8fc7d 100644 --- a/src/crypto.rs +++ b/src/crypto.rs @@ -47,9 +47,7 @@ pub fn get_random_64() -> Vec { pub fn get_random(mut array: Vec) -> Vec { use ring::rand::{SecureRandom, SystemRandom}; - SystemRandom::new() - .fill(&mut array) - .expect("Error generating random values"); + SystemRandom::new().fill(&mut array).expect("Error generating random values"); array } diff --git a/src/db/mod.rs b/src/db/mod.rs index 85926162..8e0bef54 100644 --- a/src/db/mod.rs +++ b/src/db/mod.rs @@ -314,9 +314,7 @@ mod sqlite_migrations { // Turn on WAL in SQLite if crate::CONFIG.enable_db_wal() { - diesel::sql_query("PRAGMA journal_mode=wal") - .execute(&connection) - .expect("Failed to turn on WAL"); + diesel::sql_query("PRAGMA journal_mode=wal").execute(&connection).expect("Failed to turn on WAL"); } embedded_migrations::run_with_output(&connection, &mut std::io::stdout())?; diff --git a/src/db/models/cipher.rs b/src/db/models/cipher.rs index 6de8afbd..5eeac38b 100644 --- a/src/db/models/cipher.rs +++ b/src/db/models/cipher.rs @@ -83,16 +83,9 @@ impl Cipher { attachments.iter().map(|c| c.to_json(host)).collect() }; - let fields_json = self - .fields - .as_ref() - .and_then(|s| serde_json::from_str(s).ok()) - .unwrap_or(Value::Null); - let password_history_json = self - .password_history - .as_ref() - .and_then(|s| serde_json::from_str(s).ok()) - .unwrap_or(Value::Null); + let fields_json = self.fields.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null); + let password_history_json = + self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null); let (read_only, hide_passwords) = match self.get_access_restrictions(&user_uuid, conn) { Some((ro, hp)) => (ro, hp), @@ -195,12 +188,10 @@ impl Cipher { None => { // Belongs to Organization, need to update affected users if let Some(ref org_uuid) = self.organization_uuid { - UserOrganization::find_by_cipher_and_org(&self.uuid, &org_uuid, conn) - .iter() - .for_each(|user_org| { - User::update_uuid_revision(&user_org.user_uuid, conn); - user_uuids.push(user_org.user_uuid.clone()) - }); + UserOrganization::find_by_cipher_and_org(&self.uuid, &org_uuid, conn).iter().for_each(|user_org| { + User::update_uuid_revision(&user_org.user_uuid, conn); + user_uuids.push(user_org.user_uuid.clone()) + }); } } }; diff --git a/src/db/models/collection.rs b/src/db/models/collection.rs index 06a2d671..88e11c6e 100644 --- a/src/db/models/collection.rs +++ b/src/db/models/collection.rs @@ -127,11 +127,9 @@ impl Collection { } pub fn update_users_revision(&self, conn: &DbConn) { - UserOrganization::find_by_collection_and_org(&self.uuid, &self.org_uuid, conn) - .iter() - .for_each(|user_org| { - User::update_uuid_revision(&user_org.user_uuid, conn); - }); + UserOrganization::find_by_collection_and_org(&self.uuid, &self.org_uuid, conn).iter().for_each(|user_org| { + User::update_uuid_revision(&user_org.user_uuid, conn); + }); } pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { @@ -170,10 +168,7 @@ impl Collection { } pub fn find_by_organization_and_user_uuid(org_uuid: &str, user_uuid: &str, conn: &DbConn) -> Vec { - Self::find_by_user_uuid(user_uuid, conn) - .into_iter() - .filter(|c| c.org_uuid == org_uuid) - .collect() + Self::find_by_user_uuid(user_uuid, conn).into_iter().filter(|c| c.org_uuid == org_uuid).collect() } pub fn find_by_organization(org_uuid: &str, conn: &DbConn) -> Vec { @@ -380,11 +375,9 @@ impl CollectionUser { } pub fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult { - CollectionUser::find_by_collection(&collection_uuid, conn) - .iter() - .for_each(|collection| { - User::update_uuid_revision(&collection.user_uuid, conn); - }); + CollectionUser::find_by_collection(&collection_uuid, conn).iter().for_each(|collection| { + User::update_uuid_revision(&collection.user_uuid, conn); + }); db_run! { conn: { diesel::delete(users_collections::table.filter(users_collections::collection_uuid.eq(collection_uuid))) diff --git a/src/db/models/device.rs b/src/db/models/device.rs index b2297fe2..77837fca 100644 --- a/src/db/models/device.rs +++ b/src/db/models/device.rs @@ -74,26 +74,10 @@ impl Device { let time_now = Utc::now().naive_utc(); self.updated_at = time_now; - let orgowner: Vec<_> = orgs - .iter() - .filter(|o| o.atype == 0) - .map(|o| o.org_uuid.clone()) - .collect(); - let orgadmin: Vec<_> = orgs - .iter() - .filter(|o| o.atype == 1) - .map(|o| o.org_uuid.clone()) - .collect(); - let orguser: Vec<_> = orgs - .iter() - .filter(|o| o.atype == 2) - .map(|o| o.org_uuid.clone()) - .collect(); - let orgmanager: Vec<_> = orgs - .iter() - .filter(|o| o.atype == 3) - .map(|o| o.org_uuid.clone()) - .collect(); + let orgowner: Vec<_> = orgs.iter().filter(|o| o.atype == 0).map(|o| o.org_uuid.clone()).collect(); + let orgadmin: Vec<_> = orgs.iter().filter(|o| o.atype == 1).map(|o| o.org_uuid.clone()).collect(); + let orguser: Vec<_> = orgs.iter().filter(|o| o.atype == 2).map(|o| o.org_uuid.clone()).collect(); + let orgmanager: Vec<_> = orgs.iter().filter(|o| o.atype == 3).map(|o| o.org_uuid.clone()).collect(); // Create the JWT claims struct, to send to the client use crate::auth::{encode_jwt, LoginJwtClaims, DEFAULT_VALIDITY, JWT_LOGIN_ISSUER}; diff --git a/src/db/models/organization.rs b/src/db/models/organization.rs index 9931ed5b..c5d36ff7 100644 --- a/src/db/models/organization.rs +++ b/src/db/models/organization.rs @@ -116,10 +116,7 @@ impl PartialOrd for i32 { } fn le(&self, other: &UserOrgType) -> bool { - matches!( - self.partial_cmp(other), - Some(Ordering::Less) | Some(Ordering::Equal) | None - ) + matches!(self.partial_cmp(other), Some(Ordering::Less) | Some(Ordering::Equal) | None) } } @@ -192,11 +189,9 @@ use crate::error::MapResult; /// Database methods impl Organization { pub fn save(&self, conn: &DbConn) -> EmptyResult { - UserOrganization::find_by_org(&self.uuid, conn) - .iter() - .for_each(|user_org| { - User::update_uuid_revision(&user_org.user_uuid, conn); - }); + UserOrganization::find_by_org(&self.uuid, conn).iter().for_each(|user_org| { + User::update_uuid_revision(&user_org.user_uuid, conn); + }); db_run! { conn: sqlite, mysql { diff --git a/src/db/models/user.rs b/src/db/models/user.rs index 6f61dfeb..5c29eaa6 100644 --- a/src/db/models/user.rs +++ b/src/db/models/user.rs @@ -348,7 +348,9 @@ impl User { impl Invitation { pub const fn new(email: String) -> Self { - Self { email } + Self { + email, + } } pub fn save(&self, conn: &DbConn) -> EmptyResult { diff --git a/src/error.rs b/src/error.rs index 9c597a8b..e8fa7613 100644 --- a/src/error.rs +++ b/src/error.rs @@ -198,11 +198,7 @@ impl<'r> Responder<'r> for Error { let code = Status::from_code(self.error_code).unwrap_or(Status::BadRequest); - Response::build() - .status(code) - .header(ContentType::JSON) - .sized_body(Cursor::new(format!("{}", self))) - .ok() + Response::build().status(code).header(ContentType::JSON).sized_body(Cursor::new(format!("{}", self))).ok() } } diff --git a/src/mail.rs b/src/mail.rs index eb7c84c5..025a1a9a 100644 --- a/src/mail.rs +++ b/src/mail.rs @@ -58,18 +58,12 @@ fn mailer() -> SmtpTransport { let smtp_client = match CONFIG.smtp_auth_mechanism() { Some(mechanism) => { - let allowed_mechanisms = [ - SmtpAuthMechanism::Plain, - SmtpAuthMechanism::Login, - SmtpAuthMechanism::Xoauth2, - ]; + let allowed_mechanisms = [SmtpAuthMechanism::Plain, SmtpAuthMechanism::Login, SmtpAuthMechanism::Xoauth2]; let mut selected_mechanisms = vec![]; for wanted_mechanism in mechanism.split(',') { for m in &allowed_mechanisms { if m.to_string().to_lowercase() - == wanted_mechanism - .trim_matches(|c| c == '"' || c == '\'' || c == ' ') - .to_lowercase() + == wanted_mechanism.trim_matches(|c| c == '"' || c == '\'' || c == ' ').to_lowercase() { selected_mechanisms.push(*m); } @@ -80,10 +74,7 @@ fn mailer() -> SmtpTransport { smtp_client.authentication(selected_mechanisms) } else { // Only show a warning, and return without setting an actual authentication mechanism - warn!( - "No valid SMTP Auth mechanism found for '{}', using default values", - mechanism - ); + warn!("No valid SMTP Auth mechanism found for '{}', using default values", mechanism); smtp_client } } @@ -327,16 +318,9 @@ fn send_email(address: &str, subject: &str, body_html: String, body_text: String let smtp_from = &CONFIG.smtp_from(); let email = Message::builder() - .message_id(Some(format!( - "<{}@{}>", - crate::util::get_uuid(), - smtp_from.split('@').collect::>()[1] - ))) + .message_id(Some(format!("<{}@{}>", crate::util::get_uuid(), smtp_from.split('@').collect::>()[1]))) .to(Mailbox::new(None, Address::from_str(&address)?)) - .from(Mailbox::new( - Some(CONFIG.smtp_from_name()), - Address::from_str(smtp_from)?, - )) + .from(Mailbox::new(Some(CONFIG.smtp_from_name()), Address::from_str(smtp_from)?)) .subject(subject) .multipart(MultiPart::alternative().singlepart(text).singlepart(html))?; diff --git a/src/util.rs b/src/util.rs index 1858d4a3..8da13b41 100644 --- a/src/util.rs +++ b/src/util.rs @@ -127,14 +127,8 @@ impl<'r, R: Responder<'r>> Responder<'r> for Cached { // Log all the routes from the main paths list, and the attachments endpoint // Effectively ignores, any static file route, and the alive endpoint -const LOGGED_ROUTES: [&str; 6] = [ - "/api", - "/admin", - "/identity", - "/icons", - "/notifications/hub/negotiate", - "/attachments", -]; +const LOGGED_ROUTES: [&str; 6] = + ["/api", "/admin", "/identity", "/icons", "/notifications/hub/negotiate", "/attachments"]; // Boolean is extra debug, when true, we ignore the whitelist above and also print the mounts pub struct BetterLogging(pub bool); @@ -161,7 +155,11 @@ impl Fairing for BetterLogging { } let config = rocket.config(); - let scheme = if config.tls_enabled() { "https" } else { "http" }; + let scheme = if config.tls_enabled() { + "https" + } else { + "http" + }; let addr = format!("{}://{}:{}", &scheme, &config.address, &config.port); info!(target: "start", "Rocket has launched from {}", addr); } From f7056bcaa5d9e699f667994592ffb4bc02619a1e Mon Sep 17 00:00:00 2001 From: Jake Howard Date: Wed, 7 Apr 2021 19:25:02 +0100 Subject: [PATCH 18/25] Enable socks feature for reqwest This allowed HTTP_PROXY be set with a socks5 proxy --- Cargo.lock | 39 +++++++++++++++++++++++++++++++++++++++ Cargo.toml | 2 +- 2 files changed, 40 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 7a18fadc..4c982ca5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -563,6 +563,12 @@ version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f" +[[package]] +name = "either" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" + [[package]] name = "encoding_rs" version = "0.8.28" @@ -2213,6 +2219,7 @@ dependencies = [ "serde_urlencoded", "tokio", "tokio-native-tls", + "tokio-socks", "tokio-util", "url 2.2.1", "wasm-bindgen", @@ -2767,6 +2774,26 @@ dependencies = [ "utf-8", ] +[[package]] +name = "thiserror" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0f4a65597094d4483ddaed134f409b2cb7c1beccf25201a9f73c719254fa98e" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7765189610d8241a44529806d6fd1f2e0a08734313a35d5b3a556f92b381f3c0" +dependencies = [ + "proc-macro2 1.0.24", + "quote 1.0.9", + "syn 1.0.65", +] + [[package]] name = "threadpool" version = "1.8.1" @@ -2865,6 +2892,18 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-socks" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51165dfa029d2a65969413a6cc96f354b86b464498702f174a4efa13608fd8c0" +dependencies = [ + "either", + "futures-util", + "thiserror", + "tokio", +] + [[package]] name = "tokio-util" version = "0.6.5" diff --git a/Cargo.toml b/Cargo.toml index 4edffc42..84c08806 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,7 +32,7 @@ rocket = { version = "0.5.0-dev", features = ["tls"], default-features = false } rocket_contrib = "0.5.0-dev" # HTTP client -reqwest = { version = "0.11.2", features = ["blocking", "json", "gzip", "brotli"] } +reqwest = { version = "0.11.2", features = ["blocking", "json", "gzip", "brotli", "socks"] } # multipart/form-data support multipart = { version = "0.17.1", features = ["server"], default-features = false } From 244bad3a24300792b74d4d51c7a90bfafba3842f Mon Sep 17 00:00:00 2001 From: Jeremy Lin Date: Fri, 9 Apr 2021 22:30:39 -0700 Subject: [PATCH 19/25] Warn that the SQLite backup feature doesn't produce a complete backup Also add a link to the wiki page on backups. --- src/static/templates/admin/settings.hbs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/static/templates/admin/settings.hbs b/src/static/templates/admin/settings.hbs index 0b8c6277..e19c2c39 100644 --- a/src/static/templates/admin/settings.hbs +++ b/src/static/templates/admin/settings.hbs @@ -116,7 +116,11 @@ data-target="#g_database">Backup Database
- NOTE: A local installation of sqlite3 is required for this section to work. + WARNING: This function only creates a backup copy of the SQLite database. + This does not include any configuration or file attachment data that may + also be needed to fully restore a bitwarden_rs instance. For details on + how to perform complete backups, refer to the wiki page on + backups.
From 27609ac4ccaebc2ff9952fae5d1ccedfc0d2c389 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Garc=C3=ADa?= Date: Thu, 15 Apr 2021 18:27:05 +0200 Subject: [PATCH 20/25] Update README.md --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 2eeaee6c..6b80b3b9 100644 --- a/README.md +++ b/README.md @@ -60,10 +60,10 @@ Thanks for your contribution to the project! From 305de2e2cd820ae6651a6442d88f5ca637263fae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Garc=C3=ADa?= Date: Thu, 15 Apr 2021 18:30:23 +0200 Subject: [PATCH 21/25] Format the changes from merge to master --- src/api/admin.rs | 16 +++------------- src/api/core/two_factor/duo.rs | 3 ++- src/api/icons.rs | 6 +++++- src/util.rs | 10 +++++----- 4 files changed, 15 insertions(+), 20 deletions(-) diff --git a/src/api/admin.rs b/src/api/admin.rs index 4104ba5b..6b8ae6ee 100644 --- a/src/api/admin.rs +++ b/src/api/admin.rs @@ -3,7 +3,6 @@ use serde::de::DeserializeOwned; use serde_json::Value; use std::{env, time::Duration}; - use rocket::{ http::{Cookie, Cookies, SameSite}, request::{self, FlashMessage, Form, FromRequest, Outcome, Request}, @@ -19,7 +18,7 @@ use crate::{ db::{backup_database, get_sql_server_version, models::*, DbConn, DbConnType}, error::{Error, MapResult}, mail, - util::{format_naive_datetime_local, get_display_size, is_running_in_docker, get_reqwest_client}, + util::{format_naive_datetime_local, get_display_size, get_reqwest_client, is_running_in_docker}, CONFIG, }; @@ -471,22 +470,13 @@ struct GitCommit { fn get_github_api(url: &str) -> Result { let github_api = get_reqwest_client(); - Ok(github_api - .get(url) - .timeout(Duration::from_secs(10)) - .send()? - .error_for_status()? - .json::()?) + Ok(github_api.get(url).timeout(Duration::from_secs(10)).send()?.error_for_status()?.json::()?) } fn has_http_access() -> bool { let http_access = get_reqwest_client(); - match http_access - .head("https://github.com/dani-garcia/bitwarden_rs") - .timeout(Duration::from_secs(10)) - .send() - { + match http_access.head("https://github.com/dani-garcia/bitwarden_rs").timeout(Duration::from_secs(10)).send() { Ok(r) => r.status().is_success(), _ => false, } diff --git a/src/api/core/two_factor/duo.rs b/src/api/core/two_factor/duo.rs index e8c96750..20d03a7c 100644 --- a/src/api/core/two_factor/duo.rs +++ b/src/api/core/two_factor/duo.rs @@ -204,7 +204,8 @@ fn duo_api_request(method: &str, path: &str, params: &str, data: &DuoData) -> Em let client = get_reqwest_client(); - client.request(m, &url) + client + .request(m, &url) .basic_auth(username, Some(password)) .header(header::USER_AGENT, "bitwarden_rs:Duo/1.0 (Rust)") .header(header::DATE, date) diff --git a/src/api/icons.rs b/src/api/icons.rs index 56d43111..33849bbb 100644 --- a/src/api/icons.rs +++ b/src/api/icons.rs @@ -12,7 +12,11 @@ use regex::Regex; use reqwest::{blocking::Client, blocking::Response, header, Url}; use rocket::{http::ContentType, http::Cookie, response::Content, Route}; -use crate::{error::Error, util::{Cached, get_reqwest_client_builder}, CONFIG}; +use crate::{ + error::Error, + util::{get_reqwest_client_builder, Cached}, + CONFIG, +}; pub fn routes() -> Vec { routes![icon] diff --git a/src/util.rs b/src/util.rs index 21dae07e..83930cbf 100644 --- a/src/util.rs +++ b/src/util.rs @@ -523,7 +523,10 @@ where } } -use reqwest::{blocking::{Client, ClientBuilder}, header}; +use reqwest::{ + blocking::{Client, ClientBuilder}, + header, +}; pub fn get_reqwest_client() -> Client { get_reqwest_client_builder().build().expect("Failed to build client") @@ -532,8 +535,5 @@ pub fn get_reqwest_client() -> Client { pub fn get_reqwest_client_builder() -> ClientBuilder { let mut headers = header::HeaderMap::new(); headers.insert(header::USER_AGENT, header::HeaderValue::from_static("Bitwarden_RS")); - Client::builder() - .default_headers(headers) - .timeout(Duration::from_secs(10)) - + Client::builder().default_headers(headers).timeout(Duration::from_secs(10)) } From ced7f1771acd9696c7da8adb4bd6044d51abf7c6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Garc=C3=ADa?= Date: Thu, 15 Apr 2021 18:38:00 +0200 Subject: [PATCH 22/25] Update dependencies --- Cargo.lock | 194 +++++++++++++++++++++---------------------- Cargo.toml | 4 +- docker/Dockerfile.j2 | 4 +- rust-toolchain | 2 +- 4 files changed, 102 insertions(+), 102 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4c982ca5..972ae95d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -471,9 +471,9 @@ version = "0.3.0" source = "git+https://github.com/SergioBenitez/Devise.git?rev=e58b3ac9a#e58b3ac9afc3b6ff10a8aaf02a3e768a8f530089" dependencies = [ "bitflags", - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] @@ -499,9 +499,9 @@ version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "45f5098f628d02a7a0f68ddba586fb61e80edec3bdc1be3b921f4ceec60858d3" dependencies = [ - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] @@ -686,9 +686,9 @@ dependencies = [ [[package]] name = "futures" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f55667319111d593ba876406af7c409c0ebb44dc4be6132a783ccf163ea14c1" +checksum = "a9d5813545e459ad3ca1bff9915e9ad7f1a47dc6a91b627ce321d5863b7dd253" dependencies = [ "futures-channel", "futures-core", @@ -701,9 +701,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c2dd2df839b57db9ab69c2c9d8f3e8c81984781937fe2807dc6dcf3b2ad2939" +checksum = "ce79c6a52a299137a6013061e0cf0e688fce5d7f1bc60125f520912fdb29ec25" dependencies = [ "futures-core", "futures-sink", @@ -711,15 +711,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15496a72fabf0e62bdc3df11a59a3787429221dd0710ba8ef163d6f7a9112c94" +checksum = "098cd1c6dda6ca01650f1a37a794245eb73181d0d4d4e955e2f3c37db7af1815" [[package]] name = "futures-executor" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "891a4b7b96d84d5940084b2a37632dd65deeae662c114ceaa2c879629c9c0ad1" +checksum = "10f6cb7042eda00f0049b1d2080aa4b93442997ee507eb3828e8bd7577f94c9d" dependencies = [ "futures-core", "futures-task", @@ -728,39 +728,39 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71c2c65c57704c32f5241c1223167c2c3294fd34ac020c807ddbe6db287ba59" +checksum = "365a1a1fb30ea1c03a830fdb2158f5236833ac81fa0ad12fe35b29cddc35cb04" [[package]] name = "futures-macro" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea405816a5139fb39af82c2beb921d52143f556038378d6db21183a5c37fbfb7" +checksum = "668c6733a182cd7deb4f1de7ba3bf2120823835b3bcfbeacf7d2c4a773c1bb8b" dependencies = [ "proc-macro-hack", - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] name = "futures-sink" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85754d98985841b7d4f5e8e6fbfa4a4ac847916893ec511a2917ccd8525b8bb3" +checksum = "5c5629433c555de3d82861a7a4e3794a4c40040390907cfbfd7143a92a426c23" [[package]] name = "futures-task" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa189ef211c15ee602667a6fcfe1c1fd9e07d42250d2156382820fba33c9df80" +checksum = "ba7aa51095076f3ba6d9a1f702f74bd05ec65f555d70d2033d55ba8d69f581bc" [[package]] name = "futures-util" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1812c7ab8aedf8d6f2701a43e1243acdbcc2b36ab26e2ad421eb99ac963d96d1" +checksum = "3c144ad54d60f23927f0a6b6d816e4271278b64f005ad65e4e35291d2de9c025" dependencies = [ "futures-channel", "futures-core", @@ -929,16 +929,16 @@ dependencies = [ "log 0.4.14", "mac", "markup5ever", - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] name = "http" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7245cd7449cc792608c3c8a9eaf69bd4eabbabf802713748fd739c98b82f0747" +checksum = "527e8c9ac747e28542699a951517aa9a6945af506cd1f2e1b53a576c17b6cc11" dependencies = [ "bytes 1.0.1", "fnv", @@ -958,9 +958,9 @@ dependencies = [ [[package]] name = "httparse" -version = "1.3.5" +version = "1.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "615caabe2c3160b313d52ccc905335f4ed5f10881dd63dc5699d47e90be85691" +checksum = "bc35c995b9d93ec174cf9a27d425c7892722101e14993cd227fdb51d70cf9589" [[package]] name = "httpdate" @@ -1127,9 +1127,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.49" +version = "0.3.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc15e39392125075f60c95ba416f5381ff6c3a948ff02ab12464715adf56c821" +checksum = "2d99f9e3e84b8f67f846ef5b4cbbc3b1c29f6c759fcbce6f01aa0e73d932a24c" dependencies = [ "wasm-bindgen", ] @@ -1200,9 +1200,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.91" +version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8916b1f6ca17130ec6568feccee27c156ad12037880833a3b842a823236502e7" +checksum = "9385f66bf6105b241aa65a61cb923ef20efc665cb9f9bb50ac2f0c4b7f378d41" [[package]] name = "libsqlite3-sys" @@ -1217,9 +1217,9 @@ dependencies = [ [[package]] name = "lock_api" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd96ffd135b2fd7b973ac026d28085defbe8983df057ced3eb4f2130b0831312" +checksum = "5a3c91c24eae6777794bb1997ad98bbb87daf92890acab859f7eaa4320333176" dependencies = [ "scopeguard", ] @@ -1323,9 +1323,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9753f12909fd8d923f75ae5c3258cae1ed3c8ec052e1b38c93c21a6d157f789c" dependencies = [ "migrations_internals", - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] @@ -1550,9 +1550,9 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" dependencies = [ - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] @@ -1825,9 +1825,9 @@ checksum = "99b8db626e31e5b81787b9783425769681b347011cc59471e33ea46d2ea0cf55" dependencies = [ "pest", "pest_meta", - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] @@ -1900,9 +1900,9 @@ version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a490329918e856ed1b083f244e3bfe2d8c4f336407e4ea9e1a9f479ff09049e5" dependencies = [ - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] @@ -1967,9 +1967,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.24" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71" +checksum = "a152013215dca273577e18d2bf00fa862b89b24169fb78c4c95aeb07992c9cec" dependencies = [ "unicode-xid 0.2.1", ] @@ -2001,14 +2001,14 @@ version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7" dependencies = [ - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", ] [[package]] name = "quoted_printable" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47b080c5db639b292ac79cbd34be0cfc5d36694768d8341109634d90b86930e2" +checksum = "1238256b09923649ec89b08104c4dfe9f6cb2fea734a5db5384e44916d59e9c5" [[package]] name = "r2d2" @@ -2157,9 +2157,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94341e4e44e24f6b591b59e47a8a027df12e008d73fd5672dbea9cc22f4507d9" +checksum = "8270314b5ccceb518e7e578952f0b72b88222d02e8f77f5ecf7abbb673539041" dependencies = [ "bitflags", ] @@ -2192,9 +2192,9 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf12057f289428dbf5c591c74bf10392e4a8003f993405a902f20117019022d4" +checksum = "2296f2fac53979e8ccbc4a1136b25dcefd37be9ed7e4a1f6b05a6029c84ff124" dependencies = [ "async-compression", "base64 0.13.0", @@ -2408,9 +2408,9 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "sct" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3042af939fca8c3453b7af0f1c66e533a15a86169e39de2657310ade8f98d3c" +checksum = "b362b83898e0e69f38515b82ee15aa80636befe47c3b6d3d89a911e78fc228ce" dependencies = [ "ring", "untrusted", @@ -2469,9 +2469,9 @@ version = "1.0.125" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b093b7a2bb58203b5da3056c05b4ec1fed827dcfdb37347a8841695263b3d06d" dependencies = [ - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] @@ -2649,11 +2649,11 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c87a60a40fccc84bef0652345bbbbbe20a605bf5d0ce81719fc476f5c03b50ef" dependencies = [ - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", "serde", "serde_derive", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] @@ -2663,13 +2663,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "58fa5ff6ad0d98d1ffa8cb115892b6e69d67799f6763e162a1c9db421dc22e11" dependencies = [ "base-x", - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", "serde", "serde_derive", "serde_json", "sha1", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] @@ -2699,7 +2699,7 @@ checksum = "f24c8e5e19d22a726626f1a5e16fe15b132dcf21d10177fa5a45ce7962996b97" dependencies = [ "phf_generator", "phf_shared", - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", ] @@ -2722,11 +2722,11 @@ dependencies = [ [[package]] name = "syn" -version = "1.0.65" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3a1d708c221c5a612956ef9f75b37e454e88d1f7b899fbd3a18d4252012d663" +checksum = "48fe99c6bd8b1cc636890bcc071842de909d902c81ac7dab53ba33c421ab8ffb" dependencies = [ - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", "unicode-xid 0.2.1", ] @@ -2789,9 +2789,9 @@ version = "1.0.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7765189610d8241a44529806d6fd1f2e0a08734313a35d5b3a556f92b381f3c0" dependencies = [ - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] @@ -2846,17 +2846,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5c3be1edfad6027c69f5491cf4cb310d1a71ecd6af742788c6ff8bced86b8fa" dependencies = [ "proc-macro-hack", - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", "standback", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] name = "tinyvec" -version = "1.1.1" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317cca572a0e89c3ce0ca1f1bdc9369547fe318a683418e42ac8f59d14701023" +checksum = "5b5220f05bb7de7f3f53c7c065e1199b3172696fe2db9f9c4d8ad9b4ee74c342" dependencies = [ "tinyvec_macros", ] @@ -2869,9 +2869,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "134af885d758d645f0f0505c9a8b3f9bf8a348fd822e112ab5248138348f1722" +checksum = "83f0c8e7c0addab50b663055baf787d0af7f413a46e6e7fb9559a4e4db7137a5" dependencies = [ "autocfg", "bytes 1.0.1", @@ -2906,9 +2906,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.6.5" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5143d049e85af7fbc36f5454d990e62c2df705b3589f123b71f441b6b59f443f" +checksum = "940a12c99365c31ea8dd9ba04ec1be183ffe4920102bb7122c2f515437601e8e" dependencies = [ "bytes 1.0.1", "futures-core", @@ -2952,9 +2952,9 @@ version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c42e6fa53307c8a17e4ccd4dc81cf5ec38db9209f59b222210375b54ee40d1e2" dependencies = [ - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", ] [[package]] @@ -3042,9 +3042,9 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" +checksum = "eeb8be209bb1c96b7c177c7420d26e04eccacb0eeae6b980e35fcb74678107e0" dependencies = [ "matches", ] @@ -3167,9 +3167,9 @@ checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" [[package]] name = "wasm-bindgen" -version = "0.2.72" +version = "0.2.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fe8f61dba8e5d645a4d8132dc7a0a66861ed5e1045d2c0ed940fab33bac0fbe" +checksum = "83240549659d187488f91f33c0f8547cbfef0b2088bc470c116d1d260ef623d9" dependencies = [ "cfg-if 1.0.0", "serde", @@ -3179,24 +3179,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.72" +version = "0.2.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "046ceba58ff062da072c7cb4ba5b22a37f00a302483f7e2a6cdc18fedbdc1fd3" +checksum = "ae70622411ca953215ca6d06d3ebeb1e915f0f6613e3b495122878d7ebec7dae" dependencies = [ "bumpalo", "lazy_static", "log 0.4.14", - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.22" +version = "0.4.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73157efb9af26fb564bb59a009afd1c7c334a44db171d280690d0c3faaec3468" +checksum = "81b8b767af23de6ac18bf2168b690bed2902743ddf0fb39252e36f9e2bfc63ea" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -3206,9 +3206,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.72" +version = "0.2.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ef9aa01d36cda046f797c57959ff5f3c615c9cc63997a8d545831ec7976819b" +checksum = "3e734d91443f177bfdb41969de821e15c516931c3c3db3d318fa1b68975d0f6f" dependencies = [ "quote 1.0.9", "wasm-bindgen-macro-support", @@ -3216,28 +3216,28 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.72" +version = "0.2.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96eb45c1b2ee33545a813a92dbb53856418bf7eb54ab34f7f7ff1448a5b3735d" +checksum = "d53739ff08c8a68b0fdbcd54c372b8ab800b1449ab3c9d706503bc7dd1621b2c" dependencies = [ - "proc-macro2 1.0.24", + "proc-macro2 1.0.26", "quote 1.0.9", - "syn 1.0.65", + "syn 1.0.69", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.72" +version = "0.2.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7148f4696fb4960a346eaa60bbfb42a1ac4ebba21f750f75fc1375b098d5ffa" +checksum = "d9a543ae66aa233d14bb765ed9af4a33e81b8b58d1584cf1b47ff8cd0b9e4489" [[package]] name = "web-sys" -version = "0.3.49" +version = "0.3.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59fe19d70f5dacc03f6e46777213facae5ac3801575d56ca6cbd4c93dcd12310" +checksum = "a905d57e488fec8861446d3393670fb50d27a262344013181c2cdf9fff5481be" dependencies = [ "js-sys", "wasm-bindgen", diff --git a/Cargo.toml b/Cargo.toml index 84c08806..ee96b082 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,7 +32,7 @@ rocket = { version = "0.5.0-dev", features = ["tls"], default-features = false } rocket_contrib = "0.5.0-dev" # HTTP client -reqwest = { version = "0.11.2", features = ["blocking", "json", "gzip", "brotli", "socks"] } +reqwest = { version = "0.11.3", features = ["blocking", "json", "gzip", "brotli", "socks"] } # multipart/form-data support multipart = { version = "0.17.1", features = ["server"], default-features = false } @@ -102,7 +102,7 @@ num-traits = "0.2.14" num-derive = "0.3.3" # Email libraries -tracing = { version = "0.1", features = ["log"] } # Needed to have lettre trace logging used when SMTP_DEBUG is enabled. +tracing = { version = "0.1.25", features = ["log"] } # Needed to have lettre trace logging used when SMTP_DEBUG is enabled. lettre = { version = "0.10.0-beta.3", features = ["smtp-transport", "builder", "serde", "native-tls", "hostname", "tracing"], default-features = false } newline-converter = "0.2.0" diff --git a/docker/Dockerfile.j2 b/docker/Dockerfile.j2 index f003dd0f..6f778d9e 100644 --- a/docker/Dockerfile.j2 +++ b/docker/Dockerfile.j2 @@ -1,10 +1,10 @@ # This file was generated using a Jinja2 template. # Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles. -{% set build_stage_base_image = "rust:1.50" %} +{% set build_stage_base_image = "rust:1.51" %} {% if "alpine" in target_file %} {% if "amd64" in target_file %} -{% set build_stage_base_image = "clux/muslrust:nightly-2021-02-22" %} +{% set build_stage_base_image = "clux/muslrust:nightly-2021-04-14" %} {% set runtime_stage_base_image = "alpine:3.13" %} {% set package_arch_target = "x86_64-unknown-linux-musl" %} {% elif "armv7" in target_file %} diff --git a/rust-toolchain b/rust-toolchain index 4fb94d9b..8e3f2d47 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1 +1 @@ -nightly-2021-02-22 \ No newline at end of file +nightly-2021-04-14 \ No newline at end of file From 34ea10475d316ccb2ca4cd2cac67b61c4cdfb62a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Garc=C3=ADa?= Date: Tue, 27 Apr 2021 23:18:32 +0200 Subject: [PATCH 23/25] Project renaming --- .env.template | 10 +- .github/ISSUE_TEMPLATE/bug_report.md | 14 +-- .github/ISSUE_TEMPLATE/config.yml | 4 +- .github/workflows/build.yml | 8 +- Cargo.lock | 102 +++++++++--------- Cargo.toml | 4 +- README.md | 29 +++-- docker/Dockerfile.buildx | 2 +- docker/Dockerfile.j2 | 24 ++--- docker/amd64/Dockerfile | 18 ++-- docker/amd64/Dockerfile.alpine | 18 ++-- docker/arm64/Dockerfile | 18 ++-- docker/armv6/Dockerfile | 18 ++-- docker/armv7/Dockerfile | 18 ++-- docker/armv7/Dockerfile.alpine | 18 ++-- docker/start.sh | 10 +- hooks/README.md | 2 +- hooks/build | 2 +- hooks/push | 2 +- src/api/admin.rs | 6 +- src/api/core/accounts.rs | 2 +- src/api/core/ciphers.rs | 2 +- src/api/core/two_factor/duo.rs | 2 +- src/config.rs | 8 +- src/db/models/cipher.rs | 2 +- src/main.rs | 16 +-- src/static/templates/admin/base.hbs | 4 +- src/static/templates/admin/diagnostics.hbs | 8 +- src/static/templates/admin/settings.hbs | 4 +- src/static/templates/email/change_email.hbs | 2 +- .../templates/email/change_email.html.hbs | 4 +- src/static/templates/email/delete_account.hbs | 2 +- .../templates/email/delete_account.html.hbs | 4 +- .../templates/email/invite_accepted.hbs | 4 +- .../templates/email/invite_accepted.html.hbs | 6 +- .../templates/email/invite_confirmed.hbs | 2 +- .../templates/email/invite_confirmed.html.hbs | 4 +- .../templates/email/new_device_logged_in.hbs | 2 +- .../email/new_device_logged_in.html.hbs | 4 +- src/static/templates/email/pw_hint_none.hbs | 2 +- .../templates/email/pw_hint_none.html.hbs | 4 +- src/static/templates/email/pw_hint_some.hbs | 2 +- .../templates/email/pw_hint_some.html.hbs | 4 +- .../templates/email/send_org_invite.hbs | 2 +- .../templates/email/send_org_invite.html.hbs | 4 +- src/static/templates/email/smtp_test.hbs | 4 +- src/static/templates/email/smtp_test.html.hbs | 6 +- .../templates/email/twofactor_email.hbs | 2 +- .../templates/email/twofactor_email.html.hbs | 4 +- src/static/templates/email/verify_email.hbs | 2 +- .../templates/email/verify_email.html.hbs | 4 +- src/static/templates/email/welcome.hbs | 2 +- src/static/templates/email/welcome.html.hbs | 4 +- .../templates/email/welcome_must_verify.hbs | 2 +- .../email/welcome_must_verify.html.hbs | 4 +- src/util.rs | 2 +- 56 files changed, 231 insertions(+), 232 deletions(-) diff --git a/.env.template b/.env.template index e4d0b1e1..3af3a049 100644 --- a/.env.template +++ b/.env.template @@ -1,4 +1,4 @@ -## Bitwarden_RS Configuration File +## Vaultwarden Configuration File ## Uncomment any of the following lines to change the defaults ## ## Be aware that most of these settings will be overridden if they were changed @@ -99,7 +99,7 @@ ## Enable WAL for the DB ## Set to false to avoid enabling WAL during startup. ## Note that if the DB already has WAL enabled, you will also need to disable WAL in the DB, -## this setting only prevents bitwarden_rs from automatically enabling it on start. +## this setting only prevents vaultwarden from automatically enabling it on start. ## Please read project wiki page about this setting first before changing the value as it can ## cause performance degradation or might render the service unable to start. # ENABLE_DB_WAL=true @@ -187,7 +187,7 @@ ## Invitations org admins to invite users, even when signups are disabled # INVITATIONS_ALLOWED=true ## Name shown in the invitation emails that don't come from a specific organization -# INVITATION_ORG_NAME=Bitwarden_RS +# INVITATION_ORG_NAME=Vaultwarden ## Per-organization attachment limit (KB) ## Limit in kilobytes for an organization attachments, once the limit is exceeded it won't be possible to upload more @@ -259,8 +259,8 @@ ## To make sure the email links are pointing to the correct host, set the DOMAIN variable. ## Note: if SMTP_USERNAME is specified, SMTP_PASSWORD is mandatory # SMTP_HOST=smtp.domain.tld -# SMTP_FROM=bitwarden-rs@domain.tld -# SMTP_FROM_NAME=Bitwarden_RS +# SMTP_FROM=vaultwarden@domain.tld +# SMTP_FROM_NAME=Vaultwarden # SMTP_PORT=587 # Ports 587 (submission) and 25 (smtp) are standard without encryption and with encryption via STARTTLS (Explicit TLS). Port 465 is outdated and used with Implicit TLS. # SMTP_SSL=true # (Explicit) - This variable by default configures Explicit STARTTLS, it will upgrade an insecure connection to a secure one. Unless SMTP_EXPLICIT_TLS is set to true. Either port 587 or 25 are default. # SMTP_EXPLICIT_TLS=true # (Implicit) - N.B. This variable configures Implicit TLS. It's currently mislabelled (see bug #851) - SMTP_SSL Needs to be set to true for this option to work. Usually port 465 is used here. diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 8c79cf2d..128c5f58 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,6 +1,6 @@ --- name: Bug report -about: Use this ONLY for bugs in bitwarden_rs itself. Use the Discourse forum (link below) to request features or get help with usage/configuration. If in doubt, use the forum. +about: Use this ONLY for bugs in vaultwarden itself. Use the Discourse forum (link below) to request features or get help with usage/configuration. If in doubt, use the forum. title: '' labels: '' assignees: '' @@ -8,11 +8,11 @@ assignees: '' --- @@ -37,9 +37,9 @@ such as passwords, IP addresses, and DNS names as appropriate. --> - + -* bitwarden_rs version: +* vaultwarden version: * Install method: @@ -54,7 +54,7 @@ such as passwords, IP addresses, and DNS names as appropriate. ### Steps to reproduce +and how did you start vaultwarden? --> ### Expected behaviour diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 072be117..7ecd420d 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -3,6 +3,6 @@ contact_links: - name: Discourse forum for bitwarden_rs url: https://bitwardenrs.discourse.group/ about: Use this forum to request features or get help with usage/configuration. - - name: GitHub Discussions for bitwarden_rs - url: https://github.com/dani-garcia/bitwarden_rs/discussions + - name: GitHub Discussions for vaultwarden + url: https://github.com/dani-garcia/vaultwarden/discussions about: An alternative to the Discourse forum, if this is easier for you. diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 9b476d3e..bcb16381 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -133,8 +133,8 @@ jobs: - name: Upload artifact uses: actions/upload-artifact@v2 with: - name: bitwarden_rs-${{ matrix.target-triple }}${{ matrix.ext }} - path: target/${{ matrix.target-triple }}/release/bitwarden_rs${{ matrix.ext }} + name: vaultwarden-${{ matrix.target-triple }}${{ matrix.ext }} + path: target/${{ matrix.target-triple }}/release/vaultwarden${{ matrix.ext }} # End Upload artifact to Github Actions @@ -145,7 +145,7 @@ jobs: # uses: Shopify/upload-to-release@1 # if: startsWith(github.ref, 'refs/tags/') # with: - # name: bitwarden_rs-${{ matrix.target-triple }}${{ matrix.ext }} - # path: target/${{ matrix.target-triple }}/release/bitwarden_rs${{ matrix.ext }} + # name: vaultwarden-${{ matrix.target-triple }}${{ matrix.ext }} + # path: target/${{ matrix.target-triple }}/release/vaultwarden${{ matrix.ext }} # repo-token: ${{ secrets.GITHUB_TOKEN }} # End Upload to github actions release diff --git a/Cargo.lock b/Cargo.lock index 972ae95d..1b89140a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -144,57 +144,6 @@ dependencies = [ "wyz", ] -[[package]] -name = "bitwarden_rs" -version = "1.0.0" -dependencies = [ - "backtrace", - "chashmap", - "chrono", - "chrono-tz", - "data-encoding", - "data-url", - "diesel", - "diesel_migrations", - "dotenv", - "fern", - "handlebars", - "html5ever", - "idna 0.2.2", - "job_scheduler", - "jsonwebtoken", - "lettre", - "libsqlite3-sys", - "log 0.4.14", - "markup5ever_rcdom", - "multipart", - "newline-converter", - "num-derive", - "num-traits", - "oath", - "once_cell", - "openssl", - "parity-ws", - "paste", - "percent-encoding 2.1.0", - "pico-args", - "rand 0.8.3", - "regex", - "reqwest", - "ring", - "rmpv", - "rocket", - "rocket_contrib", - "serde", - "serde_json", - "syslog", - "time 0.2.26", - "tracing", - "u2f", - "uuid", - "yubico", -] - [[package]] name = "block-buffer" version = "0.7.3" @@ -3114,6 +3063,57 @@ dependencies = [ "getrandom 0.2.2", ] +[[package]] +name = "vaultwarden" +version = "1.0.0" +dependencies = [ + "backtrace", + "chashmap", + "chrono", + "chrono-tz", + "data-encoding", + "data-url", + "diesel", + "diesel_migrations", + "dotenv", + "fern", + "handlebars", + "html5ever", + "idna 0.2.2", + "job_scheduler", + "jsonwebtoken", + "lettre", + "libsqlite3-sys", + "log 0.4.14", + "markup5ever_rcdom", + "multipart", + "newline-converter", + "num-derive", + "num-traits", + "oath", + "once_cell", + "openssl", + "parity-ws", + "paste", + "percent-encoding 2.1.0", + "pico-args", + "rand 0.8.3", + "regex", + "reqwest", + "ring", + "rmpv", + "rocket", + "rocket_contrib", + "serde", + "serde_json", + "syslog", + "time 0.2.26", + "tracing", + "u2f", + "uuid", + "yubico", +] + [[package]] name = "vcpkg" version = "0.2.11" diff --git a/Cargo.toml b/Cargo.toml index ee96b082..67dea2fa 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,10 +1,10 @@ [package] -name = "bitwarden_rs" +name = "vaultwarden" version = "1.0.0" authors = ["Daniel García "] edition = "2018" -repository = "https://github.com/dani-garcia/bitwarden_rs" +repository = "https://github.com/dani-garcia/vaultwarden" readme = "README.md" license = "GPL-3.0-only" publish = false diff --git a/README.md b/README.md index 6b80b3b9..a6003b44 100644 --- a/README.md +++ b/README.md @@ -1,15 +1,14 @@ -### This is a Bitwarden server API implementation written in Rust compatible with [upstream Bitwarden clients](https://bitwarden.com/#download)*, perfect for self-hosted deployment where running the official resource-heavy service might not be ideal. +### Alternative implementation of the Bitwarden server API written in Rust and compatible with [upstream Bitwarden clients](https://bitwarden.com/#download)*, perfect for self-hosted deployment where running the official resource-heavy service might not be ideal. --- -[![Travis Build Status](https://travis-ci.org/dani-garcia/bitwarden_rs.svg?branch=master)](https://travis-ci.org/dani-garcia/bitwarden_rs) -[![Docker Pulls](https://img.shields.io/docker/pulls/bitwardenrs/server.svg)](https://hub.docker.com/r/bitwardenrs/server) -[![Dependency Status](https://deps.rs/repo/github/dani-garcia/bitwarden_rs/status.svg)](https://deps.rs/repo/github/dani-garcia/bitwarden_rs) -[![GitHub Release](https://img.shields.io/github/release/dani-garcia/bitwarden_rs.svg)](https://github.com/dani-garcia/bitwarden_rs/releases/latest) -[![GPL-3.0 Licensed](https://img.shields.io/github/license/dani-garcia/bitwarden_rs.svg)](https://github.com/dani-garcia/bitwarden_rs/blob/master/LICENSE.txt) -[![Matrix Chat](https://img.shields.io/matrix/bitwarden_rs:matrix.org.svg?logo=matrix)](https://matrix.to/#/#bitwarden_rs:matrix.org) +[![Docker Pulls](https://img.shields.io/docker/pulls/bitwardenrs/server.svg)](https://hub.docker.com/r/vaultwarden/server) +[![Dependency Status](https://deps.rs/repo/github/dani-garcia/vaultwarden/status.svg)](https://deps.rs/repo/github/dani-garcia/vaultwarden) +[![GitHub Release](https://img.shields.io/github/release/dani-garcia/vaultwarden.svg)](https://github.com/dani-garcia/vaultwarden/releases/latest) +[![GPL-3.0 Licensed](https://img.shields.io/github/license/dani-garcia/vaultwarden.svg)](https://github.com/dani-garcia/vaultwarden/blob/master/LICENSE.txt) +[![Matrix Chat](https://img.shields.io/matrix/vaultwarden:matrix.org.svg?logo=matrix)](https://matrix.to/#/#vaultwarden:matrix.org) -Image is based on [Rust implementation of Bitwarden API](https://github.com/dani-garcia/bitwarden_rs). +Image is based on [Rust implementation of Bitwarden API](https://github.com/dani-garcia/vaultwarden). **This project is not associated with the [Bitwarden](https://bitwarden.com/) project nor 8bit Solutions LLC.** @@ -33,26 +32,26 @@ Basically full implementation of Bitwarden API is provided including: Pull the docker image and mount a volume from the host for persistent storage: ```sh -docker pull bitwardenrs/server:latest -docker run -d --name bitwarden -v /bw-data/:/data/ -p 80:80 bitwardenrs/server:latest +docker pull vaultwarden/server:latest +docker run -d --name vaultwarden -v /vw-data/:/data/ -p 80:80 vaultwarden/server:latest ``` This will preserve any persistent data under /bw-data/, you can adapt the path to whatever suits you. **IMPORTANT**: Some web browsers, like Chrome, disallow the use of Web Crypto APIs in insecure contexts. In this case, you might get an error like `Cannot read property 'importKey'`. To solve this problem, you need to access the web vault from HTTPS. -This can be configured in [bitwarden_rs directly](https://github.com/dani-garcia/bitwarden_rs/wiki/Enabling-HTTPS) or using a third-party reverse proxy ([some examples](https://github.com/dani-garcia/bitwarden_rs/wiki/Proxy-examples)). +This can be configured in [vaultwarden directly](https://github.com/dani-garcia/vaultwarden/wiki/Enabling-HTTPS) or using a third-party reverse proxy ([some examples](https://github.com/dani-garcia/vaultwarden/wiki/Proxy-examples)). If you have an available domain name, you can get HTTPS certificates with [Let's Encrypt](https://letsencrypt.org/), or you can generate self-signed certificates with utilities like [mkcert](https://github.com/FiloSottile/mkcert). Some proxies automatically do this step, like Caddy (see examples linked above). ## Usage -See the [bitwarden_rs wiki](https://github.com/dani-garcia/bitwarden_rs/wiki) for more information on how to configure and run the bitwarden_rs server. +See the [vaultwarden wiki](https://github.com/dani-garcia/vaultwarden/wiki) for more information on how to configure and run the vaultwarden server. ## Get in touch -To ask a question, offer suggestions or new features or to get help configuring or installing the software, please [use the forum](https://bitwardenrs.discourse.group/). +To ask a question, offer suggestions or new features or to get help configuring or installing the software, please [use the forum](https://vaultwarden.discourse.group/). -If you spot any bugs or crashes with bitwarden_rs itself, please [create an issue](https://github.com/dani-garcia/bitwarden_rs/issues/). Make sure there aren't any similar issues open, though! +If you spot any bugs or crashes with vaultwarden itself, please [create an issue](https://github.com/dani-garcia/vaultwarden/issues/). Make sure there aren't any similar issues open, though! -If you prefer to chat, we're usually hanging around at [#bitwarden_rs:matrix.org](https://matrix.to/#/#bitwarden_rs:matrix.org) room on Matrix. Feel free to join us! +If you prefer to chat, we're usually hanging around at [#vaultwarden:matrix.org](https://matrix.to/#/#vaultwarden:matrix.org) room on Matrix. Feel free to join us! ### Sponsors Thanks for your contribution to the project! diff --git a/docker/Dockerfile.buildx b/docker/Dockerfile.buildx index 9faf3968..ed0d23b3 100644 --- a/docker/Dockerfile.buildx +++ b/docker/Dockerfile.buildx @@ -1,7 +1,7 @@ # The cross-built images have the build arch (`amd64`) embedded in the image # manifest, rather than the target arch. For example: # -# $ docker inspect bitwardenrs/server:latest-armv7 | jq -r '.[]|.Architecture' +# $ docker inspect vaultwarden/server:latest-armv7 | jq -r '.[]|.Architecture' # amd64 # # Recent versions of Docker have started printing a warning when the image's diff --git a/docker/Dockerfile.j2 b/docker/Dockerfile.j2 index 6f778d9e..94f910b0 100644 --- a/docker/Dockerfile.j2 +++ b/docker/Dockerfile.j2 @@ -44,26 +44,26 @@ # https://docs.docker.com/develop/develop-images/multistage-build/ # https://whitfin.io/speeding-up-rust-docker-builds/ ####################### VAULT BUILD IMAGE ####################### -{% set vault_version = "2.19.0b" %} -{% set vault_image_digest = "sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e" %} +{% set vault_version = "2.19.0d" %} +{% set vault_image_digest = "sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233" %} # The web-vault digest specifies a particular web-vault build on Docker Hub. # Using the digest instead of the tag name provides better security, # as the digest of an image is immutable, whereas a tag name can later # be changed to point to a malicious image. # # To verify the current digest for a given tag name: -# - From https://hub.docker.com/r/bitwardenrs/web-vault/tags, +# - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull bitwardenrs/web-vault:v{{ vault_version }} -# $ docker image inspect --format "{{ '{{' }}.RepoDigests}}" bitwardenrs/web-vault:v{{ vault_version }} -# [bitwardenrs/web-vault@{{ vault_image_digest }}] +# $ docker pull vaultwarden/web-vault:v{{ vault_version }} +# $ docker image inspect --format "{{ '{{' }}.RepoDigests}}" vaultwarden/web-vault:v{{ vault_version }} +# [vaultwarden/web-vault@{{ vault_image_digest }}] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{ '{{' }}.RepoTags}}" bitwardenrs/web-vault@{{ vault_image_digest }} -# [bitwardenrs/web-vault:v{{ vault_version }}] +# $ docker image inspect --format "{{ '{{' }}.RepoTags}}" vaultwarden/web-vault@{{ vault_image_digest }} +# [vaultwarden/web-vault:v{{ vault_version }}] # -FROM bitwardenrs/web-vault@{{ vault_image_digest }} as vault +FROM vaultwarden/web-vault@{{ vault_image_digest }} as vault ########################## BUILD IMAGE ########################## FROM {{ build_stage_base_image }} as build @@ -189,7 +189,7 @@ RUN touch src/main.rs RUN cargo build --features ${DB} --release{{ package_arch_target_param }} {% if "alpine" in target_file %} {% if "armv7" in target_file %} -RUN musl-strip target/{{ package_arch_target }}/release/bitwarden_rs +RUN musl-strip target/{{ package_arch_target }}/release/vaultwarden {% endif %} {% endif %} @@ -250,9 +250,9 @@ WORKDIR / COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault {% if package_arch_target is defined %} -COPY --from=build /app/target/{{ package_arch_target }}/release/bitwarden_rs . +COPY --from=build /app/target/{{ package_arch_target }}/release/vaultwarden . {% else %} -COPY --from=build /app/target/release/bitwarden_rs . +COPY --from=build /app/target/release/vaultwarden . {% endif %} COPY docker/healthcheck.sh /healthcheck.sh diff --git a/docker/amd64/Dockerfile b/docker/amd64/Dockerfile index e0f6c70b..8152461f 100644 --- a/docker/amd64/Dockerfile +++ b/docker/amd64/Dockerfile @@ -11,21 +11,21 @@ # be changed to point to a malicious image. # # To verify the current digest for a given tag name: -# - From https://hub.docker.com/r/bitwardenrs/web-vault/tags, +# - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull bitwardenrs/web-vault:v2.19.0b -# $ docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.19.0b -# [bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e] +# $ docker pull vaultwarden/web-vault:v2.19.0d +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.19.0d +# [vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e -# [bitwardenrs/web-vault:v2.19.0b] +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233 +# [vaultwarden/web-vault:v2.19.0d] # -FROM bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e as vault +FROM vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233 as vault ########################## BUILD IMAGE ########################## -FROM rust:1.50 as build +FROM rust:1.51 as build # Debian-based builds support multidb ARG DB=sqlite,mysql,postgresql @@ -100,7 +100,7 @@ EXPOSE 3012 WORKDIR / COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault -COPY --from=build /app/target/release/bitwarden_rs . +COPY --from=build /app/target/release/vaultwarden . COPY docker/healthcheck.sh /healthcheck.sh COPY docker/start.sh /start.sh diff --git a/docker/amd64/Dockerfile.alpine b/docker/amd64/Dockerfile.alpine index 71b3130f..d5462bfe 100644 --- a/docker/amd64/Dockerfile.alpine +++ b/docker/amd64/Dockerfile.alpine @@ -11,21 +11,21 @@ # be changed to point to a malicious image. # # To verify the current digest for a given tag name: -# - From https://hub.docker.com/r/bitwardenrs/web-vault/tags, +# - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull bitwardenrs/web-vault:v2.19.0b -# $ docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.19.0b -# [bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e] +# $ docker pull vaultwarden/web-vault:v2.19.0d +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.19.0d +# [vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e -# [bitwardenrs/web-vault:v2.19.0b] +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233 +# [vaultwarden/web-vault:v2.19.0d] # -FROM bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e as vault +FROM vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233 as vault ########################## BUILD IMAGE ########################## -FROM clux/muslrust:nightly-2021-02-22 as build +FROM clux/muslrust:nightly-2021-04-14 as build # Alpine-based AMD64 (musl) does not support mysql/mariadb during compile time. ARG DB=sqlite,postgresql @@ -95,7 +95,7 @@ EXPOSE 3012 WORKDIR / COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault -COPY --from=build /app/target/x86_64-unknown-linux-musl/release/bitwarden_rs . +COPY --from=build /app/target/x86_64-unknown-linux-musl/release/vaultwarden . COPY docker/healthcheck.sh /healthcheck.sh COPY docker/start.sh /start.sh diff --git a/docker/arm64/Dockerfile b/docker/arm64/Dockerfile index 937d192f..3669c4fb 100644 --- a/docker/arm64/Dockerfile +++ b/docker/arm64/Dockerfile @@ -11,21 +11,21 @@ # be changed to point to a malicious image. # # To verify the current digest for a given tag name: -# - From https://hub.docker.com/r/bitwardenrs/web-vault/tags, +# - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull bitwardenrs/web-vault:v2.19.0b -# $ docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.19.0b -# [bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e] +# $ docker pull vaultwarden/web-vault:v2.19.0d +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.19.0d +# [vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e -# [bitwardenrs/web-vault:v2.19.0b] +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233 +# [vaultwarden/web-vault:v2.19.0d] # -FROM bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e as vault +FROM vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233 as vault ########################## BUILD IMAGE ########################## -FROM rust:1.50 as build +FROM rust:1.51 as build # Debian-based builds support multidb ARG DB=sqlite,mysql,postgresql @@ -146,7 +146,7 @@ EXPOSE 3012 WORKDIR / COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault -COPY --from=build /app/target/aarch64-unknown-linux-gnu/release/bitwarden_rs . +COPY --from=build /app/target/aarch64-unknown-linux-gnu/release/vaultwarden . COPY docker/healthcheck.sh /healthcheck.sh COPY docker/start.sh /start.sh diff --git a/docker/armv6/Dockerfile b/docker/armv6/Dockerfile index 2423ee16..5cfcacf5 100644 --- a/docker/armv6/Dockerfile +++ b/docker/armv6/Dockerfile @@ -11,21 +11,21 @@ # be changed to point to a malicious image. # # To verify the current digest for a given tag name: -# - From https://hub.docker.com/r/bitwardenrs/web-vault/tags, +# - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull bitwardenrs/web-vault:v2.19.0b -# $ docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.19.0b -# [bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e] +# $ docker pull vaultwarden/web-vault:v2.19.0d +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.19.0d +# [vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e -# [bitwardenrs/web-vault:v2.19.0b] +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233 +# [vaultwarden/web-vault:v2.19.0d] # -FROM bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e as vault +FROM vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233 as vault ########################## BUILD IMAGE ########################## -FROM rust:1.50 as build +FROM rust:1.51 as build # Debian-based builds support multidb ARG DB=sqlite,mysql,postgresql @@ -146,7 +146,7 @@ EXPOSE 3012 WORKDIR / COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault -COPY --from=build /app/target/arm-unknown-linux-gnueabi/release/bitwarden_rs . +COPY --from=build /app/target/arm-unknown-linux-gnueabi/release/vaultwarden . COPY docker/healthcheck.sh /healthcheck.sh COPY docker/start.sh /start.sh diff --git a/docker/armv7/Dockerfile b/docker/armv7/Dockerfile index 4f612f72..09c1cf43 100644 --- a/docker/armv7/Dockerfile +++ b/docker/armv7/Dockerfile @@ -11,21 +11,21 @@ # be changed to point to a malicious image. # # To verify the current digest for a given tag name: -# - From https://hub.docker.com/r/bitwardenrs/web-vault/tags, +# - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull bitwardenrs/web-vault:v2.19.0b -# $ docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.19.0b -# [bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e] +# $ docker pull vaultwarden/web-vault:v2.19.0d +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.19.0d +# [vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e -# [bitwardenrs/web-vault:v2.19.0b] +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233 +# [vaultwarden/web-vault:v2.19.0d] # -FROM bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e as vault +FROM vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233 as vault ########################## BUILD IMAGE ########################## -FROM rust:1.50 as build +FROM rust:1.51 as build # Debian-based builds support multidb ARG DB=sqlite,mysql,postgresql @@ -146,7 +146,7 @@ EXPOSE 3012 WORKDIR / COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault -COPY --from=build /app/target/armv7-unknown-linux-gnueabihf/release/bitwarden_rs . +COPY --from=build /app/target/armv7-unknown-linux-gnueabihf/release/vaultwarden . COPY docker/healthcheck.sh /healthcheck.sh COPY docker/start.sh /start.sh diff --git a/docker/armv7/Dockerfile.alpine b/docker/armv7/Dockerfile.alpine index 430e043e..f9e6cc4d 100644 --- a/docker/armv7/Dockerfile.alpine +++ b/docker/armv7/Dockerfile.alpine @@ -11,18 +11,18 @@ # be changed to point to a malicious image. # # To verify the current digest for a given tag name: -# - From https://hub.docker.com/r/bitwardenrs/web-vault/tags, +# - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull bitwardenrs/web-vault:v2.19.0b -# $ docker image inspect --format "{{.RepoDigests}}" bitwardenrs/web-vault:v2.19.0b -# [bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e] +# $ docker pull vaultwarden/web-vault:v2.19.0d +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.19.0d +# [vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e -# [bitwardenrs/web-vault:v2.19.0b] +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233 +# [vaultwarden/web-vault:v2.19.0d] # -FROM bitwardenrs/web-vault@sha256:27631b913f5858895a3e109c5e701341b9d01e69818f5283e72a49fa545eb40e as vault +FROM vaultwarden/web-vault@sha256:a7bd6bc4db33bd45f723c4b1ac90918b7f80204560683cfc8efd9efd03a9b233 as vault ########################## BUILD IMAGE ########################## FROM messense/rust-musl-cross:armv7-musleabihf as build @@ -67,7 +67,7 @@ RUN touch src/main.rs # Builds again, this time it'll just be # your actual source files being built RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-musleabihf -RUN musl-strip target/armv7-unknown-linux-musleabihf/release/bitwarden_rs +RUN musl-strip target/armv7-unknown-linux-musleabihf/release/vaultwarden ######################## RUNTIME IMAGE ######################## # Create a new stage with a minimal image @@ -101,7 +101,7 @@ EXPOSE 3012 WORKDIR / COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault -COPY --from=build /app/target/armv7-unknown-linux-musleabihf/release/bitwarden_rs . +COPY --from=build /app/target/armv7-unknown-linux-musleabihf/release/vaultwarden . COPY docker/healthcheck.sh /healthcheck.sh COPY docker/start.sh /start.sh diff --git a/docker/start.sh b/docker/start.sh index 39f591c2..993ddbd9 100755 --- a/docker/start.sh +++ b/docker/start.sh @@ -1,15 +1,15 @@ #!/bin/sh -if [ -r /etc/bitwarden_rs.sh ]; then - . /etc/bitwarden_rs.sh +if [ -r /etc/vaultwarden.sh ]; then + . /etc/vaultwarden.sh fi -if [ -d /etc/bitwarden_rs.d ]; then - for f in /etc/bitwarden_rs.d/*.sh; do +if [ -d /etc/vaultwarden.d ]; then + for f in /etc/vaultwarden.d/*.sh; do if [ -r $f ]; then . $f fi done fi -exec /bitwarden_rs "${@}" +exec /vaultwarden "${@}" diff --git a/hooks/README.md b/hooks/README.md index 0ad0383f..d198452e 100644 --- a/hooks/README.md +++ b/hooks/README.md @@ -10,7 +10,7 @@ Docker Hub hooks provide these predefined [environment variables](https://docs.d * `DOCKER_TAG`: the Docker repository tag being built. * `IMAGE_NAME`: the name and tag of the Docker repository being built. (This variable is a combination of `DOCKER_REPO:DOCKER_TAG`.) -The current multi-arch image build relies on the original bitwarden_rs Dockerfiles, which use cross-compilation for architectures other than `amd64`, and don't yet support all arch/distro combinations. However, cross-compilation is much faster than QEMU-based builds (e.g., using `docker buildx`). This situation may need to be revisited at some point. +The current multi-arch image build relies on the original vaultwarden Dockerfiles, which use cross-compilation for architectures other than `amd64`, and don't yet support all arch/distro combinations. However, cross-compilation is much faster than QEMU-based builds (e.g., using `docker buildx`). This situation may need to be revisited at some point. ## References diff --git a/hooks/build b/hooks/build index eeead8b4..f18c58bf 100755 --- a/hooks/build +++ b/hooks/build @@ -22,7 +22,7 @@ fi LABELS=( # https://github.com/opencontainers/image-spec/blob/master/annotations.md org.opencontainers.image.created="$(date --utc --iso-8601=seconds)" - org.opencontainers.image.documentation="https://github.com/dani-garcia/bitwarden_rs/wiki" + org.opencontainers.image.documentation="https://github.com/dani-garcia/vaultwarden/wiki" org.opencontainers.image.licenses="GPL-3.0-only" org.opencontainers.image.revision="${SOURCE_COMMIT}" org.opencontainers.image.source="${SOURCE_REPOSITORY_URL}" diff --git a/hooks/push b/hooks/push index 7b32da2e..f50e48b9 100755 --- a/hooks/push +++ b/hooks/push @@ -103,7 +103,7 @@ docker buildx build \ # (https://github.com/moby/moby/issues/41017). # # Note that we use `arm32v6` instead of `armv6` to be consistent with the -# existing bitwarden_rs tags, which adhere to the naming conventions of the +# existing vaultwarden tags, which adhere to the naming conventions of the # Docker per-architecture repos (e.g., https://hub.docker.com/u/arm32v6). # Unfortunately, these per-arch repo names aren't always consistent with the # corresponding platform (OS/arch/variant) IDs, particularly in the case of diff --git a/src/api/admin.rs b/src/api/admin.rs index 6b8ae6ee..63950379 100644 --- a/src/api/admin.rs +++ b/src/api/admin.rs @@ -476,7 +476,7 @@ fn get_github_api(url: &str) -> Result { fn has_http_access() -> bool { let http_access = get_reqwest_client(); - match http_access.head("https://github.com/dani-garcia/bitwarden_rs").timeout(Duration::from_secs(10)).send() { + match http_access.head("https://github.com/dani-garcia/vaultwarden").timeout(Duration::from_secs(10)).send() { Ok(r) => r.status().is_success(), _ => false, } @@ -518,12 +518,12 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu // TODO: Maybe we need to cache this using a LazyStatic or something. Github only allows 60 requests per hour, and we use 3 here already. let (latest_release, latest_commit, latest_web_build) = if has_http_access { ( - match get_github_api::("https://api.github.com/repos/dani-garcia/bitwarden_rs/releases/latest") + match get_github_api::("https://api.github.com/repos/dani-garcia/vaultwarden/releases/latest") { Ok(r) => r.tag_name, _ => "-".to_string(), }, - match get_github_api::("https://api.github.com/repos/dani-garcia/bitwarden_rs/commits/master") { + match get_github_api::("https://api.github.com/repos/dani-garcia/vaultwarden/commits/master") { Ok(mut c) => { c.sha.truncate(8); c.sha diff --git a/src/api/core/accounts.rs b/src/api/core/accounts.rs index c9390cf7..3888075b 100644 --- a/src/api/core/accounts.rs +++ b/src/api/core/accounts.rs @@ -95,7 +95,7 @@ fn register(data: JsonUpcase, conn: DbConn) -> EmptyResult { } None => { // Order is important here; the invitation check must come first - // because the bitwarden_rs admin can invite anyone, regardless + // because the vaultwarden admin can invite anyone, regardless // of other signup restrictions. if Invitation::take(&data.Email, &conn) || CONFIG.is_signup_allowed(&data.Email) { User::new(data.Email.clone()) diff --git a/src/api/core/ciphers.rs b/src/api/core/ciphers.rs index fa9fd6ed..0f655f76 100644 --- a/src/api/core/ciphers.rs +++ b/src/api/core/ciphers.rs @@ -25,7 +25,7 @@ pub fn routes() -> Vec { // whether the user is an owner/admin of the relevant org, and if so, // allows the operation unconditionally. // - // bitwarden_rs factors in the org owner/admin status as part of + // vaultwarden factors in the org owner/admin status as part of // determining the write accessibility of a cipher, so most // admin/non-admin implementations can be shared. routes![ diff --git a/src/api/core/two_factor/duo.rs b/src/api/core/two_factor/duo.rs index 20d03a7c..5ca87085 100644 --- a/src/api/core/two_factor/duo.rs +++ b/src/api/core/two_factor/duo.rs @@ -207,7 +207,7 @@ fn duo_api_request(method: &str, path: &str, params: &str, data: &DuoData) -> Em client .request(m, &url) .basic_auth(username, Some(password)) - .header(header::USER_AGENT, "bitwarden_rs:Duo/1.0 (Rust)") + .header(header::USER_AGENT, "vaultwarden:Duo/1.0 (Rust)") .header(header::DATE, date) .send()? .error_for_status()?; diff --git a/src/config.rs b/src/config.rs index fa6ac87e..fa7db32c 100644 --- a/src/config.rs +++ b/src/config.rs @@ -359,7 +359,7 @@ make_config! { /// $ICON_CACHE_FOLDER, but it won't produce any external network request. Needs to set $ICON_CACHE_TTL to 0, /// otherwise it will delete them and they won't be downloaded again. disable_icon_download: bool, true, def, false; - /// Allow new signups |> Controls whether new users can register. Users can be invited by the bitwarden_rs admin even if this is disabled + /// Allow new signups |> Controls whether new users can register. Users can be invited by the vaultwarden admin even if this is disabled signups_allowed: bool, true, def, true; /// Require email verification on signups. This will prevent logins from succeeding until the address has been verified signups_verify: bool, true, def, false; @@ -385,7 +385,7 @@ make_config! { admin_token: Pass, true, option; /// Invitation organization name |> Name shown in the invitation emails that don't come from a specific organization - invitation_org_name: String, true, def, "Bitwarden_RS".to_string(); + invitation_org_name: String, true, def, "Vaultwarden".to_string(); }, /// Advanced settings @@ -434,7 +434,7 @@ make_config! { /// Log level log_level: String, false, def, "Info".to_string(); - /// Enable DB WAL |> Turning this off might lead to worse performance, but might help if using bitwarden_rs on some exotic filesystems, + /// Enable DB WAL |> Turning this off might lead to worse performance, but might help if using vaultwarden on some exotic filesystems, /// that do not support WAL. Please make sure you read project wiki on the topic before changing this setting. enable_db_wal: bool, false, def, true; @@ -489,7 +489,7 @@ make_config! { /// From Address smtp_from: String, true, def, String::new(); /// From Name - smtp_from_name: String, true, def, "Bitwarden_RS".to_string(); + smtp_from_name: String, true, def, "Vaultwarden".to_string(); /// Username smtp_username: String, true, option; /// Password diff --git a/src/db/models/cipher.rs b/src/db/models/cipher.rs index 99a5122d..09247c44 100644 --- a/src/db/models/cipher.rs +++ b/src/db/models/cipher.rs @@ -125,7 +125,7 @@ impl Cipher { // There are three types of cipher response models in upstream // Bitwarden: "cipherMini", "cipher", and "cipherDetails" (in order - // of increasing level of detail). bitwarden_rs currently only + // of increasing level of detail). vaultwarden currently only // supports the "cipherDetails" type, though it seems like the // Bitwarden clients will ignore extra fields. // diff --git a/src/main.rs b/src/main.rs index d35cdfbf..cfd945ad 100644 --- a/src/main.rs +++ b/src/main.rs @@ -64,10 +64,10 @@ fn main() { } const HELP: &str = "\ - A Bitwarden API server written in Rust + Alternative implementation of the Bitwarden server API written in Rust USAGE: - bitwarden_rs + vaultwarden FLAGS: -h, --help Prints help information @@ -79,18 +79,18 @@ fn parse_args() { let mut pargs = pico_args::Arguments::from_env(); if pargs.contains(["-h", "--help"]) { - println!("bitwarden_rs {}", option_env!("BWRS_VERSION").unwrap_or(NO_VERSION)); + println!("vaultwarden {}", option_env!("BWRS_VERSION").unwrap_or(NO_VERSION)); print!("{}", HELP); exit(0); } else if pargs.contains(["-v", "--version"]) { - println!("bitwarden_rs {}", option_env!("BWRS_VERSION").unwrap_or(NO_VERSION)); + println!("vaultwarden {}", option_env!("BWRS_VERSION").unwrap_or(NO_VERSION)); exit(0); } } fn launch_info() { println!("/--------------------------------------------------------------------\\"); - println!("| Starting Bitwarden_RS |"); + println!("| Starting Vaultwarden |"); if let Some(version) = option_env!("BWRS_VERSION") { println!("|{:^68}|", format!("Version {}", version)); @@ -102,7 +102,7 @@ fn launch_info() { println!("| Send usage/configuration questions or feature requests to: |"); println!("| https://bitwardenrs.discourse.group/ |"); println!("| Report suspected bugs/issues in the software itself at: |"); - println!("| https://github.com/dani-garcia/bitwarden_rs/issues/new |"); + println!("| https://github.com/dani-garcia/vaultwarden/issues/new |"); println!("\\--------------------------------------------------------------------/\n"); } @@ -207,7 +207,7 @@ fn chain_syslog(logger: fern::Dispatch) -> fern::Dispatch { let syslog_fmt = syslog::Formatter3164 { facility: syslog::Facility::LOG_USER, hostname: None, - process: "bitwarden_rs".into(), + process: "vaultwarden".into(), pid: 0, }; @@ -304,7 +304,7 @@ fn check_web_vault() { "Web vault is not found at '{}'. To install it, please follow the steps in: ", CONFIG.web_vault_folder() ); - error!("https://github.com/dani-garcia/bitwarden_rs/wiki/Building-binary#install-the-web-vault"); + error!("https://github.com/dani-garcia/vaultwarden/wiki/Building-binary#install-the-web-vault"); error!("You can also set the environment variable 'WEB_VAULT_ENABLED=false' to disable it"); exit(1); } diff --git a/src/static/templates/admin/base.hbs b/src/static/templates/admin/base.hbs index e4f1ec85..e476309d 100644 --- a/src/static/templates/admin/base.hbs +++ b/src/static/templates/admin/base.hbs @@ -5,7 +5,7 @@ - Bitwarden_rs Admin Panel + Vaultwarden Admin Panel
- - netDpay + + netdadaltd
- netDpay + netDada Ltd.