1
0
Fork 1
Spiegel von https://github.com/dani-garcia/vaultwarden.git synchronisiert 2024-11-25 05:40:29 +01:00

Merge branch 'rocket-0.4'

Dieser Commit ist enthalten in:
Daniel García 2018-12-13 17:49:55 +01:00
Commit a1272c7190
Es konnte kein GPG-Schlüssel zu dieser Signatur gefunden werden
GPG-Schlüssel-ID: FC8A7D14C3CD543A
44 geänderte Dateien mit 1357 neuen und 1968 gelöschten Zeilen

11
.env
Datei anzeigen

@ -18,6 +18,17 @@
# WEBSOCKET_ADDRESS=0.0.0.0 # WEBSOCKET_ADDRESS=0.0.0.0
# WEBSOCKET_PORT=3012 # WEBSOCKET_PORT=3012
## Enable extended logging
## This shows timestamps and allows logging to file and to syslog
### To enable logging to file, use the LOG_FILE env variable
### To enable syslog, you need to compile with `cargo build --features=enable_syslog'
# EXTENDED_LOGGING=true
## Logging to file
## This requires extended logging
## It's recommended to also set 'ROCKET_CLI_COLORS=off'
# LOG_FILE=/path/to/log
## Controls if new users can register ## Controls if new users can register
# SIGNUPS_ALLOWED=true # SIGNUPS_ALLOWED=true

Datei anzeigen

@ -5,3 +5,5 @@ dist: trusty # so we get a VM with higher specs
cache: cargo cache: cargo
rust: rust:
- nightly - nightly
script:
- cargo build --verbose --all-features

853
Cargo.lock generiert

Datei-Diff unterdrückt, da er zu groß ist Diff laden

Datei anzeigen

@ -2,21 +2,24 @@
name = "bitwarden_rs" name = "bitwarden_rs"
version = "1.0.0" version = "1.0.0"
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"] authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
edition = "2018"
[features]
enable_syslog = ["syslog", "fern/syslog-4"]
[dependencies] [dependencies]
# Web framework for nightly with a focus on ease-of-use, expressibility, and speed. # Web framework for nightly with a focus on ease-of-use, expressibility, and speed.
rocket = { version = "0.3.17", features = ["tls"] } rocket = { version = "0.4.0", features = ["tls"], default-features = false }
rocket_codegen = "0.3.17" rocket_contrib = "0.4.0"
rocket_contrib = "0.3.17"
# HTTP client # HTTP client
reqwest = "0.9.2" reqwest = "0.9.5"
# multipart/form-data support # multipart/form-data support
multipart = "0.15.3" multipart = "0.15.4"
# WebSockets library # WebSockets library
ws = "0.7.8" ws = "0.7.9"
# MessagePack library # MessagePack library
rmpv = "0.4.0" rmpv = "0.4.0"
@ -25,9 +28,14 @@ rmpv = "0.4.0"
chashmap = "2.2.0" chashmap = "2.2.0"
# A generic serialization/deserialization framework # A generic serialization/deserialization framework
serde = "1.0.79" serde = "1.0.82"
serde_derive = "1.0.79" serde_derive = "1.0.82"
serde_json = "1.0.31" serde_json = "1.0.33"
# Logging
log = "0.4.6"
fern = "0.5.7"
syslog = { version = "4.0.1", optional = true }
# A safe, extensible ORM and Query builder # A safe, extensible ORM and Query builder
diesel = { version = "1.3.3", features = ["sqlite", "chrono", "r2d2"] } diesel = { version = "1.3.3", features = ["sqlite", "chrono", "r2d2"] }
@ -37,7 +45,7 @@ diesel_migrations = { version = "1.3.0", features = ["sqlite"] }
libsqlite3-sys = { version = "0.9.3", features = ["bundled"] } libsqlite3-sys = { version = "0.9.3", features = ["bundled"] }
# Crypto library # Crypto library
ring = { version = "= 0.11.0", features = ["rsa_signing"] } ring = { version = "0.13.5", features = ["rsa_signing"] }
# UUID generation # UUID generation
uuid = { version = "0.7.1", features = ["v4"] } uuid = { version = "0.7.1", features = ["v4"] }
@ -52,7 +60,7 @@ oath = "0.10.2"
data-encoding = "2.1.1" data-encoding = "2.1.1"
# JWT library # JWT library
jsonwebtoken = "= 4.0.1" jsonwebtoken = "5.0.1"
# U2F library # U2F library
u2f = "0.1.2" u2f = "0.1.2"
@ -64,7 +72,7 @@ yubico = { version = "=0.4.0", features = ["online"], default-features = false }
dotenv = { version = "0.13.0", default-features = false } dotenv = { version = "0.13.0", default-features = false }
# Lazy static macro # Lazy static macro
lazy_static = "1.1.0" lazy_static = { version = "1.2.0", features = ["nightly"] }
# Numerical libraries # Numerical libraries
num-traits = "0.2.6" num-traits = "0.2.6"
@ -73,20 +81,21 @@ num-derive = "0.2.3"
# Email libraries # Email libraries
lettre = "0.9.0" lettre = "0.9.0"
lettre_email = "0.9.0" lettre_email = "0.9.0"
native-tls = "0.2.1" native-tls = "0.2.2"
# Number encoding library # Number encoding library
byteorder = "1.2.6" byteorder = "1.2.7"
[patch.crates-io] [patch.crates-io]
# Make jwt use ring 0.11, to match rocket # Add support for Timestamp type
jsonwebtoken = { path = "libs/jsonwebtoken" }
rmp = { git = 'https://github.com/dani-garcia/msgpack-rust' } rmp = { git = 'https://github.com/dani-garcia/msgpack-rust' }
# Use new native_tls version 0.2
lettre = { git = 'https://github.com/lettre/lettre', rev = 'c988b1760ad81' } lettre = { git = 'https://github.com/lettre/lettre', rev = 'c988b1760ad81' }
lettre_email = { git = 'https://github.com/lettre/lettre', rev = 'c988b1760ad81' } lettre_email = { git = 'https://github.com/lettre/lettre', rev = 'c988b1760ad81' }
# Version 0.1.2 from crates.io lacks a commit that fixes a certificate error # Version 0.1.2 from crates.io lacks a commit that fixes a certificate error
u2f = { git = 'https://github.com/wisespace-io/u2f-rs', rev = '193de35093a44' } u2f = { git = 'https://github.com/wisespace-io/u2f-rs', rev = '75b9fa5afb4c5' }
# Allows optional libusb support # Allows optional libusb support
yubico = { git = 'https://github.com/dani-garcia/yubico-rs' } yubico = { git = 'https://github.com/dani-garcia/yubico-rs' }

Datei anzeigen

@ -2,9 +2,9 @@
# https://docs.docker.com/develop/develop-images/multistage-build/ # https://docs.docker.com/develop/develop-images/multistage-build/
# https://whitfin.io/speeding-up-rust-docker-builds/ # https://whitfin.io/speeding-up-rust-docker-builds/
####################### VAULT BUILD IMAGE ####################### ####################### VAULT BUILD IMAGE #######################
FROM node:8-alpine as vault FROM node:10-alpine as vault
ENV VAULT_VERSION "v2.5.0" ENV VAULT_VERSION "v2.6.1"
ENV URL "https://github.com/bitwarden/web.git" ENV URL "https://github.com/bitwarden/web.git"
@ -41,7 +41,6 @@ WORKDIR /app
# Copies over *only* your manifests and vendored dependencies # Copies over *only* your manifests and vendored dependencies
COPY ./Cargo.* ./ COPY ./Cargo.* ./
COPY ./libs ./libs
COPY ./rust-toolchain ./rust-toolchain COPY ./rust-toolchain ./rust-toolchain
# Builds your dependencies and removes the # Builds your dependencies and removes the
@ -54,6 +53,9 @@ RUN find . -not -path "./target*" -delete
# To avoid copying unneeded files, use .dockerignore # To avoid copying unneeded files, use .dockerignore
COPY . . COPY . .
# Make sure that we actually build the project
RUN touch src/main.rs
# Builds again, this time it'll just be # Builds again, this time it'll just be
# your actual source files being built # your actual source files being built
RUN cargo build --release RUN cargo build --release
@ -64,6 +66,7 @@ RUN cargo build --release
FROM debian:stretch-slim FROM debian:stretch-slim
ENV ROCKET_ENV "staging" ENV ROCKET_ENV "staging"
ENV ROCKET_PORT=80
ENV ROCKET_WORKERS=10 ENV ROCKET_WORKERS=10
# Install needed libraries # Install needed libraries

Datei anzeigen

@ -2,9 +2,9 @@
# https://docs.docker.com/develop/develop-images/multistage-build/ # https://docs.docker.com/develop/develop-images/multistage-build/
# https://whitfin.io/speeding-up-rust-docker-builds/ # https://whitfin.io/speeding-up-rust-docker-builds/
####################### VAULT BUILD IMAGE ####################### ####################### VAULT BUILD IMAGE #######################
FROM node:8-alpine as vault FROM node:10-alpine as vault
ENV VAULT_VERSION "v2.5.0" ENV VAULT_VERSION "v2.6.1"
ENV URL "https://github.com/bitwarden/web.git" ENV URL "https://github.com/bitwarden/web.git"
@ -69,6 +69,7 @@ RUN cargo build --release --target=aarch64-unknown-linux-gnu -v
FROM balenalib/aarch64-debian:stretch FROM balenalib/aarch64-debian:stretch
ENV ROCKET_ENV "staging" ENV ROCKET_ENV "staging"
ENV ROCKET_PORT=80
ENV ROCKET_WORKERS=10 ENV ROCKET_WORKERS=10
RUN [ "cross-build-start" ] RUN [ "cross-build-start" ]

Datei anzeigen

@ -2,9 +2,9 @@
# https://docs.docker.com/develop/develop-images/multistage-build/ # https://docs.docker.com/develop/develop-images/multistage-build/
# https://whitfin.io/speeding-up-rust-docker-builds/ # https://whitfin.io/speeding-up-rust-docker-builds/
####################### VAULT BUILD IMAGE ####################### ####################### VAULT BUILD IMAGE #######################
FROM node:8-alpine as vault FROM node:10-alpine as vault
ENV VAULT_VERSION "v2.5.0" ENV VAULT_VERSION "v2.6.1"
ENV URL "https://github.com/bitwarden/web.git" ENV URL "https://github.com/bitwarden/web.git"
@ -26,7 +26,7 @@ RUN npm run dist \
########################## BUILD IMAGE ########################## ########################## BUILD IMAGE ##########################
# Musl build image for statically compiled binary # Musl build image for statically compiled binary
FROM clux/muslrust:nightly-2018-10-03 as build FROM clux/muslrust:nightly-2018-11-30 as build
ENV USER "root" ENV USER "root"
@ -45,6 +45,7 @@ RUN cargo build --release
FROM alpine:3.8 FROM alpine:3.8
ENV ROCKET_ENV "staging" ENV ROCKET_ENV "staging"
ENV ROCKET_PORT=80
ENV ROCKET_WORKERS=10 ENV ROCKET_WORKERS=10
ENV SSL_CERT_DIR=/etc/ssl/certs ENV SSL_CERT_DIR=/etc/ssl/certs

Datei anzeigen

@ -2,9 +2,9 @@
# https://docs.docker.com/develop/develop-images/multistage-build/ # https://docs.docker.com/develop/develop-images/multistage-build/
# https://whitfin.io/speeding-up-rust-docker-builds/ # https://whitfin.io/speeding-up-rust-docker-builds/
####################### VAULT BUILD IMAGE ####################### ####################### VAULT BUILD IMAGE #######################
FROM node:8-alpine as vault FROM node:10-alpine as vault
ENV VAULT_VERSION "v2.5.0" ENV VAULT_VERSION "v2.6.1"
ENV URL "https://github.com/bitwarden/web.git" ENV URL "https://github.com/bitwarden/web.git"
@ -69,6 +69,7 @@ RUN cargo build --release --target=armv7-unknown-linux-gnueabihf -v
FROM balenalib/armv7hf-debian:stretch FROM balenalib/armv7hf-debian:stretch
ENV ROCKET_ENV "staging" ENV ROCKET_ENV "staging"
ENV ROCKET_PORT=80
ENV ROCKET_WORKERS=10 ENV ROCKET_WORKERS=10
RUN [ "cross-build-start" ] RUN [ "cross-build-start" ]

Datei anzeigen

@ -1,20 +0,0 @@
[package]
name = "jsonwebtoken"
version = "4.0.1"
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
license = "MIT"
readme = "README.md"
description = "Create and parse JWT in a strongly typed way."
homepage = "https://github.com/Keats/rust-jwt"
repository = "https://github.com/Keats/rust-jwt"
keywords = ["jwt", "web", "api", "token", "json"]
[dependencies]
error-chain = { version = "0.11", default-features = false }
serde_json = "1.0"
serde_derive = "1.0"
serde = "1.0"
ring = { version = "0.11.0", features = ["rsa_signing", "dev_urandom_fallback"] }
base64 = "0.9"
untrusted = "0.5"
chrono = "0.4"

Datei anzeigen

@ -1,21 +0,0 @@
The MIT License (MIT)
Copyright (c) 2015 Vincent Prouillet
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

Datei anzeigen

@ -1,120 +0,0 @@
use std::sync::Arc;
use base64;
use ring::{rand, digest, hmac, signature};
use ring::constant_time::verify_slices_are_equal;
use untrusted;
use errors::{Result, ErrorKind};
/// The algorithms supported for signing/verifying
#[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)]
pub enum Algorithm {
/// HMAC using SHA-256
HS256,
/// HMAC using SHA-384
HS384,
/// HMAC using SHA-512
HS512,
/// RSASSA-PKCS1-v1_5 using SHA-256
RS256,
/// RSASSA-PKCS1-v1_5 using SHA-384
RS384,
/// RSASSA-PKCS1-v1_5 using SHA-512
RS512,
}
/// The actual HS signing + encoding
fn sign_hmac(alg: &'static digest::Algorithm, key: &[u8], signing_input: &str) -> Result<String> {
let signing_key = hmac::SigningKey::new(alg, key);
let digest = hmac::sign(&signing_key, signing_input.as_bytes());
Ok(
base64::encode_config::<hmac::Signature>(&digest, base64::URL_SAFE_NO_PAD)
)
}
/// The actual RSA signing + encoding
/// Taken from Ring doc https://briansmith.org/rustdoc/ring/signature/index.html
fn sign_rsa(alg: Algorithm, key: &[u8], signing_input: &str) -> Result<String> {
let ring_alg = match alg {
Algorithm::RS256 => &signature::RSA_PKCS1_SHA256,
Algorithm::RS384 => &signature::RSA_PKCS1_SHA384,
Algorithm::RS512 => &signature::RSA_PKCS1_SHA512,
_ => unreachable!(),
};
let key_pair = Arc::new(
signature::RSAKeyPair::from_der(untrusted::Input::from(key))
.map_err(|_| ErrorKind::InvalidKey)?
);
let mut signing_state = signature::RSASigningState::new(key_pair)
.map_err(|_| ErrorKind::InvalidKey)?;
let mut signature = vec![0; signing_state.key_pair().public_modulus_len()];
let rng = rand::SystemRandom::new();
signing_state.sign(ring_alg, &rng, signing_input.as_bytes(), &mut signature)
.map_err(|_| ErrorKind::InvalidKey)?;
Ok(
base64::encode_config::<[u8]>(&signature, base64::URL_SAFE_NO_PAD)
)
}
/// Take the payload of a JWT, sign it using the algorithm given and return
/// the base64 url safe encoded of the result.
///
/// Only use this function if you want to do something other than JWT.
pub fn sign(signing_input: &str, key: &[u8], algorithm: Algorithm) -> Result<String> {
match algorithm {
Algorithm::HS256 => sign_hmac(&digest::SHA256, key, signing_input),
Algorithm::HS384 => sign_hmac(&digest::SHA384, key, signing_input),
Algorithm::HS512 => sign_hmac(&digest::SHA512, key, signing_input),
Algorithm::RS256 | Algorithm::RS384 | Algorithm::RS512 => sign_rsa(algorithm, key, signing_input),
// TODO: if PKCS1 is made prublic, remove the line above and uncomment below
// Algorithm::RS256 => sign_rsa(&signature::RSA_PKCS1_SHA256, key, signing_input),
// Algorithm::RS384 => sign_rsa(&signature::RSA_PKCS1_SHA384, key, signing_input),
// Algorithm::RS512 => sign_rsa(&signature::RSA_PKCS1_SHA512, key, signing_input),
}
}
/// See Ring RSA docs for more details
fn verify_rsa(alg: &signature::RSAParameters, signature: &str, signing_input: &str, key: &[u8]) -> Result<bool> {
let signature_bytes = base64::decode_config(signature, base64::URL_SAFE_NO_PAD)?;
let public_key_der = untrusted::Input::from(key);
let message = untrusted::Input::from(signing_input.as_bytes());
let expected_signature = untrusted::Input::from(signature_bytes.as_slice());
let res = signature::verify(alg, public_key_der, message, expected_signature);
Ok(res.is_ok())
}
/// Compares the signature given with a re-computed signature for HMAC or using the public key
/// for RSA.
///
/// Only use this function if you want to do something other than JWT.
///
/// `signature` is the signature part of a jwt (text after the second '.')
///
/// `signing_input` is base64(header) + "." + base64(claims)
pub fn verify(signature: &str, signing_input: &str, key: &[u8], algorithm: Algorithm) -> Result<bool> {
match algorithm {
Algorithm::HS256 | Algorithm::HS384 | Algorithm::HS512 => {
// we just re-sign the data with the key and compare if they are equal
let signed = sign(signing_input, key, algorithm)?;
Ok(verify_slices_are_equal(signature.as_ref(), signed.as_ref()).is_ok())
},
Algorithm::RS256 => verify_rsa(&signature::RSA_PKCS1_2048_8192_SHA256, signature, signing_input, key),
Algorithm::RS384 => verify_rsa(&signature::RSA_PKCS1_2048_8192_SHA384, signature, signing_input, key),
Algorithm::RS512 => verify_rsa(&signature::RSA_PKCS1_2048_8192_SHA512, signature, signing_input, key),
}
}
impl Default for Algorithm {
fn default() -> Self {
Algorithm::HS256
}
}

Datei anzeigen

@ -1,68 +0,0 @@
use base64;
use serde_json;
use ring;
error_chain! {
errors {
/// When a token doesn't have a valid JWT shape
InvalidToken {
description("invalid token")
display("Invalid token")
}
/// When the signature doesn't match
InvalidSignature {
description("invalid signature")
display("Invalid signature")
}
/// When the secret given is not a valid RSA key
InvalidKey {
description("invalid key")
display("Invalid Key")
}
// Validation error
/// When a tokens `exp` claim indicates that it has expired
ExpiredSignature {
description("expired signature")
display("Expired Signature")
}
/// When a tokens `iss` claim does not match the expected issuer
InvalidIssuer {
description("invalid issuer")
display("Invalid Issuer")
}
/// When a tokens `aud` claim does not match one of the expected audience values
InvalidAudience {
description("invalid audience")
display("Invalid Audience")
}
/// When a tokens `aud` claim does not match one of the expected audience values
InvalidSubject {
description("invalid subject")
display("Invalid Subject")
}
/// When a tokens `iat` claim is in the future
InvalidIssuedAt {
description("invalid issued at")
display("Invalid Issued At")
}
/// When a tokens nbf claim represents a time in the future
ImmatureSignature {
description("immature signature")
display("Immature Signature")
}
/// When the algorithm in the header doesn't match the one passed to `decode`
InvalidAlgorithm {
description("Invalid algorithm")
display("Invalid Algorithm")
}
}
foreign_links {
Unspecified(ring::error::Unspecified) #[doc = "An error happened while signing/verifying a token with RSA"];
Base64(base64::DecodeError) #[doc = "An error happened while decoding some base64 text"];
Json(serde_json::Error) #[doc = "An error happened while serializing/deserializing JSON"];
Utf8(::std::string::FromUtf8Error) #[doc = "An error happened while trying to convert the result of base64 decoding to a String"];
}
}

Datei anzeigen

@ -1,64 +0,0 @@
use crypto::Algorithm;
/// A basic JWT header, the alg defaults to HS256 and typ is automatically
/// set to `JWT`. All the other fields are optional.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Header {
/// The type of JWS: it can only be "JWT" here
///
/// Defined in [RFC7515#4.1.9](https://tools.ietf.org/html/rfc7515#section-4.1.9).
#[serde(skip_serializing_if = "Option::is_none")]
pub typ: Option<String>,
/// The algorithm used
///
/// Defined in [RFC7515#4.1.1](https://tools.ietf.org/html/rfc7515#section-4.1.1).
pub alg: Algorithm,
/// Content type
///
/// Defined in [RFC7519#5.2](https://tools.ietf.org/html/rfc7519#section-5.2).
#[serde(skip_serializing_if = "Option::is_none")]
pub cty: Option<String>,
/// JSON Key URL
///
/// Defined in [RFC7515#4.1.2](https://tools.ietf.org/html/rfc7515#section-4.1.2).
#[serde(skip_serializing_if = "Option::is_none")]
pub jku: Option<String>,
/// Key ID
///
/// Defined in [RFC7515#4.1.4](https://tools.ietf.org/html/rfc7515#section-4.1.4).
#[serde(skip_serializing_if = "Option::is_none")]
pub kid: Option<String>,
/// X.509 URL
///
/// Defined in [RFC7515#4.1.5](https://tools.ietf.org/html/rfc7515#section-4.1.5).
#[serde(skip_serializing_if = "Option::is_none")]
pub x5u: Option<String>,
/// X.509 certificate thumbprint
///
/// Defined in [RFC7515#4.1.7](https://tools.ietf.org/html/rfc7515#section-4.1.7).
#[serde(skip_serializing_if = "Option::is_none")]
pub x5t: Option<String>,
}
impl Header {
/// Returns a JWT header with the algorithm given
pub fn new(algorithm: Algorithm) -> Header {
Header {
typ: Some("JWT".to_string()),
alg: algorithm,
cty: None,
jku: None,
kid: None,
x5u: None,
x5t: None,
}
}
}
impl Default for Header {
/// Returns a JWT header using the default Algorithm, HS256
fn default() -> Self {
Header::new(Algorithm::default())
}
}

Datei anzeigen

@ -1,142 +0,0 @@
//! Create and parses JWT (JSON Web Tokens)
//!
//! Documentation: [stable](https://docs.rs/jsonwebtoken/)
#![recursion_limit = "300"]
#![deny(missing_docs)]
#![allow(unused_doc_comments)]
#![allow(renamed_and_removed_lints)]
#[macro_use]
extern crate error_chain;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
extern crate serde;
extern crate base64;
extern crate ring;
extern crate untrusted;
extern crate chrono;
/// All the errors, generated using error-chain
pub mod errors;
mod header;
mod crypto;
mod serialization;
mod validation;
pub use header::Header;
pub use crypto::{
Algorithm,
sign,
verify,
};
pub use validation::Validation;
pub use serialization::TokenData;
use serde::de::DeserializeOwned;
use serde::ser::Serialize;
use errors::{Result, ErrorKind};
use serialization::{from_jwt_part, from_jwt_part_claims, to_jwt_part};
use validation::{validate};
/// Encode the header and claims given and sign the payload using the algorithm from the header and the key
///
/// ```rust,ignore
/// #[macro_use]
/// extern crate serde_derive;
/// use jsonwebtoken::{encode, Algorithm, Header};
///
/// /// #[derive(Debug, Serialize, Deserialize)]
/// struct Claims {
/// sub: String,
/// company: String
/// }
///
/// let my_claims = Claims {
/// sub: "b@b.com".to_owned(),
/// company: "ACME".to_owned()
/// };
///
/// // my_claims is a struct that implements Serialize
/// // This will create a JWT using HS256 as algorithm
/// let token = encode(&Header::default(), &my_claims, "secret".as_ref()).unwrap();
/// ```
pub fn encode<T: Serialize>(header: &Header, claims: &T, key: &[u8]) -> Result<String> {
let encoded_header = to_jwt_part(&header)?;
let encoded_claims = to_jwt_part(&claims)?;
let signing_input = [encoded_header.as_ref(), encoded_claims.as_ref()].join(".");
let signature = sign(&*signing_input, key.as_ref(), header.alg)?;
Ok([signing_input, signature].join("."))
}
/// Used in decode: takes the result of a rsplit and ensure we only get 2 parts
/// Errors if we don't
macro_rules! expect_two {
($iter:expr) => {{
let mut i = $iter;
match (i.next(), i.next(), i.next()) {
(Some(first), Some(second), None) => (first, second),
_ => return Err(ErrorKind::InvalidToken.into())
}
}}
}
/// Decode a token into a struct containing 2 fields: `claims` and `header`.
///
/// If the token or its signature is invalid or the claims fail validation, it will return an error.
///
/// ```rust,ignore
/// #[macro_use]
/// extern crate serde_derive;
/// use jsonwebtoken::{decode, Validation, Algorithm};
///
/// #[derive(Debug, Serialize, Deserialize)]
/// struct Claims {
/// sub: String,
/// company: String
/// }
///
/// let token = "a.jwt.token".to_string();
/// // Claims is a struct that implements Deserialize
/// let token_data = decode::<Claims>(&token, "secret", &Validation::new(Algorithm::HS256));
/// ```
pub fn decode<T: DeserializeOwned>(token: &str, key: &[u8], validation: &Validation) -> Result<TokenData<T>> {
let (signature, signing_input) = expect_two!(token.rsplitn(2, '.'));
let (claims, header) = expect_two!(signing_input.rsplitn(2, '.'));
let header: Header = from_jwt_part(header)?;
if !verify(signature, signing_input, key, header.alg)? {
return Err(ErrorKind::InvalidSignature.into());
}
if !validation.algorithms.contains(&header.alg) {
return Err(ErrorKind::InvalidAlgorithm.into());
}
let (decoded_claims, claims_map): (T, _) = from_jwt_part_claims(claims)?;
validate(&claims_map, validation)?;
Ok(TokenData { header: header, claims: decoded_claims })
}
/// Decode a token and return the Header. This is not doing any kind of validation: it is meant to be
/// used when you don't know which `alg` the token is using and want to find out.
///
/// If the token has an invalid format, it will return an error.
///
/// ```rust,ignore
/// use jsonwebtoken::decode_header;
///
/// let token = "a.jwt.token".to_string();
/// let header = decode_header(&token);
/// ```
pub fn decode_header(token: &str) -> Result<Header> {
let (_, signing_input) = expect_two!(token.rsplitn(2, '.'));
let (_, header) = expect_two!(signing_input.rsplitn(2, '.'));
from_jwt_part(header)
}

Datei anzeigen

@ -1,42 +0,0 @@
use base64;
use serde::de::DeserializeOwned;
use serde::ser::Serialize;
use serde_json::{from_str, to_string, Value};
use serde_json::map::Map;
use errors::{Result};
use header::Header;
/// The return type of a successful call to decode
#[derive(Debug)]
pub struct TokenData<T> {
/// The decoded JWT header
pub header: Header,
/// The decoded JWT claims
pub claims: T
}
/// Serializes to JSON and encodes to base64
pub fn to_jwt_part<T: Serialize>(input: &T) -> Result<String> {
let encoded = to_string(input)?;
Ok(base64::encode_config(encoded.as_bytes(), base64::URL_SAFE_NO_PAD))
}
/// Decodes from base64 and deserializes from JSON to a struct
pub fn from_jwt_part<B: AsRef<str>, T: DeserializeOwned>(encoded: B) -> Result<T> {
let decoded = base64::decode_config(encoded.as_ref(), base64::URL_SAFE_NO_PAD)?;
let s = String::from_utf8(decoded)?;
Ok(from_str(&s)?)
}
/// Decodes from base64 and deserializes from JSON to a struct AND a hashmap
pub fn from_jwt_part_claims<B: AsRef<str>, T: DeserializeOwned>(encoded: B) -> Result<(T, Map<String, Value>)> {
let decoded = base64::decode_config(encoded.as_ref(), base64::URL_SAFE_NO_PAD)?;
let s = String::from_utf8(decoded)?;
let claims: T = from_str(&s)?;
let map: Map<_,_> = from_str(&s)?;
Ok((claims, map))
}

Datei anzeigen

@ -1,377 +0,0 @@
use chrono::Utc;
use serde::ser::Serialize;
use serde_json::{Value, from_value, to_value};
use serde_json::map::Map;
use errors::{Result, ErrorKind};
use crypto::Algorithm;
/// Contains the various validations that are applied after decoding a token.
///
/// All time validation happen on UTC timestamps.
///
/// ```rust
/// use jsonwebtoken::Validation;
///
/// // Default value
/// let validation = Validation::default();
///
/// // Changing one parameter
/// let mut validation = Validation {leeway: 60, ..Default::default()};
///
/// // Setting audience
/// let mut validation = Validation::default();
/// validation.set_audience(&"Me"); // string
/// validation.set_audience(&["Me", "You"]); // array of strings
/// ```
#[derive(Debug, Clone, PartialEq)]
pub struct Validation {
/// Add some leeway (in seconds) to the `exp`, `iat` and `nbf` validation to
/// account for clock skew.
///
/// Defaults to `0`.
pub leeway: i64,
/// Whether to validate the `exp` field.
///
/// It will return an error if the time in the `exp` field is past.
///
/// Defaults to `true`.
pub validate_exp: bool,
/// Whether to validate the `iat` field.
///
/// It will return an error if the time in the `iat` field is in the future.
///
/// Defaults to `true`.
pub validate_iat: bool,
/// Whether to validate the `nbf` field.
///
/// It will return an error if the current timestamp is before the time in the `nbf` field.
///
/// Defaults to `true`.
pub validate_nbf: bool,
/// If it contains a value, the validation will check that the `aud` field is the same as the
/// one provided and will error otherwise.
/// Since `aud` can be either a String or a Vec<String> in the JWT spec, you will need to use
/// the [set_audience](struct.Validation.html#method.set_audience) method to set it.
///
/// Defaults to `None`.
pub aud: Option<Value>,
/// If it contains a value, the validation will check that the `iss` field is the same as the
/// one provided and will error otherwise.
///
/// Defaults to `None`.
pub iss: Option<String>,
/// If it contains a value, the validation will check that the `sub` field is the same as the
/// one provided and will error otherwise.
///
/// Defaults to `None`.
pub sub: Option<String>,
/// If it contains a value, the validation will check that the `alg` of the header is contained
/// in the ones provided and will error otherwise.
///
/// Defaults to `vec![Algorithm::HS256]`.
pub algorithms: Vec<Algorithm>,
}
impl Validation {
/// Create a default validation setup allowing the given alg
pub fn new(alg: Algorithm) -> Validation {
let mut validation = Validation::default();
validation.algorithms = vec![alg];
validation
}
/// Since `aud` can be either a String or an array of String in the JWT spec, this method will take
/// care of serializing the value.
pub fn set_audience<T: Serialize>(&mut self, audience: &T) {
self.aud = Some(to_value(audience).unwrap());
}
}
impl Default for Validation {
fn default() -> Validation {
Validation {
leeway: 0,
validate_exp: true,
validate_iat: true,
validate_nbf: true,
iss: None,
sub: None,
aud: None,
algorithms: vec![Algorithm::HS256],
}
}
}
pub fn validate(claims: &Map<String, Value>, options: &Validation) -> Result<()> {
let now = Utc::now().timestamp();
if let Some(iat) = claims.get("iat") {
if options.validate_iat && from_value::<i64>(iat.clone())? > now + options.leeway {
return Err(ErrorKind::InvalidIssuedAt.into());
}
}
if let Some(exp) = claims.get("exp") {
if options.validate_exp && from_value::<i64>(exp.clone())? < now - options.leeway {
return Err(ErrorKind::ExpiredSignature.into());
}
}
if let Some(nbf) = claims.get("nbf") {
if options.validate_nbf && from_value::<i64>(nbf.clone())? > now + options.leeway {
return Err(ErrorKind::ImmatureSignature.into());
}
}
if let Some(iss) = claims.get("iss") {
if let Some(ref correct_iss) = options.iss {
if from_value::<String>(iss.clone())? != *correct_iss {
return Err(ErrorKind::InvalidIssuer.into());
}
}
}
if let Some(sub) = claims.get("sub") {
if let Some(ref correct_sub) = options.sub {
if from_value::<String>(sub.clone())? != *correct_sub {
return Err(ErrorKind::InvalidSubject.into());
}
}
}
if let Some(aud) = claims.get("aud") {
if let Some(ref correct_aud) = options.aud {
if aud != correct_aud {
return Err(ErrorKind::InvalidAudience.into());
}
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use serde_json::{to_value};
use serde_json::map::Map;
use chrono::Utc;
use super::{validate, Validation};
use errors::ErrorKind;
#[test]
fn iat_in_past_ok() {
let mut claims = Map::new();
claims.insert("iat".to_string(), to_value(Utc::now().timestamp() - 10000).unwrap());
let res = validate(&claims, &Validation::default());
assert!(res.is_ok());
}
#[test]
fn iat_in_future_fails() {
let mut claims = Map::new();
claims.insert("iat".to_string(), to_value(Utc::now().timestamp() + 100000).unwrap());
let res = validate(&claims, &Validation::default());
assert!(res.is_err());
match res.unwrap_err().kind() {
&ErrorKind::InvalidIssuedAt => (),
_ => assert!(false),
};
}
#[test]
fn iat_in_future_but_in_leeway_ok() {
let mut claims = Map::new();
claims.insert("iat".to_string(), to_value(Utc::now().timestamp() + 50).unwrap());
let validation = Validation {
leeway: 1000 * 60,
..Default::default()
};
let res = validate(&claims, &validation);
assert!(res.is_ok());
}
#[test]
fn exp_in_future_ok() {
let mut claims = Map::new();
claims.insert("exp".to_string(), to_value(Utc::now().timestamp() + 10000).unwrap());
let res = validate(&claims, &Validation::default());
assert!(res.is_ok());
}
#[test]
fn exp_in_past_fails() {
let mut claims = Map::new();
claims.insert("exp".to_string(), to_value(Utc::now().timestamp() - 100000).unwrap());
let res = validate(&claims, &Validation::default());
assert!(res.is_err());
match res.unwrap_err().kind() {
&ErrorKind::ExpiredSignature => (),
_ => assert!(false),
};
}
#[test]
fn exp_in_past_but_in_leeway_ok() {
let mut claims = Map::new();
claims.insert("exp".to_string(), to_value(Utc::now().timestamp() - 500).unwrap());
let validation = Validation {
leeway: 1000 * 60,
..Default::default()
};
let res = validate(&claims, &validation);
assert!(res.is_ok());
}
#[test]
fn nbf_in_past_ok() {
let mut claims = Map::new();
claims.insert("nbf".to_string(), to_value(Utc::now().timestamp() - 10000).unwrap());
let res = validate(&claims, &Validation::default());
assert!(res.is_ok());
}
#[test]
fn nbf_in_future_fails() {
let mut claims = Map::new();
claims.insert("nbf".to_string(), to_value(Utc::now().timestamp() + 100000).unwrap());
let res = validate(&claims, &Validation::default());
assert!(res.is_err());
match res.unwrap_err().kind() {
&ErrorKind::ImmatureSignature => (),
_ => assert!(false),
};
}
#[test]
fn nbf_in_future_but_in_leeway_ok() {
let mut claims = Map::new();
claims.insert("nbf".to_string(), to_value(Utc::now().timestamp() + 500).unwrap());
let validation = Validation {
leeway: 1000 * 60,
..Default::default()
};
let res = validate(&claims, &validation);
assert!(res.is_ok());
}
#[test]
fn iss_ok() {
let mut claims = Map::new();
claims.insert("iss".to_string(), to_value("Keats").unwrap());
let validation = Validation {
iss: Some("Keats".to_string()),
..Default::default()
};
let res = validate(&claims, &validation);
assert!(res.is_ok());
}
#[test]
fn iss_not_matching_fails() {
let mut claims = Map::new();
claims.insert("iss".to_string(), to_value("Hacked").unwrap());
let validation = Validation {
iss: Some("Keats".to_string()),
..Default::default()
};
let res = validate(&claims, &validation);
assert!(res.is_err());
match res.unwrap_err().kind() {
&ErrorKind::InvalidIssuer => (),
_ => assert!(false),
};
}
#[test]
fn sub_ok() {
let mut claims = Map::new();
claims.insert("sub".to_string(), to_value("Keats").unwrap());
let validation = Validation {
sub: Some("Keats".to_string()),
..Default::default()
};
let res = validate(&claims, &validation);
assert!(res.is_ok());
}
#[test]
fn sub_not_matching_fails() {
let mut claims = Map::new();
claims.insert("sub".to_string(), to_value("Hacked").unwrap());
let validation = Validation {
sub: Some("Keats".to_string()),
..Default::default()
};
let res = validate(&claims, &validation);
assert!(res.is_err());
match res.unwrap_err().kind() {
&ErrorKind::InvalidSubject => (),
_ => assert!(false),
};
}
#[test]
fn aud_string_ok() {
let mut claims = Map::new();
claims.insert("aud".to_string(), to_value("Everyone").unwrap());
let mut validation = Validation::default();
validation.set_audience(&"Everyone");
let res = validate(&claims, &validation);
assert!(res.is_ok());
}
#[test]
fn aud_array_of_string_ok() {
let mut claims = Map::new();
claims.insert("aud".to_string(), to_value(["UserA", "UserB"]).unwrap());
let mut validation = Validation::default();
validation.set_audience(&["UserA", "UserB"]);
let res = validate(&claims, &validation);
assert!(res.is_ok());
}
#[test]
fn aud_type_mismatch_fails() {
let mut claims = Map::new();
claims.insert("aud".to_string(), to_value("Everyone").unwrap());
let mut validation = Validation::default();
validation.set_audience(&["UserA", "UserB"]);
let res = validate(&claims, &validation);
assert!(res.is_err());
match res.unwrap_err().kind() {
&ErrorKind::InvalidAudience => (),
_ => assert!(false),
};
}
#[test]
fn aud_correct_type_not_matching_fails() {
let mut claims = Map::new();
claims.insert("aud".to_string(), to_value("Everyone").unwrap());
let mut validation = Validation::default();
validation.set_audience(&"None");
let res = validate(&claims, &validation);
assert!(res.is_err());
match res.unwrap_err().kind() {
&ErrorKind::InvalidAudience => (),
_ => assert!(false),
};
}
}

Datei anzeigen

@ -0,0 +1,3 @@
ALTER TABLE attachments
ADD COLUMN
key TEXT;

Datei anzeigen

@ -1 +1 @@
nightly-2018-10-03 nightly-2018-12-01

Datei anzeigen

@ -1,13 +1,37 @@
use rocket_contrib::Json; use rocket_contrib::json::Json;
use db::DbConn; use crate::db::models::*;
use db::models::*; use crate::db::DbConn;
use api::{PasswordData, JsonResult, EmptyResult, JsonUpcase, NumberOrString}; use crate::api::{EmptyResult, JsonResult, JsonUpcase, NumberOrString, PasswordData, UpdateType, WebSocketUsers};
use auth::Headers; use crate::auth::Headers;
use mail; use crate::mail;
use CONFIG; use crate::CONFIG;
use rocket::{Route, State};
pub fn routes() -> Vec<Route> {
routes![
register,
profile,
put_profile,
post_profile,
get_public_keys,
post_keys,
post_password,
post_kdf,
post_rotatekey,
post_sstamp,
post_email_token,
post_email,
delete_account,
post_delete_account,
revision_date,
password_hint,
prelogin,
]
}
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
#[allow(non_snake_case)] #[allow(non_snake_case)]
@ -33,23 +57,22 @@ struct KeysData {
fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> EmptyResult { fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> EmptyResult {
let data: RegisterData = data.into_inner().data; let data: RegisterData = data.into_inner().data;
let mut user = match User::find_by_mail(&data.Email, &conn) { let mut user = match User::find_by_mail(&data.Email, &conn) {
Some(mut user) => { Some(user) => {
if Invitation::take(&data.Email, &conn) { if Invitation::take(&data.Email, &conn) {
for mut user_org in UserOrganization::find_invited_by_user(&user.uuid, &conn).iter_mut() { for mut user_org in UserOrganization::find_invited_by_user(&user.uuid, &conn).iter_mut() {
user_org.status = UserOrgStatus::Accepted as i32; user_org.status = UserOrgStatus::Accepted as i32;
if user_org.save(&conn).is_err() { if user_org.save(&conn).is_err() {
err!("Failed to accept user to organization") err!("Failed to accept user to organization")
} }
}; }
user user
} else if CONFIG.signups_allowed { } else if CONFIG.signups_allowed {
err!("Account with this email already exists") err!("Account with this email already exists")
} else { } else {
err!("Registration not allowed") err!("Registration not allowed")
} }
}, }
None => { None => {
if CONFIG.signups_allowed || Invitation::take(&data.Email, &conn) { if CONFIG.signups_allowed || Invitation::take(&data.Email, &conn) {
User::new(data.Email) User::new(data.Email)
@ -86,7 +109,7 @@ fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> EmptyResult {
match user.save(&conn) { match user.save(&conn) {
Ok(()) => Ok(()), Ok(()) => Ok(()),
Err(_) => err!("Failed to save user") Err(_) => err!("Failed to save user"),
} }
} }
@ -122,7 +145,7 @@ fn post_profile(data: JsonUpcase<ProfileData>, headers: Headers, conn: DbConn) -
}; };
match user.save(&conn) { match user.save(&conn) {
Ok(()) => Ok(Json(user.to_json(&conn))), Ok(()) => Ok(Json(user.to_json(&conn))),
Err(_) => err!("Failed to save user profile") Err(_) => err!("Failed to save user profile"),
} }
} }
@ -130,7 +153,7 @@ fn post_profile(data: JsonUpcase<ProfileData>, headers: Headers, conn: DbConn) -
fn get_public_keys(uuid: String, _headers: Headers, conn: DbConn) -> JsonResult { fn get_public_keys(uuid: String, _headers: Headers, conn: DbConn) -> JsonResult {
let user = match User::find_by_uuid(&uuid, &conn) { let user = match User::find_by_uuid(&uuid, &conn) {
Some(user) => user, Some(user) => user,
None => err!("User doesn't exist") None => err!("User doesn't exist"),
}; };
Ok(Json(json!({ Ok(Json(json!({
@ -151,12 +174,10 @@ fn post_keys(data: JsonUpcase<KeysData>, headers: Headers, conn: DbConn) -> Json
match user.save(&conn) { match user.save(&conn) {
Ok(()) => Ok(Json(user.to_json(&conn))), Ok(()) => Ok(Json(user.to_json(&conn))),
Err(_) => err!("Failed to save the user's keys") Err(_) => err!("Failed to save the user's keys"),
} }
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[allow(non_snake_case)]
struct ChangePassData { struct ChangePassData {
@ -178,7 +199,7 @@ fn post_password(data: JsonUpcase<ChangePassData>, headers: Headers, conn: DbCon
user.key = data.Key; user.key = data.Key;
match user.save(&conn) { match user.save(&conn) {
Ok(()) => Ok(()), Ok(()) => Ok(()),
Err(_) => err!("Failed to save password") Err(_) => err!("Failed to save password"),
} }
} }
@ -208,10 +229,86 @@ fn post_kdf(data: JsonUpcase<ChangeKdfData>, headers: Headers, conn: DbConn) ->
user.key = data.Key; user.key = data.Key;
match user.save(&conn) { match user.save(&conn) {
Ok(()) => Ok(()), Ok(()) => Ok(()),
Err(_) => err!("Failed to save password settings") Err(_) => err!("Failed to save password settings"),
} }
} }
#[derive(Deserialize)]
#[allow(non_snake_case)]
struct UpdateFolderData {
Id: String,
Name: String,
}
use super::ciphers::CipherData;
#[derive(Deserialize)]
#[allow(non_snake_case)]
struct KeyData {
Ciphers: Vec<CipherData>,
Folders: Vec<UpdateFolderData>,
Key: String,
PrivateKey: String,
MasterPasswordHash: String,
}
#[post("/accounts/key", data = "<data>")]
fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> EmptyResult {
let data: KeyData = data.into_inner().data;
if !headers.user.check_valid_password(&data.MasterPasswordHash) {
err!("Invalid password")
}
let user_uuid = &headers.user.uuid;
// Update folder data
for folder_data in data.Folders {
let mut saved_folder = match Folder::find_by_uuid(&folder_data.Id, &conn) {
Some(folder) => folder,
None => err!("Folder doesn't exist"),
};
if &saved_folder.user_uuid != user_uuid {
err!("The folder is not owned by the user")
}
saved_folder.name = folder_data.Name;
if saved_folder.save(&conn).is_err() {
err!("Failed to save folder")
}
}
// Update cipher data
use super::ciphers::update_cipher_from_data;
for cipher_data in data.Ciphers {
let mut saved_cipher = match Cipher::find_by_uuid(cipher_data.Id.as_ref().unwrap(), &conn) {
Some(cipher) => cipher,
None => err!("Cipher doesn't exist"),
};
if saved_cipher.user_uuid.as_ref().unwrap() != user_uuid {
err!("The cipher is not owned by the user")
}
update_cipher_from_data(&mut saved_cipher, cipher_data, &headers, false, &conn, &ws, UpdateType::SyncCipherUpdate)?
}
// Update user data
let mut user = headers.user;
user.key = data.Key;
user.private_key = Some(data.PrivateKey);
user.reset_security_stamp();
if user.save(&conn).is_err() {
err!("Failed modify user key");
}
Ok(())
}
#[post("/accounts/security-stamp", data = "<data>")] #[post("/accounts/security-stamp", data = "<data>")]
fn post_sstamp(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> EmptyResult { fn post_sstamp(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> EmptyResult {
let data: PasswordData = data.into_inner().data; let data: PasswordData = data.into_inner().data;
@ -224,7 +321,7 @@ fn post_sstamp(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -
user.reset_security_stamp(); user.reset_security_stamp();
match user.save(&conn) { match user.save(&conn) {
Ok(()) => Ok(()), Ok(()) => Ok(()),
Err(_) => err!("Failed to reset security stamp") Err(_) => err!("Failed to reset security stamp"),
} }
} }
@ -282,7 +379,7 @@ fn post_email(data: JsonUpcase<ChangeEmailData>, headers: Headers, conn: DbConn)
match user.save(&conn) { match user.save(&conn) {
Ok(()) => Ok(()), Ok(()) => Ok(()),
Err(_) => err!("Failed to save email address") Err(_) => err!("Failed to save email address"),
} }
} }
@ -302,7 +399,7 @@ fn delete_account(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn
match user.delete(&conn) { match user.delete(&conn) {
Ok(()) => Ok(()), Ok(()) => Ok(()),
Err(_) => err!("Failed deleting user account, are you the only owner of some organization?") Err(_) => err!("Failed deleting user account, are you the only owner of some organization?"),
} }
} }

Datei anzeigen

@ -1,35 +1,76 @@
use std::collections::{HashSet, HashMap};
use std::path::Path; use std::path::Path;
use std::collections::HashSet;
use rocket::State;
use rocket::Data;
use rocket::http::ContentType; use rocket::http::ContentType;
use rocket::{request::Form, Data, Route, State};
use rocket_contrib::{Json, Value}; use rocket_contrib::json::Json;
use serde_json::Value;
use multipart::server::{Multipart, SaveResult};
use multipart::server::save::SavedData; use multipart::server::save::SavedData;
use multipart::server::{Multipart, SaveResult};
use data_encoding::HEXLOWER; use data_encoding::HEXLOWER;
use db::DbConn; use crate::db::models::*;
use db::models::*; use crate::db::DbConn;
use crypto; use crate::crypto;
use api::{self, PasswordData, JsonResult, EmptyResult, JsonUpcase, WebSocketUsers, UpdateType}; use crate::api::{self, EmptyResult, JsonResult, JsonUpcase, PasswordData, UpdateType, WebSocketUsers};
use auth::Headers; use crate::auth::Headers;
use CONFIG; use crate::CONFIG;
#[derive(FromForm)] pub fn routes() -> Vec<Route> {
#[allow(non_snake_case)] routes![
struct SyncData { sync,
excludeDomains: bool, get_ciphers,
get_cipher,
get_cipher_admin,
get_cipher_details,
post_ciphers,
put_cipher_admin,
post_ciphers_admin,
post_ciphers_create,
post_ciphers_import,
post_attachment,
post_attachment_admin,
post_attachment_share,
delete_attachment_post,
delete_attachment_post_admin,
delete_attachment,
delete_attachment_admin,
post_cipher_admin,
post_cipher_share,
put_cipher_share,
put_cipher_share_seleted,
post_cipher,
put_cipher,
delete_cipher_post,
delete_cipher_post_admin,
delete_cipher,
delete_cipher_admin,
delete_cipher_selected,
delete_cipher_selected_post,
delete_all,
move_cipher_selected,
move_cipher_selected_put,
post_collections_update,
post_collections_admin,
put_collections_admin,
]
} }
#[get("/sync?<data>")] #[derive(FromForm, Default)]
fn sync(data: SyncData, headers: Headers, conn: DbConn) -> JsonResult { struct SyncData {
#[form(field = "excludeDomains")]
exclude_domains: bool, // Default: 'false'
}
#[get("/sync?<data..>")]
fn sync(data: Form<SyncData>, headers: Headers, conn: DbConn) -> JsonResult {
let user_json = headers.user.to_json(&conn); let user_json = headers.user.to_json(&conn);
let folders = Folder::find_by_user(&headers.user.uuid, &conn); let folders = Folder::find_by_user(&headers.user.uuid, &conn);
@ -41,7 +82,7 @@ fn sync(data: SyncData, headers: Headers, conn: DbConn) -> JsonResult {
let ciphers = Cipher::find_by_user(&headers.user.uuid, &conn); let ciphers = Cipher::find_by_user(&headers.user.uuid, &conn);
let ciphers_json: Vec<Value> = ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect(); let ciphers_json: Vec<Value> = ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect();
let domains_json = if data.excludeDomains { Value::Null } else { api::core::get_eq_domains(headers).unwrap().into_inner() }; let domains_json = if data.exclude_domains { Value::Null } else { api::core::get_eq_domains(headers).unwrap().into_inner() };
Ok(Json(json!({ Ok(Json(json!({
"Profile": user_json, "Profile": user_json,
@ -53,14 +94,6 @@ fn sync(data: SyncData, headers: Headers, conn: DbConn) -> JsonResult {
}))) })))
} }
#[get("/sync")]
fn sync_no_query(headers: Headers, conn: DbConn) -> JsonResult {
let sync_data = SyncData {
excludeDomains: false,
};
sync(sync_data, headers, conn)
}
#[get("/ciphers")] #[get("/ciphers")]
fn get_ciphers(headers: Headers, conn: DbConn) -> JsonResult { fn get_ciphers(headers: Headers, conn: DbConn) -> JsonResult {
let ciphers = Cipher::find_by_user(&headers.user.uuid, &conn); let ciphers = Cipher::find_by_user(&headers.user.uuid, &conn);
@ -103,7 +136,7 @@ fn get_cipher_details(uuid: String, headers: Headers, conn: DbConn) -> JsonResul
#[allow(non_snake_case)] #[allow(non_snake_case)]
pub struct CipherData { pub struct CipherData {
// Id is optional as it is included only in bulk share // Id is optional as it is included only in bulk share
Id: Option<String>, pub Id: Option<String>,
// Folder id is not included in import // Folder id is not included in import
FolderId: Option<String>, FolderId: Option<String>,
// TODO: Some of these might appear all the time, no need for Option // TODO: Some of these might appear all the time, no need for Option
@ -129,13 +162,25 @@ pub struct CipherData {
Favorite: Option<bool>, Favorite: Option<bool>,
PasswordHistory: Option<Value>, PasswordHistory: Option<Value>,
// These are used during key rotation
#[serde(rename = "Attachments")]
_Attachments: Option<Value>, // Unused, contains map of {id: filename}
Attachments2: Option<HashMap<String, Attachments2Data>>
}
#[derive(Deserialize, Debug)]
#[allow(non_snake_case)]
pub struct Attachments2Data {
FileName: String,
Key: String,
} }
#[post("/ciphers/admin", data = "<data>")] #[post("/ciphers/admin", data = "<data>")]
fn post_ciphers_admin(data: JsonUpcase<ShareCipherData>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> JsonResult { fn post_ciphers_admin(data: JsonUpcase<ShareCipherData>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> JsonResult {
let data: ShareCipherData = data.into_inner().data; let data: ShareCipherData = data.into_inner().data;
let mut cipher = Cipher::new(data.Cipher.Type.clone(), data.Cipher.Name.clone()); let mut cipher = Cipher::new(data.Cipher.Type, data.Cipher.Name.clone());
cipher.user_uuid = Some(headers.user.uuid.clone()); cipher.user_uuid = Some(headers.user.uuid.clone());
match cipher.save(&conn) { match cipher.save(&conn) {
Ok(()) => (), Ok(()) => (),
@ -188,6 +233,28 @@ pub fn update_cipher_from_data(cipher: &mut Cipher, data: CipherData, headers: &
} }
} }
// Modify attachments name and keys when rotating
if let Some(attachments) = data.Attachments2 {
for (id, attachment) in attachments {
let mut saved_att = match Attachment::find_by_id(&id, &conn) {
Some(att) => att,
None => err!("Attachment doesn't exist")
};
if saved_att.cipher_uuid != cipher.uuid {
err!("Attachment is not owned by the cipher")
}
saved_att.key = Some(attachment.Key);
saved_att.file_name = attachment.FileName;
match saved_att.save(&conn) {
Ok(()) => (),
Err(_) => err!("Failed to save attachment")
};
}
}
let type_data_opt = match data.Type { let type_data_opt = match data.Type {
1 => data.Login, 1 => data.Login,
2 => data.SecureNote, 2 => data.SecureNote,
@ -219,7 +286,7 @@ pub fn update_cipher_from_data(cipher: &mut Cipher, data: CipherData, headers: &
match cipher.save(&conn) { match cipher.save(&conn) {
Ok(()) => (), Ok(()) => (),
Err(_) => println!("Error: Failed to save cipher") Err(_) => err!("Failed to save cipher")
}; };
ws.send_cipher_update(ut, &cipher, &cipher.update_users_revision(&conn)); ws.send_cipher_update(ut, &cipher, &cipher.update_users_revision(&conn));
@ -266,7 +333,6 @@ fn post_ciphers_import(data: JsonUpcase<ImportData>, headers: Headers, conn: DbC
} }
// Read the relations between folders and ciphers // Read the relations between folders and ciphers
use std::collections::HashMap;
let mut relations_map = HashMap::new(); let mut relations_map = HashMap::new();
for relation in data.FolderRelationships { for relation in data.FolderRelationships {
@ -509,7 +575,18 @@ fn post_attachment(uuid: String, data: Data, content_type: &ContentType, headers
let base_path = Path::new(&CONFIG.attachments_folder).join(&cipher.uuid); let base_path = Path::new(&CONFIG.attachments_folder).join(&cipher.uuid);
let mut attachment_key = None;
Multipart::with_body(data.open(), boundary).foreach_entry(|mut field| { Multipart::with_body(data.open(), boundary).foreach_entry(|mut field| {
match field.headers.name.as_str() {
"key" => {
use std::io::Read;
let mut key_buffer = String::new();
if field.data.read_to_string(&mut key_buffer).is_ok() {
attachment_key = Some(key_buffer);
}
},
"data" => {
// This is provided by the client, don't trust it // This is provided by the client, don't trust it
let name = field.headers.filename.expect("No filename provided"); let name = field.headers.filename.expect("No filename provided");
@ -522,24 +599,28 @@ fn post_attachment(uuid: String, data: Data, content_type: &ContentType, headers
.with_path(path) { .with_path(path) {
SaveResult::Full(SavedData::File(_, size)) => size as i32, SaveResult::Full(SavedData::File(_, size)) => size as i32,
SaveResult::Full(other) => { SaveResult::Full(other) => {
println!("Attachment is not a file: {:?}", other); error!("Attachment is not a file: {:?}", other);
return; return;
}, },
SaveResult::Partial(_, reason) => { SaveResult::Partial(_, reason) => {
println!("Partial result: {:?}", reason); error!("Partial result: {:?}", reason);
return; return;
}, },
SaveResult::Error(e) => { SaveResult::Error(e) => {
println!("Error: {:?}", e); error!("Error: {:?}", e);
return; return;
} }
}; };
let attachment = Attachment::new(file_name, cipher.uuid.clone(), name, size); let mut attachment = Attachment::new(file_name, cipher.uuid.clone(), name, size);
attachment.key = attachment_key.clone();
match attachment.save(&conn) { match attachment.save(&conn) {
Ok(()) => (), Ok(()) => (),
Err(_) => println!("Error: failed to save attachment") Err(_) => error!("Failed to save attachment")
}; };
},
_ => error!("Invalid multipart name")
}
}).expect("Error processing multipart data"); }).expect("Error processing multipart data");
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn))) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn)))
@ -670,7 +751,7 @@ fn move_cipher_selected(data: JsonUpcase<Value>, headers: Headers, conn: DbConn,
} }
match cipher.save(&conn) { match cipher.save(&conn) {
Ok(()) => (), Ok(()) => (),
Err(_) => println!("Error: Failed to save cipher") Err(_) => err!("Failed to save cipher")
}; };
ws.send_cipher_update(UpdateType::SyncCipherUpdate, &cipher, &cipher.update_users_revision(&conn)); ws.send_cipher_update(UpdateType::SyncCipherUpdate, &cipher, &cipher.update_users_revision(&conn));
} }
@ -708,8 +789,7 @@ fn delete_all(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn, ws
for f in Folder::find_by_user(&user.uuid, &conn) { for f in Folder::find_by_user(&user.uuid, &conn) {
if f.delete(&conn).is_err() { if f.delete(&conn).is_err() {
err!("Failed deleting folder") err!("Failed deleting folder")
} } else {
else {
ws.send_folder_update(UpdateType::SyncFolderCreate, &f); ws.send_folder_update(UpdateType::SyncFolderCreate, &f);
} }
} }
@ -761,6 +841,6 @@ fn _delete_cipher_attachment_by_id(uuid: &str, attachment_id: &str, headers: &He
ws.send_cipher_update(UpdateType::SyncCipherDelete, &cipher, &cipher.update_users_revision(&conn)); ws.send_cipher_update(UpdateType::SyncCipherDelete, &cipher, &cipher.update_users_revision(&conn));
Ok(()) Ok(())
} }
Err(_) => err!("Deleting attachement failed") Err(_) => err!("Deleting attachment failed")
} }
} }

Datei anzeigen

@ -1,11 +1,26 @@
use rocket::State; use rocket::State;
use rocket_contrib::{Json, Value}; use rocket_contrib::json::Json;
use serde_json::Value;
use db::DbConn; use crate::db::DbConn;
use db::models::*; use crate::db::models::*;
use api::{JsonResult, EmptyResult, JsonUpcase, WebSocketUsers, UpdateType}; use crate::api::{JsonResult, EmptyResult, JsonUpcase, WebSocketUsers, UpdateType};
use auth::Headers; use crate::auth::Headers;
use rocket::Route;
pub fn routes() -> Vec<Route> {
routes![
get_folders,
get_folder,
post_folders,
post_folder,
put_folder,
delete_folder_post,
delete_folder,
]
}
#[get("/folders")] #[get("/folders")]
fn get_folders(headers: Headers, conn: DbConn) -> JsonResult { fn get_folders(headers: Headers, conn: DbConn) -> JsonResult {

Datei anzeigen

@ -4,182 +4,69 @@ mod folders;
mod organizations; mod organizations;
pub(crate) mod two_factor; pub(crate) mod two_factor;
use self::accounts::*;
use self::ciphers::*;
use self::folders::*;
use self::organizations::*;
use self::two_factor::*;
pub fn routes() -> Vec<Route> { pub fn routes() -> Vec<Route> {
routes![ let mut mod_routes = routes![
register,
profile,
put_profile,
post_profile,
get_public_keys,
post_keys,
post_password,
post_kdf,
post_sstamp,
post_email_token,
post_email,
delete_account,
post_delete_account,
revision_date,
password_hint,
prelogin,
sync,
sync_no_query,
get_ciphers,
get_cipher,
get_cipher_admin,
get_cipher_details,
post_ciphers,
put_cipher_admin,
post_ciphers_admin,
post_ciphers_create,
post_ciphers_import,
post_attachment,
post_attachment_admin,
post_attachment_share,
delete_attachment_post,
delete_attachment_post_admin,
delete_attachment,
delete_attachment_admin,
post_cipher_admin,
post_cipher_share,
put_cipher_share,
put_cipher_share_seleted,
post_cipher,
put_cipher,
delete_cipher_post,
delete_cipher_post_admin,
delete_cipher,
delete_cipher_admin,
delete_cipher_selected,
delete_cipher_selected_post,
delete_all,
move_cipher_selected,
move_cipher_selected_put,
get_folders,
get_folder,
post_folders,
post_folder,
put_folder,
delete_folder_post,
delete_folder,
get_twofactor,
get_recover,
recover,
disable_twofactor,
disable_twofactor_put,
generate_authenticator,
activate_authenticator,
activate_authenticator_put,
generate_u2f,
generate_u2f_challenge,
activate_u2f,
activate_u2f_put,
generate_yubikey,
activate_yubikey,
activate_yubikey_put,
get_organization,
create_organization,
delete_organization,
post_delete_organization,
leave_organization,
get_user_collections,
get_org_collections,
get_org_collection_detail,
get_collection_users,
put_organization,
post_organization,
post_organization_collections,
delete_organization_collection_user,
post_organization_collection_delete_user,
post_organization_collection_update,
put_organization_collection_update,
delete_organization_collection,
post_organization_collection_delete,
post_collections_update,
post_collections_admin,
put_collections_admin,
get_org_details,
get_org_users,
send_invite,
confirm_invite,
get_user,
edit_user,
put_organization_user,
delete_user,
post_delete_user,
post_reinvite_user,
post_org_import,
clear_device_token, clear_device_token,
put_device_token, put_device_token,
get_eq_domains, get_eq_domains,
post_eq_domains, post_eq_domains,
put_eq_domains, put_eq_domains,
];
] let mut routes = Vec::new();
routes.append(&mut accounts::routes());
routes.append(&mut ciphers::routes());
routes.append(&mut folders::routes());
routes.append(&mut organizations::routes());
routes.append(&mut two_factor::routes());
routes.append(&mut mod_routes);
routes
} }
/// ///
/// Move this somewhere else /// Move this somewhere else
/// ///
use rocket::Route; use rocket::Route;
use rocket_contrib::{Json, Value}; use rocket_contrib::json::Json;
use serde_json::Value;
use db::DbConn; use crate::db::DbConn;
use db::models::*;
use api::{JsonResult, EmptyResult, JsonUpcase}; use crate::api::{EmptyResult, JsonResult, JsonUpcase};
use auth::Headers; use crate::auth::Headers;
#[put("/devices/identifier/<uuid>/clear-token", data = "<data>")] #[put("/devices/identifier/<uuid>/clear-token")]
fn clear_device_token(uuid: String, data: Json<Value>, headers: Headers, conn: DbConn) -> EmptyResult { fn clear_device_token(uuid: String) -> EmptyResult {
let _data: Value = data.into_inner(); // This endpoint doesn't have auth header
let device = match Device::find_by_uuid(&uuid, &conn) { let _ = uuid;
Some(device) => device, // uuid is not related to deviceId
None => err!("Device not found")
};
if device.user_uuid != headers.user.uuid { // This only clears push token
err!("Device not owned by user") // https://github.com/bitwarden/core/blob/master/src/Api/Controllers/DevicesController.cs#L109
} // https://github.com/bitwarden/core/blob/master/src/Core/Services/Implementations/DeviceService.cs#L37
Ok(())
match device.delete(&conn) {
Ok(()) => Ok(()),
Err(_) => err!("Failed deleting device")
}
} }
#[put("/devices/identifier/<uuid>/token", data = "<data>")] #[put("/devices/identifier/<uuid>/token", data = "<data>")]
fn put_device_token(uuid: String, data: Json<Value>, headers: Headers, conn: DbConn) -> JsonResult { fn put_device_token(uuid: String, data: JsonUpcase<Value>, headers: Headers) -> JsonResult {
let _data: Value = data.into_inner(); let _data: Value = data.into_inner().data;
// Data has a single string value "PushToken"
let _ = uuid;
// uuid is not related to deviceId
let device = match Device::find_by_uuid(&uuid, &conn) { // TODO: This should save the push token, but we don't have push functionality
Some(device) => device,
None => err!("Device not found")
};
if device.user_uuid != headers.user.uuid { Ok(Json(json!({
err!("Device not owned by user") "Id": headers.device.uuid,
} "Name": headers.device.name,
"Type": headers.device.type_,
// TODO: What does this do? "Identifier": headers.device.uuid,
"CreationDate": crate::util::format_date(&headers.device.created_at),
err!("Not implemented") })))
} }
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
@ -213,7 +100,6 @@ fn get_eq_domains(headers: Headers) -> JsonResult {
}))) })))
} }
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
#[allow(non_snake_case)] #[allow(non_snake_case)]
struct EquivDomainData { struct EquivDomainData {
@ -236,9 +122,8 @@ fn post_eq_domains(data: JsonUpcase<EquivDomainData>, headers: Headers, conn: Db
match user.save(&conn) { match user.save(&conn) {
Ok(()) => Ok(Json(json!({}))), Ok(()) => Ok(Json(json!({}))),
Err(_) => err!("Failed to save user") Err(_) => err!("Failed to save user"),
} }
} }
#[put("/settings/domains", data = "<data>")] #[put("/settings/domains", data = "<data>")]

Datei anzeigen

@ -1,14 +1,53 @@
use rocket::State; use rocket::State;
use rocket_contrib::{Json, Value}; use rocket::request::Form;
use CONFIG; use rocket_contrib::json::Json;
use db::DbConn; use serde_json::Value;
use db::models::*;
use api::{PasswordData, JsonResult, EmptyResult, NumberOrString, JsonUpcase, WebSocketUsers, UpdateType}; use crate::CONFIG;
use auth::{Headers, AdminHeaders, OwnerHeaders}; use crate::db::DbConn;
use crate::db::models::*;
use crate::api::{PasswordData, JsonResult, EmptyResult, NumberOrString, JsonUpcase, WebSocketUsers, UpdateType};
use crate::auth::{Headers, AdminHeaders, OwnerHeaders};
use serde::{Deserialize, Deserializer}; use serde::{Deserialize, Deserializer};
use rocket::Route;
pub fn routes() -> Vec<Route> {
routes![
get_organization,
create_organization,
delete_organization,
post_delete_organization,
leave_organization,
get_user_collections,
get_org_collections,
get_org_collection_detail,
get_collection_users,
put_organization,
post_organization,
post_organization_collections,
delete_organization_collection_user,
post_organization_collection_delete_user,
post_organization_collection_update,
put_organization_collection_update,
delete_organization_collection,
post_organization_collection_delete,
get_org_details,
get_org_users,
send_invite,
confirm_invite,
get_user,
edit_user,
put_organization_user,
delete_user,
post_delete_user,
post_reinvite_user,
post_org_import,
]
}
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[allow(non_snake_case)]
@ -315,14 +354,14 @@ fn get_collection_users(org_id: String, coll_id: String, _headers: AdminHeaders,
} }
#[derive(FromForm)] #[derive(FromForm)]
#[allow(non_snake_case)]
struct OrgIdData { struct OrgIdData {
organizationId: String #[form(field = "organizationId")]
organization_id: String
} }
#[get("/ciphers/organization-details?<data>")] #[get("/ciphers/organization-details?<data..>")]
fn get_org_details(data: OrgIdData, headers: Headers, conn: DbConn) -> JsonResult { fn get_org_details(data: Form<OrgIdData>, headers: Headers, conn: DbConn) -> JsonResult {
let ciphers = Cipher::find_by_org(&data.organizationId, &conn); let ciphers = Cipher::find_by_org(&data.organization_id, &conn);
let ciphers_json: Vec<Value> = ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect(); let ciphers_json: Vec<Value> = ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect();
Ok(Json(json!({ Ok(Json(json!({
@ -643,10 +682,10 @@ struct RelationsData {
Value: usize, Value: usize,
} }
#[post("/ciphers/import-organization?<query>", data = "<data>")] #[post("/ciphers/import-organization?<query..>", data = "<data>")]
fn post_org_import(query: OrgIdData, data: JsonUpcase<ImportData>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> EmptyResult { fn post_org_import(query: Form<OrgIdData>, data: JsonUpcase<ImportData>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> EmptyResult {
let data: ImportData = data.into_inner().data; let data: ImportData = data.into_inner().data;
let org_id = query.organizationId; let org_id = query.into_inner().organization_id;
let org_user = match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn) { let org_user = match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn) {
Some(user) => user, Some(user) => user,

Datei anzeigen

@ -1,16 +1,40 @@
use data_encoding::BASE32; use data_encoding::BASE32;
use rocket_contrib::{Json, Value}; use rocket_contrib::json::Json;
use serde_json; use serde_json;
use serde_json::Value;
use db::{
use crate::db::{
models::{TwoFactor, TwoFactorType, User}, models::{TwoFactor, TwoFactorType, User},
DbConn, DbConn,
}; };
use crypto; use crate::crypto;
use api::{ApiResult, JsonResult, JsonUpcase, NumberOrString, PasswordData}; use crate::api::{ApiResult, JsonResult, JsonUpcase, NumberOrString, PasswordData};
use auth::Headers; use crate::auth::Headers;
use rocket::Route;
pub fn routes() -> Vec<Route> {
routes![
get_twofactor,
get_recover,
recover,
disable_twofactor,
disable_twofactor_put,
generate_authenticator,
activate_authenticator,
activate_authenticator_put,
generate_u2f,
generate_u2f_challenge,
activate_u2f,
activate_u2f_put,
generate_yubikey,
activate_yubikey,
activate_yubikey_put,
]
}
#[get("/two-factor")] #[get("/two-factor")]
fn get_twofactor(headers: Headers, conn: DbConn) -> JsonResult { fn get_twofactor(headers: Headers, conn: DbConn) -> JsonResult {
@ -50,7 +74,7 @@ struct RecoverTwoFactor {
fn recover(data: JsonUpcase<RecoverTwoFactor>, conn: DbConn) -> JsonResult { fn recover(data: JsonUpcase<RecoverTwoFactor>, conn: DbConn) -> JsonResult {
let data: RecoverTwoFactor = data.into_inner().data; let data: RecoverTwoFactor = data.into_inner().data;
use db::models::User; use crate::db::models::User;
// Get the user // Get the user
let mut user = match User::find_by_mail(&data.Email, &conn) { let mut user = match User::find_by_mail(&data.Email, &conn) {
@ -219,7 +243,7 @@ fn _generate_recover_code(user: &mut User, conn: &DbConn) {
let totp_recover = BASE32.encode(&crypto::get_random(vec![0u8; 20])); let totp_recover = BASE32.encode(&crypto::get_random(vec![0u8; 20]));
user.totp_recover = Some(totp_recover); user.totp_recover = Some(totp_recover);
if user.save(conn).is_err() { if user.save(conn).is_err() {
println!("Error: Failed to save the user's two factor recovery code") error!("Failed to save the user's two factor recovery code")
} }
} }
} }
@ -228,7 +252,7 @@ use u2f::messages::{RegisterResponse, SignResponse, U2fSignRequest};
use u2f::protocol::{Challenge, U2f}; use u2f::protocol::{Challenge, U2f};
use u2f::register::Registration; use u2f::register::Registration;
use CONFIG; use crate::CONFIG;
const U2F_VERSION: &str = "U2F_V2"; const U2F_VERSION: &str = "U2F_V2";
@ -376,7 +400,7 @@ fn activate_u2f(data: JsonUpcase<EnableU2FData>, headers: Headers, conn: DbConn)
}))) })))
} }
Err(e) => { Err(e) => {
println!("Error: {:#?}", e); error!("{:#?}", e);
err!("Error activating u2f") err!("Error activating u2f")
} }
} }
@ -480,11 +504,11 @@ pub fn validate_u2f_login(user_uuid: &str, response: &str, conn: &DbConn) -> Api
match response { match response {
Ok(new_counter) => { Ok(new_counter) => {
_counter = new_counter; _counter = new_counter;
println!("O {:#}", new_counter); info!("O {:#}", new_counter);
return Ok(()); return Ok(());
} }
Err(e) => { Err(e) => {
println!("E {:#}", e); info!("E {:#}", e);
break; break;
} }
} }
@ -544,9 +568,8 @@ fn parse_yubikeys(data: &EnableYubikeyData) -> Vec<String> {
fn jsonify_yubikeys(yubikeys: Vec<String>) -> serde_json::Value { fn jsonify_yubikeys(yubikeys: Vec<String>) -> serde_json::Value {
let mut result = json!({}); let mut result = json!({});
for i in 0..yubikeys.len() { for (i, key) in yubikeys.into_iter().enumerate() {
let ref key = &yubikeys[i]; result[format!("Key{}", i+1)] = Value::String(key);
result[format!("Key{}", i+1)] = Value::String(key.to_string());
} }
result result
@ -630,7 +653,7 @@ fn activate_yubikey(data: JsonUpcase<EnableYubikeyData>, headers: Headers, conn:
let yubikeys = parse_yubikeys(&data); let yubikeys = parse_yubikeys(&data);
if yubikeys.len() == 0 { if yubikeys.is_empty() {
return Ok(Json(json!({ return Ok(Json(json!({
"Enabled": false, "Enabled": false,
"Object": "twoFactorU2f", "Object": "twoFactorU2f",

Datei anzeigen

@ -7,7 +7,7 @@ use rocket::http::ContentType;
use reqwest; use reqwest;
use CONFIG; use crate::CONFIG;
pub fn routes() -> Vec<Route> { pub fn routes() -> Vec<Route> {
routes![icon] routes![icon]
@ -43,7 +43,7 @@ fn get_icon (domain: &str) -> Vec<u8> {
icon icon
}, },
Err(e) => { Err(e) => {
println!("Error downloading icon: {:?}", e); error!("Error downloading icon: {:?}", e);
get_fallback_icon() get_fallback_icon()
} }
} }
@ -71,7 +71,7 @@ fn get_icon_url(domain: &str) -> String {
} }
fn download_icon(url: &str) -> Result<Vec<u8>, reqwest::Error> { fn download_icon(url: &str) -> Result<Vec<u8>, reqwest::Error> {
println!("Downloading icon for {}...", url); info!("Downloading icon for {}...", url);
let mut res = reqwest::get(url)?; let mut res = reqwest::get(url)?;
res = res.error_for_status()?; res = res.error_for_status()?;
@ -105,7 +105,7 @@ fn get_fallback_icon() -> Vec<u8> {
icon icon
}, },
Err(e) => { Err(e) => {
println!("Error downloading fallback icon: {:?}", e); error!("Error downloading fallback icon: {:?}", e);
vec![] vec![]
} }
} }

Datei anzeigen

@ -1,42 +1,43 @@
use std::collections::HashMap; use rocket::request::LenientForm;
use std::net::{IpAddr, Ipv4Addr, SocketAddr}; use rocket::Route;
use rocket::request::{self, Form, FormItems, FromForm, FromRequest, Request}; use rocket_contrib::json::Json;
use rocket::{Outcome, Route}; use serde_json::Value;
use rocket_contrib::{Json, Value};
use num_traits::FromPrimitive; use num_traits::FromPrimitive;
use db::models::*; use crate::db::models::*;
use db::DbConn; use crate::db::DbConn;
use util::{self, JsonMap}; use crate::util::{self, JsonMap};
use api::{ApiResult, JsonResult}; use crate::api::{ApiResult, EmptyResult, JsonResult};
use CONFIG; use crate::auth::ClientIp;
use crate::CONFIG;
pub fn routes() -> Vec<Route> { pub fn routes() -> Vec<Route> {
routes![login] routes![login]
} }
#[post("/connect/token", data = "<connect_data>")] #[post("/connect/token", data = "<data>")]
fn login(connect_data: Form<ConnectData>, device_type: DeviceType, conn: DbConn, socket: Option<SocketAddr>) -> JsonResult { fn login(data: LenientForm<ConnectData>, conn: DbConn, ip: ClientIp) -> JsonResult {
let data = connect_data.get(); let data: ConnectData = data.into_inner();
validate_data(&data)?;
match data.grant_type { match data.grant_type {
GrantType::RefreshToken => _refresh_login(data, device_type, conn), GrantType::refresh_token => _refresh_login(data, conn),
GrantType::Password => _password_login(data, device_type, conn, socket), GrantType::password => _password_login(data, conn, ip),
} }
} }
fn _refresh_login(data: &ConnectData, _device_type: DeviceType, conn: DbConn) -> JsonResult { fn _refresh_login(data: ConnectData, conn: DbConn) -> JsonResult {
// Extract token // Extract token
let token = data.get("refresh_token"); let token = data.refresh_token.unwrap();
// Get device by refresh token // Get device by refresh token
let mut device = match Device::find_by_refresh_token(token, &conn) { let mut device = match Device::find_by_refresh_token(&token, &conn) {
Some(device) => device, Some(device) => device,
None => err!("Invalid refresh token"), None => err!("Invalid refresh token"),
}; };
@ -55,82 +56,63 @@ fn _refresh_login(data: &ConnectData, _device_type: DeviceType, conn: DbConn) ->
"Key": user.key, "Key": user.key,
"PrivateKey": user.private_key, "PrivateKey": user.private_key,
}))), }))),
Err(_) => err!("Failed to add device to user") Err(e) => err!("Failed to add device to user", e),
} }
} }
fn _password_login(data: &ConnectData, device_type: DeviceType, conn: DbConn, remote: Option<SocketAddr>) -> JsonResult { fn _password_login(data: ConnectData, conn: DbConn, ip: ClientIp) -> JsonResult {
// Get the ip for error reporting
let ip = match remote {
Some(ip) => ip.ip(),
None => IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)),
};
// Validate scope // Validate scope
let scope = data.get("scope"); let scope = data.scope.as_ref().unwrap();
if scope != "api offline_access" { if scope != "api offline_access" {
err!("Scope not supported") err!("Scope not supported")
} }
// Get the user // Get the user
let username = data.get("username"); let username = data.username.as_ref().unwrap();
let user = match User::find_by_mail(username, &conn) { let user = match User::find_by_mail(username, &conn) {
Some(user) => user, Some(user) => user,
None => err!(format!( None => err!(format!(
"Username or password is incorrect. Try again. IP: {}. Username: {}.", "Username or password is incorrect. Try again. IP: {}. Username: {}.",
ip, username ip.ip, username
)), )),
}; };
// Check password // Check password
let password = data.get("password"); let password = data.password.as_ref().unwrap();
if !user.check_valid_password(password) { if !user.check_valid_password(password) {
err!(format!( err!(format!(
"Username or password is incorrect. Try again. IP: {}. Username: {}.", "Username or password is incorrect. Try again. IP: {}. Username: {}.",
ip, username ip.ip, username
)) ))
} }
// Let's only use the header and ignore the 'devicetype' parameter let device_type = util::try_parse_string(data.device_type.as_ref()).unwrap_or(0);
let device_type_num = device_type.0; let device_id = data.device_identifier.clone().unwrap_or_else(crate::util::get_uuid);
let device_name = data.device_name.clone().unwrap_or("unknown_device".into());
let (device_id, device_name) = if data.is_device {
(
data.get("deviceidentifier").clone(),
data.get("devicename").clone(),
)
} else {
(format!("web-{}", user.uuid), String::from("web"))
};
// Find device or create new // Find device or create new
let mut device = match Device::find_by_uuid(&device_id, &conn) { let mut device = match Device::find_by_uuid(&device_id, &conn) {
Some(device) => { Some(device) => {
// Check if valid device // Check if owned device, and recreate if not
if device.user_uuid != user.uuid { if device.user_uuid != user.uuid {
match device.delete(&conn) { info!("Device exists but is owned by another user. The old device will be discarded");
Ok(()) => Device::new(device_id, user.uuid.clone(), device_name, device_type_num), Device::new(device_id, user.uuid.clone(), device_name, device_type)
Err(_) => err!("Tried to delete device not owned by user, but failed")
}
} else { } else {
device device
} }
} }
None => { None => Device::new(device_id, user.uuid.clone(), device_name, device_type)
// Create new device
Device::new(device_id, user.uuid.clone(), device_name, device_type_num)
}
}; };
let twofactor_token = twofactor_auth(&user.uuid, &data, &mut device, &conn)?; let twofactor_token = twofactor_auth(&user.uuid, &data.clone(), &mut device, &conn)?;
// Common // Common
let user = User::find_by_uuid(&device.user_uuid, &conn).unwrap(); let user = User::find_by_uuid(&device.user_uuid, &conn).unwrap();
let orgs = UserOrganization::find_by_user(&user.uuid, &conn); let orgs = UserOrganization::find_by_user(&user.uuid, &conn);
let (access_token, expires_in) = device.refresh_tokens(&user, orgs); let (access_token, expires_in) = device.refresh_tokens(&user, orgs);
if device.save(&conn).is_err() { if let Err(e) = device.save(&conn) {
err!("Failed to add device to user") err!("Failed to add device to user", e)
} }
let mut result = json!({ let mut result = json!({
@ -147,6 +129,7 @@ fn _password_login(data: &ConnectData, device_type: DeviceType, conn: DbConn, re
result["TwoFactorToken"] = Value::String(token); result["TwoFactorToken"] = Value::String(token);
} }
info!("User {} logged in successfully. IP: {}", username, ip.ip);
Ok(Json(result)) Ok(Json(result))
} }
@ -167,13 +150,10 @@ fn twofactor_auth(
return Ok(None); return Ok(None);
} }
let provider = match util::try_parse_string(data.get_opt("twoFactorProvider")) { let provider = data.two_factor_provider.unwrap_or(providers[0]); // If we aren't given a two factor provider, asume the first one
Some(provider) => provider,
None => providers[0], // If we aren't given a two factor provider, asume the first one
};
let twofactor_code = match data.get_opt("twoFactorToken") { let twofactor_code = match data.two_factor_token {
Some(code) => code, Some(ref code) => code,
None => err_json!(_json_err_twofactor(&providers, user_uuid, conn)?), None => err_json!(_json_err_twofactor(&providers, user_uuid, conn)?),
}; };
@ -181,8 +161,8 @@ fn twofactor_auth(
match TwoFactorType::from_i32(provider) { match TwoFactorType::from_i32(provider) {
Some(TwoFactorType::Remember) => { Some(TwoFactorType::Remember) => {
match &device.twofactor_remember { match device.twofactor_remember {
Some(remember) if remember == twofactor_code => return Ok(None), // No twofactor token needed here Some(ref remember) if remember == twofactor_code => return Ok(None), // No twofactor token needed here
_ => err_json!(_json_err_twofactor(&providers, user_uuid, conn)?), _ => err_json!(_json_err_twofactor(&providers, user_uuid, conn)?),
} }
} }
@ -204,13 +184,13 @@ fn twofactor_auth(
} }
Some(TwoFactorType::U2f) => { Some(TwoFactorType::U2f) => {
use api::core::two_factor; use crate::api::core::two_factor;
two_factor::validate_u2f_login(user_uuid, twofactor_code, conn)?; two_factor::validate_u2f_login(user_uuid, &twofactor_code, conn)?;
} }
Some(TwoFactorType::YubiKey) => { Some(TwoFactorType::YubiKey) => {
use api::core::two_factor; use crate::api::core::two_factor;
two_factor::validate_yubikey_login(user_uuid, twofactor_code, conn)?; two_factor::validate_yubikey_login(user_uuid, twofactor_code, conn)?;
} }
@ -218,7 +198,7 @@ fn twofactor_auth(
_ => err!("Invalid two factor provider"), _ => err!("Invalid two factor provider"),
} }
if util::try_parse_string_or(data.get_opt("twoFactorRemember"), 0) == 1 { if data.two_factor_remember.unwrap_or(0) == 1 {
Ok(Some(device.refresh_twofactor_remember())) Ok(Some(device.refresh_twofactor_remember()))
} else { } else {
device.delete_twofactor_remember(); device.delete_twofactor_remember();
@ -227,7 +207,7 @@ fn twofactor_auth(
} }
fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> ApiResult<Value> { fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> ApiResult<Value> {
use api::core::two_factor; use crate::api::core::two_factor;
let mut result = json!({ let mut result = json!({
"error" : "invalid_grant", "error" : "invalid_grant",
@ -289,93 +269,60 @@ fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> Api
Ok(result) Ok(result)
} }
#[derive(Clone, Copy)] #[derive(FromForm, Debug, Clone)]
struct DeviceType(i32); #[allow(non_snake_case)]
impl<'a, 'r> FromRequest<'a, 'r> for DeviceType {
type Error = &'static str;
fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> {
let headers = request.headers();
let type_opt = headers.get_one("Device-Type");
let type_num = util::try_parse_string_or(type_opt, 0);
Outcome::Success(DeviceType(type_num))
}
}
#[derive(Debug)]
struct ConnectData { struct ConnectData {
grant_type: GrantType, grant_type: GrantType,
is_device: bool,
data: HashMap<String, String>, // Needed for grant_type="refresh_token"
refresh_token: Option<String>,
// Needed for grant_type="password"
client_id: Option<String>, // web, cli, desktop, browser, mobile
password: Option<String>,
scope: Option<String>,
username: Option<String>,
#[form(field = "deviceIdentifier")]
device_identifier: Option<String>,
#[form(field = "deviceName")]
device_name: Option<String>,
#[form(field = "deviceType")]
device_type: Option<String>,
// Needed for two-factor auth
#[form(field = "twoFactorProvider")]
two_factor_provider: Option<i32>,
#[form(field = "twoFactorToken")]
two_factor_token: Option<String>,
#[form(field = "twoFactorRemember")]
two_factor_remember: Option<i32>,
} }
#[derive(Debug, Copy, Clone)] #[derive(FromFormValue, Debug, Clone, Copy)]
#[allow(non_camel_case_types)]
enum GrantType { enum GrantType {
RefreshToken, refresh_token,
Password, password,
} }
impl ConnectData { fn validate_data(data: &ConnectData) -> EmptyResult {
fn get(&self, key: &str) -> &String { match data.grant_type {
&self.data[&key.to_lowercase()] GrantType::refresh_token => {
_check_is_some(&data.refresh_token, "refresh_token cannot be blank")
}
GrantType::password => {
_check_is_some(&data.client_id, "client_id cannot be blank")?;
_check_is_some(&data.password, "password cannot be blank")?;
_check_is_some(&data.scope, "scope cannot be blank")?;
_check_is_some(&data.username, "username cannot be blank")
} }
fn get_opt(&self, key: &str) -> Option<&String> {
self.data.get(&key.to_lowercase())
} }
} }
const VALUES_REFRESH: [&str; 1] = ["refresh_token"]; fn _check_is_some<T>(value: &Option<T>, msg: &str) -> EmptyResult {
const VALUES_PASSWORD: [&str; 5] = ["client_id", "grant_type", "password", "scope", "username"]; if value.is_none() {
const VALUES_DEVICE: [&str; 3] = ["deviceidentifier", "devicename", "devicetype"]; err!(msg)
impl<'f> FromForm<'f> for ConnectData {
type Error = String;
fn from_form(items: &mut FormItems<'f>, _strict: bool) -> Result<Self, Self::Error> {
let mut data = HashMap::new();
// Insert data into map
for (key, value) in items {
match (key.url_decode(), value.url_decode()) {
(Ok(key), Ok(value)) => data.insert(key.to_lowercase(), value),
_ => return Err("Error decoding key or value".to_string()),
};
}
// Validate needed values
let (grant_type, is_device) = match data.get("grant_type").map(String::as_ref) {
Some("refresh_token") => {
check_values(&data, &VALUES_REFRESH)?;
(GrantType::RefreshToken, false) // Device doesn't matter here
}
Some("password") => {
check_values(&data, &VALUES_PASSWORD)?;
let is_device = match data["client_id"].as_ref() {
"browser" | "mobile" => check_values(&data, &VALUES_DEVICE)?,
_ => false,
};
(GrantType::Password, is_device)
}
_ => return Err("Grant type not supported".to_string()),
};
Ok(ConnectData {
grant_type,
is_device,
data,
})
} }
} Ok(())
fn check_values(map: &HashMap<String, String>, values: &[&str]) -> Result<bool, String> {
for value in values {
if !map.contains_key(*value) {
return Err(format!("{} cannot be blank", value));
}
}
Ok(true)
} }

Datei anzeigen

@ -12,14 +12,15 @@ pub use self::notifications::routes as notifications_routes;
pub use self::notifications::{start_notification_server, WebSocketUsers, UpdateType}; pub use self::notifications::{start_notification_server, WebSocketUsers, UpdateType};
use rocket::response::status::BadRequest; use rocket::response::status::BadRequest;
use rocket_contrib::Json; use rocket_contrib::json::Json;
use serde_json::Value;
// Type aliases for API methods results // Type aliases for API methods results
type ApiResult<T> = Result<T, BadRequest<Json>>; type ApiResult<T> = Result<T, BadRequest<Json<Value>>>;
type JsonResult = ApiResult<Json>; type JsonResult = ApiResult<Json<Value>>;
type EmptyResult = ApiResult<()>; type EmptyResult = ApiResult<()>;
use util; use crate::util;
type JsonUpcase<T> = Json<util::UpCase<T>>; type JsonUpcase<T> = Json<util::UpCase<T>>;
// Common structs representing JSON data received // Common structs representing JSON data received

Datei anzeigen

@ -1,12 +1,12 @@
use rocket::Route; use rocket::Route;
use rocket_contrib::Json; use rocket_contrib::json::Json;
use serde_json::Value as JsonValue; use serde_json::Value as JsonValue;
use api::JsonResult; use crate::api::JsonResult;
use auth::Headers; use crate::auth::Headers;
use db::DbConn; use crate::db::DbConn;
use CONFIG; use crate::CONFIG;
pub fn routes() -> Vec<Route> { pub fn routes() -> Vec<Route> {
routes![negotiate, websockets_err] routes![negotiate, websockets_err]
@ -19,7 +19,7 @@ fn websockets_err() -> JsonResult {
#[post("/hub/negotiate")] #[post("/hub/negotiate")]
fn negotiate(_headers: Headers, _conn: DbConn) -> JsonResult { fn negotiate(_headers: Headers, _conn: DbConn) -> JsonResult {
use crypto; use crate::crypto;
use data_encoding::BASE64URL; use data_encoding::BASE64URL;
let conn_id = BASE64URL.encode(&crypto::get_random(vec![0u8; 16])); let conn_id = BASE64URL.encode(&crypto::get_random(vec![0u8; 16]));
@ -52,7 +52,7 @@ use chashmap::CHashMap;
use chrono::NaiveDateTime; use chrono::NaiveDateTime;
use serde_json::from_str; use serde_json::from_str;
use db::models::{Cipher, Folder, User}; use crate::db::models::{Cipher, Folder, User};
use rmpv::Value; use rmpv::Value;
@ -139,7 +139,7 @@ impl Handler for WSHandler {
let _id = &query_split[1][3..]; let _id = &query_split[1][3..];
// Validate the user // Validate the user
use auth; use crate::auth;
let claims = match auth::decode_jwt(access_token) { let claims = match auth::decode_jwt(access_token) {
Ok(claims) => claims, Ok(claims) => claims,
Err(_) => { Err(_) => {
@ -169,7 +169,7 @@ impl Handler for WSHandler {
} }
fn on_message(&mut self, msg: Message) -> ws::Result<()> { fn on_message(&mut self, msg: Message) -> ws::Result<()> {
println!("Server got message '{}'. ", msg); info!("Server got message '{}'. ", msg);
if let Message::Text(text) = msg.clone() { if let Message::Text(text) = msg.clone() {
let json = &text[..text.len() - 1]; // Remove last char let json = &text[..text.len() - 1]; // Remove last char
@ -242,10 +242,10 @@ pub struct WebSocketUsers {
} }
impl WebSocketUsers { impl WebSocketUsers {
fn send_update(&self, user_uuid: &String, data: Vec<u8>) -> ws::Result<()> { fn send_update(&self, user_uuid: &String, data: &[u8]) -> ws::Result<()> {
if let Some(user) = self.map.get(user_uuid) { if let Some(user) = self.map.get(user_uuid) {
for sender in user.iter() { for sender in user.iter() {
sender.send(data.clone())?; sender.send(data)?;
} }
} }
Ok(()) Ok(())
@ -262,7 +262,7 @@ impl WebSocketUsers {
ut, ut,
); );
self.send_update(&user.uuid.clone(), data).ok(); self.send_update(&user.uuid.clone(), &data).ok();
} }
pub fn send_folder_update(&self, ut: UpdateType, folder: &Folder) { pub fn send_folder_update(&self, ut: UpdateType, folder: &Folder) {
@ -275,10 +275,10 @@ impl WebSocketUsers {
ut, ut,
); );
self.send_update(&folder.user_uuid, data).ok(); self.send_update(&folder.user_uuid, &data).ok();
} }
pub fn send_cipher_update(&self, ut: UpdateType, cipher: &Cipher, user_uuids: &Vec<String>) { pub fn send_cipher_update(&self, ut: UpdateType, cipher: &Cipher, user_uuids: &[String]) {
let user_uuid = convert_option(cipher.user_uuid.clone()); let user_uuid = convert_option(cipher.user_uuid.clone());
let org_uuid = convert_option(cipher.organization_uuid.clone()); let org_uuid = convert_option(cipher.organization_uuid.clone());
@ -294,7 +294,7 @@ impl WebSocketUsers {
); );
for uuid in user_uuids { for uuid in user_uuids {
self.send_update(&uuid, data.clone()).ok(); self.send_update(&uuid, &data).ok();
} }
} }
} }

Datei anzeigen

@ -6,9 +6,10 @@ use rocket::response::{self, NamedFile, Responder};
use rocket::response::content::Content; use rocket::response::content::Content;
use rocket::http::{ContentType, Status}; use rocket::http::{ContentType, Status};
use rocket::Route; use rocket::Route;
use rocket_contrib::{Json, Value}; use rocket_contrib::json::Json;
use serde_json::Value;
use CONFIG; use crate::CONFIG;
pub fn routes() -> Vec<Route> { pub fn routes() -> Vec<Route> {
if CONFIG.web_vault_enabled { if CONFIG.web_vault_enabled {
@ -73,7 +74,7 @@ fn attachments(uuid: String, file: PathBuf) -> io::Result<NamedFile> {
#[get("/alive")] #[get("/alive")]
fn alive() -> Json<String> { fn alive() -> Json<String> {
use util::format_date; use crate::util::format_date;
use chrono::Utc; use chrono::Utc;
Json(format_date(&Utc::now().naive_utc())) Json(format_date(&Utc::now().naive_utc()))

Datei anzeigen

@ -1,22 +1,21 @@
/// ///
/// JWT Handling /// JWT Handling
/// ///
use crate::util::read_file;
use util::read_file;
use chrono::Duration; use chrono::Duration;
use jwt; use jsonwebtoken::{self, Algorithm, Header};
use serde::ser::Serialize; use serde::ser::Serialize;
use CONFIG; use crate::CONFIG;
const JWT_ALGORITHM: jwt::Algorithm = jwt::Algorithm::RS256; const JWT_ALGORITHM: Algorithm = Algorithm::RS256;
lazy_static! { lazy_static! {
pub static ref DEFAULT_VALIDITY: Duration = Duration::hours(2); pub static ref DEFAULT_VALIDITY: Duration = Duration::hours(2);
pub static ref JWT_ISSUER: String = CONFIG.domain.clone(); pub static ref JWT_ISSUER: String = CONFIG.domain.clone();
static ref JWT_HEADER: jwt::Header = jwt::Header::new(JWT_ALGORITHM); static ref JWT_HEADER: Header = Header::new(JWT_ALGORITHM);
static ref PRIVATE_RSA_KEY: Vec<u8> = match read_file(&CONFIG.private_rsa_key) { static ref PRIVATE_RSA_KEY: Vec<u8> = match read_file(&CONFIG.private_rsa_key) {
Ok(key) => key, Ok(key) => key,
@ -30,17 +29,17 @@ lazy_static! {
} }
pub fn encode_jwt<T: Serialize>(claims: &T) -> String { pub fn encode_jwt<T: Serialize>(claims: &T) -> String {
match jwt::encode(&JWT_HEADER, claims, &PRIVATE_RSA_KEY) { match jsonwebtoken::encode(&JWT_HEADER, claims, &PRIVATE_RSA_KEY) {
Ok(token) => token, Ok(token) => token,
Err(e) => panic!("Error encoding jwt {}", e) Err(e) => panic!("Error encoding jwt {}", e)
} }
} }
pub fn decode_jwt(token: &str) -> Result<JWTClaims, String> { pub fn decode_jwt(token: &str) -> Result<JWTClaims, String> {
let validation = jwt::Validation { let validation = jsonwebtoken::Validation {
leeway: 30, // 30 seconds leeway: 30, // 30 seconds
validate_exp: true, validate_exp: true,
validate_iat: true, validate_iat: false, // IssuedAt is the same as NotBefore
validate_nbf: true, validate_nbf: true,
aud: None, aud: None,
iss: Some(JWT_ISSUER.clone()), iss: Some(JWT_ISSUER.clone()),
@ -48,10 +47,10 @@ pub fn decode_jwt(token: &str) -> Result<JWTClaims, String> {
algorithms: vec![JWT_ALGORITHM], algorithms: vec![JWT_ALGORITHM],
}; };
match jwt::decode(token, &PUBLIC_RSA_KEY, &validation) { match jsonwebtoken::decode(token, &PUBLIC_RSA_KEY, &validation) {
Ok(decoded) => Ok(decoded.claims), Ok(decoded) => Ok(decoded.claims),
Err(msg) => { Err(msg) => {
println!("Error validating jwt - {:#?}", msg); error!("Error validating jwt - {:#?}", msg);
Err(msg.to_string()) Err(msg.to_string())
} }
} }
@ -76,6 +75,7 @@ pub struct JWTClaims {
pub orgowner: Vec<String>, pub orgowner: Vec<String>,
pub orgadmin: Vec<String>, pub orgadmin: Vec<String>,
pub orguser: Vec<String>, pub orguser: Vec<String>,
pub orgmanager: Vec<String>,
// user security_stamp // user security_stamp
pub sstamp: String, pub sstamp: String,
@ -90,12 +90,11 @@ pub struct JWTClaims {
/// ///
/// Bearer token authentication /// Bearer token authentication
/// ///
use rocket::Outcome; use rocket::Outcome;
use rocket::request::{self, Request, FromRequest}; use rocket::request::{self, Request, FromRequest};
use db::DbConn; use crate::db::DbConn;
use db::models::{User, Organization, UserOrganization, UserOrgType, UserOrgStatus, Device}; use crate::db::models::{User, Organization, UserOrganization, UserOrgType, UserOrgStatus, Device};
pub struct Headers { pub struct Headers {
pub host: String, pub host: String,
@ -139,13 +138,11 @@ impl<'a, 'r> FromRequest<'a, 'r> for Headers {
// Get access_token // Get access_token
let access_token: &str = match request.headers().get_one("Authorization") { let access_token: &str = match request.headers().get_one("Authorization") {
Some(a) => { Some(a) => match a.rsplit("Bearer ").next() {
match a.rsplit("Bearer ").next() {
Some(split) => split, Some(split) => split,
None => err_handler!("No access token provided") None => err_handler!("No access token provided"),
} },
} None => err_handler!("No access token provided"),
None => err_handler!("No access token provided")
}; };
// Check JWT token is valid and get device and user from it // Check JWT token is valid and get device and user from it
@ -192,13 +189,12 @@ impl<'a, 'r> FromRequest<'a, 'r> for OrgHeaders {
fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> { fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> {
match request.guard::<Headers>() { match request.guard::<Headers>() {
Outcome::Forward(f) => Outcome::Forward(f), Outcome::Forward(_) => Outcome::Forward(()),
Outcome::Failure(f) => Outcome::Failure(f), Outcome::Failure(f) => Outcome::Failure(f),
Outcome::Success(headers) => { Outcome::Success(headers) => {
// org_id is expected to be the first dynamic param // org_id is expected to be the second param ("/organizations/<org_id>")
match request.get_param::<String>(0) { match request.get_param::<String>(1) {
Err(_) => err_handler!("Error getting the organization id"), Some(Ok(org_id)) => {
Ok(org_id) => {
let conn = match request.guard::<DbConn>() { let conn = match request.guard::<DbConn>() {
Outcome::Success(conn) => conn, Outcome::Success(conn) => conn,
_ => err_handler!("Error getting DB") _ => err_handler!("Error getting DB")
@ -226,14 +222,15 @@ impl<'a, 'r> FromRequest<'a, 'r> for OrgHeaders {
device: headers.device, device: headers.device,
user: headers.user, user: headers.user,
org_user_type: { org_user_type: {
if let Some(org_usr_type) = UserOrgType::from_i32(&org_user.type_) { if let Some(org_usr_type) = UserOrgType::from_i32(org_user.type_) {
org_usr_type org_usr_type
} else { // This should only happen if the DB is corrupted } else { // This should only happen if the DB is corrupted
err_handler!("Unknown user type in the database") err_handler!("Unknown user type in the database")
} }
}, },
}) })
} },
_ => err_handler!("Error getting the organization id"),
} }
} }
} }
@ -252,11 +249,11 @@ impl<'a, 'r> FromRequest<'a, 'r> for AdminHeaders {
fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> { fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> {
match request.guard::<OrgHeaders>() { match request.guard::<OrgHeaders>() {
Outcome::Forward(f) => Outcome::Forward(f), Outcome::Forward(_) => Outcome::Forward(()),
Outcome::Failure(f) => Outcome::Failure(f), Outcome::Failure(f) => Outcome::Failure(f),
Outcome::Success(headers) => { Outcome::Success(headers) => {
if headers.org_user_type >= UserOrgType::Admin { if headers.org_user_type >= UserOrgType::Admin {
Outcome::Success(Self{ Outcome::Success(Self {
host: headers.host, host: headers.host,
device: headers.device, device: headers.device,
user: headers.user, user: headers.user,
@ -281,11 +278,11 @@ impl<'a, 'r> FromRequest<'a, 'r> for OwnerHeaders {
fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> { fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> {
match request.guard::<OrgHeaders>() { match request.guard::<OrgHeaders>() {
Outcome::Forward(f) => Outcome::Forward(f), Outcome::Forward(_) => Outcome::Forward(()),
Outcome::Failure(f) => Outcome::Failure(f), Outcome::Failure(f) => Outcome::Failure(f),
Outcome::Success(headers) => { Outcome::Success(headers) => {
if headers.org_user_type == UserOrgType::Owner { if headers.org_user_type == UserOrgType::Owner {
Outcome::Success(Self{ Outcome::Success(Self {
host: headers.host, host: headers.host,
device: headers.device, device: headers.device,
user: headers.user, user: headers.user,
@ -297,3 +294,25 @@ impl<'a, 'r> FromRequest<'a, 'r> for OwnerHeaders {
} }
} }
} }
///
/// Client IP address detection
///
use std::net::IpAddr;
pub struct ClientIp {
pub ip: IpAddr,
}
impl<'a, 'r> FromRequest<'a, 'r> for ClientIp {
type Error = ();
fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> {
let ip = match request.client_ip() {
Some(addr) => addr,
None => "0.0.0.0".parse().unwrap(),
};
Outcome::Success(ClientIp { ip })
}
}

Datei anzeigen

@ -9,7 +9,7 @@ use rocket::http::Status;
use rocket::request::{self, FromRequest}; use rocket::request::{self, FromRequest};
use rocket::{Outcome, Request, State}; use rocket::{Outcome, Request, State};
use CONFIG; use crate::CONFIG;
/// An alias to the database connection used /// An alias to the database connection used
type Connection = SqliteConnection; type Connection = SqliteConnection;

Datei anzeigen

@ -1,7 +1,7 @@
use serde_json::Value as JsonValue; use serde_json::Value;
use super::Cipher; use super::Cipher;
use CONFIG; use crate::CONFIG;
#[derive(Debug, Identifiable, Queryable, Insertable, Associations)] #[derive(Debug, Identifiable, Queryable, Insertable, Associations)]
#[table_name = "attachments"] #[table_name = "attachments"]
@ -12,6 +12,7 @@ pub struct Attachment {
pub cipher_uuid: String, pub cipher_uuid: String,
pub file_name: String, pub file_name: String,
pub file_size: i32, pub file_size: i32,
pub key: Option<String>
} }
/// Local methods /// Local methods
@ -22,6 +23,7 @@ impl Attachment {
cipher_uuid, cipher_uuid,
file_name, file_name,
file_size, file_size,
key: None
} }
} }
@ -29,8 +31,8 @@ impl Attachment {
format!("{}/{}/{}", CONFIG.attachments_folder, self.cipher_uuid, self.id) format!("{}/{}/{}", CONFIG.attachments_folder, self.cipher_uuid, self.id)
} }
pub fn to_json(&self, host: &str) -> JsonValue { pub fn to_json(&self, host: &str) -> Value {
use util::get_display_size; use crate::util::get_display_size;
let web_path = format!("{}/attachments/{}/{}", host, self.cipher_uuid, self.id); let web_path = format!("{}/attachments/{}/{}", host, self.cipher_uuid, self.id);
let display_size = get_display_size(self.file_size); let display_size = get_display_size(self.file_size);
@ -41,6 +43,7 @@ impl Attachment {
"FileName": self.file_name, "FileName": self.file_name,
"Size": self.file_size.to_string(), "Size": self.file_size.to_string(),
"SizeName": display_size, "SizeName": display_size,
"Key": self.key,
"Object": "attachment" "Object": "attachment"
}) })
} }
@ -48,8 +51,8 @@ impl Attachment {
use diesel; use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use db::DbConn; use crate::db::DbConn;
use db::schema::attachments; use crate::db::schema::attachments;
/// Database methods /// Database methods
impl Attachment { impl Attachment {
@ -61,39 +64,21 @@ impl Attachment {
} }
pub fn delete(self, conn: &DbConn) -> QueryResult<()> { pub fn delete(self, conn: &DbConn) -> QueryResult<()> {
use util; crate::util::retry(
use std::{thread, time}; || {
diesel::delete(attachments::table.filter(attachments::id.eq(&self.id)))
.execute(&**conn)
},
10,
)?;
let mut retries = 10; crate::util::delete_file(&self.get_file_path());
loop {
match diesel::delete(
attachments::table.filter(
attachments::id.eq(&self.id)
)
).execute(&**conn) {
Ok(_) => break,
Err(err) => {
if retries < 1 {
println!("ERROR: Failed with 10 retries");
return Err(err)
} else {
retries -= 1;
println!("Had to retry! Retries left: {}", retries);
thread::sleep(time::Duration::from_millis(500));
continue
}
}
}
}
util::delete_file(&self.get_file_path());
Ok(()) Ok(())
} }
pub fn delete_all_by_cipher(cipher_uuid: &str, conn: &DbConn) -> QueryResult<()> { pub fn delete_all_by_cipher(cipher_uuid: &str, conn: &DbConn) -> QueryResult<()> {
for attachement in Attachment::find_by_cipher(&cipher_uuid, &conn) { for attachment in Attachment::find_by_cipher(&cipher_uuid, &conn) {
attachement.delete(&conn)?; attachment.delete(&conn)?;
} }
Ok(()) Ok(())
} }

Datei anzeigen

@ -1,7 +1,5 @@
use chrono::{NaiveDateTime, Utc}; use chrono::{NaiveDateTime, Utc};
use serde_json::Value as JsonValue; use serde_json::Value;
use uuid::Uuid;
use super::{User, Organization, Attachment, FolderCipher, CollectionCipher, UserOrganization, UserOrgType, UserOrgStatus}; use super::{User, Organization, Attachment, FolderCipher, CollectionCipher, UserOrganization, UserOrgType, UserOrgStatus};
@ -41,7 +39,7 @@ impl Cipher {
let now = Utc::now().naive_utc(); let now = Utc::now().naive_utc();
Self { Self {
uuid: Uuid::new_v4().to_string(), uuid: crate::util::get_uuid(),
created_at: now, created_at: now,
updated_at: now, updated_at: now,
@ -63,28 +61,28 @@ impl Cipher {
use diesel; use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use db::DbConn; use crate::db::DbConn;
use db::schema::*; use crate::db::schema::*;
/// Database methods /// Database methods
impl Cipher { impl Cipher {
pub fn to_json(&self, host: &str, user_uuid: &str, conn: &DbConn) -> JsonValue { pub fn to_json(&self, host: &str, user_uuid: &str, conn: &DbConn) -> Value {
use serde_json; use serde_json;
use util::format_date; use crate::util::format_date;
use super::Attachment; use super::Attachment;
let attachments = Attachment::find_by_cipher(&self.uuid, conn); let attachments = Attachment::find_by_cipher(&self.uuid, conn);
let attachments_json: Vec<JsonValue> = attachments.iter().map(|c| c.to_json(host)).collect(); let attachments_json: Vec<Value> = attachments.iter().map(|c| c.to_json(host)).collect();
let fields_json: JsonValue = if let Some(ref fields) = self.fields { let fields_json: Value = if let Some(ref fields) = self.fields {
serde_json::from_str(fields).unwrap() serde_json::from_str(fields).unwrap()
} else { JsonValue::Null }; } else { Value::Null };
let password_history_json: JsonValue = if let Some(ref password_history) = self.password_history { let password_history_json: Value = if let Some(ref password_history) = self.password_history {
serde_json::from_str(password_history).unwrap() serde_json::from_str(password_history).unwrap()
} else { JsonValue::Null }; } else { Value::Null };
let mut data_json: JsonValue = serde_json::from_str(&self.data).unwrap(); let mut data_json: Value = serde_json::from_str(&self.data).unwrap();
// TODO: ******* Backwards compat start ********** // TODO: ******* Backwards compat start **********
// To remove backwards compatibility, just remove this entire section // To remove backwards compatibility, just remove this entire section
@ -234,7 +232,7 @@ impl Cipher {
} }
pub fn is_write_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool { pub fn is_write_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool {
match ciphers::table ciphers::table
.filter(ciphers::uuid.eq(&self.uuid)) .filter(ciphers::uuid.eq(&self.uuid))
.left_join(users_organizations::table.on( .left_join(users_organizations::table.on(
ciphers::organization_uuid.eq(users_organizations::org_uuid.nullable()).and( ciphers::organization_uuid.eq(users_organizations::org_uuid.nullable()).and(
@ -255,14 +253,11 @@ impl Cipher {
) )
)) ))
.select(ciphers::all_columns) .select(ciphers::all_columns)
.first::<Self>(&**conn).ok() { .first::<Self>(&**conn).ok().is_some()
Some(_) => true,
None => false
}
} }
pub fn is_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool { pub fn is_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool {
match ciphers::table ciphers::table
.filter(ciphers::uuid.eq(&self.uuid)) .filter(ciphers::uuid.eq(&self.uuid))
.left_join(users_organizations::table.on( .left_join(users_organizations::table.on(
ciphers::organization_uuid.eq(users_organizations::org_uuid.nullable()).and( ciphers::organization_uuid.eq(users_organizations::org_uuid.nullable()).and(
@ -281,10 +276,7 @@ impl Cipher {
) )
)) ))
.select(ciphers::all_columns) .select(ciphers::all_columns)
.first::<Self>(&**conn).ok() { .first::<Self>(&**conn).ok().is_some()
Some(_) => true,
None => false
}
} }
pub fn get_folder_uuid(&self, user_uuid: &str, conn: &DbConn) -> Option<String> { pub fn get_folder_uuid(&self, user_uuid: &str, conn: &DbConn) -> Option<String> {

Datei anzeigen

@ -1,6 +1,4 @@
use serde_json::Value as JsonValue; use serde_json::Value;
use uuid::Uuid;
use super::{Organization, UserOrganization, UserOrgType, UserOrgStatus}; use super::{Organization, UserOrganization, UserOrgType, UserOrgStatus};
@ -18,14 +16,14 @@ pub struct Collection {
impl Collection { impl Collection {
pub fn new(org_uuid: String, name: String) -> Self { pub fn new(org_uuid: String, name: String) -> Self {
Self { Self {
uuid: Uuid::new_v4().to_string(), uuid: crate::util::get_uuid(),
org_uuid, org_uuid,
name, name,
} }
} }
pub fn to_json(&self) -> JsonValue { pub fn to_json(&self) -> Value {
json!({ json!({
"Id": self.uuid, "Id": self.uuid,
"OrganizationId": self.org_uuid, "OrganizationId": self.org_uuid,
@ -37,8 +35,8 @@ impl Collection {
use diesel; use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use db::DbConn; use crate::db::DbConn;
use db::schema::*; use crate::db::schema::*;
/// Database methods /// Database methods
impl Collection { impl Collection {
@ -150,15 +148,12 @@ impl Collection {
if user_org.access_all { if user_org.access_all {
true true
} else { } else {
match users_collections::table.inner_join(collections::table) users_collections::table.inner_join(collections::table)
.filter(users_collections::collection_uuid.eq(&self.uuid)) .filter(users_collections::collection_uuid.eq(&self.uuid))
.filter(users_collections::user_uuid.eq(&user_uuid)) .filter(users_collections::user_uuid.eq(&user_uuid))
.filter(users_collections::read_only.eq(false)) .filter(users_collections::read_only.eq(false))
.select(collections::all_columns) .select(collections::all_columns)
.first::<Self>(&**conn).ok() { .first::<Self>(&**conn).ok().is_some() // Read only or no access to collection
None => false, // Read only or no access to collection
Some(_) => true,
}
} }
} }
} }

Datei anzeigen

@ -45,7 +45,7 @@ impl Device {
pub fn refresh_twofactor_remember(&mut self) -> String { pub fn refresh_twofactor_remember(&mut self) -> String {
use data_encoding::BASE64; use data_encoding::BASE64;
use crypto; use crate::crypto;
let twofactor_remember = BASE64.encode(&crypto::get_random(vec![0u8; 180])); let twofactor_remember = BASE64.encode(&crypto::get_random(vec![0u8; 180]));
self.twofactor_remember = Some(twofactor_remember.clone()); self.twofactor_remember = Some(twofactor_remember.clone());
@ -62,7 +62,7 @@ impl Device {
// If there is no refresh token, we create one // If there is no refresh token, we create one
if self.refresh_token.is_empty() { if self.refresh_token.is_empty() {
use data_encoding::BASE64URL; use data_encoding::BASE64URL;
use crypto; use crate::crypto;
self.refresh_token = BASE64URL.encode(&crypto::get_random_64()); self.refresh_token = BASE64URL.encode(&crypto::get_random_64());
} }
@ -71,14 +71,14 @@ impl Device {
let time_now = Utc::now().naive_utc(); let time_now = Utc::now().naive_utc();
self.updated_at = time_now; self.updated_at = time_now;
let orgowner: Vec<_> = orgs.iter().filter(|o| o.type_ == 0).map(|o| o.org_uuid.clone()).collect(); let orgowner: Vec<_> = orgs.iter().filter(|o| o.type_ == 0).map(|o| o.org_uuid.clone()).collect();
let orgadmin: Vec<_> = orgs.iter().filter(|o| o.type_ == 1).map(|o| o.org_uuid.clone()).collect(); let orgadmin: Vec<_> = orgs.iter().filter(|o| o.type_ == 1).map(|o| o.org_uuid.clone()).collect();
let orguser: Vec<_> = orgs.iter().filter(|o| o.type_ == 2).map(|o| o.org_uuid.clone()).collect(); let orguser: Vec<_> = orgs.iter().filter(|o| o.type_ == 2).map(|o| o.org_uuid.clone()).collect();
let orgmanager: Vec<_> = orgs.iter().filter(|o| o.type_ == 3).map(|o| o.org_uuid.clone()).collect();
// Create the JWT claims struct, to send to the client // Create the JWT claims struct, to send to the client
use auth::{encode_jwt, JWTClaims, DEFAULT_VALIDITY, JWT_ISSUER}; use crate::auth::{encode_jwt, JWTClaims, DEFAULT_VALIDITY, JWT_ISSUER};
let claims = JWTClaims { let claims = JWTClaims {
nbf: time_now.timestamp(), nbf: time_now.timestamp(),
exp: (time_now + *DEFAULT_VALIDITY).timestamp(), exp: (time_now + *DEFAULT_VALIDITY).timestamp(),
@ -93,6 +93,7 @@ impl Device {
orgowner, orgowner,
orgadmin, orgadmin,
orguser, orguser,
orgmanager,
sstamp: user.security_stamp.to_string(), sstamp: user.security_stamp.to_string(),
device: self.uuid.to_string(), device: self.uuid.to_string(),
@ -100,23 +101,29 @@ impl Device {
amr: vec!["Application".into()], amr: vec!["Application".into()],
}; };
(encode_jwt(&claims), DEFAULT_VALIDITY.num_seconds()) (encode_jwt(&claims), DEFAULT_VALIDITY.num_seconds())
} }
} }
use diesel; use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use db::DbConn; use crate::db::DbConn;
use db::schema::devices; use crate::db::schema::devices;
/// Database methods /// Database methods
impl Device { impl Device {
pub fn save(&mut self, conn: &DbConn) -> QueryResult<()> { pub fn save(&mut self, conn: &DbConn) -> QueryResult<()> {
self.updated_at = Utc::now().naive_utc(); self.updated_at = Utc::now().naive_utc();
crate::util::retry(
|| {
diesel::replace_into(devices::table) diesel::replace_into(devices::table)
.values(&*self).execute(&**conn).and(Ok(())) .values(&*self)
.execute(&**conn)
},
10,
)
.and(Ok(()))
} }
pub fn delete(self, conn: &DbConn) -> QueryResult<()> { pub fn delete(self, conn: &DbConn) -> QueryResult<()> {

Datei anzeigen

@ -1,7 +1,5 @@
use chrono::{NaiveDateTime, Utc}; use chrono::{NaiveDateTime, Utc};
use serde_json::Value as JsonValue; use serde_json::Value;
use uuid::Uuid;
use super::{User, Cipher}; use super::{User, Cipher};
@ -33,7 +31,7 @@ impl Folder {
let now = Utc::now().naive_utc(); let now = Utc::now().naive_utc();
Self { Self {
uuid: Uuid::new_v4().to_string(), uuid: crate::util::get_uuid(),
created_at: now, created_at: now,
updated_at: now, updated_at: now,
@ -42,8 +40,8 @@ impl Folder {
} }
} }
pub fn to_json(&self) -> JsonValue { pub fn to_json(&self) -> Value {
use util::format_date; use crate::util::format_date;
json!({ json!({
"Id": self.uuid, "Id": self.uuid,
@ -65,8 +63,8 @@ impl FolderCipher {
use diesel; use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use db::DbConn; use crate::db::DbConn;
use db::schema::{folders, folders_ciphers}; use crate::db::schema::{folders, folders_ciphers};
/// Database methods /// Database methods
impl Folder { impl Folder {

Datei anzeigen

@ -1,7 +1,6 @@
use std::cmp::Ordering; use std::cmp::Ordering;
use serde_json::Value as JsonValue; use serde_json::Value;
use uuid::Uuid;
use super::{User, CollectionUser, Invitation}; use super::{User, CollectionUser, Invitation};
#[derive(Debug, Identifiable, Queryable, Insertable)] #[derive(Debug, Identifiable, Queryable, Insertable)]
@ -78,10 +77,10 @@ impl PartialEq<i32> for UserOrgType {
impl PartialOrd<i32> for UserOrgType { impl PartialOrd<i32> for UserOrgType {
fn partial_cmp(&self, other: &i32) -> Option<Ordering> { fn partial_cmp(&self, other: &i32) -> Option<Ordering> {
if let Some(other) = Self::from_i32(other) { if let Some(other) = Self::from_i32(*other) {
return Some(self.cmp(&other)) return Some(self.cmp(&other))
} }
return None None
} }
fn gt(&self, other: &i32) -> bool { fn gt(&self, other: &i32) -> bool {
@ -108,10 +107,10 @@ impl PartialEq<UserOrgType> for i32 {
impl PartialOrd<UserOrgType> for i32 { impl PartialOrd<UserOrgType> for i32 {
fn partial_cmp(&self, other: &UserOrgType) -> Option<Ordering> { fn partial_cmp(&self, other: &UserOrgType) -> Option<Ordering> {
if let Some(self_type) = UserOrgType::from_i32(self) { if let Some(self_type) = UserOrgType::from_i32(*self) {
return Some(self_type.cmp(other)) return Some(self_type.cmp(other))
} }
return None None
} }
fn lt(&self, other: &UserOrgType) -> bool { fn lt(&self, other: &UserOrgType) -> bool {
@ -141,7 +140,7 @@ impl UserOrgType {
} }
} }
pub fn from_i32(i: &i32) -> Option<Self> { pub fn from_i32(i: i32) -> Option<Self> {
match i { match i {
0 => Some(UserOrgType::Owner), 0 => Some(UserOrgType::Owner),
1 => Some(UserOrgType::Admin), 1 => Some(UserOrgType::Admin),
@ -159,7 +158,7 @@ impl Organization {
pub fn new(name: String, billing_email: String) -> Self { pub fn new(name: String, billing_email: String) -> Self {
Self { Self {
uuid: Uuid::new_v4().to_string(), uuid: crate::util::get_uuid(),
name, name,
billing_email, billing_email,
@ -174,7 +173,7 @@ impl Organization {
} }
} }
pub fn to_json(&self) -> JsonValue { pub fn to_json(&self) -> Value {
json!({ json!({
"Id": self.uuid, "Id": self.uuid,
"Name": self.name, "Name": self.name,
@ -206,7 +205,7 @@ impl Organization {
impl UserOrganization { impl UserOrganization {
pub fn new(user_uuid: String, org_uuid: String) -> Self { pub fn new(user_uuid: String, org_uuid: String) -> Self {
Self { Self {
uuid: Uuid::new_v4().to_string(), uuid: crate::util::get_uuid(),
user_uuid, user_uuid,
org_uuid, org_uuid,
@ -236,8 +235,8 @@ impl UserOrganization {
use diesel; use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use db::DbConn; use crate::db::DbConn;
use db::schema::{organizations, users_organizations, users_collections, ciphers_collections}; use crate::db::schema::{organizations, users_organizations, users_collections, ciphers_collections};
/// Database methods /// Database methods
impl Organization { impl Organization {
@ -285,7 +284,7 @@ impl Organization {
} }
impl UserOrganization { impl UserOrganization {
pub fn to_json(&self, conn: &DbConn) -> JsonValue { pub fn to_json(&self, conn: &DbConn) -> Value {
let org = Organization::find_by_uuid(&self.org_uuid, conn).unwrap(); let org = Organization::find_by_uuid(&self.org_uuid, conn).unwrap();
json!({ json!({
@ -313,7 +312,7 @@ impl UserOrganization {
}) })
} }
pub fn to_json_user_details(&self, conn: &DbConn) -> JsonValue { pub fn to_json_user_details(&self, conn: &DbConn) -> Value {
let user = User::find_by_uuid(&self.user_uuid, conn).unwrap(); let user = User::find_by_uuid(&self.user_uuid, conn).unwrap();
json!({ json!({
@ -330,7 +329,7 @@ impl UserOrganization {
}) })
} }
pub fn to_json_collection_user_details(&self, read_only: bool, conn: &DbConn) -> JsonValue { pub fn to_json_collection_user_details(&self, read_only: bool, conn: &DbConn) -> Value {
let user = User::find_by_uuid(&self.user_uuid, conn).unwrap(); let user = User::find_by_uuid(&self.user_uuid, conn).unwrap();
json!({ json!({
@ -345,7 +344,7 @@ impl UserOrganization {
}) })
} }
pub fn to_json_details(&self, conn: &DbConn) -> JsonValue { pub fn to_json_details(&self, conn: &DbConn) -> Value {
let coll_uuids = if self.access_all { let coll_uuids = if self.access_all {
vec![] // If we have complete access, no need to fill the array vec![] // If we have complete access, no need to fill the array
} else { } else {

Datei anzeigen

@ -1,6 +1,4 @@
use serde_json::Value as JsonValue; use serde_json::Value;
use uuid::Uuid;
use super::User; use super::User;
@ -36,7 +34,7 @@ pub enum TwoFactorType {
impl TwoFactor { impl TwoFactor {
pub fn new(user_uuid: String, type_: TwoFactorType, data: String) -> Self { pub fn new(user_uuid: String, type_: TwoFactorType, data: String) -> Self {
Self { Self {
uuid: Uuid::new_v4().to_string(), uuid: crate::util::get_uuid(),
user_uuid, user_uuid,
type_: type_ as i32, type_: type_ as i32,
enabled: true, enabled: true,
@ -59,7 +57,7 @@ impl TwoFactor {
generated == totp_code generated == totp_code
} }
pub fn to_json(&self) -> JsonValue { pub fn to_json(&self) -> Value {
json!({ json!({
"Enabled": self.enabled, "Enabled": self.enabled,
"Key": "", // This key and value vary "Key": "", // This key and value vary
@ -67,7 +65,7 @@ impl TwoFactor {
}) })
} }
pub fn to_json_list(&self) -> JsonValue { pub fn to_json_list(&self) -> Value {
json!({ json!({
"Enabled": self.enabled, "Enabled": self.enabled,
"Type": self.type_, "Type": self.type_,
@ -78,8 +76,8 @@ impl TwoFactor {
use diesel; use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use db::DbConn; use crate::db::DbConn;
use db::schema::twofactor; use crate::db::schema::twofactor;
/// Database methods /// Database methods
impl TwoFactor { impl TwoFactor {

Datei anzeigen

@ -1,10 +1,8 @@
use chrono::{NaiveDateTime, Utc}; use chrono::{NaiveDateTime, Utc};
use serde_json::Value as JsonValue; use serde_json::Value;
use uuid::Uuid; use crate::crypto;
use crate::CONFIG;
use crypto;
use CONFIG;
#[derive(Debug, Identifiable, Queryable, Insertable)] #[derive(Debug, Identifiable, Queryable, Insertable)]
@ -50,7 +48,7 @@ impl User {
let email = mail.to_lowercase(); let email = mail.to_lowercase();
Self { Self {
uuid: Uuid::new_v4().to_string(), uuid: crate::util::get_uuid(),
created_at: now, created_at: now,
updated_at: now, updated_at: now,
name: email.clone(), name: email.clone(),
@ -61,7 +59,7 @@ impl User {
salt: crypto::get_random_64(), salt: crypto::get_random_64(),
password_iterations: CONFIG.password_iterations, password_iterations: CONFIG.password_iterations,
security_stamp: Uuid::new_v4().to_string(), security_stamp: crate::util::get_uuid(),
password_hint: None, password_hint: None,
private_key: None, private_key: None,
@ -97,11 +95,10 @@ impl User {
self.password_hash = crypto::hash_password(password.as_bytes(), self.password_hash = crypto::hash_password(password.as_bytes(),
&self.salt, &self.salt,
self.password_iterations as u32); self.password_iterations as u32);
self.reset_security_stamp();
} }
pub fn reset_security_stamp(&mut self) { pub fn reset_security_stamp(&mut self) {
self.security_stamp = Uuid::new_v4().to_string(); self.security_stamp = crate::util::get_uuid();
} }
pub fn is_server_admin(&self) -> bool { pub fn is_server_admin(&self) -> bool {
@ -114,20 +111,20 @@ impl User {
use diesel; use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use db::DbConn; use crate::db::DbConn;
use db::schema::{users, invitations}; use crate::db::schema::{users, invitations};
use super::{Cipher, Folder, Device, UserOrganization, UserOrgType}; use super::{Cipher, Folder, Device, UserOrganization, UserOrgType};
/// Database methods /// Database methods
impl User { impl User {
pub fn to_json(&self, conn: &DbConn) -> JsonValue { pub fn to_json(&self, conn: &DbConn) -> Value {
use super::{UserOrganization, UserOrgType, UserOrgStatus, TwoFactor}; use super::{UserOrganization, UserOrgType, UserOrgStatus, TwoFactor};
let mut orgs = UserOrganization::find_by_user(&self.uuid, conn); let mut orgs = UserOrganization::find_by_user(&self.uuid, conn);
if self.is_server_admin() { if self.is_server_admin() {
orgs.push(UserOrganization::new_virtual(self.uuid.clone(), UserOrgType::Owner, UserOrgStatus::Confirmed)); orgs.push(UserOrganization::new_virtual(self.uuid.clone(), UserOrgType::Owner, UserOrgStatus::Confirmed));
} }
let orgs_json: Vec<JsonValue> = orgs.iter().map(|c| c.to_json(&conn)).collect(); let orgs_json: Vec<Value> = orgs.iter().map(|c| c.to_json(&conn)).collect();
let twofactor_enabled = !TwoFactor::find_by_user(&self.uuid, conn).is_empty(); let twofactor_enabled = !TwoFactor::find_by_user(&self.uuid, conn).is_empty();
json!({ json!({
@ -181,7 +178,7 @@ impl User {
pub fn update_uuid_revision(uuid: &str, conn: &DbConn) { pub fn update_uuid_revision(uuid: &str, conn: &DbConn) {
if let Some(mut user) = User::find_by_uuid(&uuid, conn) { if let Some(mut user) = User::find_by_uuid(&uuid, conn) {
if user.update_revision(conn).is_err(){ if user.update_revision(conn).is_err(){
println!("Warning: Failed to update revision for {}", user.email); warn!("Failed to update revision for {}", user.email);
}; };
}; };
} }

Datei anzeigen

@ -4,6 +4,7 @@ table! {
cipher_uuid -> Text, cipher_uuid -> Text,
file_name -> Text, file_name -> Text,
file_size -> Integer, file_size -> Integer,
key -> Nullable<Text>,
} }
} }

Datei anzeigen

@ -4,7 +4,7 @@ use lettre::smtp::ConnectionReuseParameters;
use lettre::smtp::authentication::Credentials; use lettre::smtp::authentication::Credentials;
use lettre_email::EmailBuilder; use lettre_email::EmailBuilder;
use MailConfig; use crate::MailConfig;
fn mailer(config: &MailConfig) -> SmtpTransport { fn mailer(config: &MailConfig) -> SmtpTransport {
let client_security = if config.smtp_ssl { let client_security = if config.smtp_ssl {

Datei anzeigen

@ -1,41 +1,15 @@
#![feature(plugin, custom_derive, vec_remove_item, try_trait)] #![feature(proc_macro_hygiene, decl_macro, vec_remove_item, try_trait)]
#![plugin(rocket_codegen)] #![recursion_limit = "128"]
#![recursion_limit="128"]
#![allow(proc_macro_derive_resolution_fallback)] // TODO: Remove this when diesel update fixes warnings #![allow(proc_macro_derive_resolution_fallback)] // TODO: Remove this when diesel update fixes warnings
extern crate rocket;
extern crate rocket_contrib; #[macro_use] extern crate rocket;
extern crate reqwest; #[macro_use] extern crate serde_derive;
extern crate multipart; #[macro_use] extern crate serde_json;
extern crate ws; #[macro_use] extern crate log;
extern crate rmpv; #[macro_use] extern crate diesel;
extern crate chashmap; #[macro_use] extern crate diesel_migrations;
extern crate serde; #[macro_use] extern crate lazy_static;
#[macro_use] #[macro_use] extern crate num_derive;
extern crate serde_derive;
#[macro_use]
extern crate serde_json;
#[macro_use]
extern crate diesel;
#[macro_use]
extern crate diesel_migrations;
extern crate ring;
extern crate uuid;
extern crate chrono;
extern crate oath;
extern crate data_encoding;
extern crate jsonwebtoken as jwt;
extern crate u2f;
extern crate yubico;
extern crate dotenv;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate num_derive;
extern crate num_traits;
extern crate lettre;
extern crate lettre_email;
extern crate native_tls;
extern crate byteorder;
use std::{path::Path, process::{exit, Command}}; use std::{path::Path, process::{exit, Command}};
use rocket::Rocket; use rocket::Rocket;
@ -50,6 +24,9 @@ mod auth;
mod mail; mod mail;
fn init_rocket() -> Rocket { fn init_rocket() -> Rocket {
// TODO: TO HIDE MOUNTING LOG, call ignite, set logging to disabled, call all the mounts, and then enable it again
rocket::ignite() rocket::ignite()
.mount("/", api::web_routes()) .mount("/", api::web_routes())
.mount("/api", api::core_routes()) .mount("/api", api::core_routes())
@ -69,7 +46,7 @@ mod migrations {
pub fn run_migrations() { pub fn run_migrations() {
// Make sure the database is up to date (create if it doesn't exist, or run the migrations) // Make sure the database is up to date (create if it doesn't exist, or run the migrations)
let connection = ::db::get_connection().expect("Can't conect to DB"); let connection = crate::db::get_connection().expect("Can't conect to DB");
use std::io::stdout; use std::io::stdout;
embedded_migrations::run_with_output(&connection, &mut stdout()).expect("Can't run migrations"); embedded_migrations::run_with_output(&connection, &mut stdout()).expect("Can't run migrations");
@ -77,6 +54,10 @@ mod migrations {
} }
fn main() { fn main() {
if CONFIG.extended_logging {
init_logging().ok();
}
check_db(); check_db();
check_rsa_keys(); check_rsa_keys();
check_web_vault(); check_web_vault();
@ -85,13 +66,61 @@ fn main() {
init_rocket().launch(); init_rocket().launch();
} }
fn init_logging() -> Result<(), fern::InitError> {
let mut logger = fern::Dispatch::new()
.format(|out, message, record| {
out.finish(format_args!(
"{}[{}][{}] {}",
chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"),
record.target(),
record.level(),
message
))
})
.level(log::LevelFilter::Debug)
.level_for("hyper", log::LevelFilter::Warn)
.level_for("ws", log::LevelFilter::Info)
.level_for("multipart", log::LevelFilter::Info)
.chain(std::io::stdout());
if let Some(log_file) = CONFIG.log_file.as_ref() {
logger = logger.chain(fern::log_file(log_file)?);
}
logger = chain_syslog(logger);
logger.apply()?;
Ok(())
}
#[cfg(not(feature = "enable_syslog"))]
fn chain_syslog(logger: fern::Dispatch) -> fern::Dispatch { logger }
#[cfg(feature = "enable_syslog")]
fn chain_syslog(logger: fern::Dispatch) -> fern::Dispatch {
let syslog_fmt = syslog::Formatter3164 {
facility: syslog::Facility::LOG_USER,
hostname: None,
process: "bitwarden_rs".into(),
pid: 0,
};
match syslog::unix(syslog_fmt) {
Ok(sl) => logger.chain(sl),
Err(e) => {
error!("Unable to connect to syslog: {:?}", e);
logger
}
}
}
fn check_db() { fn check_db() {
let path = Path::new(&CONFIG.database_url); let path = Path::new(&CONFIG.database_url);
if let Some(parent) = path.parent() { if let Some(parent) = path.parent() {
use std::fs; use std::fs;
if fs::create_dir_all(parent).is_err() { if fs::create_dir_all(parent).is_err() {
println!("Error creating database directory"); error!("Error creating database directory");
exit(1); exit(1);
} }
} }
@ -106,16 +135,16 @@ fn check_rsa_keys() {
// If the RSA keys don't exist, try to create them // If the RSA keys don't exist, try to create them
if !util::file_exists(&CONFIG.private_rsa_key) if !util::file_exists(&CONFIG.private_rsa_key)
|| !util::file_exists(&CONFIG.public_rsa_key) { || !util::file_exists(&CONFIG.public_rsa_key) {
println!("JWT keys don't exist, checking if OpenSSL is available..."); info!("JWT keys don't exist, checking if OpenSSL is available...");
Command::new("openssl") Command::new("openssl")
.arg("version") .arg("version")
.output().unwrap_or_else(|_| { .output().unwrap_or_else(|_| {
println!("Can't create keys because OpenSSL is not available, make sure it's installed and available on the PATH"); info!("Can't create keys because OpenSSL is not available, make sure it's installed and available on the PATH");
exit(1); exit(1);
}); });
println!("OpenSSL detected, creating keys..."); info!("OpenSSL detected, creating keys...");
let mut success = Command::new("openssl").arg("genrsa") let mut success = Command::new("openssl").arg("genrsa")
.arg("-out").arg(&CONFIG.private_rsa_key_pem) .arg("-out").arg(&CONFIG.private_rsa_key_pem)
@ -139,9 +168,9 @@ fn check_rsa_keys() {
.status.success(); .status.success();
if success { if success {
println!("Keys created correctly."); info!("Keys created correctly.");
} else { } else {
println!("Error creating keys, exiting..."); error!("Error creating keys, exiting...");
exit(1); exit(1);
} }
} }
@ -155,7 +184,7 @@ fn check_web_vault() {
let index_path = Path::new(&CONFIG.web_vault_folder).join("index.html"); let index_path = Path::new(&CONFIG.web_vault_folder).join("index.html");
if !index_path.exists() { if !index_path.exists() {
println!("Web vault is not found. Please follow the steps in the README to install it"); error!("Web vault is not found. Please follow the steps in the README to install it");
exit(1); exit(1);
} }
} }
@ -177,7 +206,7 @@ pub struct MailConfig {
impl MailConfig { impl MailConfig {
fn load() -> Option<Self> { fn load() -> Option<Self> {
use util::{get_env, get_env_or}; use crate::util::{get_env, get_env_or};
// When SMTP_HOST is absent, we assume the user does not want to enable it. // When SMTP_HOST is absent, we assume the user does not want to enable it.
let smtp_host = match get_env("SMTP_HOST") { let smtp_host = match get_env("SMTP_HOST") {
@ -186,7 +215,7 @@ impl MailConfig {
}; };
let smtp_from = get_env("SMTP_FROM").unwrap_or_else(|| { let smtp_from = get_env("SMTP_FROM").unwrap_or_else(|| {
println!("Please specify SMTP_FROM to enable SMTP support."); error!("Please specify SMTP_FROM to enable SMTP support.");
exit(1); exit(1);
}); });
@ -202,7 +231,7 @@ impl MailConfig {
let smtp_username = get_env("SMTP_USERNAME"); let smtp_username = get_env("SMTP_USERNAME");
let smtp_password = get_env("SMTP_PASSWORD").or_else(|| { let smtp_password = get_env("SMTP_PASSWORD").or_else(|| {
if smtp_username.as_ref().is_some() { if smtp_username.as_ref().is_some() {
println!("SMTP_PASSWORD is mandatory when specifying SMTP_USERNAME."); error!("SMTP_PASSWORD is mandatory when specifying SMTP_USERNAME.");
exit(1); exit(1);
} else { } else {
None None
@ -236,6 +265,9 @@ pub struct Config {
websocket_enabled: bool, websocket_enabled: bool,
websocket_url: String, websocket_url: String,
extended_logging: bool,
log_file: Option<String>,
local_icon_extractor: bool, local_icon_extractor: bool,
signups_allowed: bool, signups_allowed: bool,
invitations_allowed: bool, invitations_allowed: bool,
@ -256,7 +288,7 @@ pub struct Config {
impl Config { impl Config {
fn load() -> Self { fn load() -> Self {
use util::{get_env, get_env_or}; use crate::util::{get_env, get_env_or};
dotenv::dotenv().ok(); dotenv::dotenv().ok();
let df = get_env_or("DATA_FOLDER", "data".to_string()); let df = get_env_or("DATA_FOLDER", "data".to_string());
@ -282,6 +314,9 @@ impl Config {
websocket_enabled: get_env_or("WEBSOCKET_ENABLED", false), websocket_enabled: get_env_or("WEBSOCKET_ENABLED", false),
websocket_url: format!("{}:{}", get_env_or("WEBSOCKET_ADDRESS", "0.0.0.0".to_string()), get_env_or("WEBSOCKET_PORT", 3012)), websocket_url: format!("{}:{}", get_env_or("WEBSOCKET_ADDRESS", "0.0.0.0".to_string()), get_env_or("WEBSOCKET_PORT", 3012)),
extended_logging: get_env_or("EXTENDED_LOGGING", true),
log_file: get_env("LOG_FILE"),
local_icon_extractor: get_env_or("LOCAL_ICON_EXTRACTOR", false), local_icon_extractor: get_env_or("LOCAL_ICON_EXTRACTOR", false),
signups_allowed: get_env_or("SIGNUPS_ALLOWED", true), signups_allowed: get_env_or("SIGNUPS_ALLOWED", true),
server_admin_email: get_env("SERVER_ADMIN_EMAIL"), server_admin_email: get_env("SERVER_ADMIN_EMAIL"),

Datei anzeigen

@ -2,36 +2,46 @@
/// Macros /// Macros
/// ///
#[macro_export] #[macro_export]
macro_rules! err { macro_rules! _err_object {
($err:expr, $msg:expr) => {{ ($msg:expr) => {{
println!("ERROR: {}", $msg);
err_json!(json!({ err_json!(json!({
"error": $err, "Message": "",
"error_description": $err, "error": "",
"error_description": "",
"ValidationErrors": {"": [ $msg ]},
"ErrorModel": { "ErrorModel": {
"Message": $msg, "Message": $msg,
"ValidationErrors": null,
"ExceptionMessage": null,
"ExceptionStackTrace": null,
"InnerExceptionMessage": null,
"Object": "error" "Object": "error"
}})) },
"Object": "error"
}))
}}; }};
($msg:expr) => { err!("unknown_error", $msg) } }
#[macro_export]
macro_rules! err {
($msg:expr) => {{
error!("{}", $msg);
_err_object!($msg)
}};
($usr_msg:expr, $log_value:expr) => {{
error!("{}: {:#?}", $usr_msg, $log_value);
_err_object!($usr_msg)
}}
} }
#[macro_export] #[macro_export]
macro_rules! err_json { macro_rules! err_json {
($expr:expr) => {{ ($expr:expr) => {{
return Err($crate::rocket::response::status::BadRequest(Some($crate::rocket_contrib::Json($expr)))); return Err(rocket::response::status::BadRequest(Some(rocket_contrib::json::Json($expr))));
}} }}
} }
#[macro_export] #[macro_export]
macro_rules! err_handler { macro_rules! err_handler {
($expr:expr) => {{ ($expr:expr) => {{
println!("ERROR: {}", $expr); error!("{}", $expr);
return $crate::rocket::Outcome::Failure(($crate::rocket::http::Status::Unauthorized, $expr)); return rocket::Outcome::Failure((rocket::http::Status::Unauthorized, $expr));
}} }}
} }
@ -92,6 +102,10 @@ pub fn get_display_size(size: i32) -> String {
format!("{} {}", size, UNITS[unit_counter]) format!("{} {}", size, UNITS[unit_counter])
} }
pub fn get_uuid() -> String {
uuid::Uuid::new_v4().to_string()
}
/// ///
/// String util methods /// String util methods
@ -238,6 +252,33 @@ fn upcase_value(value: &Value) -> Value {
fn _process_key(key: &str) -> String { fn _process_key(key: &str) -> String {
match key.to_lowercase().as_ref() { match key.to_lowercase().as_ref() {
"ssn" => "SSN".into(), "ssn" => "SSN".into(),
_ => self::upcase_first(key) _ => self::upcase_first(key),
}
}
//
// Retry methods
//
pub fn retry<F, T, E>(func: F, max_tries: i32) -> Result<T, E>
where
F: Fn() -> Result<T, E>,
{
use std::{thread::sleep, time::Duration};
let mut tries = 0;
loop {
match func() {
ok @ Ok(_) => return ok,
err @ Err(_) => {
tries += 1;
if tries >= max_tries {
return err;
}
sleep(Duration::from_millis(500));
}
}
} }
} }