1
0
Fork 0

Start using rustfmt and some style changes to make some lines shorter

Dieser Commit ist enthalten in:
Daniel García 2018-12-30 23:34:31 +01:00
Ursprung 72ed05c4a4
Commit 30e768613b
Es konnte kein GPG-Schlüssel zu dieser Signatur gefunden werden
GPG-Schlüssel-ID: FC8A7D14C3CD543A
26 geänderte Dateien mit 1172 neuen und 898 gelöschten Zeilen

1
rustfmt.toml Normale Datei
Datei anzeigen

@ -0,0 +1 @@
max_width = 120

Datei anzeigen

@ -3,13 +3,13 @@ use rocket_contrib::json::Json;
use crate::db::models::*; use crate::db::models::*;
use crate::db::DbConn; use crate::db::DbConn;
use crate::api::{EmptyResult, JsonResult, JsonUpcase, NumberOrString, PasswordData, UpdateType, WebSocketUsers}; use crate::api::{EmptyResult, JsonResult, JsonUpcase, Notify, NumberOrString, PasswordData, UpdateType};
use crate::auth::{Headers, decode_invite_jwt, InviteJWTClaims}; use crate::auth::{decode_invite_jwt, Headers, InviteJWTClaims};
use crate::mail; use crate::mail;
use crate::CONFIG; use crate::CONFIG;
use rocket::{Route, State}; use rocket::Route;
pub fn routes() -> Vec<Route> { pub fn routes() -> Vec<Route> {
routes![ routes![
@ -74,7 +74,7 @@ fn register(data: JsonUpcase<RegisterData>, conn: DbConn) -> EmptyResult {
} else { } else {
let token = match &data.Token { let token = match &data.Token {
Some(token) => token, Some(token) => token,
None => err!("No valid invite token") None => err!("No valid invite token"),
}; };
let claims: InviteJWTClaims = decode_invite_jwt(&token)?; let claims: InviteJWTClaims = decode_invite_jwt(&token)?;
@ -257,7 +257,7 @@ struct KeyData {
} }
#[post("/accounts/key", data = "<data>")] #[post("/accounts/key", data = "<data>")]
fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> EmptyResult { fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
let data: KeyData = data.into_inner().data; let data: KeyData = data.into_inner().data;
if !headers.user.check_valid_password(&data.MasterPasswordHash) { if !headers.user.check_valid_password(&data.MasterPasswordHash) {
@ -294,7 +294,15 @@ fn post_rotatekey(data: JsonUpcase<KeyData>, headers: Headers, conn: DbConn, ws:
err!("The cipher is not owned by the user") err!("The cipher is not owned by the user")
} }
update_cipher_from_data(&mut saved_cipher, cipher_data, &headers, false, &conn, &ws, UpdateType::SyncCipherUpdate)? update_cipher_from_data(
&mut saved_cipher,
cipher_data,
&headers,
false,
&conn,
&nt,
UpdateType::CipherUpdate,
)?
} }
// Update user data // Update user data

Datei anzeigen

@ -1,8 +1,8 @@
use std::collections::{HashSet, HashMap}; use std::collections::{HashMap, HashSet};
use std::path::Path; use std::path::Path;
use rocket::http::ContentType; use rocket::http::ContentType;
use rocket::{request::Form, Data, Route, State}; use rocket::{request::Form, Data, Route};
use rocket_contrib::json::Json; use rocket_contrib::json::Json;
use serde_json::Value; use serde_json::Value;
@ -17,7 +17,7 @@ use crate::db::DbConn;
use crate::crypto; use crate::crypto;
use crate::api::{self, EmptyResult, JsonResult, JsonUpcase, PasswordData, UpdateType, WebSocketUsers}; use crate::api::{self, EmptyResult, JsonResult, JsonUpcase, Notify, PasswordData, UpdateType};
use crate::auth::Headers; use crate::auth::Headers;
use crate::CONFIG; use crate::CONFIG;
@ -56,7 +56,6 @@ pub fn routes() -> Vec<Route> {
delete_all, delete_all,
move_cipher_selected, move_cipher_selected,
move_cipher_selected_put, move_cipher_selected_put,
post_collections_update, post_collections_update,
post_collections_admin, post_collections_admin,
put_collections_admin, put_collections_admin,
@ -80,9 +79,16 @@ fn sync(data: Form<SyncData>, headers: Headers, conn: DbConn) -> JsonResult {
let collections_json: Vec<Value> = collections.iter().map(|c| c.to_json()).collect(); let collections_json: Vec<Value> = collections.iter().map(|c| c.to_json()).collect();
let ciphers = Cipher::find_by_user(&headers.user.uuid, &conn); let ciphers = Cipher::find_by_user(&headers.user.uuid, &conn);
let ciphers_json: Vec<Value> = ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect(); let ciphers_json: Vec<Value> = ciphers
.iter()
.map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn))
.collect();
let domains_json = if data.exclude_domains { Value::Null } else { api::core::get_eq_domains(headers).unwrap().into_inner() }; let domains_json = if data.exclude_domains {
Value::Null
} else {
api::core::get_eq_domains(headers).unwrap().into_inner()
};
Ok(Json(json!({ Ok(Json(json!({
"Profile": user_json, "Profile": user_json,
@ -98,7 +104,10 @@ fn sync(data: Form<SyncData>, headers: Headers, conn: DbConn) -> JsonResult {
fn get_ciphers(headers: Headers, conn: DbConn) -> JsonResult { fn get_ciphers(headers: Headers, conn: DbConn) -> JsonResult {
let ciphers = Cipher::find_by_user(&headers.user.uuid, &conn); let ciphers = Cipher::find_by_user(&headers.user.uuid, &conn);
let ciphers_json: Vec<Value> = ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect(); let ciphers_json: Vec<Value> = ciphers
.iter()
.map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn))
.collect();
Ok(Json(json!({ Ok(Json(json!({
"Data": ciphers_json, "Data": ciphers_json,
@ -111,7 +120,7 @@ fn get_ciphers(headers: Headers, conn: DbConn) -> JsonResult {
fn get_cipher(uuid: String, headers: Headers, conn: DbConn) -> JsonResult { fn get_cipher(uuid: String, headers: Headers, conn: DbConn) -> JsonResult {
let cipher = match Cipher::find_by_uuid(&uuid, &conn) { let cipher = match Cipher::find_by_uuid(&uuid, &conn) {
Some(cipher) => cipher, Some(cipher) => cipher,
None => err!("Cipher doesn't exist") None => err!("Cipher doesn't exist"),
}; };
if !cipher.is_accessible_to_user(&headers.user.uuid, &conn) { if !cipher.is_accessible_to_user(&headers.user.uuid, &conn) {
@ -166,7 +175,7 @@ pub struct CipherData {
// These are used during key rotation // These are used during key rotation
#[serde(rename = "Attachments")] #[serde(rename = "Attachments")]
_Attachments: Option<Value>, // Unused, contains map of {id: filename} _Attachments: Option<Value>, // Unused, contains map of {id: filename}
Attachments2: Option<HashMap<String, Attachments2Data>> Attachments2: Option<HashMap<String, Attachments2Data>>,
} }
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
@ -177,44 +186,55 @@ pub struct Attachments2Data {
} }
#[post("/ciphers/admin", data = "<data>")] #[post("/ciphers/admin", data = "<data>")]
fn post_ciphers_admin(data: JsonUpcase<ShareCipherData>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> JsonResult { fn post_ciphers_admin(data: JsonUpcase<ShareCipherData>, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult {
let data: ShareCipherData = data.into_inner().data; let data: ShareCipherData = data.into_inner().data;
let mut cipher = Cipher::new(data.Cipher.Type, data.Cipher.Name.clone()); let mut cipher = Cipher::new(data.Cipher.Type, data.Cipher.Name.clone());
cipher.user_uuid = Some(headers.user.uuid.clone()); cipher.user_uuid = Some(headers.user.uuid.clone());
cipher.save(&conn)?; cipher.save(&conn)?;
share_cipher_by_uuid(&cipher.uuid, data, &headers, &conn, &ws) share_cipher_by_uuid(&cipher.uuid, data, &headers, &conn, &nt)
} }
#[post("/ciphers/create", data = "<data>")] #[post("/ciphers/create", data = "<data>")]
fn post_ciphers_create(data: JsonUpcase<ShareCipherData>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> JsonResult { fn post_ciphers_create(data: JsonUpcase<ShareCipherData>, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult {
post_ciphers_admin(data, headers, conn, ws) post_ciphers_admin(data, headers, conn, nt)
} }
#[post("/ciphers", data = "<data>")] #[post("/ciphers", data = "<data>")]
fn post_ciphers(data: JsonUpcase<CipherData>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> JsonResult { fn post_ciphers(data: JsonUpcase<CipherData>, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult {
let data: CipherData = data.into_inner().data; let data: CipherData = data.into_inner().data;
let mut cipher = Cipher::new(data.Type, data.Name.clone()); let mut cipher = Cipher::new(data.Type, data.Name.clone());
update_cipher_from_data(&mut cipher, data, &headers, false, &conn, &ws, UpdateType::SyncCipherCreate)?; update_cipher_from_data(&mut cipher, data, &headers, false, &conn, &nt, UpdateType::CipherCreate)?;
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn))) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn)))
} }
pub fn update_cipher_from_data(cipher: &mut Cipher, data: CipherData, headers: &Headers, shared_to_collection: bool, conn: &DbConn, ws: &State<WebSocketUsers>, ut: UpdateType) -> EmptyResult { pub fn update_cipher_from_data(
cipher: &mut Cipher,
data: CipherData,
headers: &Headers,
shared_to_collection: bool,
conn: &DbConn,
nt: &Notify,
ut: UpdateType,
) -> EmptyResult {
if let Some(org_id) = data.OrganizationId { if let Some(org_id) = data.OrganizationId {
match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn) { match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn) {
None => err!("You don't have permission to add item to organization"), None => err!("You don't have permission to add item to organization"),
Some(org_user) => if shared_to_collection Some(org_user) => {
if shared_to_collection
|| org_user.has_full_access() || org_user.has_full_access()
|| cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) { || cipher.is_write_accessible_to_user(&headers.user.uuid, &conn)
{
cipher.organization_uuid = Some(org_id); cipher.organization_uuid = Some(org_id);
cipher.user_uuid = None; cipher.user_uuid = None;
} else { } else {
err!("You don't have permission to add cipher directly to organization") err!("You don't have permission to add cipher directly to organization")
} }
} }
}
} else { } else {
cipher.user_uuid = Some(headers.user.uuid.clone()); cipher.user_uuid = Some(headers.user.uuid.clone());
} }
@ -226,7 +246,7 @@ pub fn update_cipher_from_data(cipher: &mut Cipher, data: CipherData, headers: &
err!("Folder is not owned by user") err!("Folder is not owned by user")
} }
} }
None => err!("Folder doesn't exist") None => err!("Folder doesn't exist"),
} }
} }
@ -235,7 +255,7 @@ pub fn update_cipher_from_data(cipher: &mut Cipher, data: CipherData, headers: &
for (id, attachment) in attachments { for (id, attachment) in attachments {
let mut saved_att = match Attachment::find_by_id(&id, &conn) { let mut saved_att = match Attachment::find_by_id(&id, &conn) {
Some(att) => att, Some(att) => att,
None => err!("Attachment doesn't exist") None => err!("Attachment doesn't exist"),
}; };
if saved_att.cipher_uuid != cipher.uuid { if saved_att.cipher_uuid != cipher.uuid {
@ -254,12 +274,12 @@ pub fn update_cipher_from_data(cipher: &mut Cipher, data: CipherData, headers: &
2 => data.SecureNote, 2 => data.SecureNote,
3 => data.Card, 3 => data.Card,
4 => data.Identity, 4 => data.Identity,
_ => err!("Invalid type") _ => err!("Invalid type"),
}; };
let mut type_data = match type_data_opt { let mut type_data = match type_data_opt {
Some(data) => data, Some(data) => data,
None => err!("Data missing") None => err!("Data missing"),
}; };
// TODO: ******* Backwards compat start ********** // TODO: ******* Backwards compat start **********
@ -280,7 +300,7 @@ pub fn update_cipher_from_data(cipher: &mut Cipher, data: CipherData, headers: &
cipher.save(&conn)?; cipher.save(&conn)?;
ws.send_cipher_update(ut, &cipher, &cipher.update_users_revision(&conn)); nt.send_cipher_update(ut, &cipher, &cipher.update_users_revision(&conn));
cipher.move_to_folder(data.FolderId, &headers.user.uuid, &conn) cipher.move_to_folder(data.FolderId, &headers.user.uuid, &conn)
} }
@ -304,9 +324,8 @@ struct RelationsData {
Value: usize, Value: usize,
} }
#[post("/ciphers/import", data = "<data>")] #[post("/ciphers/import", data = "<data>")]
fn post_ciphers_import(data: JsonUpcase<ImportData>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> EmptyResult { fn post_ciphers_import(data: JsonUpcase<ImportData>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
let data: ImportData = data.into_inner().data; let data: ImportData = data.into_inner().data;
// Read and create the folders // Read and create the folders
@ -327,49 +346,67 @@ fn post_ciphers_import(data: JsonUpcase<ImportData>, headers: Headers, conn: DbC
// Read and create the ciphers // Read and create the ciphers
for (index, cipher_data) in data.Ciphers.into_iter().enumerate() { for (index, cipher_data) in data.Ciphers.into_iter().enumerate() {
let folder_uuid = relations_map.get(&index) let folder_uuid = relations_map.get(&index).map(|i| folders[*i].uuid.clone());
.map(|i| folders[*i].uuid.clone());
let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone()); let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone());
update_cipher_from_data(&mut cipher, cipher_data, &headers, false, &conn, &ws, UpdateType::SyncCipherCreate)?; update_cipher_from_data(
&mut cipher,
cipher_data,
&headers,
false,
&conn,
&nt,
UpdateType::CipherCreate,
)?;
cipher.move_to_folder(folder_uuid, &headers.user.uuid.clone(), &conn).ok(); cipher.move_to_folder(folder_uuid, &headers.user.uuid.clone(), &conn)?;
} }
let mut user = headers.user; let mut user = headers.user;
user.update_revision(&conn) user.update_revision(&conn)
} }
#[put("/ciphers/<uuid>/admin", data = "<data>")] #[put("/ciphers/<uuid>/admin", data = "<data>")]
fn put_cipher_admin(uuid: String, data: JsonUpcase<CipherData>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> JsonResult { fn put_cipher_admin(
put_cipher(uuid, data, headers, conn, ws) uuid: String,
data: JsonUpcase<CipherData>,
headers: Headers,
conn: DbConn,
nt: Notify,
) -> JsonResult {
put_cipher(uuid, data, headers, conn, nt)
} }
#[post("/ciphers/<uuid>/admin", data = "<data>")] #[post("/ciphers/<uuid>/admin", data = "<data>")]
fn post_cipher_admin(uuid: String, data: JsonUpcase<CipherData>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> JsonResult { fn post_cipher_admin(
post_cipher(uuid, data, headers, conn, ws) uuid: String,
data: JsonUpcase<CipherData>,
headers: Headers,
conn: DbConn,
nt: Notify,
) -> JsonResult {
post_cipher(uuid, data, headers, conn, nt)
} }
#[post("/ciphers/<uuid>", data = "<data>")] #[post("/ciphers/<uuid>", data = "<data>")]
fn post_cipher(uuid: String, data: JsonUpcase<CipherData>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> JsonResult { fn post_cipher(uuid: String, data: JsonUpcase<CipherData>, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult {
put_cipher(uuid, data, headers, conn, ws) put_cipher(uuid, data, headers, conn, nt)
} }
#[put("/ciphers/<uuid>", data = "<data>")] #[put("/ciphers/<uuid>", data = "<data>")]
fn put_cipher(uuid: String, data: JsonUpcase<CipherData>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> JsonResult { fn put_cipher(uuid: String, data: JsonUpcase<CipherData>, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult {
let data: CipherData = data.into_inner().data; let data: CipherData = data.into_inner().data;
let mut cipher = match Cipher::find_by_uuid(&uuid, &conn) { let mut cipher = match Cipher::find_by_uuid(&uuid, &conn) {
Some(cipher) => cipher, Some(cipher) => cipher,
None => err!("Cipher doesn't exist") None => err!("Cipher doesn't exist"),
}; };
if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) { if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) {
err!("Cipher is not write accessible") err!("Cipher is not write accessible")
} }
update_cipher_from_data(&mut cipher, data, &headers, false, &conn, &ws, UpdateType::SyncCipherUpdate)?; update_cipher_from_data(&mut cipher, data, &headers, false, &conn, &nt, UpdateType::CipherUpdate)?;
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn))) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn)))
} }
@ -381,22 +418,37 @@ struct CollectionsAdminData {
} }
#[post("/ciphers/<uuid>/collections", data = "<data>")] #[post("/ciphers/<uuid>/collections", data = "<data>")]
fn post_collections_update(uuid: String, data: JsonUpcase<CollectionsAdminData>, headers: Headers, conn: DbConn) -> EmptyResult { fn post_collections_update(
uuid: String,
data: JsonUpcase<CollectionsAdminData>,
headers: Headers,
conn: DbConn,
) -> EmptyResult {
post_collections_admin(uuid, data, headers, conn) post_collections_admin(uuid, data, headers, conn)
} }
#[put("/ciphers/<uuid>/collections-admin", data = "<data>")] #[put("/ciphers/<uuid>/collections-admin", data = "<data>")]
fn put_collections_admin(uuid: String, data: JsonUpcase<CollectionsAdminData>, headers: Headers, conn: DbConn) -> EmptyResult { fn put_collections_admin(
uuid: String,
data: JsonUpcase<CollectionsAdminData>,
headers: Headers,
conn: DbConn,
) -> EmptyResult {
post_collections_admin(uuid, data, headers, conn) post_collections_admin(uuid, data, headers, conn)
} }
#[post("/ciphers/<uuid>/collections-admin", data = "<data>")] #[post("/ciphers/<uuid>/collections-admin", data = "<data>")]
fn post_collections_admin(uuid: String, data: JsonUpcase<CollectionsAdminData>, headers: Headers, conn: DbConn) -> EmptyResult { fn post_collections_admin(
uuid: String,
data: JsonUpcase<CollectionsAdminData>,
headers: Headers,
conn: DbConn,
) -> EmptyResult {
let data: CollectionsAdminData = data.into_inner().data; let data: CollectionsAdminData = data.into_inner().data;
let cipher = match Cipher::find_by_uuid(&uuid, &conn) { let cipher = match Cipher::find_by_uuid(&uuid, &conn) {
Some(cipher) => cipher, Some(cipher) => cipher,
None => err!("Cipher doesn't exist") None => err!("Cipher doesn't exist"),
}; };
if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) { if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) {
@ -404,16 +456,22 @@ fn post_collections_admin(uuid: String, data: JsonUpcase<CollectionsAdminData>,
} }
let posted_collections: HashSet<String> = data.CollectionIds.iter().cloned().collect(); let posted_collections: HashSet<String> = data.CollectionIds.iter().cloned().collect();
let current_collections: HashSet<String> = cipher.get_collections(&headers.user.uuid ,&conn).iter().cloned().collect(); let current_collections: HashSet<String> = cipher
.get_collections(&headers.user.uuid, &conn)
.iter()
.cloned()
.collect();
for collection in posted_collections.symmetric_difference(&current_collections) { for collection in posted_collections.symmetric_difference(&current_collections) {
match Collection::find_by_uuid(&collection, &conn) { match Collection::find_by_uuid(&collection, &conn) {
None => err!("Invalid collection ID provided"), None => err!("Invalid collection ID provided"),
Some(collection) => { Some(collection) => {
if collection.is_writable_by_user(&headers.user.uuid, &conn) { if collection.is_writable_by_user(&headers.user.uuid, &conn) {
if posted_collections.contains(&collection.uuid) { // Add to collection if posted_collections.contains(&collection.uuid) {
// Add to collection
CollectionCipher::save(&cipher.uuid, &collection.uuid, &conn)?; CollectionCipher::save(&cipher.uuid, &collection.uuid, &conn)?;
} else { // Remove from collection } else {
// Remove from collection
CollectionCipher::delete(&cipher.uuid, &collection.uuid, &conn)?; CollectionCipher::delete(&cipher.uuid, &collection.uuid, &conn)?;
} }
} else { } else {
@ -434,28 +492,45 @@ struct ShareCipherData {
} }
#[post("/ciphers/<uuid>/share", data = "<data>")] #[post("/ciphers/<uuid>/share", data = "<data>")]
fn post_cipher_share(uuid: String, data: JsonUpcase<ShareCipherData>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> JsonResult { fn post_cipher_share(
uuid: String,
data: JsonUpcase<ShareCipherData>,
headers: Headers,
conn: DbConn,
nt: Notify,
) -> JsonResult {
let data: ShareCipherData = data.into_inner().data; let data: ShareCipherData = data.into_inner().data;
share_cipher_by_uuid(&uuid, data, &headers, &conn, &ws) share_cipher_by_uuid(&uuid, data, &headers, &conn, &nt)
} }
#[put("/ciphers/<uuid>/share", data = "<data>")] #[put("/ciphers/<uuid>/share", data = "<data>")]
fn put_cipher_share(uuid: String, data: JsonUpcase<ShareCipherData>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> JsonResult { fn put_cipher_share(
uuid: String,
data: JsonUpcase<ShareCipherData>,
headers: Headers,
conn: DbConn,
nt: Notify,
) -> JsonResult {
let data: ShareCipherData = data.into_inner().data; let data: ShareCipherData = data.into_inner().data;
share_cipher_by_uuid(&uuid, data, &headers, &conn, &ws) share_cipher_by_uuid(&uuid, data, &headers, &conn, &nt)
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[allow(non_snake_case)]
struct ShareSelectedCipherData { struct ShareSelectedCipherData {
Ciphers: Vec<CipherData>, Ciphers: Vec<CipherData>,
CollectionIds: Vec<String> CollectionIds: Vec<String>,
} }
#[put("/ciphers/share", data = "<data>")] #[put("/ciphers/share", data = "<data>")]
fn put_cipher_share_seleted(data: JsonUpcase<ShareSelectedCipherData>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> EmptyResult { fn put_cipher_share_seleted(
data: JsonUpcase<ShareSelectedCipherData>,
headers: Headers,
conn: DbConn,
nt: Notify,
) -> EmptyResult {
let mut data: ShareSelectedCipherData = data.into_inner().data; let mut data: ShareSelectedCipherData = data.into_inner().data;
let mut cipher_ids: Vec<String> = Vec::new(); let mut cipher_ids: Vec<String> = Vec::new();
@ -470,7 +545,7 @@ fn put_cipher_share_seleted(data: JsonUpcase<ShareSelectedCipherData>, headers:
for cipher in data.Ciphers.iter() { for cipher in data.Ciphers.iter() {
match cipher.Id { match cipher.Id {
Some(ref id) => cipher_ids.push(id.to_string()), Some(ref id) => cipher_ids.push(id.to_string()),
None => err!("Request missing ids field") None => err!("Request missing ids field"),
}; };
} }
@ -483,20 +558,25 @@ fn put_cipher_share_seleted(data: JsonUpcase<ShareSelectedCipherData>, headers:
while let Some(cipher) = data.Ciphers.pop() { while let Some(cipher) = data.Ciphers.pop() {
let mut shared_cipher_data = ShareCipherData { let mut shared_cipher_data = ShareCipherData {
Cipher: cipher, Cipher: cipher,
CollectionIds: data.CollectionIds.clone() CollectionIds: data.CollectionIds.clone(),
}; };
match shared_cipher_data.Cipher.Id.take() { match shared_cipher_data.Cipher.Id.take() {
Some(id) => share_cipher_by_uuid(&id, shared_cipher_data , &headers, &conn, &ws)?, Some(id) => share_cipher_by_uuid(&id, shared_cipher_data, &headers, &conn, &nt)?,
None => err!("Request missing ids field") None => err!("Request missing ids field"),
}; };
} }
Ok(()) Ok(())
} }
fn share_cipher_by_uuid(uuid: &str, data: ShareCipherData, headers: &Headers, conn: &DbConn, ws: &State<WebSocketUsers>) -> JsonResult { fn share_cipher_by_uuid(
uuid: &str,
data: ShareCipherData,
headers: &Headers,
conn: &DbConn,
nt: &Notify,
) -> JsonResult {
let mut cipher = match Cipher::find_by_uuid(&uuid, &conn) { let mut cipher = match Cipher::find_by_uuid(&uuid, &conn) {
Some(cipher) => { Some(cipher) => {
if cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) { if cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) {
@ -504,8 +584,8 @@ fn share_cipher_by_uuid(uuid: &str, data: ShareCipherData, headers: &Headers, co
} else { } else {
err!("Cipher is not write accessible") err!("Cipher is not write accessible")
} }
}, }
None => err!("Cipher doesn't exist") None => err!("Cipher doesn't exist"),
}; };
match data.Cipher.OrganizationId.clone() { match data.Cipher.OrganizationId.clone() {
@ -525,7 +605,15 @@ fn share_cipher_by_uuid(uuid: &str, data: ShareCipherData, headers: &Headers, co
} }
} }
} }
update_cipher_from_data(&mut cipher, data.Cipher, &headers, shared_to_collection, &conn, &ws, UpdateType::SyncCipherUpdate)?; update_cipher_from_data(
&mut cipher,
data.Cipher,
&headers,
shared_to_collection,
&conn,
&nt,
UpdateType::CipherUpdate,
)?;
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn))) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn)))
} }
@ -536,7 +624,7 @@ fn share_cipher_by_uuid(uuid: &str, data: ShareCipherData, headers: &Headers, co
fn post_attachment(uuid: String, data: Data, content_type: &ContentType, headers: Headers, conn: DbConn) -> JsonResult { fn post_attachment(uuid: String, data: Data, content_type: &ContentType, headers: Headers, conn: DbConn) -> JsonResult {
let cipher = match Cipher::find_by_uuid(&uuid, &conn) { let cipher = match Cipher::find_by_uuid(&uuid, &conn) {
Some(cipher) => cipher, Some(cipher) => cipher,
None => err!("Cipher doesn't exist") None => err!("Cipher doesn't exist"),
}; };
if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) { if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) {
@ -551,7 +639,8 @@ fn post_attachment(uuid: String, data: Data, content_type: &ContentType, headers
let mut attachment_key = None; let mut attachment_key = None;
Multipart::with_body(data.open(), boundary).foreach_entry(|mut field| { Multipart::with_body(data.open(), boundary)
.foreach_entry(|mut field| {
match field.headers.name.as_str() { match field.headers.name.as_str() {
"key" => { "key" => {
use std::io::Read; use std::io::Read;
@ -559,7 +648,7 @@ fn post_attachment(uuid: String, data: Data, content_type: &ContentType, headers
if field.data.read_to_string(&mut key_buffer).is_ok() { if field.data.read_to_string(&mut key_buffer).is_ok() {
attachment_key = Some(key_buffer); attachment_key = Some(key_buffer);
} }
}, }
"data" => { "data" => {
// This is provided by the client, don't trust it // This is provided by the client, don't trust it
let name = field.headers.filename.expect("No filename provided"); let name = field.headers.filename.expect("No filename provided");
@ -567,19 +656,16 @@ fn post_attachment(uuid: String, data: Data, content_type: &ContentType, headers
let file_name = HEXLOWER.encode(&crypto::get_random(vec![0; 10])); let file_name = HEXLOWER.encode(&crypto::get_random(vec![0; 10]));
let path = base_path.join(&file_name); let path = base_path.join(&file_name);
let size = match field.data.save() let size = match field.data.save().memory_threshold(0).size_limit(None).with_path(path) {
.memory_threshold(0)
.size_limit(None)
.with_path(path) {
SaveResult::Full(SavedData::File(_, size)) => size as i32, SaveResult::Full(SavedData::File(_, size)) => size as i32,
SaveResult::Full(other) => { SaveResult::Full(other) => {
error!("Attachment is not a file: {:?}", other); error!("Attachment is not a file: {:?}", other);
return; return;
}, }
SaveResult::Partial(_, reason) => { SaveResult::Partial(_, reason) => {
error!("Partial result: {:?}", reason); error!("Partial result: {:?}", reason);
return; return;
}, }
SaveResult::Error(e) => { SaveResult::Error(e) => {
error!("Error: {:?}", e); error!("Error: {:?}", e);
return; return;
@ -589,79 +675,116 @@ fn post_attachment(uuid: String, data: Data, content_type: &ContentType, headers
let mut attachment = Attachment::new(file_name, cipher.uuid.clone(), name, size); let mut attachment = Attachment::new(file_name, cipher.uuid.clone(), name, size);
attachment.key = attachment_key.clone(); attachment.key = attachment_key.clone();
attachment.save(&conn).expect("Error saving attachment"); attachment.save(&conn).expect("Error saving attachment");
},
_ => error!("Invalid multipart name")
} }
}).expect("Error processing multipart data"); _ => error!("Invalid multipart name"),
}
})
.expect("Error processing multipart data");
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn))) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn)))
} }
#[post("/ciphers/<uuid>/attachment-admin", format = "multipart/form-data", data = "<data>")] #[post("/ciphers/<uuid>/attachment-admin", format = "multipart/form-data", data = "<data>")]
fn post_attachment_admin(uuid: String, data: Data, content_type: &ContentType, headers: Headers, conn: DbConn) -> JsonResult { fn post_attachment_admin(
uuid: String,
data: Data,
content_type: &ContentType,
headers: Headers,
conn: DbConn,
) -> JsonResult {
post_attachment(uuid, data, content_type, headers, conn) post_attachment(uuid, data, content_type, headers, conn)
} }
#[post("/ciphers/<uuid>/attachment/<attachment_id>/share", format = "multipart/form-data", data = "<data>")] #[post(
fn post_attachment_share(uuid: String, attachment_id: String, data: Data, content_type: &ContentType, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> JsonResult { "/ciphers/<uuid>/attachment/<attachment_id>/share",
_delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &conn, &ws)?; format = "multipart/form-data",
data = "<data>"
)]
fn post_attachment_share(
uuid: String,
attachment_id: String,
data: Data,
content_type: &ContentType,
headers: Headers,
conn: DbConn,
nt: Notify,
) -> JsonResult {
_delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &conn, &nt)?;
post_attachment(uuid, data, content_type, headers, conn) post_attachment(uuid, data, content_type, headers, conn)
} }
#[post("/ciphers/<uuid>/attachment/<attachment_id>/delete-admin")] #[post("/ciphers/<uuid>/attachment/<attachment_id>/delete-admin")]
fn delete_attachment_post_admin(uuid: String, attachment_id: String, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> EmptyResult { fn delete_attachment_post_admin(
delete_attachment(uuid, attachment_id, headers, conn, ws) uuid: String,
attachment_id: String,
headers: Headers,
conn: DbConn,
nt: Notify,
) -> EmptyResult {
delete_attachment(uuid, attachment_id, headers, conn, nt)
} }
#[post("/ciphers/<uuid>/attachment/<attachment_id>/delete")] #[post("/ciphers/<uuid>/attachment/<attachment_id>/delete")]
fn delete_attachment_post(uuid: String, attachment_id: String, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> EmptyResult { fn delete_attachment_post(
delete_attachment(uuid, attachment_id, headers, conn, ws) uuid: String,
attachment_id: String,
headers: Headers,
conn: DbConn,
nt: Notify,
) -> EmptyResult {
delete_attachment(uuid, attachment_id, headers, conn, nt)
} }
#[delete("/ciphers/<uuid>/attachment/<attachment_id>")] #[delete("/ciphers/<uuid>/attachment/<attachment_id>")]
fn delete_attachment(uuid: String, attachment_id: String, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> EmptyResult { fn delete_attachment(uuid: String, attachment_id: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
_delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &conn, &ws) _delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &conn, &nt)
} }
#[delete("/ciphers/<uuid>/attachment/<attachment_id>/admin")] #[delete("/ciphers/<uuid>/attachment/<attachment_id>/admin")]
fn delete_attachment_admin(uuid: String, attachment_id: String, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> EmptyResult { fn delete_attachment_admin(
_delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &conn, &ws) uuid: String,
attachment_id: String,
headers: Headers,
conn: DbConn,
nt: Notify,
) -> EmptyResult {
_delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &conn, &nt)
} }
#[post("/ciphers/<uuid>/delete")] #[post("/ciphers/<uuid>/delete")]
fn delete_cipher_post(uuid: String, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> EmptyResult { fn delete_cipher_post(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
_delete_cipher_by_uuid(&uuid, &headers, &conn, &ws) _delete_cipher_by_uuid(&uuid, &headers, &conn, &nt)
} }
#[post("/ciphers/<uuid>/delete-admin")] #[post("/ciphers/<uuid>/delete-admin")]
fn delete_cipher_post_admin(uuid: String, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> EmptyResult { fn delete_cipher_post_admin(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
_delete_cipher_by_uuid(&uuid, &headers, &conn, &ws) _delete_cipher_by_uuid(&uuid, &headers, &conn, &nt)
} }
#[delete("/ciphers/<uuid>")] #[delete("/ciphers/<uuid>")]
fn delete_cipher(uuid: String, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> EmptyResult { fn delete_cipher(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
_delete_cipher_by_uuid(&uuid, &headers, &conn, &ws) _delete_cipher_by_uuid(&uuid, &headers, &conn, &nt)
} }
#[delete("/ciphers/<uuid>/admin")] #[delete("/ciphers/<uuid>/admin")]
fn delete_cipher_admin(uuid: String, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> EmptyResult { fn delete_cipher_admin(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
_delete_cipher_by_uuid(&uuid, &headers, &conn, &ws) _delete_cipher_by_uuid(&uuid, &headers, &conn, &nt)
} }
#[delete("/ciphers", data = "<data>")] #[delete("/ciphers", data = "<data>")]
fn delete_cipher_selected(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> EmptyResult { fn delete_cipher_selected(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
let data: Value = data.into_inner().data; let data: Value = data.into_inner().data;
let uuids = match data.get("Ids") { let uuids = match data.get("Ids") {
Some(ids) => match ids.as_array() { Some(ids) => match ids.as_array() {
Some(ids) => ids.iter().filter_map(Value::as_str), Some(ids) => ids.iter().filter_map(Value::as_str),
None => err!("Posted ids field is not an array") None => err!("Posted ids field is not an array"),
}, },
None => err!("Request missing ids field") None => err!("Request missing ids field"),
}; };
for uuid in uuids { for uuid in uuids {
if let error @ Err(_) = _delete_cipher_by_uuid(uuid, &headers, &conn, &ws) { if let error @ Err(_) = _delete_cipher_by_uuid(uuid, &headers, &conn, &nt) {
return error; return error;
}; };
} }
@ -670,46 +793,42 @@ fn delete_cipher_selected(data: JsonUpcase<Value>, headers: Headers, conn: DbCon
} }
#[post("/ciphers/delete", data = "<data>")] #[post("/ciphers/delete", data = "<data>")]
fn delete_cipher_selected_post(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> EmptyResult { fn delete_cipher_selected_post(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
delete_cipher_selected(data, headers, conn, ws) delete_cipher_selected(data, headers, conn, nt)
} }
#[post("/ciphers/move", data = "<data>")] #[post("/ciphers/move", data = "<data>")]
fn move_cipher_selected(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> EmptyResult { fn move_cipher_selected(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
let data = data.into_inner().data; let data = data.into_inner().data;
let folder_id = match data.get("FolderId") { let folder_id = match data.get("FolderId") {
Some(folder_id) => { Some(folder_id) => match folder_id.as_str() {
match folder_id.as_str() { Some(folder_id) => match Folder::find_by_uuid(folder_id, &conn) {
Some(folder_id) => {
match Folder::find_by_uuid(folder_id, &conn) {
Some(folder) => { Some(folder) => {
if folder.user_uuid != headers.user.uuid { if folder.user_uuid != headers.user.uuid {
err!("Folder is not owned by user") err!("Folder is not owned by user")
} }
Some(folder.uuid) Some(folder.uuid)
} }
None => err!("Folder doesn't exist") None => err!("Folder doesn't exist"),
} },
} None => err!("Folder id provided in wrong format"),
None => err!("Folder id provided in wrong format") },
} None => None,
}
None => None
}; };
let uuids = match data.get("Ids") { let uuids = match data.get("Ids") {
Some(ids) => match ids.as_array() { Some(ids) => match ids.as_array() {
Some(ids) => ids.iter().filter_map(Value::as_str), Some(ids) => ids.iter().filter_map(Value::as_str),
None => err!("Posted ids field is not an array") None => err!("Posted ids field is not an array"),
}, },
None => err!("Request missing ids field") None => err!("Request missing ids field"),
}; };
for uuid in uuids { for uuid in uuids {
let mut cipher = match Cipher::find_by_uuid(uuid, &conn) { let mut cipher = match Cipher::find_by_uuid(uuid, &conn) {
Some(cipher) => cipher, Some(cipher) => cipher,
None => err!("Cipher doesn't exist") None => err!("Cipher doesn't exist"),
}; };
if !cipher.is_accessible_to_user(&headers.user.uuid, &conn) { if !cipher.is_accessible_to_user(&headers.user.uuid, &conn) {
@ -720,19 +839,19 @@ fn move_cipher_selected(data: JsonUpcase<Value>, headers: Headers, conn: DbConn,
cipher.move_to_folder(folder_id.clone(), &headers.user.uuid, &conn)?; cipher.move_to_folder(folder_id.clone(), &headers.user.uuid, &conn)?;
cipher.save(&conn)?; cipher.save(&conn)?;
ws.send_cipher_update(UpdateType::SyncCipherUpdate, &cipher, &cipher.update_users_revision(&conn)); nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(&conn));
} }
Ok(()) Ok(())
} }
#[put("/ciphers/move", data = "<data>")] #[put("/ciphers/move", data = "<data>")]
fn move_cipher_selected_put(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> EmptyResult { fn move_cipher_selected_put(data: JsonUpcase<Value>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
move_cipher_selected(data, headers, conn, ws) move_cipher_selected(data, headers, conn, nt)
} }
#[post("/ciphers/purge", data = "<data>")] #[post("/ciphers/purge", data = "<data>")]
fn delete_all(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> EmptyResult { fn delete_all(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
let data: PasswordData = data.into_inner().data; let data: PasswordData = data.into_inner().data;
let password_hash = data.MasterPasswordHash; let password_hash = data.MasterPasswordHash;
@ -745,19 +864,19 @@ fn delete_all(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn, ws
// Delete ciphers and their attachments // Delete ciphers and their attachments
for cipher in Cipher::find_owned_by_user(&user.uuid, &conn) { for cipher in Cipher::find_owned_by_user(&user.uuid, &conn) {
cipher.delete(&conn)?; cipher.delete(&conn)?;
ws.send_cipher_update(UpdateType::SyncCipherDelete, &cipher, &cipher.update_users_revision(&conn)); nt.send_cipher_update(UpdateType::CipherDelete, &cipher, &cipher.update_users_revision(&conn));
} }
// Delete folders // Delete folders
for f in Folder::find_by_user(&user.uuid, &conn) { for f in Folder::find_by_user(&user.uuid, &conn) {
f.delete(&conn)?; f.delete(&conn)?;
ws.send_folder_update(UpdateType::SyncFolderCreate, &f); nt.send_folder_update(UpdateType::FolderCreate, &f);
} }
Ok(()) Ok(())
} }
fn _delete_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, ws: &State<WebSocketUsers>) -> EmptyResult { fn _delete_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, nt: &Notify) -> EmptyResult {
let cipher = match Cipher::find_by_uuid(&uuid, &conn) { let cipher = match Cipher::find_by_uuid(&uuid, &conn) {
Some(cipher) => cipher, Some(cipher) => cipher,
None => err!("Cipher doesn't exist"), None => err!("Cipher doesn't exist"),
@ -768,14 +887,20 @@ fn _delete_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, ws: &Sta
} }
cipher.delete(&conn)?; cipher.delete(&conn)?;
ws.send_cipher_update(UpdateType::SyncCipherDelete, &cipher, &cipher.update_users_revision(&conn)); nt.send_cipher_update(UpdateType::CipherDelete, &cipher, &cipher.update_users_revision(&conn));
Ok(()) Ok(())
} }
fn _delete_cipher_attachment_by_id(uuid: &str, attachment_id: &str, headers: &Headers, conn: &DbConn, ws: &State<WebSocketUsers>) -> EmptyResult { fn _delete_cipher_attachment_by_id(
uuid: &str,
attachment_id: &str,
headers: &Headers,
conn: &DbConn,
nt: &Notify,
) -> EmptyResult {
let attachment = match Attachment::find_by_id(&attachment_id, &conn) { let attachment = match Attachment::find_by_id(&attachment_id, &conn) {
Some(attachment) => attachment, Some(attachment) => attachment,
None => err!("Attachment doesn't exist") None => err!("Attachment doesn't exist"),
}; };
if attachment.cipher_uuid != uuid { if attachment.cipher_uuid != uuid {
@ -784,7 +909,7 @@ fn _delete_cipher_attachment_by_id(uuid: &str, attachment_id: &str, headers: &He
let cipher = match Cipher::find_by_uuid(&uuid, &conn) { let cipher = match Cipher::find_by_uuid(&uuid, &conn) {
Some(cipher) => cipher, Some(cipher) => cipher,
None => err!("Cipher doesn't exist") None => err!("Cipher doesn't exist"),
}; };
if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) { if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) {
@ -793,6 +918,6 @@ fn _delete_cipher_attachment_by_id(uuid: &str, attachment_id: &str, headers: &He
// Delete attachment // Delete attachment
attachment.delete(&conn)?; attachment.delete(&conn)?;
ws.send_cipher_update(UpdateType::SyncCipherDelete, &cipher, &cipher.update_users_revision(&conn)); nt.send_cipher_update(UpdateType::CipherDelete, &cipher, &cipher.update_users_revision(&conn));
Ok(()) Ok(())
} }

Datei anzeigen

@ -1,11 +1,10 @@
use rocket::State;
use rocket_contrib::json::Json; use rocket_contrib::json::Json;
use serde_json::Value; use serde_json::Value;
use crate::db::DbConn;
use crate::db::models::*; use crate::db::models::*;
use crate::db::DbConn;
use crate::api::{JsonResult, EmptyResult, JsonUpcase, WebSocketUsers, UpdateType}; use crate::api::{EmptyResult, JsonResult, JsonUpcase, Notify, UpdateType};
use crate::auth::Headers; use crate::auth::Headers;
use rocket::Route; use rocket::Route;
@ -39,7 +38,7 @@ fn get_folders(headers: Headers, conn: DbConn) -> JsonResult {
fn get_folder(uuid: String, headers: Headers, conn: DbConn) -> JsonResult { fn get_folder(uuid: String, headers: Headers, conn: DbConn) -> JsonResult {
let folder = match Folder::find_by_uuid(&uuid, &conn) { let folder = match Folder::find_by_uuid(&uuid, &conn) {
Some(folder) => folder, Some(folder) => folder,
_ => err!("Invalid folder") _ => err!("Invalid folder"),
}; };
if folder.user_uuid != headers.user.uuid { if folder.user_uuid != headers.user.uuid {
@ -53,33 +52,33 @@ fn get_folder(uuid: String, headers: Headers, conn: DbConn) -> JsonResult {
#[allow(non_snake_case)] #[allow(non_snake_case)]
pub struct FolderData { pub struct FolderData {
pub Name: String pub Name: String,
} }
#[post("/folders", data = "<data>")] #[post("/folders", data = "<data>")]
fn post_folders(data: JsonUpcase<FolderData>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> JsonResult { fn post_folders(data: JsonUpcase<FolderData>, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult {
let data: FolderData = data.into_inner().data; let data: FolderData = data.into_inner().data;
let mut folder = Folder::new(headers.user.uuid.clone(), data.Name); let mut folder = Folder::new(headers.user.uuid.clone(), data.Name);
folder.save(&conn)?; folder.save(&conn)?;
ws.send_folder_update(UpdateType::SyncFolderCreate, &folder); nt.send_folder_update(UpdateType::FolderCreate, &folder);
Ok(Json(folder.to_json())) Ok(Json(folder.to_json()))
} }
#[post("/folders/<uuid>", data = "<data>")] #[post("/folders/<uuid>", data = "<data>")]
fn post_folder(uuid: String, data: JsonUpcase<FolderData>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> JsonResult { fn post_folder(uuid: String, data: JsonUpcase<FolderData>, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult {
put_folder(uuid, data, headers, conn, ws) put_folder(uuid, data, headers, conn, nt)
} }
#[put("/folders/<uuid>", data = "<data>")] #[put("/folders/<uuid>", data = "<data>")]
fn put_folder(uuid: String, data: JsonUpcase<FolderData>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> JsonResult { fn put_folder(uuid: String, data: JsonUpcase<FolderData>, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult {
let data: FolderData = data.into_inner().data; let data: FolderData = data.into_inner().data;
let mut folder = match Folder::find_by_uuid(&uuid, &conn) { let mut folder = match Folder::find_by_uuid(&uuid, &conn) {
Some(folder) => folder, Some(folder) => folder,
_ => err!("Invalid folder") _ => err!("Invalid folder"),
}; };
if folder.user_uuid != headers.user.uuid { if folder.user_uuid != headers.user.uuid {
@ -89,21 +88,21 @@ fn put_folder(uuid: String, data: JsonUpcase<FolderData>, headers: Headers, conn
folder.name = data.Name; folder.name = data.Name;
folder.save(&conn)?; folder.save(&conn)?;
ws.send_folder_update(UpdateType::SyncFolderUpdate, &folder); nt.send_folder_update(UpdateType::FolderUpdate, &folder);
Ok(Json(folder.to_json())) Ok(Json(folder.to_json()))
} }
#[post("/folders/<uuid>/delete")] #[post("/folders/<uuid>/delete")]
fn delete_folder_post(uuid: String, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> EmptyResult { fn delete_folder_post(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
delete_folder(uuid, headers, conn, ws) delete_folder(uuid, headers, conn, nt)
} }
#[delete("/folders/<uuid>")] #[delete("/folders/<uuid>")]
fn delete_folder(uuid: String, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> EmptyResult { fn delete_folder(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult {
let folder = match Folder::find_by_uuid(&uuid, &conn) { let folder = match Folder::find_by_uuid(&uuid, &conn) {
Some(folder) => folder, Some(folder) => folder,
_ => err!("Invalid folder") _ => err!("Invalid folder"),
}; };
if folder.user_uuid != headers.user.uuid { if folder.user_uuid != headers.user.uuid {
@ -113,6 +112,6 @@ fn delete_folder(uuid: String, headers: Headers, conn: DbConn, ws: State<WebSock
// Delete the actual folder entry // Delete the actual folder entry
folder.delete(&conn)?; folder.delete(&conn)?;
ws.send_folder_update(UpdateType::SyncFolderDelete, &folder); nt.send_folder_update(UpdateType::FolderDelete, &folder);
Ok(()) Ok(())
} }

Datei anzeigen

@ -8,7 +8,6 @@ pub fn routes() -> Vec<Route> {
let mut mod_routes = routes![ let mut mod_routes = routes![
clear_device_token, clear_device_token,
put_device_token, put_device_token,
get_eq_domains, get_eq_domains,
post_eq_domains, post_eq_domains,
put_eq_domains, put_eq_domains,
@ -25,9 +24,9 @@ pub fn routes() -> Vec<Route> {
routes routes
} }
/// //
/// Move this somewhere else // Move this somewhere else
/// //
use rocket::Route; use rocket::Route;
use rocket_contrib::json::Json; use rocket_contrib::json::Json;

Datei anzeigen

@ -1,14 +1,13 @@
use rocket::State;
use rocket::request::Form; use rocket::request::Form;
use rocket_contrib::json::Json; use rocket_contrib::json::Json;
use serde_json::Value; use serde_json::Value;
use crate::CONFIG;
use crate::db::DbConn;
use crate::db::models::*; use crate::db::models::*;
use crate::db::DbConn;
use crate::CONFIG;
use crate::api::{PasswordData, JsonResult, EmptyResult, NumberOrString, JsonUpcase, WebSocketUsers, UpdateType}; use crate::api::{EmptyResult, JsonResult, JsonUpcase, Notify, NumberOrString, PasswordData, UpdateType};
use crate::auth::{Headers, AdminHeaders, OwnerHeaders, encode_jwt, decode_invite_jwt, InviteJWTClaims, JWT_ISSUER}; use crate::auth::{decode_invite_jwt, encode_jwt, AdminHeaders, Headers, InviteJWTClaims, OwnerHeaders, JWT_ISSUER};
use crate::mail; use crate::mail;
@ -53,7 +52,6 @@ pub fn routes() -> Vec<Route> {
] ]
} }
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[allow(non_snake_case)]
struct OrgData { struct OrgData {
@ -83,10 +81,8 @@ fn create_organization(headers: Headers, data: JsonUpcase<OrgData>, conn: DbConn
let data: OrgData = data.into_inner().data; let data: OrgData = data.into_inner().data;
let mut org = Organization::new(data.Name, data.BillingEmail); let mut org = Organization::new(data.Name, data.BillingEmail);
let mut user_org = UserOrganization::new( let mut user_org = UserOrganization::new(headers.user.uuid.clone(), org.uuid.clone());
headers.user.uuid.clone(), org.uuid.clone()); let mut collection = Collection::new(org.uuid.clone(), data.CollectionName);
let mut collection = Collection::new(
org.uuid.clone(), data.CollectionName);
user_org.key = data.Key; user_org.key = data.Key;
user_org.access_all = true; user_org.access_all = true;
@ -101,7 +97,12 @@ fn create_organization(headers: Headers, data: JsonUpcase<OrgData>, conn: DbConn
} }
#[delete("/organizations/<org_id>", data = "<data>")] #[delete("/organizations/<org_id>", data = "<data>")]
fn delete_organization(org_id: String, data: JsonUpcase<PasswordData>, headers: OwnerHeaders, conn: DbConn) -> EmptyResult { fn delete_organization(
org_id: String,
data: JsonUpcase<PasswordData>,
headers: OwnerHeaders,
conn: DbConn,
) -> EmptyResult {
let data: PasswordData = data.into_inner().data; let data: PasswordData = data.into_inner().data;
let password_hash = data.MasterPasswordHash; let password_hash = data.MasterPasswordHash;
@ -111,12 +112,17 @@ fn delete_organization(org_id: String, data: JsonUpcase<PasswordData>, headers:
match Organization::find_by_uuid(&org_id, &conn) { match Organization::find_by_uuid(&org_id, &conn) {
None => err!("Organization not found"), None => err!("Organization not found"),
Some(org) => org.delete(&conn) Some(org) => org.delete(&conn),
} }
} }
#[post("/organizations/<org_id>/delete", data = "<data>")] #[post("/organizations/<org_id>/delete", data = "<data>")]
fn post_delete_organization(org_id: String, data: JsonUpcase<PasswordData>, headers: OwnerHeaders, conn: DbConn) -> EmptyResult { fn post_delete_organization(
org_id: String,
data: JsonUpcase<PasswordData>,
headers: OwnerHeaders,
conn: DbConn,
) -> EmptyResult {
delete_organization(org_id, data, headers, conn) delete_organization(org_id, data, headers, conn)
} }
@ -126,9 +132,8 @@ fn leave_organization(org_id: String, headers: Headers, conn: DbConn) -> EmptyRe
None => err!("User not part of organization"), None => err!("User not part of organization"),
Some(user_org) => { Some(user_org) => {
if user_org.type_ == UserOrgType::Owner { if user_org.type_ == UserOrgType::Owner {
let num_owners = UserOrganization::find_by_org_and_type( let num_owners =
&org_id, UserOrgType::Owner as i32, &conn) UserOrganization::find_by_org_and_type(&org_id, UserOrgType::Owner as i32, &conn).len();
.len();
if num_owners <= 1 { if num_owners <= 1 {
err!("The last owner can't leave") err!("The last owner can't leave")
@ -144,22 +149,32 @@ fn leave_organization(org_id: String, headers: Headers, conn: DbConn) -> EmptyRe
fn get_organization(org_id: String, _headers: OwnerHeaders, conn: DbConn) -> JsonResult { fn get_organization(org_id: String, _headers: OwnerHeaders, conn: DbConn) -> JsonResult {
match Organization::find_by_uuid(&org_id, &conn) { match Organization::find_by_uuid(&org_id, &conn) {
Some(organization) => Ok(Json(organization.to_json())), Some(organization) => Ok(Json(organization.to_json())),
None => err!("Can't find organization details") None => err!("Can't find organization details"),
} }
} }
#[put("/organizations/<org_id>", data = "<data>")] #[put("/organizations/<org_id>", data = "<data>")]
fn put_organization(org_id: String, headers: OwnerHeaders, data: JsonUpcase<OrganizationUpdateData>, conn: DbConn) -> JsonResult { fn put_organization(
org_id: String,
headers: OwnerHeaders,
data: JsonUpcase<OrganizationUpdateData>,
conn: DbConn,
) -> JsonResult {
post_organization(org_id, headers, data, conn) post_organization(org_id, headers, data, conn)
} }
#[post("/organizations/<org_id>", data = "<data>")] #[post("/organizations/<org_id>", data = "<data>")]
fn post_organization(org_id: String, _headers: OwnerHeaders, data: JsonUpcase<OrganizationUpdateData>, conn: DbConn) -> JsonResult { fn post_organization(
org_id: String,
_headers: OwnerHeaders,
data: JsonUpcase<OrganizationUpdateData>,
conn: DbConn,
) -> JsonResult {
let data: OrganizationUpdateData = data.into_inner().data; let data: OrganizationUpdateData = data.into_inner().data;
let mut org = match Organization::find_by_uuid(&org_id, &conn) { let mut org = match Organization::find_by_uuid(&org_id, &conn) {
Some(organization) => organization, Some(organization) => organization,
None => err!("Can't find organization details") None => err!("Can't find organization details"),
}; };
org.name = data.Name; org.name = data.Name;
@ -172,7 +187,6 @@ fn post_organization(org_id: String, _headers: OwnerHeaders, data: JsonUpcase<Or
// GET /api/collections?writeOnly=false // GET /api/collections?writeOnly=false
#[get("/collections")] #[get("/collections")]
fn get_user_collections(headers: Headers, conn: DbConn) -> JsonResult { fn get_user_collections(headers: Headers, conn: DbConn) -> JsonResult {
Ok(Json(json!({ Ok(Json(json!({
"Data": "Data":
Collection::find_by_user_uuid(&headers.user.uuid, &conn) Collection::find_by_user_uuid(&headers.user.uuid, &conn)
@ -198,12 +212,17 @@ fn get_org_collections(org_id: String, _headers: AdminHeaders, conn: DbConn) ->
} }
#[post("/organizations/<org_id>/collections", data = "<data>")] #[post("/organizations/<org_id>/collections", data = "<data>")]
fn post_organization_collections(org_id: String, _headers: AdminHeaders, data: JsonUpcase<NewCollectionData>, conn: DbConn) -> JsonResult { fn post_organization_collections(
org_id: String,
_headers: AdminHeaders,
data: JsonUpcase<NewCollectionData>,
conn: DbConn,
) -> JsonResult {
let data: NewCollectionData = data.into_inner().data; let data: NewCollectionData = data.into_inner().data;
let org = match Organization::find_by_uuid(&org_id, &conn) { let org = match Organization::find_by_uuid(&org_id, &conn) {
Some(organization) => organization, Some(organization) => organization,
None => err!("Can't find organization details") None => err!("Can't find organization details"),
}; };
let mut collection = Collection::new(org.uuid.clone(), data.Name); let mut collection = Collection::new(org.uuid.clone(), data.Name);
@ -213,22 +232,34 @@ fn post_organization_collections(org_id: String, _headers: AdminHeaders, data: J
} }
#[put("/organizations/<org_id>/collections/<col_id>", data = "<data>")] #[put("/organizations/<org_id>/collections/<col_id>", data = "<data>")]
fn put_organization_collection_update(org_id: String, col_id: String, headers: AdminHeaders, data: JsonUpcase<NewCollectionData>, conn: DbConn) -> JsonResult { fn put_organization_collection_update(
org_id: String,
col_id: String,
headers: AdminHeaders,
data: JsonUpcase<NewCollectionData>,
conn: DbConn,
) -> JsonResult {
post_organization_collection_update(org_id, col_id, headers, data, conn) post_organization_collection_update(org_id, col_id, headers, data, conn)
} }
#[post("/organizations/<org_id>/collections/<col_id>", data = "<data>")] #[post("/organizations/<org_id>/collections/<col_id>", data = "<data>")]
fn post_organization_collection_update(org_id: String, col_id: String, _headers: AdminHeaders, data: JsonUpcase<NewCollectionData>, conn: DbConn) -> JsonResult { fn post_organization_collection_update(
org_id: String,
col_id: String,
_headers: AdminHeaders,
data: JsonUpcase<NewCollectionData>,
conn: DbConn,
) -> JsonResult {
let data: NewCollectionData = data.into_inner().data; let data: NewCollectionData = data.into_inner().data;
let org = match Organization::find_by_uuid(&org_id, &conn) { let org = match Organization::find_by_uuid(&org_id, &conn) {
Some(organization) => organization, Some(organization) => organization,
None => err!("Can't find organization details") None => err!("Can't find organization details"),
}; };
let mut collection = match Collection::find_by_uuid(&col_id, &conn) { let mut collection = match Collection::find_by_uuid(&col_id, &conn) {
Some(collection) => collection, Some(collection) => collection,
None => err!("Collection not found") None => err!("Collection not found"),
}; };
if collection.org_uuid != org.uuid { if collection.org_uuid != org.uuid {
@ -241,16 +272,23 @@ fn post_organization_collection_update(org_id: String, col_id: String, _headers:
Ok(Json(collection.to_json())) Ok(Json(collection.to_json()))
} }
#[delete("/organizations/<org_id>/collections/<col_id>/user/<org_user_id>")] #[delete("/organizations/<org_id>/collections/<col_id>/user/<org_user_id>")]
fn delete_organization_collection_user(org_id: String, col_id: String, org_user_id: String, _headers: AdminHeaders, conn: DbConn) -> EmptyResult { fn delete_organization_collection_user(
org_id: String,
col_id: String,
org_user_id: String,
_headers: AdminHeaders,
conn: DbConn,
) -> EmptyResult {
let collection = match Collection::find_by_uuid(&col_id, &conn) { let collection = match Collection::find_by_uuid(&col_id, &conn) {
None => err!("Collection not found"), None => err!("Collection not found"),
Some(collection) => if collection.org_uuid == org_id { Some(collection) => {
if collection.org_uuid == org_id {
collection collection
} else { } else {
err!("Collection and Organization id do not match") err!("Collection and Organization id do not match")
} }
}
}; };
match UserOrganization::find_by_uuid_and_org(&org_user_id, &org_id, &conn) { match UserOrganization::find_by_uuid_and_org(&org_user_id, &org_id, &conn) {
@ -258,16 +296,20 @@ fn delete_organization_collection_user(org_id: String, col_id: String, org_user_
Some(user_org) => { Some(user_org) => {
match CollectionUser::find_by_collection_and_user(&collection.uuid, &user_org.user_uuid, &conn) { match CollectionUser::find_by_collection_and_user(&collection.uuid, &user_org.user_uuid, &conn) {
None => err!("User not assigned to collection"), None => err!("User not assigned to collection"),
Some(col_user) => { Some(col_user) => col_user.delete(&conn),
col_user.delete(&conn)
}
} }
} }
} }
} }
#[post("/organizations/<org_id>/collections/<col_id>/delete-user/<org_user_id>")] #[post("/organizations/<org_id>/collections/<col_id>/delete-user/<org_user_id>")]
fn post_organization_collection_delete_user(org_id: String, col_id: String, org_user_id: String, headers: AdminHeaders, conn: DbConn) -> EmptyResult { fn post_organization_collection_delete_user(
org_id: String,
col_id: String,
org_user_id: String,
headers: AdminHeaders,
conn: DbConn,
) -> EmptyResult {
delete_organization_collection_user(org_id, col_id, org_user_id, headers, conn) delete_organization_collection_user(org_id, col_id, org_user_id, headers, conn)
} }
@ -275,13 +317,15 @@ fn post_organization_collection_delete_user(org_id: String, col_id: String, org_
fn delete_organization_collection(org_id: String, col_id: String, _headers: AdminHeaders, conn: DbConn) -> EmptyResult { fn delete_organization_collection(org_id: String, col_id: String, _headers: AdminHeaders, conn: DbConn) -> EmptyResult {
match Collection::find_by_uuid(&col_id, &conn) { match Collection::find_by_uuid(&col_id, &conn) {
None => err!("Collection not found"), None => err!("Collection not found"),
Some(collection) => if collection.org_uuid == org_id { Some(collection) => {
if collection.org_uuid == org_id {
collection.delete(&conn) collection.delete(&conn)
} else { } else {
err!("Collection and Organization id do not match") err!("Collection and Organization id do not match")
} }
} }
} }
}
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
#[allow(non_snake_case)] #[allow(non_snake_case)]
@ -291,7 +335,13 @@ struct DeleteCollectionData {
} }
#[post("/organizations/<org_id>/collections/<col_id>/delete", data = "<_data>")] #[post("/organizations/<org_id>/collections/<col_id>/delete", data = "<_data>")]
fn post_organization_collection_delete(org_id: String, col_id: String, headers: AdminHeaders, _data: JsonUpcase<DeleteCollectionData>, conn: DbConn) -> EmptyResult { fn post_organization_collection_delete(
org_id: String,
col_id: String,
headers: AdminHeaders,
_data: JsonUpcase<DeleteCollectionData>,
conn: DbConn,
) -> EmptyResult {
delete_organization_collection(org_id, col_id, headers, conn) delete_organization_collection(org_id, col_id, headers, conn)
} }
@ -314,16 +364,18 @@ fn get_collection_users(org_id: String, coll_id: String, _headers: AdminHeaders,
// Get org and collection, check that collection is from org // Get org and collection, check that collection is from org
let collection = match Collection::find_by_uuid_and_org(&coll_id, &org_id, &conn) { let collection = match Collection::find_by_uuid_and_org(&coll_id, &org_id, &conn) {
None => err!("Collection not found in Organization"), None => err!("Collection not found in Organization"),
Some(collection) => collection Some(collection) => collection,
}; };
// Get the users from collection // Get the users from collection
let user_list: Vec<Value> = CollectionUser::find_by_collection(&collection.uuid, &conn) let user_list: Vec<Value> = CollectionUser::find_by_collection(&collection.uuid, &conn)
.iter().map(|col_user| { .iter()
.map(|col_user| {
UserOrganization::find_by_user_and_org(&col_user.user_uuid, &org_id, &conn) UserOrganization::find_by_user_and_org(&col_user.user_uuid, &org_id, &conn)
.unwrap() .unwrap()
.to_json_collection_user_details(col_user.read_only, &conn) .to_json_collection_user_details(col_user.read_only, &conn)
}).collect(); })
.collect();
Ok(Json(json!({ Ok(Json(json!({
"Data": user_list, "Data": user_list,
@ -335,13 +387,16 @@ fn get_collection_users(org_id: String, coll_id: String, _headers: AdminHeaders,
#[derive(FromForm)] #[derive(FromForm)]
struct OrgIdData { struct OrgIdData {
#[form(field = "organizationId")] #[form(field = "organizationId")]
organization_id: String organization_id: String,
} }
#[get("/ciphers/organization-details?<data..>")] #[get("/ciphers/organization-details?<data..>")]
fn get_org_details(data: Form<OrgIdData>, headers: Headers, conn: DbConn) -> JsonResult { fn get_org_details(data: Form<OrgIdData>, headers: Headers, conn: DbConn) -> JsonResult {
let ciphers = Cipher::find_by_org(&data.organization_id, &conn); let ciphers = Cipher::find_by_org(&data.organization_id, &conn);
let ciphers_json: Vec<Value> = ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect(); let ciphers_json: Vec<Value> = ciphers
.iter()
.map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn))
.collect();
Ok(Json(json!({ Ok(Json(json!({
"Data": ciphers_json, "Data": ciphers_json,
@ -393,11 +448,10 @@ fn send_invite(org_id: String, data: JsonUpcase<InviteData>, headers: AdminHeade
let new_type = match UserOrgType::from_str(&data.Type.into_string()) { let new_type = match UserOrgType::from_str(&data.Type.into_string()) {
Some(new_type) => new_type as i32, Some(new_type) => new_type as i32,
None => err!("Invalid type") None => err!("Invalid type"),
}; };
if new_type != UserOrgType::User && if new_type != UserOrgType::User && headers.org_user_type != UserOrgType::Owner {
headers.org_user_type != UserOrgType::Owner {
err!("Only Owners can invite Managers, Admins or Owners") err!("Only Owners can invite Managers, Admins or Owners")
} }
@ -407,23 +461,26 @@ fn send_invite(org_id: String, data: JsonUpcase<InviteData>, headers: AdminHeade
None => UserOrgStatus::Accepted as i32, // Automatically mark user as accepted if no email invites None => UserOrgStatus::Accepted as i32, // Automatically mark user as accepted if no email invites
}; };
let user = match User::find_by_mail(&email, &conn) { let user = match User::find_by_mail(&email, &conn) {
None => if CONFIG.invitations_allowed { // Invite user if that's enabled None => {
if CONFIG.invitations_allowed {
// Invite user if that's enabled
let mut invitation = Invitation::new(email.clone()); let mut invitation = Invitation::new(email.clone());
invitation.save(&conn)?; invitation.save(&conn)?;
let mut user = User::new(email.clone()); let mut user = User::new(email.clone());
user.save(&conn)?; user.save(&conn)?;
user_org_status = UserOrgStatus::Invited as i32; user_org_status = UserOrgStatus::Invited as i32;
user user
} else { } else {
err!(format!("User email does not exist: {}", email)) err!(format!("User email does not exist: {}", email))
}, }
Some(user) => if UserOrganization::find_by_user_and_org(&user.uuid, &org_id, &conn).is_some() { }
Some(user) => {
if UserOrganization::find_by_user_and_org(&user.uuid, &org_id, &conn).is_some() {
err!(format!("User already in organization: {}", email)) err!(format!("User already in organization: {}", email))
} else { } else {
user user
} }
}
}; };
let mut new_user = UserOrganization::new(user.uuid.clone(), org_id.clone()); let mut new_user = UserOrganization::new(user.uuid.clone(), org_id.clone());
@ -449,9 +506,14 @@ fn send_invite(org_id: String, data: JsonUpcase<InviteData>, headers: AdminHeade
if let Some(ref mail_config) = CONFIG.mail { if let Some(ref mail_config) = CONFIG.mail {
let org_name = match Organization::find_by_uuid(&org_id, &conn) { let org_name = match Organization::find_by_uuid(&org_id, &conn) {
Some(org) => org.name, Some(org) => org.name,
None => err!("Error looking up organization") None => err!("Error looking up organization"),
}; };
let claims = generate_invite_claims(user.uuid.to_string(), user.email.clone(), org_id.clone(), Some(new_user.uuid.clone())); let claims = generate_invite_claims(
user.uuid.to_string(),
user.email.clone(),
org_id.clone(),
Some(new_user.uuid.clone()),
);
let invite_token = encode_jwt(&claims); let invite_token = encode_jwt(&claims);
mail::send_invite(&email, &org_id, &new_user.uuid, &invite_token, &org_name, mail_config)?; mail::send_invite(&email, &org_id, &new_user.uuid, &invite_token, &org_name, mail_config)?;
} }
@ -486,13 +548,25 @@ fn reinvite_user(org_id: String, user_org: String, _headers: AdminHeaders, conn:
let org_name = match Organization::find_by_uuid(&org_id, &conn) { let org_name = match Organization::find_by_uuid(&org_id, &conn) {
Some(org) => org.name, Some(org) => org.name,
None => err!("Error looking up organization.") None => err!("Error looking up organization."),
}; };
let claims = generate_invite_claims(user.uuid.to_string(), user.email.clone(), org_id.clone(), Some(user_org.uuid.clone())); let claims = generate_invite_claims(
user.uuid.to_string(),
user.email.clone(),
org_id.clone(),
Some(user_org.uuid.clone()),
);
let invite_token = encode_jwt(&claims); let invite_token = encode_jwt(&claims);
if let Some(ref mail_config) = CONFIG.mail { if let Some(ref mail_config) = CONFIG.mail {
mail::send_invite(&user.email, &org_id, &user_org.uuid, &invite_token, &org_name, mail_config)?; mail::send_invite(
&user.email,
&org_id,
&user_org.uuid,
&invite_token,
&org_name,
mail_config,
)?;
} }
Ok(()) Ok(())
@ -529,35 +603,39 @@ fn accept_invite(_org_id: String, _org_user_id: String, data: JsonUpcase<AcceptD
Invitation::take(&claims.email, &conn); Invitation::take(&claims.email, &conn);
if claims.user_org_id.is_some() { if claims.user_org_id.is_some() {
// If this isn't the virtual_org, mark userorg as accepted // If this isn't the virtual_org, mark userorg as accepted
let mut user_org = match UserOrganization::find_by_uuid_and_org(&claims.user_org_id.unwrap(), &claims.org_id, &conn) { let mut user_org =
match UserOrganization::find_by_uuid_and_org(&claims.user_org_id.unwrap(), &claims.org_id, &conn) {
Some(user_org) => user_org, Some(user_org) => user_org,
None => err!("Error accepting the invitation") None => err!("Error accepting the invitation"),
}; };
user_org.status = UserOrgStatus::Accepted as i32; user_org.status = UserOrgStatus::Accepted as i32;
if user_org.save(&conn).is_err() { if user_org.save(&conn).is_err() {
err!("Failed to accept user to organization") err!("Failed to accept user to organization")
} }
} }
}, }
None => { None => err!("Invited user not found"),
err!("Invited user not found")
},
} }
Ok(()) Ok(())
} }
#[post("/organizations/<org_id>/users/<org_user_id>/confirm", data = "<data>")] #[post("/organizations/<org_id>/users/<org_user_id>/confirm", data = "<data>")]
fn confirm_invite(org_id: String, org_user_id: String, data: JsonUpcase<Value>, headers: AdminHeaders, conn: DbConn) -> EmptyResult { fn confirm_invite(
org_id: String,
org_user_id: String,
data: JsonUpcase<Value>,
headers: AdminHeaders,
conn: DbConn,
) -> EmptyResult {
let data = data.into_inner().data; let data = data.into_inner().data;
let mut user_to_confirm = match UserOrganization::find_by_uuid_and_org(&org_user_id, &org_id, &conn) { let mut user_to_confirm = match UserOrganization::find_by_uuid_and_org(&org_user_id, &org_id, &conn) {
Some(user) => user, Some(user) => user,
None => err!("The specified user isn't a member of the organization") None => err!("The specified user isn't a member of the organization"),
}; };
if user_to_confirm.type_ != UserOrgType::User && if user_to_confirm.type_ != UserOrgType::User && headers.org_user_type != UserOrgType::Owner {
headers.org_user_type != UserOrgType::Owner {
err!("Only Owners can confirm Managers, Admins or Owners") err!("Only Owners can confirm Managers, Admins or Owners")
} }
@ -568,7 +646,7 @@ fn confirm_invite(org_id: String, org_user_id: String, data: JsonUpcase<Value>,
user_to_confirm.status = UserOrgStatus::Confirmed as i32; user_to_confirm.status = UserOrgStatus::Confirmed as i32;
user_to_confirm.key = match data["Key"].as_str() { user_to_confirm.key = match data["Key"].as_str() {
Some(key) => key.to_string(), Some(key) => key.to_string(),
None => err!("Invalid key provided") None => err!("Invalid key provided"),
}; };
user_to_confirm.save(&conn) user_to_confirm.save(&conn)
@ -578,7 +656,7 @@ fn confirm_invite(org_id: String, org_user_id: String, data: JsonUpcase<Value>,
fn get_user(org_id: String, org_user_id: String, _headers: AdminHeaders, conn: DbConn) -> JsonResult { fn get_user(org_id: String, org_user_id: String, _headers: AdminHeaders, conn: DbConn) -> JsonResult {
let user = match UserOrganization::find_by_uuid_and_org(&org_user_id, &org_id, &conn) { let user = match UserOrganization::find_by_uuid_and_org(&org_user_id, &org_id, &conn) {
Some(user) => user, Some(user) => user,
None => err!("The specified user isn't a member of the organization") None => err!("The specified user isn't a member of the organization"),
}; };
Ok(Json(user.to_json_details(&conn))) Ok(Json(user.to_json_details(&conn)))
@ -594,44 +672,50 @@ struct EditUserData {
} }
#[put("/organizations/<org_id>/users/<org_user_id>", data = "<data>", rank = 1)] #[put("/organizations/<org_id>/users/<org_user_id>", data = "<data>", rank = 1)]
fn put_organization_user(org_id: String, org_user_id: String, data: JsonUpcase<EditUserData>, headers: AdminHeaders, conn: DbConn) -> EmptyResult { fn put_organization_user(
org_id: String,
org_user_id: String,
data: JsonUpcase<EditUserData>,
headers: AdminHeaders,
conn: DbConn,
) -> EmptyResult {
edit_user(org_id, org_user_id, data, headers, conn) edit_user(org_id, org_user_id, data, headers, conn)
} }
#[post("/organizations/<org_id>/users/<org_user_id>", data = "<data>", rank = 1)] #[post("/organizations/<org_id>/users/<org_user_id>", data = "<data>", rank = 1)]
fn edit_user(org_id: String, org_user_id: String, data: JsonUpcase<EditUserData>, headers: AdminHeaders, conn: DbConn) -> EmptyResult { fn edit_user(
org_id: String,
org_user_id: String,
data: JsonUpcase<EditUserData>,
headers: AdminHeaders,
conn: DbConn,
) -> EmptyResult {
let data: EditUserData = data.into_inner().data; let data: EditUserData = data.into_inner().data;
let new_type = match UserOrgType::from_str(&data.Type.into_string()) { let new_type = match UserOrgType::from_str(&data.Type.into_string()) {
Some(new_type) => new_type, Some(new_type) => new_type,
None => err!("Invalid type") None => err!("Invalid type"),
}; };
let mut user_to_edit = match UserOrganization::find_by_uuid_and_org(&org_user_id, &org_id, &conn) { let mut user_to_edit = match UserOrganization::find_by_uuid_and_org(&org_user_id, &org_id, &conn) {
Some(user) => user, Some(user) => user,
None => err!("The specified user isn't member of the organization") None => err!("The specified user isn't member of the organization"),
}; };
if new_type != user_to_edit.type_ && ( if new_type != user_to_edit.type_
user_to_edit.type_ >= UserOrgType::Admin || && (user_to_edit.type_ >= UserOrgType::Admin || new_type >= UserOrgType::Admin)
new_type >= UserOrgType::Admin && headers.org_user_type != UserOrgType::Owner
) && {
headers.org_user_type != UserOrgType::Owner {
err!("Only Owners can grant and remove Admin or Owner privileges") err!("Only Owners can grant and remove Admin or Owner privileges")
} }
if user_to_edit.type_ == UserOrgType::Owner && if user_to_edit.type_ == UserOrgType::Owner && headers.org_user_type != UserOrgType::Owner {
headers.org_user_type != UserOrgType::Owner {
err!("Only Owners can edit Owner users") err!("Only Owners can edit Owner users")
} }
if user_to_edit.type_ == UserOrgType::Owner && if user_to_edit.type_ == UserOrgType::Owner && new_type != UserOrgType::Owner {
new_type != UserOrgType::Owner {
// Removing owner permmission, check that there are at least another owner // Removing owner permmission, check that there are at least another owner
let num_owners = UserOrganization::find_by_org_and_type( let num_owners = UserOrganization::find_by_org_and_type(&org_id, UserOrgType::Owner as i32, &conn).len();
&org_id, UserOrgType::Owner as i32, &conn)
.len();
if num_owners <= 1 { if num_owners <= 1 {
err!("Can't delete the last owner") err!("Can't delete the last owner")
@ -665,19 +749,16 @@ fn edit_user(org_id: String, org_user_id: String, data: JsonUpcase<EditUserData>
fn delete_user(org_id: String, org_user_id: String, headers: AdminHeaders, conn: DbConn) -> EmptyResult { fn delete_user(org_id: String, org_user_id: String, headers: AdminHeaders, conn: DbConn) -> EmptyResult {
let user_to_delete = match UserOrganization::find_by_uuid_and_org(&org_user_id, &org_id, &conn) { let user_to_delete = match UserOrganization::find_by_uuid_and_org(&org_user_id, &org_id, &conn) {
Some(user) => user, Some(user) => user,
None => err!("User to delete isn't member of the organization") None => err!("User to delete isn't member of the organization"),
}; };
if user_to_delete.type_ != UserOrgType::User && if user_to_delete.type_ != UserOrgType::User && headers.org_user_type != UserOrgType::Owner {
headers.org_user_type != UserOrgType::Owner {
err!("Only Owners can delete Admins or Owners") err!("Only Owners can delete Admins or Owners")
} }
if user_to_delete.type_ == UserOrgType::Owner { if user_to_delete.type_ == UserOrgType::Owner {
// Removing owner, check that there are at least another owner // Removing owner, check that there are at least another owner
let num_owners = UserOrganization::find_by_org_and_type( let num_owners = UserOrganization::find_by_org_and_type(&org_id, UserOrgType::Owner as i32, &conn).len();
&org_id, UserOrgType::Owner as i32, &conn)
.len();
if num_owners <= 1 { if num_owners <= 1 {
err!("Can't delete the last owner") err!("Can't delete the last owner")
@ -692,8 +773,8 @@ fn post_delete_user(org_id: String, org_user_id: String, headers: AdminHeaders,
delete_user(org_id, org_user_id, headers, conn) delete_user(org_id, org_user_id, headers, conn)
} }
use super::ciphers::CipherData;
use super::ciphers::update_cipher_from_data; use super::ciphers::update_cipher_from_data;
use super::ciphers::CipherData;
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[allow(non_snake_case)]
@ -713,13 +794,19 @@ struct RelationsData {
} }
#[post("/ciphers/import-organization?<query..>", data = "<data>")] #[post("/ciphers/import-organization?<query..>", data = "<data>")]
fn post_org_import(query: Form<OrgIdData>, data: JsonUpcase<ImportData>, headers: Headers, conn: DbConn, ws: State<WebSocketUsers>) -> EmptyResult { fn post_org_import(
query: Form<OrgIdData>,
data: JsonUpcase<ImportData>,
headers: Headers,
conn: DbConn,
nt: Notify,
) -> EmptyResult {
let data: ImportData = data.into_inner().data; let data: ImportData = data.into_inner().data;
let org_id = query.into_inner().organization_id; let org_id = query.into_inner().organization_id;
let org_user = match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn) { let org_user = match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn) {
Some(user) => user, Some(user) => user,
None => err!("User is not part of the organization") None => err!("User is not part of the organization"),
}; };
if org_user.type_ < UserOrgType::Admin { if org_user.type_ < UserOrgType::Admin {
@ -727,14 +814,18 @@ fn post_org_import(query: Form<OrgIdData>, data: JsonUpcase<ImportData>, headers
} }
// Read and create the collections // Read and create the collections
let collections: Vec<_> = data.Collections.into_iter().map(|coll| { let collections: Vec<_> = data
.Collections
.into_iter()
.map(|coll| {
let mut collection = Collection::new(org_id.clone(), coll.Name); let mut collection = Collection::new(org_id.clone(), coll.Name);
if collection.save(&conn).is_err() { if collection.save(&conn).is_err() {
err!("Failed to create Collection"); err!("Failed to create Collection");
} }
Ok(collection) Ok(collection)
}).collect(); })
.collect();
// Read the relations between collections and ciphers // Read the relations between collections and ciphers
let mut relations = Vec::new(); let mut relations = Vec::new();
@ -743,11 +834,24 @@ fn post_org_import(query: Form<OrgIdData>, data: JsonUpcase<ImportData>, headers
} }
// Read and create the ciphers // Read and create the ciphers
let ciphers: Vec<_> = data.Ciphers.into_iter().map(|cipher_data| { let ciphers: Vec<_> = data
.Ciphers
.into_iter()
.map(|cipher_data| {
let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone()); let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone());
update_cipher_from_data(&mut cipher, cipher_data, &headers, false, &conn, &ws, UpdateType::SyncCipherCreate).ok(); update_cipher_from_data(
&mut cipher,
cipher_data,
&headers,
false,
&conn,
&nt,
UpdateType::CipherCreate,
)
.ok();
cipher cipher
}).collect(); })
.collect();
// Assign the collections // Assign the collections
for (cipher_index, coll_index) in relations { for (cipher_index, coll_index) in relations {
@ -755,7 +859,7 @@ fn post_org_import(query: Form<OrgIdData>, data: JsonUpcase<ImportData>, headers
let coll = &collections[coll_index]; let coll = &collections[coll_index];
let coll_id = match coll { let coll_id = match coll {
Ok(coll) => coll.uuid.as_str(), Ok(coll) => coll.uuid.as_str(),
Err(_) => err!("Failed to assign to collection") Err(_) => err!("Failed to assign to collection"),
}; };
CollectionCipher::save(cipher_id, coll_id, &conn)?; CollectionCipher::save(cipher_id, coll_id, &conn)?;

Datei anzeigen

@ -3,7 +3,6 @@ use rocket_contrib::json::Json;
use serde_json; use serde_json;
use serde_json::Value; use serde_json::Value;
use crate::db::{ use crate::db::{
models::{TwoFactor, TwoFactorType, User}, models::{TwoFactor, TwoFactorType, User},
DbConn, DbConn,
@ -111,11 +110,7 @@ struct DisableTwoFactorData {
} }
#[post("/two-factor/disable", data = "<data>")] #[post("/two-factor/disable", data = "<data>")]
fn disable_twofactor( fn disable_twofactor(data: JsonUpcase<DisableTwoFactorData>, headers: Headers, conn: DbConn) -> JsonResult {
data: JsonUpcase<DisableTwoFactorData>,
headers: Headers,
conn: DbConn,
) -> JsonResult {
let data: DisableTwoFactorData = data.into_inner().data; let data: DisableTwoFactorData = data.into_inner().data;
let password_hash = data.MasterPasswordHash; let password_hash = data.MasterPasswordHash;
@ -137,20 +132,12 @@ fn disable_twofactor(
} }
#[put("/two-factor/disable", data = "<data>")] #[put("/two-factor/disable", data = "<data>")]
fn disable_twofactor_put( fn disable_twofactor_put(data: JsonUpcase<DisableTwoFactorData>, headers: Headers, conn: DbConn) -> JsonResult {
data: JsonUpcase<DisableTwoFactorData>,
headers: Headers,
conn: DbConn,
) -> JsonResult {
disable_twofactor(data, headers, conn) disable_twofactor(data, headers, conn)
} }
#[post("/two-factor/get-authenticator", data = "<data>")] #[post("/two-factor/get-authenticator", data = "<data>")]
fn generate_authenticator( fn generate_authenticator(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult {
data: JsonUpcase<PasswordData>,
headers: Headers,
conn: DbConn,
) -> JsonResult {
let data: PasswordData = data.into_inner().data; let data: PasswordData = data.into_inner().data;
if !headers.user.check_valid_password(&data.MasterPasswordHash) { if !headers.user.check_valid_password(&data.MasterPasswordHash) {
@ -181,11 +168,7 @@ struct EnableAuthenticatorData {
} }
#[post("/two-factor/authenticator", data = "<data>")] #[post("/two-factor/authenticator", data = "<data>")]
fn activate_authenticator( fn activate_authenticator(data: JsonUpcase<EnableAuthenticatorData>, headers: Headers, conn: DbConn) -> JsonResult {
data: JsonUpcase<EnableAuthenticatorData>,
headers: Headers,
conn: DbConn,
) -> JsonResult {
let data: EnableAuthenticatorData = data.into_inner().data; let data: EnableAuthenticatorData = data.into_inner().data;
let password_hash = data.MasterPasswordHash; let password_hash = data.MasterPasswordHash;
let key = data.Key; let key = data.Key;
@ -228,11 +211,7 @@ fn activate_authenticator(
} }
#[put("/two-factor/authenticator", data = "<data>")] #[put("/two-factor/authenticator", data = "<data>")]
fn activate_authenticator_put( fn activate_authenticator_put(data: JsonUpcase<EnableAuthenticatorData>, headers: Headers, conn: DbConn) -> JsonResult {
data: JsonUpcase<EnableAuthenticatorData>,
headers: Headers,
conn: DbConn,
) -> JsonResult {
activate_authenticator(data, headers, conn) activate_authenticator(data, headers, conn)
} }
@ -338,11 +317,8 @@ fn activate_u2f(data: JsonUpcase<EnableU2FData>, headers: Headers, conn: DbConn)
err!("Invalid password"); err!("Invalid password");
} }
let tf_challenge = TwoFactor::find_by_user_and_type( let tf_challenge =
&headers.user.uuid, TwoFactor::find_by_user_and_type(&headers.user.uuid, TwoFactorType::U2fRegisterChallenge as i32, &conn);
TwoFactorType::U2fRegisterChallenge as i32,
&conn,
);
if let Some(tf_challenge) = tf_challenge { if let Some(tf_challenge) = tf_challenge {
let challenge: Challenge = serde_json::from_str(&tf_challenge.data)?; let challenge: Challenge = serde_json::from_str(&tf_challenge.data)?;
@ -400,11 +376,8 @@ fn activate_u2f_put(data: JsonUpcase<EnableU2FData>, headers: Headers, conn: DbC
fn _create_u2f_challenge(user_uuid: &str, type_: TwoFactorType, conn: &DbConn) -> Challenge { fn _create_u2f_challenge(user_uuid: &str, type_: TwoFactorType, conn: &DbConn) -> Challenge {
let challenge = U2F.generate_challenge().unwrap(); let challenge = U2F.generate_challenge().unwrap();
TwoFactor::new( TwoFactor::new(user_uuid.into(), type_, serde_json::to_string(&challenge).unwrap())
user_uuid.into(), .save(conn)
type_,
serde_json::to_string(&challenge).unwrap(),
).save(conn)
.expect("Error saving challenge"); .expect("Error saving challenge");
challenge challenge
@ -478,8 +451,7 @@ pub fn validate_u2f_login(user_uuid: &str, response: &str, conn: &DbConn) -> Emp
let mut _counter: u32 = 0; let mut _counter: u32 = 0;
for registration in registrations { for registration in registrations {
let response = let response = U2F.sign_response(challenge.clone(), registration, response.clone(), _counter);
U2F.sign_response(challenge.clone(), registration, response.clone(), _counter);
match response { match response {
Ok(new_counter) => { Ok(new_counter) => {
_counter = new_counter; _counter = new_counter;
@ -495,7 +467,6 @@ pub fn validate_u2f_login(user_uuid: &str, response: &str, conn: &DbConn) -> Emp
err!("error verifying response") err!("error verifying response")
} }
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
#[allow(non_snake_case)] #[allow(non_snake_case)]
struct EnableYubikeyData { struct EnableYubikeyData {
@ -515,8 +486,8 @@ pub struct YubikeyMetadata {
pub Nfc: bool, pub Nfc: bool,
} }
use yubico::Yubico;
use yubico::config::Config; use yubico::config::Config;
use yubico::Yubico;
fn parse_yubikeys(data: &EnableYubikeyData) -> Vec<String> { fn parse_yubikeys(data: &EnableYubikeyData) -> Vec<String> {
let mut yubikeys: Vec<String> = Vec::new(); let mut yubikeys: Vec<String> = Vec::new();
@ -556,16 +527,17 @@ fn jsonify_yubikeys(yubikeys: Vec<String>) -> serde_json::Value {
fn verify_yubikey_otp(otp: String) -> JsonResult { fn verify_yubikey_otp(otp: String) -> JsonResult {
if !CONFIG.yubico_cred_set { if !CONFIG.yubico_cred_set {
err!("`YUBICO_CLIENT_ID` or `YUBICO_SECRET_KEY` environment variable is not set. \ err!("`YUBICO_CLIENT_ID` or `YUBICO_SECRET_KEY` environment variable is not set. Yubikey OTP Disabled")
Yubikey OTP Disabled")
} }
let yubico = Yubico::new(); let yubico = Yubico::new();
let config = Config::default().set_client_id(CONFIG.yubico_client_id.to_owned()).set_key(CONFIG.yubico_secret_key.to_owned()); let config = Config::default()
.set_client_id(CONFIG.yubico_client_id.to_owned())
.set_key(CONFIG.yubico_secret_key.to_owned());
let result = match CONFIG.yubico_server { let result = match CONFIG.yubico_server {
Some(ref server) => yubico.verify(otp, config.set_api_hosts(vec![server.to_owned()])), Some(ref server) => yubico.verify(otp, config.set_api_hosts(vec![server.to_owned()])),
None => yubico.verify(otp, config) None => yubico.verify(otp, config),
}; };
match result { match result {
@ -577,8 +549,7 @@ fn verify_yubikey_otp(otp: String) -> JsonResult {
#[post("/two-factor/get-yubikey", data = "<data>")] #[post("/two-factor/get-yubikey", data = "<data>")]
fn generate_yubikey(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult { fn generate_yubikey(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> JsonResult {
if !CONFIG.yubico_cred_set { if !CONFIG.yubico_cred_set {
err!("`YUBICO_CLIENT_ID` or `YUBICO_SECRET_KEY` environment variable is not set. \ err!("`YUBICO_CLIENT_ID` or `YUBICO_SECRET_KEY` environment variable is not set. Yubikey OTP Disabled")
Yubikey OTP Disabled")
} }
let data: PasswordData = data.into_inner().data; let data: PasswordData = data.into_inner().data;
@ -619,11 +590,7 @@ fn activate_yubikey(data: JsonUpcase<EnableYubikeyData>, headers: Headers, conn:
} }
// Check if we already have some data // Check if we already have some data
let yubikey_data = TwoFactor::find_by_user_and_type( let yubikey_data = TwoFactor::find_by_user_and_type(&headers.user.uuid, TwoFactorType::YubiKey as i32, &conn);
&headers.user.uuid,
TwoFactorType::YubiKey as i32,
&conn,
);
if let Some(yubikey_data) = yubikey_data { if let Some(yubikey_data) = yubikey_data {
yubikey_data.delete(&conn)?; yubikey_data.delete(&conn)?;
@ -642,7 +609,7 @@ fn activate_yubikey(data: JsonUpcase<EnableYubikeyData>, headers: Headers, conn:
for yubikey in &yubikeys { for yubikey in &yubikeys {
if yubikey.len() == 12 { if yubikey.len() == 12 {
// YubiKey ID // YubiKey ID
continue continue;
} }
let result = verify_yubikey_otp(yubikey.to_owned()); let result = verify_yubikey_otp(yubikey.to_owned());
@ -692,7 +659,8 @@ pub fn validate_yubikey_login(user_uuid: &str, response: &str, conn: &DbConn) ->
None => err!("No YubiKey devices registered"), None => err!("No YubiKey devices registered"),
}; };
let yubikey_metadata: YubikeyMetadata = serde_json::from_str(&twofactor.data).expect("Can't parse Yubikey Metadata"); let yubikey_metadata: YubikeyMetadata =
serde_json::from_str(&twofactor.data).expect("Can't parse Yubikey Metadata");
let response_id = &response[..12]; let response_id = &response[..12];
if !yubikey_metadata.Keys.contains(&response_id.to_owned()) { if !yubikey_metadata.Keys.contains(&response_id.to_owned()) {

Datei anzeigen

@ -145,7 +145,12 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: ClientIp) -> JsonResult
Ok(Json(result)) Ok(Json(result))
} }
fn twofactor_auth(user_uuid: &str, data: &ConnectData, device: &mut Device, conn: &DbConn) -> ApiResult<Option<String>> { fn twofactor_auth(
user_uuid: &str,
data: &ConnectData,
device: &mut Device,
conn: &DbConn,
) -> ApiResult<Option<String>> {
let twofactors_raw = TwoFactor::find_by_user(user_uuid, conn); let twofactors_raw = TwoFactor::find_by_user(user_uuid, conn);
// Remove u2f challenge twofactors (impl detail) // Remove u2f challenge twofactors (impl detail)
let twofactors: Vec<_> = twofactors_raw.iter().filter(|tf| tf.type_ < 1000).collect(); let twofactors: Vec<_> = twofactors_raw.iter().filter(|tf| tf.type_ < 1000).collect();
@ -252,13 +257,14 @@ fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> Api
result["TwoFactorProviders2"][provider.to_string()] = Value::Object(map); result["TwoFactorProviders2"][provider.to_string()] = Value::Object(map);
} }
Some(TwoFactorType::YubiKey) => { Some(tf_type @ TwoFactorType::YubiKey) => {
let twofactor = match TwoFactor::find_by_user_and_type(user_uuid, TwoFactorType::YubiKey as i32, &conn) { let twofactor = match TwoFactor::find_by_user_and_type(user_uuid, tf_type as i32, &conn) {
Some(tf) => tf, Some(tf) => tf,
None => err!("No YubiKey devices registered"), None => err!("No YubiKey devices registered"),
}; };
let yubikey_metadata: two_factor::YubikeyMetadata = serde_json::from_str(&twofactor.data).expect("Can't parse Yubikey Metadata"); let yubikey_metadata: two_factor::YubikeyMetadata =
serde_json::from_str(&twofactor.data).expect("Can't parse Yubikey Metadata");
let mut map = JsonMap::new(); let mut map = JsonMap::new();
map.insert("Nfc".into(), Value::Bool(yubikey_metadata.Nfc)); map.insert("Nfc".into(), Value::Bool(yubikey_metadata.Nfc));

Datei anzeigen

@ -1,17 +1,17 @@
pub(crate) mod core;
mod admin; mod admin;
pub(crate) mod core;
mod icons; mod icons;
mod identity; mod identity;
mod web;
mod notifications; mod notifications;
mod web;
pub use self::core::routes as core_routes;
pub use self::admin::routes as admin_routes; pub use self::admin::routes as admin_routes;
pub use self::core::routes as core_routes;
pub use self::icons::routes as icons_routes; pub use self::icons::routes as icons_routes;
pub use self::identity::routes as identity_routes; pub use self::identity::routes as identity_routes;
pub use self::web::routes as web_routes;
pub use self::notifications::routes as notifications_routes; pub use self::notifications::routes as notifications_routes;
pub use self::notifications::{start_notification_server, WebSocketUsers, UpdateType}; pub use self::notifications::{start_notification_server, Notify, UpdateType};
pub use self::web::routes as web_routes;
use rocket_contrib::json::Json; use rocket_contrib::json::Json;
use serde_json::Value; use serde_json::Value;
@ -28,7 +28,7 @@ type JsonUpcase<T> = Json<util::UpCase<T>>;
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[allow(non_snake_case)]
struct PasswordData { struct PasswordData {
MasterPasswordHash: String MasterPasswordHash: String,
} }
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug, Clone)]
@ -42,14 +42,14 @@ impl NumberOrString {
fn into_string(self) -> String { fn into_string(self) -> String {
match self { match self {
NumberOrString::Number(n) => n.to_string(), NumberOrString::Number(n) => n.to_string(),
NumberOrString::String(s) => s NumberOrString::String(s) => s,
} }
} }
fn into_i32(self) -> Option<i32> { fn into_i32(self) -> Option<i32> {
match self { match self {
NumberOrString::Number(n) => Some(n), NumberOrString::Number(n) => Some(n),
NumberOrString::String(s) => s.parse().ok() NumberOrString::String(s) => s.parse().ok(),
} }
} }
} }

Datei anzeigen

@ -14,7 +14,7 @@ pub fn routes() -> Vec<Route> {
#[get("/hub")] #[get("/hub")]
fn websockets_err() -> JsonResult { fn websockets_err() -> JsonResult {
err!("'/notifications/hub' should be proxied towards the websocket server, otherwise notifications will not work. Go to the README for more info.") err!("'/notifications/hub' should be proxied to the websocket server or notifications won't work. Go to the README for more info.")
} }
#[post("/hub/negotiate")] #[post("/hub/negotiate")]
@ -40,9 +40,9 @@ fn negotiate(_headers: Headers, _conn: DbConn) -> JsonResult {
}))) })))
} }
/// //
/// Websockets server // Websockets server
/// //
use std::sync::Arc; use std::sync::Arc;
use std::thread; use std::thread;
@ -94,9 +94,7 @@ fn serialize_date(date: NaiveDateTime) -> Value {
use byteorder::{BigEndian, WriteBytesExt}; use byteorder::{BigEndian, WriteBytesExt};
let mut bs = [0u8; 8]; let mut bs = [0u8; 8];
bs.as_mut() bs.as_mut().write_i64::<BigEndian>(timestamp).expect("Unable to write");
.write_i64::<BigEndian>(timestamp)
.expect("Unable to write");
// -1 is Timestamp // -1 is Timestamp
// https://github.com/msgpack/msgpack/blob/master/spec.md#timestamp-extension-type // https://github.com/msgpack/msgpack/blob/master/spec.md#timestamp-extension-type
@ -142,12 +140,7 @@ impl Handler for WSHandler {
use crate::auth; use crate::auth;
let claims = match auth::decode_jwt(access_token) { let claims = match auth::decode_jwt(access_token) {
Ok(claims) => claims, Ok(claims) => claims,
Err(_) => { Err(_) => return Err(ws::Error::new(ws::ErrorKind::Internal, "Invalid access token provided")),
return Err(ws::Error::new(
ws::ErrorKind::Internal,
"Invalid access token provided",
))
}
}; };
// Assign the user to the handler // Assign the user to the handler
@ -158,11 +151,9 @@ impl Handler for WSHandler {
let handler_insert = self.out.clone(); let handler_insert = self.out.clone();
let handler_update = self.out.clone(); let handler_update = self.out.clone();
self.users.map.upsert( self.users
user_uuid, .map
|| vec![handler_insert], .upsert(user_uuid, || vec![handler_insert], |ref mut v| v.push(handler_update));
|ref mut v| v.push(handler_update),
);
// Schedule a ping to keep the connection alive // Schedule a ping to keep the connection alive
self.out.timeout(PING_MS, PING) self.out.timeout(PING_MS, PING)
@ -238,7 +229,7 @@ impl Factory for WSFactory {
#[derive(Clone)] #[derive(Clone)]
pub struct WebSocketUsers { pub struct WebSocketUsers {
pub map: Arc<CHashMap<String, Vec<Sender>>>, map: Arc<CHashMap<String, Vec<Sender>>>,
} }
impl WebSocketUsers { impl WebSocketUsers {
@ -338,32 +329,32 @@ fn create_ping() -> Vec<u8> {
#[allow(dead_code)] #[allow(dead_code)]
pub enum UpdateType { pub enum UpdateType {
SyncCipherUpdate = 0, CipherUpdate = 0,
SyncCipherCreate = 1, CipherCreate = 1,
SyncLoginDelete = 2, LoginDelete = 2,
SyncFolderDelete = 3, FolderDelete = 3,
SyncCiphers = 4, Ciphers = 4,
SyncVault = 5, Vault = 5,
SyncOrgKeys = 6, OrgKeys = 6,
SyncFolderCreate = 7, FolderCreate = 7,
SyncFolderUpdate = 8, FolderUpdate = 8,
SyncCipherDelete = 9, CipherDelete = 9,
SyncSettings = 10, SyncSettings = 10,
LogOut = 11, LogOut = 11,
} }
use rocket::State;
pub type Notify<'a> = State<'a, WebSocketUsers>;
pub fn start_notification_server() -> WebSocketUsers { pub fn start_notification_server() -> WebSocketUsers {
let factory = WSFactory::init(); let factory = WSFactory::init();
let users = factory.users.clone(); let users = factory.users.clone();
if CONFIG.websocket_enabled { if CONFIG.websocket_enabled {
thread::spawn(move || { thread::spawn(move || {
WebSocket::new(factory) WebSocket::new(factory).unwrap().listen(&CONFIG.websocket_url).unwrap();
.unwrap()
.listen(&CONFIG.websocket_url)
.unwrap();
}); });
} }

Datei anzeigen

@ -1,6 +1,6 @@
/// //
/// JWT Handling // JWT Handling
/// //
use crate::util::read_file; use crate::util::read_file;
use chrono::Duration; use chrono::Duration;
@ -15,17 +15,20 @@ const JWT_ALGORITHM: Algorithm = Algorithm::RS256;
lazy_static! { lazy_static! {
pub static ref DEFAULT_VALIDITY: Duration = Duration::hours(2); pub static ref DEFAULT_VALIDITY: Duration = Duration::hours(2);
pub static ref JWT_ISSUER: String = CONFIG.domain.clone(); pub static ref JWT_ISSUER: String = CONFIG.domain.clone();
static ref JWT_HEADER: Header = Header::new(JWT_ALGORITHM); static ref JWT_HEADER: Header = Header::new(JWT_ALGORITHM);
static ref PRIVATE_RSA_KEY: Vec<u8> = match read_file(&CONFIG.private_rsa_key) { static ref PRIVATE_RSA_KEY: Vec<u8> = match read_file(&CONFIG.private_rsa_key) {
Ok(key) => key, Ok(key) => key,
Err(e) => panic!("Error loading private RSA Key from {}\n Error: {}", CONFIG.private_rsa_key, e) Err(e) => panic!(
"Error loading private RSA Key from {}\n Error: {}",
CONFIG.private_rsa_key, e
),
}; };
static ref PUBLIC_RSA_KEY: Vec<u8> = match read_file(&CONFIG.public_rsa_key) { static ref PUBLIC_RSA_KEY: Vec<u8> = match read_file(&CONFIG.public_rsa_key) {
Ok(key) => key, Ok(key) => key,
Err(e) => panic!("Error loading public RSA Key from {}\n Error: {}", CONFIG.public_rsa_key, e) Err(e) => panic!(
"Error loading public RSA Key from {}\n Error: {}",
CONFIG.public_rsa_key, e
),
}; };
} }
@ -117,14 +120,14 @@ pub struct InviteJWTClaims {
pub user_org_id: Option<String>, pub user_org_id: Option<String>,
} }
/// //
/// Bearer token authentication // Bearer token authentication
/// //
use rocket::request::{self, FromRequest, Request};
use rocket::Outcome; use rocket::Outcome;
use rocket::request::{self, Request, FromRequest};
use crate::db::models::{Device, User, UserOrgStatus, UserOrgType, UserOrganization};
use crate::db::DbConn; use crate::db::DbConn;
use crate::db::models::{User, UserOrganization, UserOrgType, UserOrgStatus, Device};
pub struct Headers { pub struct Headers {
pub host: String, pub host: String,
@ -227,10 +230,11 @@ impl<'a, 'r> FromRequest<'a, 'r> for OrgHeaders {
Some(Ok(org_id)) => { Some(Ok(org_id)) => {
let conn = match request.guard::<DbConn>() { let conn = match request.guard::<DbConn>() {
Outcome::Success(conn) => conn, Outcome::Success(conn) => conn,
_ => err_handler!("Error getting DB") _ => err_handler!("Error getting DB"),
}; };
let org_user = match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn) { let user = headers.user;
let org_user = match UserOrganization::find_by_user_and_org(&user.uuid, &org_id, &conn) {
Some(user) => { Some(user) => {
if user.status == UserOrgStatus::Confirmed as i32 { if user.status == UserOrgStatus::Confirmed as i32 {
user user
@ -238,17 +242,18 @@ impl<'a, 'r> FromRequest<'a, 'r> for OrgHeaders {
err_handler!("The current user isn't confirmed member of the organization") err_handler!("The current user isn't confirmed member of the organization")
} }
} }
None => err_handler!("The current user isn't member of the organization") None => err_handler!("The current user isn't member of the organization"),
}; };
Outcome::Success(Self { Outcome::Success(Self {
host: headers.host, host: headers.host,
device: headers.device, device: headers.device,
user: headers.user, user,
org_user_type: { org_user_type: {
if let Some(org_usr_type) = UserOrgType::from_i32(org_user.type_) { if let Some(org_usr_type) = UserOrgType::from_i32(org_user.type_) {
org_usr_type org_usr_type
} else { // This should only happen if the DB is corrupted } else {
// This should only happen if the DB is corrupted
err_handler!("Unknown user type in the database") err_handler!("Unknown user type in the database")
} }
}, },
@ -319,9 +324,9 @@ impl<'a, 'r> FromRequest<'a, 'r> for OwnerHeaders {
} }
} }
/// //
/// Client IP address detection // Client IP address detection
/// //
use std::net::IpAddr; use std::net::IpAddr;
pub struct ClientIp { pub struct ClientIp {

Datei anzeigen

@ -1,6 +1,6 @@
/// //
/// PBKDF2 derivation // PBKDF2 derivation
/// //
use ring::{digest, pbkdf2}; use ring::{digest, pbkdf2};
@ -19,9 +19,9 @@ pub fn verify_password_hash(secret: &[u8], salt: &[u8], previous: &[u8], iterati
pbkdf2::verify(DIGEST_ALG, iterations, salt, secret, previous).is_ok() pbkdf2::verify(DIGEST_ALG, iterations, salt, secret, previous).is_ok()
} }
/// //
/// Random values // Random values
/// //
pub fn get_random_64() -> Vec<u8> { pub fn get_random_64() -> Vec<u8> {
get_random(vec![0u8; 64]) get_random(vec![0u8; 64])
@ -30,7 +30,9 @@ pub fn get_random_64() -> Vec<u8> {
pub fn get_random(mut array: Vec<u8>) -> Vec<u8> { pub fn get_random(mut array: Vec<u8>) -> Vec<u8> {
use ring::rand::{SecureRandom, SystemRandom}; use ring::rand::{SecureRandom, SystemRandom};
SystemRandom::new().fill(&mut array).expect("Error generating random values"); SystemRandom::new()
.fill(&mut array)
.expect("Error generating random values");
array array
} }

Datei anzeigen

@ -1,9 +1,9 @@
use std::ops::Deref; use std::ops::Deref;
use diesel::{Connection as DieselConnection, ConnectionError};
use diesel::sqlite::SqliteConnection;
use diesel::r2d2; use diesel::r2d2;
use diesel::r2d2::ConnectionManager; use diesel::r2d2::ConnectionManager;
use diesel::sqlite::SqliteConnection;
use diesel::{Connection as DieselConnection, ConnectionError};
use rocket::http::Status; use rocket::http::Status;
use rocket::request::{self, FromRequest}; use rocket::request::{self, FromRequest};
@ -20,16 +20,14 @@ type Pool = r2d2::Pool<ConnectionManager<Connection>>;
/// Connection request guard type: a wrapper around an r2d2 pooled connection. /// Connection request guard type: a wrapper around an r2d2 pooled connection.
pub struct DbConn(pub r2d2::PooledConnection<ConnectionManager<Connection>>); pub struct DbConn(pub r2d2::PooledConnection<ConnectionManager<Connection>>);
pub mod schema;
pub mod models; pub mod models;
pub mod schema;
/// Initializes a database pool. /// Initializes a database pool.
pub fn init_pool() -> Pool { pub fn init_pool() -> Pool {
let manager = ConnectionManager::new(&*CONFIG.database_url); let manager = ConnectionManager::new(&*CONFIG.database_url);
r2d2::Pool::builder() r2d2::Pool::builder().build(manager).expect("Failed to create pool")
.build(manager)
.expect("Failed to create pool")
} }
pub fn get_connection() -> Result<Connection, ConnectionError> { pub fn get_connection() -> Result<Connection, ConnectionError> {
@ -46,7 +44,7 @@ impl<'a, 'r> FromRequest<'a, 'r> for DbConn {
let pool = request.guard::<State<Pool>>()?; let pool = request.guard::<State<Pool>>()?;
match pool.get() { match pool.get() {
Ok(conn) => Outcome::Success(DbConn(conn)), Ok(conn) => Outcome::Success(DbConn(conn)),
Err(_) => Outcome::Failure((Status::ServiceUnavailable, ())) Err(_) => Outcome::Failure((Status::ServiceUnavailable, ())),
} }
} }
} }

Datei anzeigen

@ -49,10 +49,10 @@ impl Attachment {
} }
} }
use crate::db::schema::attachments;
use crate::db::DbConn;
use diesel; use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use crate::db::DbConn;
use crate::db::schema::attachments;
use crate::api::EmptyResult; use crate::api::EmptyResult;
use crate::error::MapResult; use crate::error::MapResult;
@ -68,8 +68,7 @@ impl Attachment {
pub fn delete(self, conn: &DbConn) -> EmptyResult { pub fn delete(self, conn: &DbConn) -> EmptyResult {
crate::util::retry( crate::util::retry(
|| diesel::delete(attachments::table.filter(attachments::id.eq(&self.id))) || diesel::delete(attachments::table.filter(attachments::id.eq(&self.id))).execute(&**conn),
.execute(&**conn),
10, 10,
) )
.map_res("Error deleting attachment")?; .map_res("Error deleting attachment")?;
@ -86,7 +85,10 @@ impl Attachment {
} }
pub fn find_by_id(id: &str, conn: &DbConn) -> Option<Self> { pub fn find_by_id(id: &str, conn: &DbConn) -> Option<Self> {
attachments::table.filter(attachments::id.eq(id)).first::<Self>(&**conn).ok() attachments::table
.filter(attachments::id.eq(id))
.first::<Self>(&**conn)
.ok()
} }
pub fn find_by_cipher(cipher_uuid: &str, conn: &DbConn) -> Vec<Self> { pub fn find_by_cipher(cipher_uuid: &str, conn: &DbConn) -> Vec<Self> {

Datei anzeigen

@ -1,7 +1,9 @@
use chrono::{NaiveDateTime, Utc}; use chrono::{NaiveDateTime, Utc};
use serde_json::Value; use serde_json::Value;
use super::{Attachment, CollectionCipher, FolderCipher, Organization, User, UserOrgStatus, UserOrgType, UserOrganization}; use super::{
Attachment, CollectionCipher, FolderCipher, Organization, User, UserOrgStatus, UserOrgType, UserOrganization,
};
#[derive(Debug, Identifiable, Queryable, Insertable, Associations)] #[derive(Debug, Identifiable, Queryable, Insertable, Associations)]
#[table_name = "ciphers"] #[table_name = "ciphers"]
@ -79,11 +81,15 @@ impl Cipher {
let fields_json: Value = if let Some(ref fields) = self.fields { let fields_json: Value = if let Some(ref fields) = self.fields {
serde_json::from_str(fields).unwrap() serde_json::from_str(fields).unwrap()
} else { Value::Null }; } else {
Value::Null
};
let password_history_json: Value = if let Some(ref password_history) = self.password_history { let password_history_json: Value = if let Some(ref password_history) = self.password_history {
serde_json::from_str(password_history).unwrap() serde_json::from_str(password_history).unwrap()
} else { Value::Null }; } else {
Value::Null
};
let mut data_json: Value = serde_json::from_str(&self.data).unwrap(); let mut data_json: Value = serde_json::from_str(&self.data).unwrap();
@ -137,8 +143,9 @@ impl Cipher {
Some(ref user_uuid) => { Some(ref user_uuid) => {
User::update_uuid_revision(&user_uuid, conn); User::update_uuid_revision(&user_uuid, conn);
user_uuids.push(user_uuid.clone()) user_uuids.push(user_uuid.clone())
}, }
None => { // Belongs to Organization, need to update affected users None => {
// Belongs to Organization, need to update affected users
if let Some(ref org_uuid) = self.organization_uuid { if let Some(ref org_uuid) = self.organization_uuid {
UserOrganization::find_by_cipher_and_org(&self.uuid, &org_uuid, conn) UserOrganization::find_by_cipher_and_org(&self.uuid, &org_uuid, conn)
.iter() .iter()
@ -207,7 +214,9 @@ impl Cipher {
Ok(()) //nothing to do Ok(()) //nothing to do
} else { } else {
self.update_users_revision(conn); self.update_users_revision(conn);
if let Some(current_folder) = FolderCipher::find_by_folder_and_cipher(&current_folder, &self.uuid, &conn) { if let Some(current_folder) =
FolderCipher::find_by_folder_and_cipher(&current_folder, &self.uuid, &conn)
{
current_folder.delete(&conn)?; current_folder.delete(&conn)?;
} }
FolderCipher::new(&new_folder, &self.uuid).save(&conn) FolderCipher::new(&new_folder, &self.uuid).save(&conn)
@ -228,63 +237,78 @@ impl Cipher {
pub fn is_write_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool { pub fn is_write_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool {
ciphers::table ciphers::table
.filter(ciphers::uuid.eq(&self.uuid)) .filter(ciphers::uuid.eq(&self.uuid))
.left_join(users_organizations::table.on( .left_join(
ciphers::organization_uuid.eq(users_organizations::org_uuid.nullable()).and( users_organizations::table.on(ciphers::organization_uuid
users_organizations::user_uuid.eq(user_uuid) .eq(users_organizations::org_uuid.nullable())
.and(users_organizations::user_uuid.eq(user_uuid))),
) )
))
.left_join(ciphers_collections::table) .left_join(ciphers_collections::table)
.left_join(users_collections::table.on( .left_join(
ciphers_collections::collection_uuid.eq(users_collections::collection_uuid) users_collections::table
)) .on(ciphers_collections::collection_uuid.eq(users_collections::collection_uuid)),
.filter(ciphers::user_uuid.eq(user_uuid).or( // Cipher owner )
users_organizations::access_all.eq(true).or( // access_all in Organization .filter(ciphers::user_uuid.eq(user_uuid).or(
users_organizations::type_.le(UserOrgType::Admin as i32).or( // Org admin or owner // Cipher owner
users_organizations::access_all.eq(true).or(
// access_all in Organization
users_organizations::type_.le(UserOrgType::Admin as i32).or(
// Org admin or owner
users_collections::user_uuid.eq(user_uuid).and( users_collections::user_uuid.eq(user_uuid).and(
users_collections::read_only.eq(false) //R/W access to collection users_collections::read_only.eq(false), //R/W access to collection
) ),
) ),
) ),
)) ))
.select(ciphers::all_columns) .select(ciphers::all_columns)
.first::<Self>(&**conn).ok().is_some() .first::<Self>(&**conn)
.ok()
.is_some()
} }
pub fn is_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool { pub fn is_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool {
ciphers::table ciphers::table
.filter(ciphers::uuid.eq(&self.uuid)) .filter(ciphers::uuid.eq(&self.uuid))
.left_join(users_organizations::table.on( .left_join(
ciphers::organization_uuid.eq(users_organizations::org_uuid.nullable()).and( users_organizations::table.on(ciphers::organization_uuid
users_organizations::user_uuid.eq(user_uuid) .eq(users_organizations::org_uuid.nullable())
.and(users_organizations::user_uuid.eq(user_uuid))),
) )
))
.left_join(ciphers_collections::table) .left_join(ciphers_collections::table)
.left_join(users_collections::table.on( .left_join(
ciphers_collections::collection_uuid.eq(users_collections::collection_uuid) users_collections::table
)) .on(ciphers_collections::collection_uuid.eq(users_collections::collection_uuid)),
.filter(ciphers::user_uuid.eq(user_uuid).or( // Cipher owner
users_organizations::access_all.eq(true).or( // access_all in Organization
users_organizations::type_.le(UserOrgType::Admin as i32).or( // Org admin or owner
users_collections::user_uuid.eq(user_uuid) // Access to Collection
)
) )
.filter(ciphers::user_uuid.eq(user_uuid).or(
// Cipher owner
users_organizations::access_all.eq(true).or(
// access_all in Organization
users_organizations::type_.le(UserOrgType::Admin as i32).or(
// Org admin or owner
users_collections::user_uuid.eq(user_uuid), // Access to Collection
),
),
)) ))
.select(ciphers::all_columns) .select(ciphers::all_columns)
.first::<Self>(&**conn).ok().is_some() .first::<Self>(&**conn)
.ok()
.is_some()
} }
pub fn get_folder_uuid(&self, user_uuid: &str, conn: &DbConn) -> Option<String> { pub fn get_folder_uuid(&self, user_uuid: &str, conn: &DbConn) -> Option<String> {
folders_ciphers::table.inner_join(folders::table) folders_ciphers::table
.inner_join(folders::table)
.filter(folders::user_uuid.eq(&user_uuid)) .filter(folders::user_uuid.eq(&user_uuid))
.filter(folders_ciphers::cipher_uuid.eq(&self.uuid)) .filter(folders_ciphers::cipher_uuid.eq(&self.uuid))
.select(folders_ciphers::folder_uuid) .select(folders_ciphers::folder_uuid)
.first::<String>(&**conn).ok() .first::<String>(&**conn)
.ok()
} }
pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> { pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> {
ciphers::table ciphers::table
.filter(ciphers::uuid.eq(uuid)) .filter(ciphers::uuid.eq(uuid))
.first::<Self>(&**conn).ok() .first::<Self>(&**conn)
.ok()
} }
// Find all ciphers accessible to user // Find all ciphers accessible to user

Datei anzeigen

@ -1,6 +1,6 @@
use serde_json::Value; use serde_json::Value;
use super::{Organization, UserOrganization, UserOrgType, UserOrgStatus}; use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization};
#[derive(Debug, Identifiable, Queryable, Insertable, Associations)] #[derive(Debug, Identifiable, Queryable, Insertable, Associations)]
#[table_name = "collections"] #[table_name = "collections"]
@ -33,10 +33,10 @@ impl Collection {
} }
} }
use crate::db::schema::*;
use crate::db::DbConn;
use diesel; use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use crate::db::DbConn;
use crate::db::schema::*;
use crate::api::EmptyResult; use crate::api::EmptyResult;
use crate::error::MapResult; use crate::error::MapResult;
@ -61,11 +61,8 @@ impl Collection {
CollectionCipher::delete_all_by_collection(&self.uuid, &conn)?; CollectionCipher::delete_all_by_collection(&self.uuid, &conn)?;
CollectionUser::delete_all_by_collection(&self.uuid, &conn)?; CollectionUser::delete_all_by_collection(&self.uuid, &conn)?;
diesel::delete( diesel::delete(collections::table.filter(collections::uuid.eq(self.uuid)))
collections::table.filter( .execute(&**conn)
collections::uuid.eq(self.uuid)
)
).execute(&**conn)
.map_res("Error deleting collection") .map_res("Error deleting collection")
} }
@ -79,7 +76,8 @@ impl Collection {
pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> { pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> {
collections::table collections::table
.filter(collections::uuid.eq(uuid)) .filter(collections::uuid.eq(uuid))
.first::<Self>(&**conn).ok() .first::<Self>(&**conn)
.ok()
} }
pub fn find_by_user_uuid(user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub fn find_by_user_uuid(user_uuid: &str, conn: &DbConn) -> Vec<Self> {
@ -106,13 +104,17 @@ impl Collection {
} }
pub fn find_by_organization_and_user_uuid(org_uuid: &str, user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub fn find_by_organization_and_user_uuid(org_uuid: &str, user_uuid: &str, conn: &DbConn) -> Vec<Self> {
Self::find_by_user_uuid(user_uuid, conn).into_iter().filter(|c| c.org_uuid == org_uuid).collect() Self::find_by_user_uuid(user_uuid, conn)
.into_iter()
.filter(|c| c.org_uuid == org_uuid)
.collect()
} }
pub fn find_by_organization(org_uuid: &str, conn: &DbConn) -> Vec<Self> { pub fn find_by_organization(org_uuid: &str, conn: &DbConn) -> Vec<Self> {
collections::table collections::table
.filter(collections::org_uuid.eq(org_uuid)) .filter(collections::org_uuid.eq(org_uuid))
.load::<Self>(&**conn).expect("Error loading collections") .load::<Self>(&**conn)
.expect("Error loading collections")
} }
pub fn find_by_uuid_and_org(uuid: &str, org_uuid: &str, conn: &DbConn) -> Option<Self> { pub fn find_by_uuid_and_org(uuid: &str, org_uuid: &str, conn: &DbConn) -> Option<Self> {
@ -120,7 +122,8 @@ impl Collection {
.filter(collections::uuid.eq(uuid)) .filter(collections::uuid.eq(uuid))
.filter(collections::org_uuid.eq(org_uuid)) .filter(collections::org_uuid.eq(org_uuid))
.select(collections::all_columns) .select(collections::all_columns)
.first::<Self>(&**conn).ok() .first::<Self>(&**conn)
.ok()
} }
pub fn find_by_uuid_and_user(uuid: &str, user_uuid: &str, conn: &DbConn) -> Option<Self> { pub fn find_by_uuid_and_user(uuid: &str, user_uuid: &str, conn: &DbConn) -> Option<Self> {
@ -153,12 +156,15 @@ impl Collection {
if user_org.access_all { if user_org.access_all {
true true
} else { } else {
users_collections::table.inner_join(collections::table) users_collections::table
.inner_join(collections::table)
.filter(users_collections::collection_uuid.eq(&self.uuid)) .filter(users_collections::collection_uuid.eq(&self.uuid))
.filter(users_collections::user_uuid.eq(&user_uuid)) .filter(users_collections::user_uuid.eq(&user_uuid))
.filter(users_collections::read_only.eq(false)) .filter(users_collections::read_only.eq(false))
.select(collections::all_columns) .select(collections::all_columns)
.first::<Self>(&**conn).ok().is_some() // Read only or no access to collection .first::<Self>(&**conn)
.ok()
.is_some() // Read only or no access to collection
} }
} }
} }
@ -186,7 +192,8 @@ impl CollectionUser {
.inner_join(collections::table.on(collections::uuid.eq(users_collections::collection_uuid))) .inner_join(collections::table.on(collections::uuid.eq(users_collections::collection_uuid)))
.filter(collections::org_uuid.eq(org_uuid)) .filter(collections::org_uuid.eq(org_uuid))
.select(users_collections::all_columns) .select(users_collections::all_columns)
.load::<Self>(&**conn).expect("Error loading users_collections") .load::<Self>(&**conn)
.expect("Error loading users_collections")
} }
pub fn save(user_uuid: &str, collection_uuid: &str, read_only: bool, conn: &DbConn) -> EmptyResult { pub fn save(user_uuid: &str, collection_uuid: &str, read_only: bool, conn: &DbConn) -> EmptyResult {
@ -197,16 +204,19 @@ impl CollectionUser {
users_collections::user_uuid.eq(user_uuid), users_collections::user_uuid.eq(user_uuid),
users_collections::collection_uuid.eq(collection_uuid), users_collections::collection_uuid.eq(collection_uuid),
users_collections::read_only.eq(read_only), users_collections::read_only.eq(read_only),
)).execute(&**conn) ))
.execute(&**conn)
.map_res("Error adding user to collection") .map_res("Error adding user to collection")
} }
pub fn delete(self, conn: &DbConn) -> EmptyResult { pub fn delete(self, conn: &DbConn) -> EmptyResult {
User::update_uuid_revision(&self.user_uuid, conn); User::update_uuid_revision(&self.user_uuid, conn);
diesel::delete(users_collections::table diesel::delete(
users_collections::table
.filter(users_collections::user_uuid.eq(&self.user_uuid)) .filter(users_collections::user_uuid.eq(&self.user_uuid))
.filter(users_collections::collection_uuid.eq(&self.collection_uuid))) .filter(users_collections::collection_uuid.eq(&self.collection_uuid)),
)
.execute(&**conn) .execute(&**conn)
.map_res("Error removing user from collection") .map_res("Error removing user from collection")
} }
@ -215,7 +225,8 @@ impl CollectionUser {
users_collections::table users_collections::table
.filter(users_collections::collection_uuid.eq(collection_uuid)) .filter(users_collections::collection_uuid.eq(collection_uuid))
.select(users_collections::all_columns) .select(users_collections::all_columns)
.load::<Self>(&**conn).expect("Error loading users_collections") .load::<Self>(&**conn)
.expect("Error loading users_collections")
} }
pub fn find_by_collection_and_user(collection_uuid: &str, user_uuid: &str, conn: &DbConn) -> Option<Self> { pub fn find_by_collection_and_user(collection_uuid: &str, user_uuid: &str, conn: &DbConn) -> Option<Self> {
@ -223,28 +234,25 @@ impl CollectionUser {
.filter(users_collections::collection_uuid.eq(collection_uuid)) .filter(users_collections::collection_uuid.eq(collection_uuid))
.filter(users_collections::user_uuid.eq(user_uuid)) .filter(users_collections::user_uuid.eq(user_uuid))
.select(users_collections::all_columns) .select(users_collections::all_columns)
.first::<Self>(&**conn).ok() .first::<Self>(&**conn)
.ok()
} }
pub fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult { pub fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult {
CollectionUser::find_by_collection(&collection_uuid, conn) CollectionUser::find_by_collection(&collection_uuid, conn)
.iter() .iter()
.for_each(|collection| { .for_each(|collection| User::update_uuid_revision(&collection.user_uuid, conn));
User::update_uuid_revision(&collection.user_uuid, conn)
});
diesel::delete(users_collections::table diesel::delete(users_collections::table.filter(users_collections::collection_uuid.eq(collection_uuid)))
.filter(users_collections::collection_uuid.eq(collection_uuid)) .execute(&**conn)
).execute(&**conn)
.map_res("Error deleting users from collection") .map_res("Error deleting users from collection")
} }
pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult {
User::update_uuid_revision(&user_uuid, conn); User::update_uuid_revision(&user_uuid, conn);
diesel::delete(users_collections::table diesel::delete(users_collections::table.filter(users_collections::user_uuid.eq(user_uuid)))
.filter(users_collections::user_uuid.eq(user_uuid)) .execute(&**conn)
).execute(&**conn)
.map_res("Error removing user from collections") .map_res("Error removing user from collections")
} }
} }
@ -268,29 +276,30 @@ impl CollectionCipher {
.values(( .values((
ciphers_collections::cipher_uuid.eq(cipher_uuid), ciphers_collections::cipher_uuid.eq(cipher_uuid),
ciphers_collections::collection_uuid.eq(collection_uuid), ciphers_collections::collection_uuid.eq(collection_uuid),
)).execute(&**conn) ))
.execute(&**conn)
.map_res("Error adding cipher to collection") .map_res("Error adding cipher to collection")
} }
pub fn delete(cipher_uuid: &str, collection_uuid: &str, conn: &DbConn) -> EmptyResult { pub fn delete(cipher_uuid: &str, collection_uuid: &str, conn: &DbConn) -> EmptyResult {
diesel::delete(ciphers_collections::table diesel::delete(
ciphers_collections::table
.filter(ciphers_collections::cipher_uuid.eq(cipher_uuid)) .filter(ciphers_collections::cipher_uuid.eq(cipher_uuid))
.filter(ciphers_collections::collection_uuid.eq(collection_uuid))) .filter(ciphers_collections::collection_uuid.eq(collection_uuid)),
)
.execute(&**conn) .execute(&**conn)
.map_res("Error deleting cipher from collection") .map_res("Error deleting cipher from collection")
} }
pub fn delete_all_by_cipher(cipher_uuid: &str, conn: &DbConn) -> EmptyResult { pub fn delete_all_by_cipher(cipher_uuid: &str, conn: &DbConn) -> EmptyResult {
diesel::delete(ciphers_collections::table diesel::delete(ciphers_collections::table.filter(ciphers_collections::cipher_uuid.eq(cipher_uuid)))
.filter(ciphers_collections::cipher_uuid.eq(cipher_uuid)) .execute(&**conn)
).execute(&**conn)
.map_res("Error removing cipher from collections") .map_res("Error removing cipher from collections")
} }
pub fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult { pub fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult {
diesel::delete(ciphers_collections::table diesel::delete(ciphers_collections::table.filter(ciphers_collections::collection_uuid.eq(collection_uuid)))
.filter(ciphers_collections::collection_uuid.eq(collection_uuid)) .execute(&**conn)
).execute(&**conn)
.map_res("Error removing ciphers from collection") .map_res("Error removing ciphers from collection")
} }
} }

Datei anzeigen

@ -44,8 +44,8 @@ impl Device {
} }
pub fn refresh_twofactor_remember(&mut self) -> String { pub fn refresh_twofactor_remember(&mut self) -> String {
use data_encoding::BASE64;
use crate::crypto; use crate::crypto;
use data_encoding::BASE64;
let twofactor_remember = BASE64.encode(&crypto::get_random(vec![0u8; 180])); let twofactor_remember = BASE64.encode(&crypto::get_random(vec![0u8; 180]));
self.twofactor_remember = Some(twofactor_remember.clone()); self.twofactor_remember = Some(twofactor_remember.clone());
@ -57,12 +57,11 @@ impl Device {
self.twofactor_remember = None; self.twofactor_remember = None;
} }
pub fn refresh_tokens(&mut self, user: &super::User, orgs: Vec<super::UserOrganization>) -> (String, i64) { pub fn refresh_tokens(&mut self, user: &super::User, orgs: Vec<super::UserOrganization>) -> (String, i64) {
// If there is no refresh token, we create one // If there is no refresh token, we create one
if self.refresh_token.is_empty() { if self.refresh_token.is_empty() {
use data_encoding::BASE64URL;
use crate::crypto; use crate::crypto;
use data_encoding::BASE64URL;
self.refresh_token = BASE64URL.encode(&crypto::get_random_64()); self.refresh_token = BASE64URL.encode(&crypto::get_random_64());
} }
@ -105,10 +104,10 @@ impl Device {
} }
} }
use crate::db::schema::devices;
use crate::db::DbConn;
use diesel; use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use crate::db::DbConn;
use crate::db::schema::devices;
use crate::api::EmptyResult; use crate::api::EmptyResult;
use crate::error::MapResult; use crate::error::MapResult;
@ -119,20 +118,15 @@ impl Device {
self.updated_at = Utc::now().naive_utc(); self.updated_at = Utc::now().naive_utc();
crate::util::retry( crate::util::retry(
|| { || diesel::replace_into(devices::table).values(&*self).execute(&**conn),
diesel::replace_into(devices::table)
.values(&*self)
.execute(&**conn)
},
10, 10,
) )
.map_res("Error saving device") .map_res("Error saving device")
} }
pub fn delete(self, conn: &DbConn) -> EmptyResult { pub fn delete(self, conn: &DbConn) -> EmptyResult {
diesel::delete(devices::table.filter( diesel::delete(devices::table.filter(devices::uuid.eq(self.uuid)))
devices::uuid.eq(self.uuid) .execute(&**conn)
)).execute(&**conn)
.map_res("Error removing device") .map_res("Error removing device")
} }
@ -146,18 +140,21 @@ impl Device {
pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> { pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> {
devices::table devices::table
.filter(devices::uuid.eq(uuid)) .filter(devices::uuid.eq(uuid))
.first::<Self>(&**conn).ok() .first::<Self>(&**conn)
.ok()
} }
pub fn find_by_refresh_token(refresh_token: &str, conn: &DbConn) -> Option<Self> { pub fn find_by_refresh_token(refresh_token: &str, conn: &DbConn) -> Option<Self> {
devices::table devices::table
.filter(devices::refresh_token.eq(refresh_token)) .filter(devices::refresh_token.eq(refresh_token))
.first::<Self>(&**conn).ok() .first::<Self>(&**conn)
.ok()
} }
pub fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> {
devices::table devices::table
.filter(devices::user_uuid.eq(user_uuid)) .filter(devices::user_uuid.eq(user_uuid))
.load::<Self>(&**conn).expect("Error loading devices") .load::<Self>(&**conn)
.expect("Error loading devices")
} }
} }

Datei anzeigen

@ -1,7 +1,7 @@
use chrono::{NaiveDateTime, Utc}; use chrono::{NaiveDateTime, Utc};
use serde_json::Value; use serde_json::Value;
use super::{User, Cipher}; use super::{Cipher, User};
#[derive(Debug, Identifiable, Queryable, Insertable, Associations)] #[derive(Debug, Identifiable, Queryable, Insertable, Associations)]
#[table_name = "folders"] #[table_name = "folders"]
@ -61,10 +61,10 @@ impl FolderCipher {
} }
} }
use crate::db::schema::{folders, folders_ciphers};
use crate::db::DbConn;
use diesel; use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use crate::db::DbConn;
use crate::db::schema::{folders, folders_ciphers};
use crate::api::EmptyResult; use crate::api::EmptyResult;
use crate::error::MapResult; use crate::error::MapResult;
@ -76,7 +76,8 @@ impl Folder {
self.updated_at = Utc::now().naive_utc(); self.updated_at = Utc::now().naive_utc();
diesel::replace_into(folders::table) diesel::replace_into(folders::table)
.values(&*self).execute(&**conn) .values(&*self)
.execute(&**conn)
.map_res("Error saving folder") .map_res("Error saving folder")
} }
@ -84,11 +85,8 @@ impl Folder {
User::update_uuid_revision(&self.user_uuid, conn); User::update_uuid_revision(&self.user_uuid, conn);
FolderCipher::delete_all_by_folder(&self.uuid, &conn)?; FolderCipher::delete_all_by_folder(&self.uuid, &conn)?;
diesel::delete( diesel::delete(folders::table.filter(folders::uuid.eq(&self.uuid)))
folders::table.filter( .execute(&**conn)
folders::uuid.eq(&self.uuid)
)
).execute(&**conn)
.map_res("Error deleting folder") .map_res("Error deleting folder")
} }
@ -102,13 +100,15 @@ impl Folder {
pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> { pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> {
folders::table folders::table
.filter(folders::uuid.eq(uuid)) .filter(folders::uuid.eq(uuid))
.first::<Self>(&**conn).ok() .first::<Self>(&**conn)
.ok()
} }
pub fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> {
folders::table folders::table
.filter(folders::user_uuid.eq(user_uuid)) .filter(folders::user_uuid.eq(user_uuid))
.load::<Self>(&**conn).expect("Error loading folders") .load::<Self>(&**conn)
.expect("Error loading folders")
} }
} }
@ -121,24 +121,24 @@ impl FolderCipher {
} }
pub fn delete(self, conn: &DbConn) -> EmptyResult { pub fn delete(self, conn: &DbConn) -> EmptyResult {
diesel::delete(folders_ciphers::table diesel::delete(
folders_ciphers::table
.filter(folders_ciphers::cipher_uuid.eq(self.cipher_uuid)) .filter(folders_ciphers::cipher_uuid.eq(self.cipher_uuid))
.filter(folders_ciphers::folder_uuid.eq(self.folder_uuid)) .filter(folders_ciphers::folder_uuid.eq(self.folder_uuid)),
).execute(&**conn) )
.execute(&**conn)
.map_res("Error removing cipher from folder") .map_res("Error removing cipher from folder")
} }
pub fn delete_all_by_cipher(cipher_uuid: &str, conn: &DbConn) -> EmptyResult { pub fn delete_all_by_cipher(cipher_uuid: &str, conn: &DbConn) -> EmptyResult {
diesel::delete(folders_ciphers::table diesel::delete(folders_ciphers::table.filter(folders_ciphers::cipher_uuid.eq(cipher_uuid)))
.filter(folders_ciphers::cipher_uuid.eq(cipher_uuid)) .execute(&**conn)
).execute(&**conn)
.map_res("Error removing cipher from folders") .map_res("Error removing cipher from folders")
} }
pub fn delete_all_by_folder(folder_uuid: &str, conn: &DbConn) -> EmptyResult { pub fn delete_all_by_folder(folder_uuid: &str, conn: &DbConn) -> EmptyResult {
diesel::delete(folders_ciphers::table diesel::delete(folders_ciphers::table.filter(folders_ciphers::folder_uuid.eq(folder_uuid)))
.filter(folders_ciphers::folder_uuid.eq(folder_uuid)) .execute(&**conn)
).execute(&**conn)
.map_res("Error removing ciphers from folder") .map_res("Error removing ciphers from folder")
} }
@ -146,12 +146,14 @@ impl FolderCipher {
folders_ciphers::table folders_ciphers::table
.filter(folders_ciphers::folder_uuid.eq(folder_uuid)) .filter(folders_ciphers::folder_uuid.eq(folder_uuid))
.filter(folders_ciphers::cipher_uuid.eq(cipher_uuid)) .filter(folders_ciphers::cipher_uuid.eq(cipher_uuid))
.first::<Self>(&**conn).ok() .first::<Self>(&**conn)
.ok()
} }
pub fn find_by_folder(folder_uuid: &str, conn: &DbConn) -> Vec<Self> { pub fn find_by_folder(folder_uuid: &str, conn: &DbConn) -> Vec<Self> {
folders_ciphers::table folders_ciphers::table
.filter(folders_ciphers::folder_uuid.eq(folder_uuid)) .filter(folders_ciphers::folder_uuid.eq(folder_uuid))
.load::<Self>(&**conn).expect("Error loading folders") .load::<Self>(&**conn)
.expect("Error loading folders")
} }
} }

Datei anzeigen

@ -10,10 +10,10 @@ mod two_factor;
pub use self::attachment::Attachment; pub use self::attachment::Attachment;
pub use self::cipher::Cipher; pub use self::cipher::Cipher;
pub use self::collection::{Collection, CollectionCipher, CollectionUser};
pub use self::device::Device; pub use self::device::Device;
pub use self::folder::{Folder, FolderCipher}; pub use self::folder::{Folder, FolderCipher};
pub use self::user::{User, Invitation};
pub use self::organization::Organization; pub use self::organization::Organization;
pub use self::organization::{UserOrganization, UserOrgStatus, UserOrgType}; pub use self::organization::{UserOrgStatus, UserOrgType, UserOrganization};
pub use self::collection::{Collection, CollectionUser, CollectionCipher};
pub use self::two_factor::{TwoFactor, TwoFactorType}; pub use self::two_factor::{TwoFactor, TwoFactorType};
pub use self::user::{Invitation, User};

Datei anzeigen

@ -1,7 +1,7 @@
use std::cmp::Ordering;
use serde_json::Value; use serde_json::Value;
use std::cmp::Ordering;
use super::{User, CollectionUser}; use super::{CollectionUser, User};
#[derive(Debug, Identifiable, Queryable, Insertable)] #[derive(Debug, Identifiable, Queryable, Insertable)]
#[table_name = "organizations"] #[table_name = "organizations"]
@ -32,9 +32,7 @@ pub enum UserOrgStatus {
Confirmed = 2, Confirmed = 2,
} }
#[derive(Copy, Clone)] #[derive(Copy, Clone, PartialEq, Eq)]
#[derive(PartialEq)]
#[derive(Eq)]
pub enum UserOrgType { pub enum UserOrgType {
Owner = 0, Owner = 0,
Admin = 1, Admin = 1,
@ -51,13 +49,13 @@ impl Ord for UserOrgType {
UserOrgType::Owner => Ordering::Greater, UserOrgType::Owner => Ordering::Greater,
UserOrgType::Admin => match other { UserOrgType::Admin => match other {
UserOrgType::Owner => Ordering::Less, UserOrgType::Owner => Ordering::Less,
_ => Ordering::Greater _ => Ordering::Greater,
}, },
UserOrgType::Manager => match other { UserOrgType::Manager => match other {
UserOrgType::Owner | UserOrgType::Admin => Ordering::Less, UserOrgType::Owner | UserOrgType::Admin => Ordering::Less,
_ => Ordering::Greater _ => Ordering::Greater,
}, },
UserOrgType::User => Ordering::Less UserOrgType::User => Ordering::Less,
} }
} }
} }
@ -78,7 +76,7 @@ impl PartialEq<i32> for UserOrgType {
impl PartialOrd<i32> for UserOrgType { impl PartialOrd<i32> for UserOrgType {
fn partial_cmp(&self, other: &i32) -> Option<Ordering> { fn partial_cmp(&self, other: &i32) -> Option<Ordering> {
if let Some(other) = Self::from_i32(*other) { if let Some(other) = Self::from_i32(*other) {
return Some(self.cmp(&other)) return Some(self.cmp(&other));
} }
None None
} }
@ -96,7 +94,6 @@ impl PartialOrd<i32> for UserOrgType {
_ => true, _ => true,
} }
} }
} }
impl PartialEq<UserOrgType> for i32 { impl PartialEq<UserOrgType> for i32 {
@ -108,7 +105,7 @@ impl PartialEq<UserOrgType> for i32 {
impl PartialOrd<UserOrgType> for i32 { impl PartialOrd<UserOrgType> for i32 {
fn partial_cmp(&self, other: &UserOrgType) -> Option<Ordering> { fn partial_cmp(&self, other: &UserOrgType) -> Option<Ordering> {
if let Some(self_type) = UserOrgType::from_i32(*self) { if let Some(self_type) = UserOrgType::from_i32(*self) {
return Some(self_type.cmp(other)) return Some(self_type.cmp(other));
} }
None None
} }
@ -126,7 +123,6 @@ impl PartialOrd<UserOrgType> for i32 {
_ => false, _ => false,
} }
} }
} }
impl UserOrgType { impl UserOrgType {
@ -149,7 +145,6 @@ impl UserOrgType {
_ => None, _ => None,
} }
} }
} }
/// Local methods /// Local methods
@ -208,11 +203,10 @@ impl UserOrganization {
} }
} }
use crate::db::schema::{ciphers_collections, organizations, users_collections, users_organizations};
use crate::db::DbConn;
use diesel; use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use crate::db::DbConn;
use crate::db::schema::{organizations, users_organizations, users_collections, ciphers_collections};
use crate::api::EmptyResult; use crate::api::EmptyResult;
use crate::error::MapResult; use crate::error::MapResult;
@ -227,7 +221,8 @@ impl Organization {
}); });
diesel::replace_into(organizations::table) diesel::replace_into(organizations::table)
.values(&*self).execute(&**conn) .values(&*self)
.execute(&**conn)
.map_res("Error saving organization") .map_res("Error saving organization")
} }
@ -238,18 +233,16 @@ impl Organization {
Collection::delete_all_by_organization(&self.uuid, &conn)?; Collection::delete_all_by_organization(&self.uuid, &conn)?;
UserOrganization::delete_all_by_organization(&self.uuid, &conn)?; UserOrganization::delete_all_by_organization(&self.uuid, &conn)?;
diesel::delete( diesel::delete(organizations::table.filter(organizations::uuid.eq(self.uuid)))
organizations::table.filter( .execute(&**conn)
organizations::uuid.eq(self.uuid)
)
).execute(&**conn)
.map_res("Error saving organization") .map_res("Error saving organization")
} }
pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> { pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> {
organizations::table organizations::table
.filter(organizations::uuid.eq(uuid)) .filter(organizations::uuid.eq(uuid))
.first::<Self>(&**conn).ok() .first::<Self>(&**conn)
.ok()
} }
} }
@ -319,7 +312,10 @@ impl UserOrganization {
vec![] // If we have complete access, no need to fill the array vec![] // If we have complete access, no need to fill the array
} else { } else {
let collections = CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn); let collections = CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn);
collections.iter().map(|c| json!({"Id": c.collection_uuid, "ReadOnly": c.read_only})).collect() collections
.iter()
.map(|c| json!({"Id": c.collection_uuid, "ReadOnly": c.read_only}))
.collect()
}; };
json!({ json!({
@ -339,7 +335,8 @@ impl UserOrganization {
User::update_uuid_revision(&self.user_uuid, conn); User::update_uuid_revision(&self.user_uuid, conn);
diesel::replace_into(users_organizations::table) diesel::replace_into(users_organizations::table)
.values(&*self).execute(&**conn) .values(&*self)
.execute(&**conn)
.map_res("Error adding user to organization") .map_res("Error adding user to organization")
} }
@ -348,11 +345,8 @@ impl UserOrganization {
CollectionUser::delete_all_by_user(&self.user_uuid, &conn)?; CollectionUser::delete_all_by_user(&self.user_uuid, &conn)?;
diesel::delete( diesel::delete(users_organizations::table.filter(users_organizations::uuid.eq(self.uuid)))
users_organizations::table.filter( .execute(&**conn)
users_organizations::uuid.eq(self.uuid)
)
).execute(&**conn)
.map_res("Error removing user from organization") .map_res("Error removing user from organization")
} }
@ -377,54 +371,62 @@ impl UserOrganization {
pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> { pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> {
users_organizations::table users_organizations::table
.filter(users_organizations::uuid.eq(uuid)) .filter(users_organizations::uuid.eq(uuid))
.first::<Self>(&**conn).ok() .first::<Self>(&**conn)
.ok()
} }
pub fn find_by_uuid_and_org(uuid: &str, org_uuid: &str, conn: &DbConn) -> Option<Self> { pub fn find_by_uuid_and_org(uuid: &str, org_uuid: &str, conn: &DbConn) -> Option<Self> {
users_organizations::table users_organizations::table
.filter(users_organizations::uuid.eq(uuid)) .filter(users_organizations::uuid.eq(uuid))
.filter(users_organizations::org_uuid.eq(org_uuid)) .filter(users_organizations::org_uuid.eq(org_uuid))
.first::<Self>(&**conn).ok() .first::<Self>(&**conn)
.ok()
} }
pub fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> {
users_organizations::table users_organizations::table
.filter(users_organizations::user_uuid.eq(user_uuid)) .filter(users_organizations::user_uuid.eq(user_uuid))
.filter(users_organizations::status.eq(UserOrgStatus::Confirmed as i32)) .filter(users_organizations::status.eq(UserOrgStatus::Confirmed as i32))
.load::<Self>(&**conn).unwrap_or_default() .load::<Self>(&**conn)
.unwrap_or_default()
} }
pub fn find_invited_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub fn find_invited_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> {
users_organizations::table users_organizations::table
.filter(users_organizations::user_uuid.eq(user_uuid)) .filter(users_organizations::user_uuid.eq(user_uuid))
.filter(users_organizations::status.eq(UserOrgStatus::Invited as i32)) .filter(users_organizations::status.eq(UserOrgStatus::Invited as i32))
.load::<Self>(&**conn).unwrap_or_default() .load::<Self>(&**conn)
.unwrap_or_default()
} }
pub fn find_any_state_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub fn find_any_state_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> {
users_organizations::table users_organizations::table
.filter(users_organizations::user_uuid.eq(user_uuid)) .filter(users_organizations::user_uuid.eq(user_uuid))
.load::<Self>(&**conn).unwrap_or_default() .load::<Self>(&**conn)
.unwrap_or_default()
} }
pub fn find_by_org(org_uuid: &str, conn: &DbConn) -> Vec<Self> { pub fn find_by_org(org_uuid: &str, conn: &DbConn) -> Vec<Self> {
users_organizations::table users_organizations::table
.filter(users_organizations::org_uuid.eq(org_uuid)) .filter(users_organizations::org_uuid.eq(org_uuid))
.load::<Self>(&**conn).expect("Error loading user organizations") .load::<Self>(&**conn)
.expect("Error loading user organizations")
} }
pub fn find_by_org_and_type(org_uuid: &str, type_: i32, conn: &DbConn) -> Vec<Self> { pub fn find_by_org_and_type(org_uuid: &str, type_: i32, conn: &DbConn) -> Vec<Self> {
users_organizations::table users_organizations::table
.filter(users_organizations::org_uuid.eq(org_uuid)) .filter(users_organizations::org_uuid.eq(org_uuid))
.filter(users_organizations::type_.eq(type_)) .filter(users_organizations::type_.eq(type_))
.load::<Self>(&**conn).expect("Error loading user organizations") .load::<Self>(&**conn)
.expect("Error loading user organizations")
} }
pub fn find_by_user_and_org(user_uuid: &str, org_uuid: &str, conn: &DbConn) -> Option<Self> { pub fn find_by_user_and_org(user_uuid: &str, org_uuid: &str, conn: &DbConn) -> Option<Self> {
users_organizations::table users_organizations::table
.filter(users_organizations::user_uuid.eq(user_uuid)) .filter(users_organizations::user_uuid.eq(user_uuid))
.filter(users_organizations::org_uuid.eq(org_uuid)) .filter(users_organizations::org_uuid.eq(org_uuid))
.first::<Self>(&**conn).ok() .first::<Self>(&**conn)
.ok()
} }
pub fn find_by_cipher_and_org(cipher_uuid: &str, org_uuid: &str, conn: &DbConn) -> Vec<Self> { pub fn find_by_cipher_and_org(cipher_uuid: &str, org_uuid: &str, conn: &DbConn) -> Vec<Self> {
@ -461,7 +463,4 @@ impl UserOrganization {
.select(users_organizations::all_columns) .select(users_organizations::all_columns)
.load::<Self>(&**conn).expect("Error loading user organizations") .load::<Self>(&**conn).expect("Error loading user organizations")
} }
} }

Datei anzeigen

@ -50,7 +50,7 @@ impl TwoFactor {
let decoded_secret = match BASE32.decode(totp_secret) { let decoded_secret = match BASE32.decode(totp_secret) {
Ok(s) => s, Ok(s) => s,
Err(_) => return false Err(_) => return false,
}; };
let generated = totp_raw_now(&decoded_secret, 6, 0, 30, &HashType::SHA1); let generated = totp_raw_now(&decoded_secret, 6, 0, 30, &HashType::SHA1);
@ -74,10 +74,10 @@ impl TwoFactor {
} }
} }
use crate::db::schema::twofactor;
use crate::db::DbConn;
use diesel; use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use crate::db::DbConn;
use crate::db::schema::twofactor;
use crate::api::EmptyResult; use crate::api::EmptyResult;
use crate::error::MapResult; use crate::error::MapResult;
@ -92,33 +92,29 @@ impl TwoFactor {
} }
pub fn delete(self, conn: &DbConn) -> EmptyResult { pub fn delete(self, conn: &DbConn) -> EmptyResult {
diesel::delete( diesel::delete(twofactor::table.filter(twofactor::uuid.eq(self.uuid)))
twofactor::table.filter( .execute(&**conn)
twofactor::uuid.eq(self.uuid)
)
).execute(&**conn)
.map_res("Error deleting twofactor") .map_res("Error deleting twofactor")
} }
pub fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> { pub fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> {
twofactor::table twofactor::table
.filter(twofactor::user_uuid.eq(user_uuid)) .filter(twofactor::user_uuid.eq(user_uuid))
.load::<Self>(&**conn).expect("Error loading twofactor") .load::<Self>(&**conn)
.expect("Error loading twofactor")
} }
pub fn find_by_user_and_type(user_uuid: &str, type_: i32, conn: &DbConn) -> Option<Self> { pub fn find_by_user_and_type(user_uuid: &str, type_: i32, conn: &DbConn) -> Option<Self> {
twofactor::table twofactor::table
.filter(twofactor::user_uuid.eq(user_uuid)) .filter(twofactor::user_uuid.eq(user_uuid))
.filter(twofactor::type_.eq(type_)) .filter(twofactor::type_.eq(type_))
.first::<Self>(&**conn).ok() .first::<Self>(&**conn)
.ok()
} }
pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult {
diesel::delete( diesel::delete(twofactor::table.filter(twofactor::user_uuid.eq(user_uuid)))
twofactor::table.filter( .execute(&**conn)
twofactor::user_uuid.eq(user_uuid)
)
).execute(&**conn)
.map_res("Error deleting twofactors") .map_res("Error deleting twofactors")
} }
} }

Datei anzeigen

@ -4,7 +4,6 @@ use serde_json::Value;
use crate::crypto; use crate::crypto;
use crate::CONFIG; use crate::CONFIG;
#[derive(Debug, Identifiable, Queryable, Insertable)] #[derive(Debug, Identifiable, Queryable, Insertable)]
#[table_name = "users"] #[table_name = "users"]
#[primary_key(uuid)] #[primary_key(uuid)]
@ -77,10 +76,12 @@ impl User {
} }
pub fn check_valid_password(&self, password: &str) -> bool { pub fn check_valid_password(&self, password: &str) -> bool {
crypto::verify_password_hash(password.as_bytes(), crypto::verify_password_hash(
password.as_bytes(),
&self.salt, &self.salt,
&self.password_hash, &self.password_hash,
self.password_iterations as u32) self.password_iterations as u32,
)
} }
pub fn check_valid_recovery_code(&self, recovery_code: &str) -> bool { pub fn check_valid_recovery_code(&self, recovery_code: &str) -> bool {
@ -92,9 +93,7 @@ impl User {
} }
pub fn set_password(&mut self, password: &str) { pub fn set_password(&mut self, password: &str) {
self.password_hash = crypto::hash_password(password.as_bytes(), self.password_hash = crypto::hash_password(password.as_bytes(), &self.salt, self.password_iterations as u32);
&self.salt,
self.password_iterations as u32);
} }
pub fn reset_security_stamp(&mut self) { pub fn reset_security_stamp(&mut self) {
@ -102,11 +101,11 @@ impl User {
} }
} }
use super::{Cipher, Device, Folder, TwoFactor, UserOrgType, UserOrganization};
use crate::db::schema::{invitations, users};
use crate::db::DbConn;
use diesel; use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use crate::db::DbConn;
use crate::db::schema::{users, invitations};
use super::{Cipher, Folder, Device, UserOrganization, UserOrgType, TwoFactor};
use crate::api::EmptyResult; use crate::api::EmptyResult;
use crate::error::MapResult; use crate::error::MapResult;
@ -114,7 +113,7 @@ use crate::error::MapResult;
/// Database methods /// Database methods
impl User { impl User {
pub fn to_json(&self, conn: &DbConn) -> Value { pub fn to_json(&self, conn: &DbConn) -> Value {
use super::{UserOrganization, TwoFactor}; use super::{TwoFactor, UserOrganization};
let orgs = UserOrganization::find_by_user(&self.uuid, conn); let orgs = UserOrganization::find_by_user(&self.uuid, conn);
let orgs_json: Vec<Value> = orgs.iter().map(|c| c.to_json(&conn)).collect(); let orgs_json: Vec<Value> = orgs.iter().map(|c| c.to_json(&conn)).collect();
@ -137,22 +136,20 @@ impl User {
}) })
} }
pub fn save(&mut self, conn: &DbConn) -> EmptyResult { pub fn save(&mut self, conn: &DbConn) -> EmptyResult {
self.updated_at = Utc::now().naive_utc(); self.updated_at = Utc::now().naive_utc();
diesel::replace_into(users::table) // Insert or update diesel::replace_into(users::table) // Insert or update
.values(&*self).execute(&**conn) .values(&*self)
.execute(&**conn)
.map_res("Error saving user") .map_res("Error saving user")
} }
pub fn delete(self, conn: &DbConn) -> EmptyResult { pub fn delete(self, conn: &DbConn) -> EmptyResult {
for user_org in UserOrganization::find_by_user(&self.uuid, &*conn) { for user_org in UserOrganization::find_by_user(&self.uuid, &*conn) {
if user_org.type_ == UserOrgType::Owner { if user_org.type_ == UserOrgType::Owner {
if UserOrganization::find_by_org_and_type( let owner_type = UserOrgType::Owner as i32;
&user_org.org_uuid, if UserOrganization::find_by_org_and_type(&user_org.org_uuid, owner_type, &conn).len() <= 1 {
UserOrgType::Owner as i32, &conn
).len() <= 1 {
err!("Can't delete last owner") err!("Can't delete last owner")
} }
} }
@ -165,8 +162,7 @@ impl User {
TwoFactor::delete_all_by_user(&self.uuid, &*conn)?; TwoFactor::delete_all_by_user(&self.uuid, &*conn)?;
Invitation::take(&self.email, &*conn); // Delete invitation if any Invitation::take(&self.email, &*conn); // Delete invitation if any
diesel::delete(users::table.filter( diesel::delete(users::table.filter(users::uuid.eq(self.uuid)))
users::uuid.eq(self.uuid)))
.execute(&**conn) .execute(&**conn)
.map_res("Error deleting user") .map_res("Error deleting user")
} }
@ -181,11 +177,7 @@ impl User {
pub fn update_revision(&mut self, conn: &DbConn) -> EmptyResult { pub fn update_revision(&mut self, conn: &DbConn) -> EmptyResult {
self.updated_at = Utc::now().naive_utc(); self.updated_at = Utc::now().naive_utc();
diesel::update( diesel::update(users::table.filter(users::uuid.eq(&self.uuid)))
users::table.filter(
users::uuid.eq(&self.uuid)
)
)
.set(users::updated_at.eq(&self.updated_at)) .set(users::updated_at.eq(&self.updated_at))
.execute(&**conn) .execute(&**conn)
.map_res("Error updating user revision") .map_res("Error updating user revision")
@ -195,18 +187,16 @@ impl User {
let lower_mail = mail.to_lowercase(); let lower_mail = mail.to_lowercase();
users::table users::table
.filter(users::email.eq(lower_mail)) .filter(users::email.eq(lower_mail))
.first::<Self>(&**conn).ok() .first::<Self>(&**conn)
.ok()
} }
pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> { pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option<Self> {
users::table users::table.filter(users::uuid.eq(uuid)).first::<Self>(&**conn).ok()
.filter(users::uuid.eq(uuid))
.first::<Self>(&**conn).ok()
} }
pub fn get_all(conn: &DbConn) -> Vec<Self> { pub fn get_all(conn: &DbConn) -> Vec<Self> {
users::table users::table.load::<Self>(&**conn).expect("Error loading users")
.load::<Self>(&**conn).expect("Error loading users")
} }
} }
@ -219,9 +209,7 @@ pub struct Invitation {
impl Invitation { impl Invitation {
pub fn new(email: String) -> Self { pub fn new(email: String) -> Self {
Self { Self { email }
email
}
} }
pub fn save(&mut self, conn: &DbConn) -> EmptyResult { pub fn save(&mut self, conn: &DbConn) -> EmptyResult {
@ -232,8 +220,7 @@ impl Invitation {
} }
pub fn delete(self, conn: &DbConn) -> EmptyResult { pub fn delete(self, conn: &DbConn) -> EmptyResult {
diesel::delete(invitations::table.filter( diesel::delete(invitations::table.filter(invitations::email.eq(self.email)))
invitations::email.eq(self.email)))
.execute(&**conn) .execute(&**conn)
.map_res("Error deleting invitation") .map_res("Error deleting invitation")
} }
@ -242,14 +229,15 @@ impl Invitation {
let lower_mail = mail.to_lowercase(); let lower_mail = mail.to_lowercase();
invitations::table invitations::table
.filter(invitations::email.eq(lower_mail)) .filter(invitations::email.eq(lower_mail))
.first::<Self>(&**conn).ok() .first::<Self>(&**conn)
.ok()
} }
pub fn take(mail: &str, conn: &DbConn) -> bool { pub fn take(mail: &str, conn: &DbConn) -> bool {
CONFIG.invitations_allowed && CONFIG.invitations_allowed
match Self::find_by_mail(mail, &conn) { && match Self::find_by_mail(mail, &conn) {
Some(invitation) => invitation.delete(&conn).is_ok(), Some(invitation) => invitation.delete(&conn).is_ok(),
None => false None => false,
} }
} }
} }

Datei anzeigen

@ -140,9 +140,9 @@ impl<'r> Responder<'r> for Error {
} }
} }
/// //
/// Error return macros // Error return macros
/// //
#[macro_export] #[macro_export]
macro_rules! err { macro_rules! err {
($msg:expr) => {{ ($msg:expr) => {{

Datei anzeigen

@ -1,8 +1,8 @@
use native_tls::{Protocol, TlsConnector};
use lettre::{Transport, SmtpTransport, SmtpClient, ClientTlsParameters, ClientSecurity};
use lettre::smtp::ConnectionReuseParameters;
use lettre::smtp::authentication::Credentials; use lettre::smtp::authentication::Credentials;
use lettre::smtp::ConnectionReuseParameters;
use lettre::{ClientSecurity, ClientTlsParameters, SmtpClient, SmtpTransport, Transport};
use lettre_email::EmailBuilder; use lettre_email::EmailBuilder;
use native_tls::{Protocol, TlsConnector};
use crate::MailConfig; use crate::MailConfig;
use crate::CONFIG; use crate::CONFIG;
@ -22,10 +22,7 @@ fn mailer(config: &MailConfig) -> SmtpTransport {
ClientSecurity::None ClientSecurity::None
}; };
let smtp_client = SmtpClient::new( let smtp_client = SmtpClient::new((config.smtp_host.as_str(), config.smtp_port), client_security).unwrap();
(config.smtp_host.as_str(), config.smtp_port),
client_security,
).unwrap();
let smtp_client = match (&config.smtp_username, &config.smtp_password) { let smtp_client = match (&config.smtp_username, &config.smtp_password) {
(Some(user), Some(pass)) => smtp_client.credentials(Credentials::new(user.clone(), pass.clone())), (Some(user), Some(pass)) => smtp_client.credentials(Credentials::new(user.clone(), pass.clone())),
@ -40,15 +37,20 @@ fn mailer(config: &MailConfig) -> SmtpTransport {
pub fn send_password_hint(address: &str, hint: Option<String>, config: &MailConfig) -> EmptyResult { pub fn send_password_hint(address: &str, hint: Option<String>, config: &MailConfig) -> EmptyResult {
let (subject, body) = if let Some(hint) = hint { let (subject, body) = if let Some(hint) = hint {
("Your master password hint", (
"Your master password hint",
format!( format!(
"You (or someone) recently requested your master password hint.\n\n\ "You (or someone) recently requested your master password hint.\n\n\
Your hint is: \"{}\"\n\n\ Your hint is: \"{}\"\n\n\
If you did not request your master password hint you can safely ignore this email.\n", If you did not request your master password hint you can safely ignore this email.\n",
hint)) hint
),
)
} else { } else {
("Sorry, you have no password hint...", (
"Sorry, you have not specified any password hint...\n".into()) "Sorry, you have no password hint...",
"Sorry, you have not specified any password hint...\n".into(),
)
}; };
let email = EmailBuilder::new() let email = EmailBuilder::new()
@ -65,7 +67,14 @@ pub fn send_password_hint(address: &str, hint: Option<String>, config: &MailConf
.and(Ok(())) .and(Ok(()))
} }
pub fn send_invite(address: &str, org_id: &str, org_user_id: &str, token: &str, org_name: &str, config: &MailConfig) -> EmptyResult { pub fn send_invite(
address: &str,
org_id: &str,
org_user_id: &str,
token: &str,
org_name: &str,
config: &MailConfig,
) -> EmptyResult {
let (subject, body) = { let (subject, body) = {
(format!("Join {}", &org_name), (format!("Join {}", &org_name),
format!( format!(

Datei anzeigen

@ -2,26 +2,39 @@
#![recursion_limit = "128"] #![recursion_limit = "128"]
#![allow(proc_macro_derive_resolution_fallback)] // TODO: Remove this when diesel update fixes warnings #![allow(proc_macro_derive_resolution_fallback)] // TODO: Remove this when diesel update fixes warnings
#[macro_use] extern crate rocket; #[macro_use]
#[macro_use] extern crate serde_derive; extern crate rocket;
#[macro_use] extern crate serde_json; #[macro_use]
#[macro_use] extern crate log; extern crate serde_derive;
#[macro_use] extern crate diesel; #[macro_use]
#[macro_use] extern crate diesel_migrations; extern crate serde_json;
#[macro_use] extern crate lazy_static; #[macro_use]
#[macro_use] extern crate derive_more; extern crate log;
#[macro_use] extern crate num_derive; #[macro_use]
extern crate diesel;
#[macro_use]
extern crate diesel_migrations;
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate derive_more;
#[macro_use]
extern crate num_derive;
use std::{path::Path, process::{exit, Command}};
use rocket::Rocket; use rocket::Rocket;
use std::{
path::Path,
process::{exit, Command},
};
#[macro_use] mod error; #[macro_use]
mod util; mod error;
mod api; mod api;
mod db;
mod crypto;
mod auth; mod auth;
mod crypto;
mod db;
mod mail; mod mail;
mod util;
fn init_rocket() -> Rocket { fn init_rocket() -> Rocket {
rocket::ignite() rocket::ignite()
@ -93,7 +106,9 @@ fn init_logging() -> Result<(), fern::InitError> {
} }
#[cfg(not(feature = "enable_syslog"))] #[cfg(not(feature = "enable_syslog"))]
fn chain_syslog(logger: fern::Dispatch) -> fern::Dispatch { logger } fn chain_syslog(logger: fern::Dispatch) -> fern::Dispatch {
logger
}
#[cfg(feature = "enable_syslog")] #[cfg(feature = "enable_syslog")]
fn chain_syslog(logger: fern::Dispatch) -> fern::Dispatch { fn chain_syslog(logger: fern::Dispatch) -> fern::Dispatch {
@ -127,44 +142,60 @@ fn check_db() {
// Turn on WAL in SQLite // Turn on WAL in SQLite
use diesel::RunQueryDsl; use diesel::RunQueryDsl;
let connection = db::get_connection().expect("Can't conect to DB"); let connection = db::get_connection().expect("Can't conect to DB");
diesel::sql_query("PRAGMA journal_mode=wal").execute(&connection).expect("Failed to turn on WAL"); diesel::sql_query("PRAGMA journal_mode=wal")
.execute(&connection)
.expect("Failed to turn on WAL");
} }
fn check_rsa_keys() { fn check_rsa_keys() {
// If the RSA keys don't exist, try to create them // If the RSA keys don't exist, try to create them
if !util::file_exists(&CONFIG.private_rsa_key) if !util::file_exists(&CONFIG.private_rsa_key) || !util::file_exists(&CONFIG.public_rsa_key) {
|| !util::file_exists(&CONFIG.public_rsa_key) {
info!("JWT keys don't exist, checking if OpenSSL is available..."); info!("JWT keys don't exist, checking if OpenSSL is available...");
Command::new("openssl") Command::new("openssl").arg("version").output().unwrap_or_else(|_| {
.arg("version")
.output().unwrap_or_else(|_| {
info!("Can't create keys because OpenSSL is not available, make sure it's installed and available on the PATH"); info!("Can't create keys because OpenSSL is not available, make sure it's installed and available on the PATH");
exit(1); exit(1);
}); });
info!("OpenSSL detected, creating keys..."); info!("OpenSSL detected, creating keys...");
let mut success = Command::new("openssl").arg("genrsa") let mut success = Command::new("openssl")
.arg("-out").arg(&CONFIG.private_rsa_key_pem) .arg("genrsa")
.output().expect("Failed to create private pem file") .arg("-out")
.status.success(); .arg(&CONFIG.private_rsa_key_pem)
.output()
.expect("Failed to create private pem file")
.status
.success();
success &= Command::new("openssl").arg("rsa") success &= Command::new("openssl")
.arg("-in").arg(&CONFIG.private_rsa_key_pem) .arg("rsa")
.arg("-outform").arg("DER") .arg("-in")
.arg("-out").arg(&CONFIG.private_rsa_key) .arg(&CONFIG.private_rsa_key_pem)
.output().expect("Failed to create private der file") .arg("-outform")
.status.success(); .arg("DER")
.arg("-out")
.arg(&CONFIG.private_rsa_key)
.output()
.expect("Failed to create private der file")
.status
.success();
success &= Command::new("openssl").arg("rsa") success &= Command::new("openssl")
.arg("-in").arg(&CONFIG.private_rsa_key) .arg("rsa")
.arg("-inform").arg("DER") .arg("-in")
.arg(&CONFIG.private_rsa_key)
.arg("-inform")
.arg("DER")
.arg("-RSAPublicKey_out") .arg("-RSAPublicKey_out")
.arg("-outform").arg("DER") .arg("-outform")
.arg("-out").arg(&CONFIG.public_rsa_key) .arg("DER")
.output().expect("Failed to create public der file") .arg("-out")
.status.success(); .arg(&CONFIG.public_rsa_key)
.output()
.expect("Failed to create public der file")
.status
.success();
if success { if success {
info!("Keys created correctly."); info!("Keys created correctly.");
@ -219,13 +250,7 @@ impl MailConfig {
}); });
let smtp_ssl = get_env_or("SMTP_SSL", true); let smtp_ssl = get_env_or("SMTP_SSL", true);
let smtp_port = get_env("SMTP_PORT").unwrap_or_else(|| let smtp_port = get_env("SMTP_PORT").unwrap_or_else(|| if smtp_ssl { 587u16 } else { 25u16 });
if smtp_ssl {
587u16
} else {
25u16
}
);
let smtp_username = get_env("SMTP_USERNAME"); let smtp_username = get_env("SMTP_USERNAME");
let smtp_password = get_env("SMTP_PASSWORD").or_else(|| { let smtp_password = get_env("SMTP_PASSWORD").or_else(|| {
@ -319,7 +344,11 @@ impl Config {
web_vault_enabled: get_env_or("WEB_VAULT_ENABLED", true), web_vault_enabled: get_env_or("WEB_VAULT_ENABLED", true),
websocket_enabled: get_env_or("WEBSOCKET_ENABLED", false), websocket_enabled: get_env_or("WEBSOCKET_ENABLED", false),
websocket_url: format!("{}:{}", get_env_or("WEBSOCKET_ADDRESS", "0.0.0.0".to_string()), get_env_or("WEBSOCKET_PORT", 3012)), websocket_url: format!(
"{}:{}",
get_env_or("WEBSOCKET_ADDRESS", "0.0.0.0".to_string()),
get_env_or("WEBSOCKET_PORT", 3012)
),
extended_logging: get_env_or("EXTENDED_LOGGING", true), extended_logging: get_env_or("EXTENDED_LOGGING", true),
log_file: get_env("LOG_FILE"), log_file: get_env("LOG_FILE"),

Datei anzeigen

@ -1,6 +1,6 @@
/// //
/// Web Headers // Web Headers
/// //
use rocket::fairing::{Fairing, Info, Kind}; use rocket::fairing::{Fairing, Info, Kind};
use rocket::{Request, Response}; use rocket::{Request, Response};
@ -29,10 +29,9 @@ impl Fairing for AppHeaders {
} }
} }
//
/// // File handling
/// File handling //
///
use std::fs::{self, File}; use std::fs::{self, File};
use std::io::{Read, Result as IOResult}; use std::io::{Read, Result as IOResult};
use std::path::Path; use std::path::Path;
@ -75,7 +74,7 @@ pub fn get_display_size(size: i32) -> String {
} else { } else {
break; break;
} }
}; }
// Round to two decimals // Round to two decimals
size = (size * 100.).round() / 100.; size = (size * 100.).round() / 100.;
@ -86,13 +85,12 @@ pub fn get_uuid() -> String {
uuid::Uuid::new_v4().to_string() uuid::Uuid::new_v4().to_string()
} }
//
// String util methods
//
///
/// String util methods
///
use std::str::FromStr;
use std::ops::Try; use std::ops::Try;
use std::str::FromStr;
pub fn upcase_first(s: &str) -> String { pub fn upcase_first(s: &str) -> String {
let mut c = s.chars(); let mut c = s.chars();
@ -102,7 +100,11 @@ pub fn upcase_first(s: &str) -> String {
} }
} }
pub fn try_parse_string<S, T, U>(string: impl Try<Ok = S, Error=U>) -> Option<T> where S: AsRef<str>, T: FromStr { pub fn try_parse_string<S, T, U>(string: impl Try<Ok = S, Error = U>) -> Option<T>
where
S: AsRef<str>,
T: FromStr,
{
if let Ok(Ok(value)) = string.into_result().map(|s| s.as_ref().parse::<T>()) { if let Ok(Ok(value)) = string.into_result().map(|s| s.as_ref().parse::<T>()) {
Some(value) Some(value)
} else { } else {
@ -110,7 +112,11 @@ pub fn try_parse_string<S, T, U>(string: impl Try<Ok = S, Error=U>) -> Option<T>
} }
} }
pub fn try_parse_string_or<S, T, U>(string: impl Try<Ok = S, Error=U>, default: T) -> T where S: AsRef<str>, T: FromStr { pub fn try_parse_string_or<S, T, U>(string: impl Try<Ok = S, Error = U>, default: T) -> T
where
S: AsRef<str>,
T: FromStr,
{
if let Ok(Ok(value)) = string.into_result().map(|s| s.as_ref().parse::<T>()) { if let Ok(Ok(value)) = string.into_result().map(|s| s.as_ref().parse::<T>()) {
value value
} else { } else {
@ -118,24 +124,29 @@ pub fn try_parse_string_or<S, T, U>(string: impl Try<Ok = S, Error=U>, default:
} }
} }
//
/// // Env methods
/// Env methods //
///
use std::env; use std::env;
pub fn get_env<V>(key: &str) -> Option<V> where V: FromStr { pub fn get_env<V>(key: &str) -> Option<V>
where
V: FromStr,
{
try_parse_string(env::var(key)) try_parse_string(env::var(key))
} }
pub fn get_env_or<V>(key: &str, default: V) -> V where V: FromStr { pub fn get_env_or<V>(key: &str, default: V) -> V
where
V: FromStr,
{
try_parse_string_or(env::var(key), default) try_parse_string_or(env::var(key), default)
} }
/// //
/// Date util methods // Date util methods
/// //
use chrono::NaiveDateTime; use chrono::NaiveDateTime;
@ -145,9 +156,9 @@ pub fn format_date(date: &NaiveDateTime) -> String {
date.format(DATETIME_FORMAT).to_string() date.format(DATETIME_FORMAT).to_string()
} }
/// //
/// Deserialization methods // Deserialization methods
/// //
use std::fmt; use std::fmt;
@ -163,10 +174,11 @@ pub struct UpCase<T: DeserializeOwned> {
pub data: T, pub data: T,
} }
/// https://github.com/serde-rs/serde/issues/586 // https://github.com/serde-rs/serde/issues/586
pub fn upcase_deserialize<'de, T, D>(deserializer: D) -> Result<T, D::Error> pub fn upcase_deserialize<'de, T, D>(deserializer: D) -> Result<T, D::Error>
where T: DeserializeOwned, where
D: Deserializer<'de> T: DeserializeOwned,
D: Deserializer<'de>,
{ {
let d = deserializer.deserialize_any(UpCaseVisitor)?; let d = deserializer.deserialize_any(UpCaseVisitor)?;
T::deserialize(d).map_err(de::Error::custom) T::deserialize(d).map_err(de::Error::custom)
@ -182,7 +194,8 @@ impl<'de> Visitor<'de> for UpCaseVisitor {
} }
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where A: MapAccess<'de> where
A: MapAccess<'de>,
{ {
let mut result_map = JsonMap::new(); let mut result_map = JsonMap::new();
@ -194,7 +207,9 @@ impl<'de> Visitor<'de> for UpCaseVisitor {
} }
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error> fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where A: SeqAccess<'de> { where
A: SeqAccess<'de>,
{
let mut result_seq = Vec::<Value>::new(); let mut result_seq = Vec::<Value>::new();
while let Some(value) = seq.next_element()? { while let Some(value) = seq.next_element()? {
@ -214,7 +229,6 @@ fn upcase_value(value: &Value) -> Value {
new_value[processed_key] = upcase_value(val); new_value[processed_key] = upcase_value(val);
} }
new_value new_value
} else if let Some(array) = value.as_array() { } else if let Some(array) = value.as_array() {
// Initialize array with null values // Initialize array with null values
let mut new_value = json!(vec![Value::Null; array.len()]); let mut new_value = json!(vec![Value::Null; array.len()]);
@ -223,7 +237,6 @@ fn upcase_value(value: &Value) -> Value {
new_value[index] = upcase_value(val); new_value[index] = upcase_value(val);
} }
new_value new_value
} else { } else {
value.clone() value.clone()
} }