Spiegel von
https://github.com/dani-garcia/vaultwarden.git
synchronisiert 2024-11-29 06:20:29 +01:00
Merge branch 'BlackDex-fix-org-export'
Dieser Commit ist enthalten in:
Commit
5da96d36e6
2 geänderte Dateien mit 90 neuen und 10 gelöschten Zeilen
|
@ -10,7 +10,9 @@ use crate::{
|
||||||
},
|
},
|
||||||
auth::{decode_invite, AdminHeaders, Headers, ManagerHeaders, ManagerHeadersLoose, OwnerHeaders},
|
auth::{decode_invite, AdminHeaders, Headers, ManagerHeaders, ManagerHeadersLoose, OwnerHeaders},
|
||||||
db::{models::*, DbConn},
|
db::{models::*, DbConn},
|
||||||
mail, CONFIG,
|
mail,
|
||||||
|
util::convert_json_key_lcase_first,
|
||||||
|
CONFIG,
|
||||||
};
|
};
|
||||||
|
|
||||||
use futures::{stream, stream::StreamExt};
|
use futures::{stream, stream::StreamExt};
|
||||||
|
@ -68,7 +70,8 @@ pub fn routes() -> Vec<Route> {
|
||||||
activate_organization_user,
|
activate_organization_user,
|
||||||
bulk_activate_organization_user,
|
bulk_activate_organization_user,
|
||||||
restore_organization_user,
|
restore_organization_user,
|
||||||
bulk_restore_organization_user
|
bulk_restore_organization_user,
|
||||||
|
get_org_export
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -246,15 +249,19 @@ async fn get_user_collections(headers: Headers, conn: DbConn) -> Json<Value> {
|
||||||
|
|
||||||
#[get("/organizations/<org_id>/collections")]
|
#[get("/organizations/<org_id>/collections")]
|
||||||
async fn get_org_collections(org_id: String, _headers: ManagerHeadersLoose, conn: DbConn) -> Json<Value> {
|
async fn get_org_collections(org_id: String, _headers: ManagerHeadersLoose, conn: DbConn) -> Json<Value> {
|
||||||
Json(json!({
|
Json(_get_org_collections(&org_id, &conn).await)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn _get_org_collections(org_id: &str, conn: &DbConn) -> Value {
|
||||||
|
json!({
|
||||||
"Data":
|
"Data":
|
||||||
Collection::find_by_organization(&org_id, &conn).await
|
Collection::find_by_organization(org_id, conn).await
|
||||||
.iter()
|
.iter()
|
||||||
.map(Collection::to_json)
|
.map(Collection::to_json)
|
||||||
.collect::<Value>(),
|
.collect::<Value>(),
|
||||||
"Object": "list",
|
"Object": "list",
|
||||||
"ContinuationToken": null,
|
"ContinuationToken": null,
|
||||||
}))
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/organizations/<org_id>/collections", data = "<data>")]
|
#[post("/organizations/<org_id>/collections", data = "<data>")]
|
||||||
|
@ -491,22 +498,26 @@ struct OrgIdData {
|
||||||
|
|
||||||
#[get("/ciphers/organization-details?<data..>")]
|
#[get("/ciphers/organization-details?<data..>")]
|
||||||
async fn get_org_details(data: OrgIdData, headers: Headers, conn: DbConn) -> Json<Value> {
|
async fn get_org_details(data: OrgIdData, headers: Headers, conn: DbConn) -> Json<Value> {
|
||||||
let ciphers = Cipher::find_by_org(&data.organization_id, &conn).await;
|
Json(_get_org_details(&data.organization_id, &headers.host, &headers.user.uuid, &conn).await)
|
||||||
let cipher_sync_data = CipherSyncData::new(&headers.user.uuid, &ciphers, CipherSyncType::Organization, &conn).await;
|
}
|
||||||
|
|
||||||
|
async fn _get_org_details(org_id: &str, host: &str, user_uuid: &str, conn: &DbConn) -> Value {
|
||||||
|
let ciphers = Cipher::find_by_org(org_id, conn).await;
|
||||||
|
let cipher_sync_data = CipherSyncData::new(user_uuid, &ciphers, CipherSyncType::Organization, conn).await;
|
||||||
|
|
||||||
let ciphers_json = stream::iter(ciphers)
|
let ciphers_json = stream::iter(ciphers)
|
||||||
.then(|c| async {
|
.then(|c| async {
|
||||||
let c = c; // Move out this single variable
|
let c = c; // Move out this single variable
|
||||||
c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), &conn).await
|
c.to_json(host, user_uuid, Some(&cipher_sync_data), conn).await
|
||||||
})
|
})
|
||||||
.collect::<Vec<Value>>()
|
.collect::<Vec<Value>>()
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
Json(json!({
|
json!({
|
||||||
"Data": ciphers_json,
|
"Data": ciphers_json,
|
||||||
"Object": "list",
|
"Object": "list",
|
||||||
"ContinuationToken": null,
|
"ContinuationToken": null,
|
||||||
}))
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/organizations/<org_id>/users")]
|
#[get("/organizations/<org_id>/users")]
|
||||||
|
@ -1690,3 +1701,19 @@ async fn _restore_organization_user(
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This is a new function active since the v2022.9.x clients.
|
||||||
|
// It combines the previous two calls done before.
|
||||||
|
// We call those two functions here and combine them our selfs.
|
||||||
|
//
|
||||||
|
// NOTE: It seems clients can't handle uppercase-first keys!!
|
||||||
|
// We need to convert all keys so they have the first character to be a lowercase.
|
||||||
|
// Else the export will be just an empty JSON file.
|
||||||
|
#[get("/organizations/<org_id>/export")]
|
||||||
|
async fn get_org_export(org_id: String, headers: AdminHeaders, conn: DbConn) -> Json<Value> {
|
||||||
|
// Also both main keys here need to be lowercase, else the export will fail.
|
||||||
|
Json(json!({
|
||||||
|
"collections": convert_json_key_lcase_first(_get_org_collections(&org_id, &conn).await),
|
||||||
|
"ciphers": convert_json_key_lcase_first(_get_org_details(&org_id, &headers.host, &headers.user.uuid, &conn).await),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
53
src/util.rs
53
src/util.rs
|
@ -357,6 +357,7 @@ pub fn get_uuid() -> String {
|
||||||
|
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
pub fn upcase_first(s: &str) -> String {
|
pub fn upcase_first(s: &str) -> String {
|
||||||
let mut c = s.chars();
|
let mut c = s.chars();
|
||||||
match c.next() {
|
match c.next() {
|
||||||
|
@ -365,6 +366,15 @@ pub fn upcase_first(s: &str) -> String {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn lcase_first(s: &str) -> String {
|
||||||
|
let mut c = s.chars();
|
||||||
|
match c.next() {
|
||||||
|
None => String::new(),
|
||||||
|
Some(f) => f.to_lowercase().collect::<String>() + c.as_str(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn try_parse_string<S, T>(string: Option<S>) -> Option<T>
|
pub fn try_parse_string<S, T>(string: Option<S>) -> Option<T>
|
||||||
where
|
where
|
||||||
S: AsRef<str>,
|
S: AsRef<str>,
|
||||||
|
@ -650,3 +660,46 @@ pub fn get_reqwest_client_builder() -> ClientBuilder {
|
||||||
headers.insert(header::USER_AGENT, header::HeaderValue::from_static("Vaultwarden"));
|
headers.insert(header::USER_AGENT, header::HeaderValue::from_static("Vaultwarden"));
|
||||||
Client::builder().default_headers(headers).timeout(Duration::from_secs(10))
|
Client::builder().default_headers(headers).timeout(Duration::from_secs(10))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn convert_json_key_lcase_first(src_json: Value) -> Value {
|
||||||
|
match src_json {
|
||||||
|
Value::Array(elm) => {
|
||||||
|
let mut new_array: Vec<Value> = Vec::with_capacity(elm.len());
|
||||||
|
|
||||||
|
for obj in elm {
|
||||||
|
new_array.push(convert_json_key_lcase_first(obj));
|
||||||
|
}
|
||||||
|
Value::Array(new_array)
|
||||||
|
}
|
||||||
|
|
||||||
|
Value::Object(obj) => {
|
||||||
|
let mut json_map = JsonMap::new();
|
||||||
|
for (key, value) in obj.iter() {
|
||||||
|
match (key, value) {
|
||||||
|
(key, Value::Object(elm)) => {
|
||||||
|
let inner_value = convert_json_key_lcase_first(Value::Object(elm.clone()));
|
||||||
|
json_map.insert(lcase_first(key), inner_value);
|
||||||
|
}
|
||||||
|
|
||||||
|
(key, Value::Array(elm)) => {
|
||||||
|
let mut inner_array: Vec<Value> = Vec::with_capacity(elm.len());
|
||||||
|
|
||||||
|
for inner_obj in elm {
|
||||||
|
inner_array.push(convert_json_key_lcase_first(inner_obj.clone()));
|
||||||
|
}
|
||||||
|
|
||||||
|
json_map.insert(lcase_first(key), Value::Array(inner_array));
|
||||||
|
}
|
||||||
|
|
||||||
|
(key, value) => {
|
||||||
|
json_map.insert(lcase_first(key), value.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Value::Object(json_map)
|
||||||
|
}
|
||||||
|
|
||||||
|
value => value,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
Laden …
In neuem Issue referenzieren