geforkt von mirrored/vaultwarden
Merge pull request #1329 from BlackDex/misc-updates
JSON Response updates and small fixes
Dieser Commit ist enthalten in:
Commit
cd768439d2
5 geänderte Dateien mit 166 neuen und 43 gelöschten Zeilen
|
@ -91,7 +91,9 @@ fn sync(data: Form<SyncData>, headers: Headers, conn: DbConn) -> JsonResult {
|
||||||
let folders_json: Vec<Value> = folders.iter().map(Folder::to_json).collect();
|
let folders_json: Vec<Value> = folders.iter().map(Folder::to_json).collect();
|
||||||
|
|
||||||
let collections = Collection::find_by_user_uuid(&headers.user.uuid, &conn);
|
let collections = Collection::find_by_user_uuid(&headers.user.uuid, &conn);
|
||||||
let collections_json: Vec<Value> = collections.iter().map(Collection::to_json).collect();
|
let collections_json: Vec<Value> = collections.iter()
|
||||||
|
.map(|c| c.to_json_details(&headers.user.uuid, &conn))
|
||||||
|
.collect();
|
||||||
|
|
||||||
let policies = OrgPolicy::find_by_user(&headers.user.uuid, &conn);
|
let policies = OrgPolicy::find_by_user(&headers.user.uuid, &conn);
|
||||||
let policies_json: Vec<Value> = policies.iter().map(OrgPolicy::to_json).collect();
|
let policies_json: Vec<Value> = policies.iter().map(OrgPolicy::to_json).collect();
|
||||||
|
@ -225,6 +227,12 @@ fn post_ciphers_admin(data: JsonUpcase<ShareCipherData>, headers: Headers, conn:
|
||||||
fn post_ciphers_create(data: JsonUpcase<ShareCipherData>, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult {
|
fn post_ciphers_create(data: JsonUpcase<ShareCipherData>, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult {
|
||||||
let mut data: ShareCipherData = data.into_inner().data;
|
let mut data: ShareCipherData = data.into_inner().data;
|
||||||
|
|
||||||
|
// Check if there are one more more collections selected when this cipher is part of an organization.
|
||||||
|
// err if this is not the case before creating an empty cipher.
|
||||||
|
if data.Cipher.OrganizationId.is_some() && data.CollectionIds.is_empty() {
|
||||||
|
err!("You must select at least one collection.");
|
||||||
|
}
|
||||||
|
|
||||||
// This check is usually only needed in update_cipher_from_data(), but we
|
// This check is usually only needed in update_cipher_from_data(), but we
|
||||||
// need it here as well to avoid creating an empty cipher in the call to
|
// need it here as well to avoid creating an empty cipher in the call to
|
||||||
// cipher.save() below.
|
// cipher.save() below.
|
||||||
|
@ -323,6 +331,11 @@ pub fn update_cipher_from_data(
|
||||||
|| cipher.is_write_accessible_to_user(&headers.user.uuid, &conn)
|
|| cipher.is_write_accessible_to_user(&headers.user.uuid, &conn)
|
||||||
{
|
{
|
||||||
cipher.organization_uuid = Some(org_id);
|
cipher.organization_uuid = Some(org_id);
|
||||||
|
// After some discussion in PR #1329 re-added the user_uuid = None again.
|
||||||
|
// TODO: Audit/Check the whole save/update cipher chain.
|
||||||
|
// Upstream uses the user_uuid to allow a cipher added by a user to an org to still allow the user to view/edit the cipher
|
||||||
|
// even when the user has hide-passwords configured as there policy.
|
||||||
|
// Removing the line below would fix that, but we have to check which effect this would have on the rest of the code.
|
||||||
cipher.user_uuid = None;
|
cipher.user_uuid = None;
|
||||||
} else {
|
} else {
|
||||||
err!("You don't have permission to add cipher directly to organization")
|
err!("You don't have permission to add cipher directly to organization")
|
||||||
|
@ -366,6 +379,23 @@ pub fn update_cipher_from_data(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Cleanup cipher data, like removing the 'Response' key.
|
||||||
|
// This key is somewhere generated during Javascript so no way for us this fix this.
|
||||||
|
// Also, upstream only retrieves keys they actually want to store, and thus skip the 'Response' key.
|
||||||
|
// We do not mind which data is in it, the keep our model more flexible when there are upstream changes.
|
||||||
|
// But, we at least know we do not need to store and return this specific key.
|
||||||
|
fn _clean_cipher_data(mut json_data: Value) -> Value {
|
||||||
|
if json_data.is_array() {
|
||||||
|
json_data.as_array_mut()
|
||||||
|
.unwrap()
|
||||||
|
.iter_mut()
|
||||||
|
.for_each(|ref mut f| {
|
||||||
|
f.as_object_mut().unwrap().remove("Response");
|
||||||
|
});
|
||||||
|
};
|
||||||
|
json_data
|
||||||
|
}
|
||||||
|
|
||||||
let type_data_opt = match data.Type {
|
let type_data_opt = match data.Type {
|
||||||
1 => data.Login,
|
1 => data.Login,
|
||||||
2 => data.SecureNote,
|
2 => data.SecureNote,
|
||||||
|
@ -374,23 +404,22 @@ pub fn update_cipher_from_data(
|
||||||
_ => err!("Invalid type"),
|
_ => err!("Invalid type"),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut type_data = match type_data_opt {
|
let type_data = match type_data_opt {
|
||||||
Some(data) => data,
|
Some(mut data) => {
|
||||||
|
// Remove the 'Response' key from the base object.
|
||||||
|
data.as_object_mut().unwrap().remove("Response");
|
||||||
|
// Remove the 'Response' key from every Uri.
|
||||||
|
if data["Uris"].is_array() {
|
||||||
|
data["Uris"] = _clean_cipher_data(data["Uris"].clone());
|
||||||
|
}
|
||||||
|
data
|
||||||
|
},
|
||||||
None => err!("Data missing"),
|
None => err!("Data missing"),
|
||||||
};
|
};
|
||||||
|
|
||||||
// TODO: ******* Backwards compat start **********
|
|
||||||
// To remove backwards compatibility, just delete this code,
|
|
||||||
// and remove the compat code from cipher::to_json
|
|
||||||
type_data["Name"] = Value::String(data.Name.clone());
|
|
||||||
type_data["Notes"] = data.Notes.clone().map(Value::String).unwrap_or(Value::Null);
|
|
||||||
type_data["Fields"] = data.Fields.clone().unwrap_or(Value::Null);
|
|
||||||
type_data["PasswordHistory"] = data.PasswordHistory.clone().unwrap_or(Value::Null);
|
|
||||||
// TODO: ******* Backwards compat end **********
|
|
||||||
|
|
||||||
cipher.name = data.Name;
|
cipher.name = data.Name;
|
||||||
cipher.notes = data.Notes;
|
cipher.notes = data.Notes;
|
||||||
cipher.fields = data.Fields.map(|f| f.to_string());
|
cipher.fields = data.Fields.map(|f| _clean_cipher_data(f).to_string() );
|
||||||
cipher.data = type_data.to_string();
|
cipher.data = type_data.to_string();
|
||||||
cipher.password_history = data.PasswordHistory.map(|f| f.to_string());
|
cipher.password_history = data.PasswordHistory.map(|f| f.to_string());
|
||||||
|
|
||||||
|
@ -1064,7 +1093,6 @@ fn delete_all(
|
||||||
Some(user_org) => {
|
Some(user_org) => {
|
||||||
if user_org.atype == UserOrgType::Owner {
|
if user_org.atype == UserOrgType::Owner {
|
||||||
Cipher::delete_all_by_organization(&org_data.org_id, &conn)?;
|
Cipher::delete_all_by_organization(&org_data.org_id, &conn)?;
|
||||||
Collection::delete_all_by_organization(&org_data.org_id, &conn)?;
|
|
||||||
nt.send_user_update(UpdateType::Vault, &user);
|
nt.send_user_update(UpdateType::Vault, &user);
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -47,7 +47,9 @@ pub fn routes() -> Vec<Route> {
|
||||||
list_policies_token,
|
list_policies_token,
|
||||||
get_policy,
|
get_policy,
|
||||||
put_policy,
|
put_policy,
|
||||||
|
get_organization_tax,
|
||||||
get_plans,
|
get_plans,
|
||||||
|
get_plans_tax_rates,
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1006,6 +1008,13 @@ fn put_policy(org_id: String, pol_type: i32, data: Json<PolicyData>, _headers: A
|
||||||
Ok(Json(policy.to_json()))
|
Ok(Json(policy.to_json()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(unused_variables)]
|
||||||
|
#[get("/organizations/<org_id>/tax")]
|
||||||
|
fn get_organization_tax(org_id: String, _headers: Headers, _conn: DbConn) -> EmptyResult {
|
||||||
|
// Prevent a 404 error, which also causes Javascript errors.
|
||||||
|
err!("Only allowed when not self hosted.")
|
||||||
|
}
|
||||||
|
|
||||||
#[get("/plans")]
|
#[get("/plans")]
|
||||||
fn get_plans(_headers: Headers, _conn: DbConn) -> JsonResult {
|
fn get_plans(_headers: Headers, _conn: DbConn) -> JsonResult {
|
||||||
Ok(Json(json!({
|
Ok(Json(json!({
|
||||||
|
@ -1057,3 +1066,13 @@ fn get_plans(_headers: Headers, _conn: DbConn) -> JsonResult {
|
||||||
"ContinuationToken": null
|
"ContinuationToken": null
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[get("/plans/sales-tax-rates")]
|
||||||
|
fn get_plans_tax_rates(_headers: Headers, _conn: DbConn) -> JsonResult {
|
||||||
|
// Prevent a 404 error, which also causes Javascript errors.
|
||||||
|
Ok(Json(json!({
|
||||||
|
"Object": "list",
|
||||||
|
"Data": [],
|
||||||
|
"ContinuationToken": null
|
||||||
|
})))
|
||||||
|
}
|
||||||
|
|
|
@ -83,7 +83,12 @@ impl Cipher {
|
||||||
use crate::util::format_date;
|
use crate::util::format_date;
|
||||||
|
|
||||||
let attachments = Attachment::find_by_cipher(&self.uuid, conn);
|
let attachments = Attachment::find_by_cipher(&self.uuid, conn);
|
||||||
let attachments_json: Vec<Value> = attachments.iter().map(|c| c.to_json(host)).collect();
|
// When there are no attachments use null instead of an empty array
|
||||||
|
let attachments_json = if attachments.is_empty() {
|
||||||
|
Value::Null
|
||||||
|
} else {
|
||||||
|
attachments.iter().map(|c| c.to_json(host)).collect()
|
||||||
|
};
|
||||||
|
|
||||||
let fields_json = self.fields.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
|
let fields_json = self.fields.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
|
||||||
let password_history_json = self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
|
let password_history_json = self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
|
||||||
|
@ -97,28 +102,31 @@ impl Cipher {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
// Get the data or a default empty value to avoid issues with the mobile apps
|
// Get the type_data or a default to an empty json object '{}'.
|
||||||
let mut data_json: Value = serde_json::from_str(&self.data).unwrap_or_else(|_| json!({
|
// If not passing an empty object, mobile clients will crash.
|
||||||
"Fields":null,
|
let mut type_data_json: Value = serde_json::from_str(&self.data).unwrap_or(json!({}));
|
||||||
"Name": self.name,
|
|
||||||
"Notes":null,
|
|
||||||
"Password":null,
|
|
||||||
"PasswordHistory":null,
|
|
||||||
"PasswordRevisionDate":null,
|
|
||||||
"Response":null,
|
|
||||||
"Totp":null,
|
|
||||||
"Uris":null,
|
|
||||||
"Username":null
|
|
||||||
}));
|
|
||||||
|
|
||||||
// TODO: ******* Backwards compat start **********
|
// NOTE: This was marked as *Backwards Compatibilty Code*, but as of January 2021 this is still being used by upstream
|
||||||
// To remove backwards compatibility, just remove this entire section
|
// Set the first element of the Uris array as Uri, this is needed several (mobile) clients.
|
||||||
// and remove the compat code from ciphers::update_cipher_from_data
|
if self.atype == 1 {
|
||||||
if self.atype == 1 && data_json["Uris"].is_array() {
|
if type_data_json["Uris"].is_array() {
|
||||||
let uri = data_json["Uris"][0]["Uri"].clone();
|
let uri = type_data_json["Uris"][0]["Uri"].clone();
|
||||||
data_json["Uri"] = uri;
|
type_data_json["Uri"] = uri;
|
||||||
|
} else {
|
||||||
|
// Upstream always has an Uri key/value
|
||||||
|
type_data_json["Uri"] = Value::Null;
|
||||||
}
|
}
|
||||||
// TODO: ******* Backwards compat end **********
|
}
|
||||||
|
|
||||||
|
// Clone the type_data and add some default value.
|
||||||
|
let mut data_json = type_data_json.clone();
|
||||||
|
|
||||||
|
// NOTE: This was marked as *Backwards Compatibilty Code*, but as of January 2021 this is still being used by upstream
|
||||||
|
// data_json should always contain the following keys with every atype
|
||||||
|
data_json["Fields"] = json!(fields_json);
|
||||||
|
data_json["Name"] = json!(self.name);
|
||||||
|
data_json["Notes"] = json!(self.notes);
|
||||||
|
data_json["PasswordHistory"] = json!(password_history_json);
|
||||||
|
|
||||||
// There are three types of cipher response models in upstream
|
// There are three types of cipher response models in upstream
|
||||||
// Bitwarden: "cipherMini", "cipher", and "cipherDetails" (in order
|
// Bitwarden: "cipherMini", "cipher", and "cipherDetails" (in order
|
||||||
|
@ -137,6 +145,8 @@ impl Cipher {
|
||||||
"Favorite": self.is_favorite(&user_uuid, conn),
|
"Favorite": self.is_favorite(&user_uuid, conn),
|
||||||
"OrganizationId": self.organization_uuid,
|
"OrganizationId": self.organization_uuid,
|
||||||
"Attachments": attachments_json,
|
"Attachments": attachments_json,
|
||||||
|
// We have UseTotp set to true by default within the Organization model.
|
||||||
|
// This variable together with UsersGetPremium is used to show or hide the TOTP counter.
|
||||||
"OrganizationUseTotp": true,
|
"OrganizationUseTotp": true,
|
||||||
|
|
||||||
// This field is specific to the cipherDetails type.
|
// This field is specific to the cipherDetails type.
|
||||||
|
@ -155,6 +165,12 @@ impl Cipher {
|
||||||
"ViewPassword": !hide_passwords,
|
"ViewPassword": !hide_passwords,
|
||||||
|
|
||||||
"PasswordHistory": password_history_json,
|
"PasswordHistory": password_history_json,
|
||||||
|
|
||||||
|
// All Cipher types are included by default as null, but only the matching one will be populated
|
||||||
|
"Login": null,
|
||||||
|
"SecureNote": null,
|
||||||
|
"Card": null,
|
||||||
|
"Identity": null,
|
||||||
});
|
});
|
||||||
|
|
||||||
let key = match self.atype {
|
let key = match self.atype {
|
||||||
|
@ -165,7 +181,7 @@ impl Cipher {
|
||||||
_ => panic!("Wrong type"),
|
_ => panic!("Wrong type"),
|
||||||
};
|
};
|
||||||
|
|
||||||
json_object[key] = data_json;
|
json_object[key] = type_data_json;
|
||||||
json_object
|
json_object
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -448,7 +464,10 @@ impl Cipher {
|
||||||
pub fn find_owned_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> {
|
pub fn find_owned_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> {
|
||||||
db_run! {conn: {
|
db_run! {conn: {
|
||||||
ciphers::table
|
ciphers::table
|
||||||
.filter(ciphers::user_uuid.eq(user_uuid))
|
.filter(
|
||||||
|
ciphers::user_uuid.eq(user_uuid)
|
||||||
|
.and(ciphers::organization_uuid.is_null())
|
||||||
|
)
|
||||||
.load::<CipherDb>(conn).expect("Error loading ciphers").from_db()
|
.load::<CipherDb>(conn).expect("Error loading ciphers").from_db()
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,12 +49,21 @@ impl Collection {
|
||||||
|
|
||||||
pub fn to_json(&self) -> Value {
|
pub fn to_json(&self) -> Value {
|
||||||
json!({
|
json!({
|
||||||
|
"ExternalId": null, // Not support by us
|
||||||
"Id": self.uuid,
|
"Id": self.uuid,
|
||||||
"OrganizationId": self.org_uuid,
|
"OrganizationId": self.org_uuid,
|
||||||
"Name": self.name,
|
"Name": self.name,
|
||||||
"Object": "collection",
|
"Object": "collection",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn to_json_details(&self, user_uuid: &str, conn: &DbConn) -> Value {
|
||||||
|
let mut json_object = self.to_json();
|
||||||
|
json_object["Object"] = json!("collectionDetails");
|
||||||
|
json_object["ReadOnly"] = json!(!self.is_writable_by_user(user_uuid, conn));
|
||||||
|
json_object["HidePasswords"] = json!(self.hide_passwords_for_user(user_uuid, conn));
|
||||||
|
json_object
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
use crate::db::DbConn;
|
use crate::db::DbConn;
|
||||||
|
@ -236,6 +245,28 @@ impl Collection {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn hide_passwords_for_user(&self, user_uuid: &str, conn: &DbConn) -> bool {
|
||||||
|
match UserOrganization::find_by_user_and_org(&user_uuid, &self.org_uuid, &conn) {
|
||||||
|
None => true, // Not in Org
|
||||||
|
Some(user_org) => {
|
||||||
|
if user_org.has_full_access() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
db_run! { conn: {
|
||||||
|
users_collections::table
|
||||||
|
.filter(users_collections::collection_uuid.eq(&self.uuid))
|
||||||
|
.filter(users_collections::user_uuid.eq(user_uuid))
|
||||||
|
.filter(users_collections::hide_passwords.eq(true))
|
||||||
|
.count()
|
||||||
|
.first::<i64>(conn)
|
||||||
|
.ok()
|
||||||
|
.unwrap_or(0) != 0
|
||||||
|
}}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Database methods
|
/// Database methods
|
||||||
|
@ -364,7 +395,6 @@ impl CollectionUser {
|
||||||
diesel::delete(users_collections::table.filter(
|
diesel::delete(users_collections::table.filter(
|
||||||
users_collections::user_uuid.eq(user_uuid)
|
users_collections::user_uuid.eq(user_uuid)
|
||||||
.and(users_collections::collection_uuid.eq(user.collection_uuid))
|
.and(users_collections::collection_uuid.eq(user.collection_uuid))
|
||||||
|
|
||||||
))
|
))
|
||||||
.execute(conn)
|
.execute(conn)
|
||||||
.map_res("Error removing user from collections")?;
|
.map_res("Error removing user from collections")?;
|
||||||
|
|
|
@ -147,9 +147,10 @@ impl Organization {
|
||||||
pub fn to_json(&self) -> Value {
|
pub fn to_json(&self) -> Value {
|
||||||
json!({
|
json!({
|
||||||
"Id": self.uuid,
|
"Id": self.uuid,
|
||||||
|
"Identifier": null, // not supported by us
|
||||||
"Name": self.name,
|
"Name": self.name,
|
||||||
"Seats": 10,
|
"Seats": 10, // The value doesn't matter, we don't check server-side
|
||||||
"MaxCollections": 10,
|
"MaxCollections": 10, // The value doesn't matter, we don't check server-side
|
||||||
"MaxStorageGb": 10, // The value doesn't matter, we don't check server-side
|
"MaxStorageGb": 10, // The value doesn't matter, we don't check server-side
|
||||||
"Use2fa": true,
|
"Use2fa": true,
|
||||||
"UseDirectory": false,
|
"UseDirectory": false,
|
||||||
|
@ -157,6 +158,9 @@ impl Organization {
|
||||||
"UseGroups": false,
|
"UseGroups": false,
|
||||||
"UseTotp": true,
|
"UseTotp": true,
|
||||||
"UsePolicies": true,
|
"UsePolicies": true,
|
||||||
|
"UseSso": false, // We do not support SSO
|
||||||
|
"SelfHost": true,
|
||||||
|
"UseApi": false, // not supported by us
|
||||||
|
|
||||||
"BusinessName": null,
|
"BusinessName": null,
|
||||||
"BusinessAddress1": null,
|
"BusinessAddress1": null,
|
||||||
|
@ -274,9 +278,10 @@ impl UserOrganization {
|
||||||
|
|
||||||
json!({
|
json!({
|
||||||
"Id": self.org_uuid,
|
"Id": self.org_uuid,
|
||||||
|
"Identifier": null, // not supported by us
|
||||||
"Name": org.name,
|
"Name": org.name,
|
||||||
"Seats": 10,
|
"Seats": 10, // The value doesn't matter, we don't check server-side
|
||||||
"MaxCollections": 10,
|
"MaxCollections": 10, // The value doesn't matter, we don't check server-side
|
||||||
"UsersGetPremium": true,
|
"UsersGetPremium": true,
|
||||||
|
|
||||||
"Use2fa": true,
|
"Use2fa": true,
|
||||||
|
@ -285,8 +290,30 @@ impl UserOrganization {
|
||||||
"UseGroups": false,
|
"UseGroups": false,
|
||||||
"UseTotp": true,
|
"UseTotp": true,
|
||||||
"UsePolicies": true,
|
"UsePolicies": true,
|
||||||
"UseApi": false,
|
"UseApi": false, // not supported by us
|
||||||
"SelfHost": true,
|
"SelfHost": true,
|
||||||
|
"SsoBound": false, // We do not support SSO
|
||||||
|
"UseSso": false, // We do not support SSO
|
||||||
|
// TODO: Add support for Business Portal
|
||||||
|
// Upstream is moving Policies and SSO management outside of the web-vault to /portal
|
||||||
|
// For now they still have that code also in the web-vault, but they will remove it at some point.
|
||||||
|
// https://github.com/bitwarden/server/tree/master/bitwarden_license/src/
|
||||||
|
"UseBusinessPortal": false, // Disable BusinessPortal Button
|
||||||
|
|
||||||
|
// TODO: Add support for Custom User Roles
|
||||||
|
// See: https://bitwarden.com/help/article/user-types-access-control/#custom-role
|
||||||
|
// "Permissions": {
|
||||||
|
// "AccessBusinessPortal": false,
|
||||||
|
// "AccessEventLogs": false,
|
||||||
|
// "AccessImportExport": false,
|
||||||
|
// "AccessReports": false,
|
||||||
|
// "ManageAllCollections": false,
|
||||||
|
// "ManageAssignedCollections": false,
|
||||||
|
// "ManageGroups": false,
|
||||||
|
// "ManagePolicies": false,
|
||||||
|
// "ManageSso": false,
|
||||||
|
// "ManageUsers": false
|
||||||
|
// },
|
||||||
|
|
||||||
"MaxStorageGb": 10, // The value doesn't matter, we don't check server-side
|
"MaxStorageGb": 10, // The value doesn't matter, we don't check server-side
|
||||||
|
|
||||||
|
|
Laden …
In neuem Issue referenzieren