2020-03-09 22:04:03 +01:00
|
|
|
use once_cell::sync::Lazy;
|
2020-06-03 17:07:32 +02:00
|
|
|
use serde::de::DeserializeOwned;
|
2020-07-14 18:00:09 +02:00
|
|
|
use serde_json::Value;
|
2021-11-28 13:02:27 +01:00
|
|
|
use std::env;
|
2018-12-18 01:53:21 +01:00
|
|
|
|
2021-11-07 18:53:39 +01:00
|
|
|
use rocket::serde::json::Json;
|
2020-07-14 18:00:09 +02:00
|
|
|
use rocket::{
|
2021-11-07 18:53:39 +01:00
|
|
|
form::Form,
|
|
|
|
http::{Cookie, CookieJar, SameSite, Status},
|
|
|
|
request::{self, FlashMessage, FromRequest, Outcome, Request},
|
|
|
|
response::{content::RawHtml as Html, Flash, Redirect},
|
2020-07-19 21:01:31 +02:00
|
|
|
Route,
|
2020-07-14 18:00:09 +02:00
|
|
|
};
|
2019-01-19 21:36:34 +01:00
|
|
|
|
2020-07-14 18:00:09 +02:00
|
|
|
use crate::{
|
2021-05-04 01:42:29 +02:00
|
|
|
api::{ApiResult, EmptyResult, JsonResult, NumberOrString},
|
2020-07-14 18:00:09 +02:00
|
|
|
auth::{decode_admin, encode_jwt, generate_admin_claims, ClientIp},
|
|
|
|
config::ConfigBuilder,
|
2021-03-28 00:10:01 +01:00
|
|
|
db::{backup_database, get_sql_server_version, models::*, DbConn, DbConnType},
|
2020-07-14 18:00:09 +02:00
|
|
|
error::{Error, MapResult},
|
|
|
|
mail,
|
2021-10-08 00:01:24 +02:00
|
|
|
util::{
|
|
|
|
docker_base_image, format_naive_datetime_local, get_display_size, get_reqwest_client, is_running_in_docker,
|
|
|
|
},
|
2022-01-23 23:40:59 +01:00
|
|
|
CONFIG, VERSION,
|
2020-07-14 18:00:09 +02:00
|
|
|
};
|
2018-12-18 18:52:58 +01:00
|
|
|
|
2021-11-16 17:07:55 +01:00
|
|
|
use futures::{stream, stream::StreamExt};
|
|
|
|
|
2019-01-19 21:36:34 +01:00
|
|
|
pub fn routes() -> Vec<Route> {
|
2020-04-10 05:55:08 +02:00
|
|
|
if !CONFIG.disable_admin_token() && !CONFIG.is_admin_token_set() {
|
2019-02-10 15:26:19 +01:00
|
|
|
return routes![admin_disabled];
|
2019-01-19 21:36:34 +01:00
|
|
|
}
|
2018-12-18 01:53:21 +01:00
|
|
|
|
2019-01-26 19:28:54 +01:00
|
|
|
routes![
|
|
|
|
admin_login,
|
2020-05-28 10:42:36 +02:00
|
|
|
get_users_json,
|
2021-05-08 22:03:03 +02:00
|
|
|
get_user_json,
|
2019-01-26 19:28:54 +01:00
|
|
|
post_admin_login,
|
|
|
|
admin_page,
|
|
|
|
invite_user,
|
2019-12-01 21:15:14 +01:00
|
|
|
logout,
|
2019-01-26 19:28:54 +01:00
|
|
|
delete_user,
|
|
|
|
deauth_user,
|
2020-11-30 23:12:56 +01:00
|
|
|
disable_user,
|
|
|
|
enable_user,
|
2019-08-21 17:13:06 +02:00
|
|
|
remove_2fa,
|
2021-02-03 18:43:54 +01:00
|
|
|
update_user_org_type,
|
2019-03-07 21:08:33 +01:00
|
|
|
update_revision_users,
|
2019-02-02 01:09:21 +01:00
|
|
|
post_config,
|
2019-02-06 17:32:13 +01:00
|
|
|
delete_config,
|
2019-05-03 15:46:29 +02:00
|
|
|
backup_db,
|
2020-02-26 11:02:22 +01:00
|
|
|
test_smtp,
|
2020-05-28 10:42:36 +02:00
|
|
|
users_overview,
|
|
|
|
organizations_overview,
|
2021-02-03 18:43:54 +01:00
|
|
|
delete_organization,
|
2020-05-28 10:42:36 +02:00
|
|
|
diagnostics,
|
2021-01-19 17:55:21 +01:00
|
|
|
get_diagnostics_config
|
2019-01-26 19:28:54 +01:00
|
|
|
]
|
2019-01-19 21:36:34 +01:00
|
|
|
}
|
2018-12-18 01:53:21 +01:00
|
|
|
|
2021-01-19 17:55:21 +01:00
|
|
|
static DB_TYPE: Lazy<&str> = Lazy::new(|| {
|
|
|
|
DbConnType::from_url(&CONFIG.database_url())
|
|
|
|
.map(|t| match t {
|
|
|
|
DbConnType::sqlite => "SQLite",
|
|
|
|
DbConnType::mysql => "MySQL",
|
|
|
|
DbConnType::postgresql => "PostgreSQL",
|
|
|
|
})
|
|
|
|
.unwrap_or("Unknown")
|
|
|
|
});
|
|
|
|
|
2021-04-06 22:55:28 +02:00
|
|
|
static CAN_BACKUP: Lazy<bool> =
|
|
|
|
Lazy::new(|| DbConnType::from_url(&CONFIG.database_url()).map(|t| t == DbConnType::sqlite).unwrap_or(false));
|
2019-06-02 00:08:52 +02:00
|
|
|
|
2019-02-10 15:26:19 +01:00
|
|
|
#[get("/")]
|
|
|
|
fn admin_disabled() -> &'static str {
|
|
|
|
"The admin panel is disabled, please configure the 'ADMIN_TOKEN' variable to enable it"
|
|
|
|
}
|
|
|
|
|
2022-01-23 23:40:59 +01:00
|
|
|
const COOKIE_NAME: &str = "VW_ADMIN";
|
2019-01-28 23:58:32 +01:00
|
|
|
const ADMIN_PATH: &str = "/admin";
|
2022-06-01 15:26:11 +02:00
|
|
|
const DT_FMT: &str = "%Y-%m-%d %H:%M:%S %Z";
|
2019-01-19 21:36:34 +01:00
|
|
|
|
2019-02-02 16:47:27 +01:00
|
|
|
const BASE_TEMPLATE: &str = "admin/base";
|
2019-01-19 22:12:52 +01:00
|
|
|
|
2020-02-19 06:27:00 +01:00
|
|
|
fn admin_path() -> String {
|
|
|
|
format!("{}{}", CONFIG.domain_path(), ADMIN_PATH)
|
|
|
|
}
|
|
|
|
|
2020-08-12 19:07:52 +02:00
|
|
|
struct Referer(Option<String>);
|
|
|
|
|
2021-11-07 18:53:39 +01:00
|
|
|
#[rocket::async_trait]
|
|
|
|
impl<'r> FromRequest<'r> for Referer {
|
2020-08-12 19:07:52 +02:00
|
|
|
type Error = ();
|
|
|
|
|
2021-11-07 18:53:39 +01:00
|
|
|
async fn from_request(request: &'r Request<'_>) -> request::Outcome<Self, Self::Error> {
|
2020-08-12 19:07:52 +02:00
|
|
|
Outcome::Success(Referer(request.headers().get_one("Referer").map(str::to_string)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-03-28 00:10:01 +01:00
|
|
|
#[derive(Debug)]
|
|
|
|
struct IpHeader(Option<String>);
|
|
|
|
|
2021-11-07 18:53:39 +01:00
|
|
|
#[rocket::async_trait]
|
|
|
|
impl<'r> FromRequest<'r> for IpHeader {
|
2021-03-28 00:10:01 +01:00
|
|
|
type Error = ();
|
|
|
|
|
2021-11-07 18:53:39 +01:00
|
|
|
async fn from_request(req: &'r Request<'_>) -> Outcome<Self, Self::Error> {
|
2021-03-28 00:10:01 +01:00
|
|
|
if req.headers().get_one(&CONFIG.ip_header()).is_some() {
|
|
|
|
Outcome::Success(IpHeader(Some(CONFIG.ip_header())))
|
|
|
|
} else if req.headers().get_one("X-Client-IP").is_some() {
|
|
|
|
Outcome::Success(IpHeader(Some(String::from("X-Client-IP"))))
|
|
|
|
} else if req.headers().get_one("X-Real-IP").is_some() {
|
|
|
|
Outcome::Success(IpHeader(Some(String::from("X-Real-IP"))))
|
|
|
|
} else if req.headers().get_one("X-Forwarded-For").is_some() {
|
|
|
|
Outcome::Success(IpHeader(Some(String::from("X-Forwarded-For"))))
|
|
|
|
} else {
|
|
|
|
Outcome::Success(IpHeader(None))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-01 09:23:46 +02:00
|
|
|
/// Used for `Location` response headers, which must specify an absolute URI
|
|
|
|
/// (see https://tools.ietf.org/html/rfc2616#section-14.30).
|
2020-08-12 19:07:52 +02:00
|
|
|
fn admin_url(referer: Referer) -> String {
|
|
|
|
// If we get a referer use that to make it work when, DOMAIN is not set
|
|
|
|
if let Some(mut referer) = referer.0 {
|
|
|
|
if let Some(start_index) = referer.find(ADMIN_PATH) {
|
|
|
|
referer.truncate(start_index + ADMIN_PATH.len());
|
|
|
|
return referer;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if CONFIG.domain_set() {
|
|
|
|
// Don't use CONFIG.domain() directly, since the user may want to keep a
|
|
|
|
// trailing slash there, particularly when running under a subpath.
|
|
|
|
format!("{}{}{}", CONFIG.domain_origin(), CONFIG.domain_path(), ADMIN_PATH)
|
|
|
|
} else {
|
|
|
|
// Last case, when no referer or domain set, technically invalid but better than nothing
|
|
|
|
ADMIN_PATH.to_string()
|
|
|
|
}
|
2020-05-01 09:23:46 +02:00
|
|
|
}
|
|
|
|
|
2019-01-19 21:36:34 +01:00
|
|
|
#[get("/", rank = 2)]
|
2021-11-19 17:50:16 +01:00
|
|
|
fn admin_login(flash: Option<FlashMessage<'_>>) -> ApiResult<Html<String>> {
|
2019-01-19 21:36:34 +01:00
|
|
|
// If there is an error, show it
|
2021-11-07 18:53:39 +01:00
|
|
|
let msg = flash.map(|msg| format!("{}: {}", msg.kind(), msg.message()));
|
2021-03-31 22:45:05 +02:00
|
|
|
let json = json!({
|
|
|
|
"page_content": "admin/login",
|
|
|
|
"version": VERSION,
|
|
|
|
"error": msg,
|
|
|
|
"urlpath": CONFIG.domain_path()
|
|
|
|
});
|
2019-01-19 21:36:34 +01:00
|
|
|
|
|
|
|
// Return the page
|
2019-02-02 16:47:27 +01:00
|
|
|
let text = CONFIG.render_template(BASE_TEMPLATE, &json)?;
|
2019-01-19 21:36:34 +01:00
|
|
|
Ok(Html(text))
|
2018-12-18 01:53:21 +01:00
|
|
|
}
|
|
|
|
|
2019-01-19 22:12:52 +01:00
|
|
|
#[derive(FromForm)]
|
|
|
|
struct LoginForm {
|
|
|
|
token: String,
|
|
|
|
}
|
|
|
|
|
2019-01-19 21:36:34 +01:00
|
|
|
#[post("/", data = "<data>")]
|
2020-08-12 19:07:52 +02:00
|
|
|
fn post_admin_login(
|
|
|
|
data: Form<LoginForm>,
|
2021-11-19 17:50:16 +01:00
|
|
|
cookies: &CookieJar<'_>,
|
2020-08-12 19:07:52 +02:00
|
|
|
ip: ClientIp,
|
|
|
|
referer: Referer,
|
|
|
|
) -> Result<Redirect, Flash<Redirect>> {
|
2019-01-19 21:36:34 +01:00
|
|
|
let data = data.into_inner();
|
|
|
|
|
2021-12-22 21:48:49 +01:00
|
|
|
if crate::ratelimit::check_limit_admin(&ip.ip).is_err() {
|
|
|
|
return Err(Flash::error(Redirect::to(admin_url(referer)), "Too many requests, try again later."));
|
|
|
|
}
|
|
|
|
|
2019-01-20 17:43:56 +01:00
|
|
|
// If the token is invalid, redirect to login page
|
2019-01-19 21:36:34 +01:00
|
|
|
if !_validate_token(&data.token) {
|
|
|
|
error!("Invalid admin token. IP: {}", ip.ip);
|
2021-04-06 22:54:42 +02:00
|
|
|
Err(Flash::error(Redirect::to(admin_url(referer)), "Invalid admin token, please try again."))
|
2019-01-19 21:36:34 +01:00
|
|
|
} else {
|
|
|
|
// If the token received is valid, generate JWT and save it as a cookie
|
|
|
|
let claims = generate_admin_claims();
|
|
|
|
let jwt = encode_jwt(&claims);
|
|
|
|
|
|
|
|
let cookie = Cookie::build(COOKIE_NAME, jwt)
|
2020-02-19 06:27:00 +01:00
|
|
|
.path(admin_path())
|
2021-11-07 18:53:39 +01:00
|
|
|
.max_age(rocket::time::Duration::minutes(20))
|
2019-01-20 15:36:33 +01:00
|
|
|
.same_site(SameSite::Strict)
|
2019-01-19 21:36:34 +01:00
|
|
|
.http_only(true)
|
|
|
|
.finish();
|
|
|
|
|
|
|
|
cookies.add(cookie);
|
2020-08-12 19:07:52 +02:00
|
|
|
Ok(Redirect::to(admin_url(referer)))
|
2019-01-19 21:36:34 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn _validate_token(token: &str) -> bool {
|
2019-01-25 18:23:51 +01:00
|
|
|
match CONFIG.admin_token().as_ref() {
|
2019-01-19 21:36:34 +01:00
|
|
|
None => false,
|
2019-03-07 20:21:50 +01:00
|
|
|
Some(t) => crate::crypto::ct_eq(t.trim(), token.trim()),
|
2019-01-19 21:36:34 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-02 16:47:27 +01:00
|
|
|
#[derive(Serialize)]
|
|
|
|
struct AdminTemplateData {
|
|
|
|
page_content: String,
|
2019-02-10 15:46:51 +01:00
|
|
|
version: Option<&'static str>,
|
2021-06-19 19:22:19 +02:00
|
|
|
page_data: Option<Value>,
|
2019-02-03 00:22:18 +01:00
|
|
|
config: Value,
|
2019-06-02 00:08:52 +02:00
|
|
|
can_backup: bool,
|
2019-12-27 18:37:14 +01:00
|
|
|
logged_in: bool,
|
2020-02-22 17:49:33 +01:00
|
|
|
urlpath: String,
|
2019-02-02 16:47:27 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
impl AdminTemplateData {
|
2020-05-28 10:42:36 +02:00
|
|
|
fn new() -> Self {
|
2019-02-02 16:47:27 +01:00
|
|
|
Self {
|
2020-05-28 10:42:36 +02:00
|
|
|
page_content: String::from("admin/settings"),
|
2019-02-10 15:46:51 +01:00
|
|
|
version: VERSION,
|
2019-02-03 00:22:18 +01:00
|
|
|
config: CONFIG.prepare_json(),
|
2019-06-02 00:08:52 +02:00
|
|
|
can_backup: *CAN_BACKUP,
|
2019-12-27 18:37:14 +01:00
|
|
|
logged_in: true,
|
2020-02-22 17:49:33 +01:00
|
|
|
urlpath: CONFIG.domain_path(),
|
2021-06-19 19:22:19 +02:00
|
|
|
page_data: None,
|
2020-05-28 10:42:36 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-19 19:22:19 +02:00
|
|
|
fn with_data(page_content: &str, page_data: Value) -> Self {
|
2020-05-28 10:42:36 +02:00
|
|
|
Self {
|
2021-06-19 19:22:19 +02:00
|
|
|
page_content: String::from(page_content),
|
2020-05-28 10:42:36 +02:00
|
|
|
version: VERSION,
|
2021-06-19 19:22:19 +02:00
|
|
|
page_data: Some(page_data),
|
2020-05-28 10:42:36 +02:00
|
|
|
config: CONFIG.prepare_json(),
|
|
|
|
can_backup: *CAN_BACKUP,
|
|
|
|
logged_in: true,
|
|
|
|
urlpath: CONFIG.domain_path(),
|
2019-02-02 16:47:27 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn render(self) -> Result<String, Error> {
|
|
|
|
CONFIG.render_template(BASE_TEMPLATE, &self)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-19 21:36:34 +01:00
|
|
|
#[get("/", rank = 1)]
|
2021-11-05 19:18:54 +01:00
|
|
|
fn admin_page(_token: AdminToken) -> ApiResult<Html<String>> {
|
2020-05-28 10:42:36 +02:00
|
|
|
let text = AdminTemplateData::new().render()?;
|
2019-01-19 21:36:34 +01:00
|
|
|
Ok(Html(text))
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Deserialize, Debug)]
|
|
|
|
#[allow(non_snake_case)]
|
|
|
|
struct InviteData {
|
2019-02-02 16:47:27 +01:00
|
|
|
email: String,
|
2018-12-18 01:53:21 +01:00
|
|
|
}
|
|
|
|
|
2021-11-16 17:07:55 +01:00
|
|
|
async fn get_user_or_404(uuid: &str, conn: &DbConn) -> ApiResult<User> {
|
|
|
|
if let Some(user) = User::find_by_uuid(uuid, conn).await {
|
2021-05-09 04:28:08 +02:00
|
|
|
Ok(user)
|
|
|
|
} else {
|
|
|
|
err_code!("User doesn't exist", Status::NotFound.code);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-18 18:52:58 +01:00
|
|
|
#[post("/invite", data = "<data>")]
|
2021-11-16 17:07:55 +01:00
|
|
|
async fn invite_user(data: Json<InviteData>, _token: AdminToken, conn: DbConn) -> JsonResult {
|
2019-02-02 16:47:27 +01:00
|
|
|
let data: InviteData = data.into_inner();
|
|
|
|
let email = data.email.clone();
|
2021-11-16 17:07:55 +01:00
|
|
|
if User::find_by_mail(&data.email, &conn).await.is_some() {
|
2021-05-09 04:29:41 +02:00
|
|
|
err_code!("User already exists", Status::Conflict.code)
|
2018-12-18 01:53:21 +01:00
|
|
|
}
|
|
|
|
|
2019-04-12 23:42:42 +02:00
|
|
|
let mut user = User::new(email);
|
|
|
|
|
2021-11-16 17:07:55 +01:00
|
|
|
async fn _generate_invite(user: &User, conn: &DbConn) -> EmptyResult {
|
2021-05-09 04:29:41 +02:00
|
|
|
if CONFIG.mail_enabled() {
|
2022-07-06 23:57:37 +02:00
|
|
|
mail::send_invite(&user.email, &user.uuid, None, None, &CONFIG.invitation_org_name(), None).await
|
2021-05-09 04:29:41 +02:00
|
|
|
} else {
|
2021-09-09 13:50:18 +02:00
|
|
|
let invitation = Invitation::new(user.email.clone());
|
2021-11-16 17:07:55 +01:00
|
|
|
invitation.save(conn).await
|
2021-05-09 04:29:41 +02:00
|
|
|
}
|
2021-11-16 17:07:55 +01:00
|
|
|
}
|
2021-05-09 04:29:41 +02:00
|
|
|
|
2021-11-16 17:07:55 +01:00
|
|
|
_generate_invite(&user, &conn).await.map_err(|e| e.with_code(Status::InternalServerError.code))?;
|
|
|
|
user.save(&conn).await.map_err(|e| e.with_code(Status::InternalServerError.code))?;
|
2021-05-04 01:42:29 +02:00
|
|
|
|
2021-11-16 17:07:55 +01:00
|
|
|
Ok(Json(user.to_json(&conn).await))
|
2018-12-18 01:53:21 +01:00
|
|
|
}
|
|
|
|
|
2020-02-26 11:02:22 +01:00
|
|
|
#[post("/test/smtp", data = "<data>")]
|
2022-07-06 23:57:37 +02:00
|
|
|
async fn test_smtp(data: Json<InviteData>, _token: AdminToken) -> EmptyResult {
|
2020-02-26 11:02:22 +01:00
|
|
|
let data: InviteData = data.into_inner();
|
|
|
|
|
|
|
|
if CONFIG.mail_enabled() {
|
2022-07-06 23:57:37 +02:00
|
|
|
mail::send_test(&data.email).await
|
2020-02-26 11:02:22 +01:00
|
|
|
} else {
|
|
|
|
err!("Mail is not enabled")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-01 21:15:14 +01:00
|
|
|
#[get("/logout")]
|
2021-11-19 17:50:16 +01:00
|
|
|
fn logout(cookies: &CookieJar<'_>, referer: Referer) -> Redirect {
|
2022-02-22 20:48:00 +01:00
|
|
|
cookies.remove(Cookie::build(COOKIE_NAME, "").path(admin_path()).finish());
|
2021-03-27 16:07:26 +01:00
|
|
|
Redirect::to(admin_url(referer))
|
2019-12-01 21:15:14 +01:00
|
|
|
}
|
|
|
|
|
2019-04-05 20:49:58 +02:00
|
|
|
#[get("/users")]
|
2021-11-16 17:07:55 +01:00
|
|
|
async fn get_users_json(_token: AdminToken, conn: DbConn) -> Json<Value> {
|
|
|
|
let users_json = stream::iter(User::get_all(&conn).await)
|
|
|
|
.then(|u| async {
|
|
|
|
let u = u; // Move out this single variable
|
2022-06-01 15:26:11 +02:00
|
|
|
let mut usr = u.to_json(&conn).await;
|
|
|
|
usr["UserEnabled"] = json!(u.enabled);
|
|
|
|
usr["CreatedAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
|
|
|
|
usr
|
2021-11-16 17:07:55 +01:00
|
|
|
})
|
|
|
|
.collect::<Vec<Value>>()
|
|
|
|
.await;
|
2019-04-05 20:49:58 +02:00
|
|
|
|
2021-03-27 16:07:26 +01:00
|
|
|
Json(Value::Array(users_json))
|
2019-04-05 20:49:58 +02:00
|
|
|
}
|
|
|
|
|
2020-05-28 10:42:36 +02:00
|
|
|
#[get("/users/overview")]
|
2021-11-16 17:07:55 +01:00
|
|
|
async fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
|
|
|
|
let users_json = stream::iter(User::get_all(&conn).await)
|
|
|
|
.then(|u| async {
|
|
|
|
let u = u; // Move out this single variable
|
|
|
|
let mut usr = u.to_json(&conn).await;
|
|
|
|
usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &conn).await);
|
|
|
|
usr["attachment_count"] = json!(Attachment::count_by_user(&u.uuid, &conn).await);
|
|
|
|
usr["attachment_size"] = json!(get_display_size(Attachment::size_by_user(&u.uuid, &conn).await as i32));
|
2020-11-30 23:12:56 +01:00
|
|
|
usr["user_enabled"] = json!(u.enabled);
|
2021-11-16 17:07:55 +01:00
|
|
|
usr["created_at"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
|
|
|
|
usr["last_active"] = match u.last_active(&conn).await {
|
|
|
|
Some(dt) => json!(format_naive_datetime_local(&dt, DT_FMT)),
|
2021-03-31 22:18:35 +02:00
|
|
|
None => json!("Never"),
|
2020-11-30 22:00:51 +01:00
|
|
|
};
|
2020-08-12 19:07:52 +02:00
|
|
|
usr
|
2021-01-19 17:55:21 +01:00
|
|
|
})
|
2021-11-16 17:07:55 +01:00
|
|
|
.collect::<Vec<Value>>()
|
|
|
|
.await;
|
2020-05-28 10:42:36 +02:00
|
|
|
|
2021-06-19 19:22:19 +02:00
|
|
|
let text = AdminTemplateData::with_data("admin/users", json!(users_json)).render()?;
|
2020-05-28 10:42:36 +02:00
|
|
|
Ok(Html(text))
|
|
|
|
}
|
|
|
|
|
2021-05-08 22:03:03 +02:00
|
|
|
#[get("/users/<uuid>")]
|
2021-11-16 17:07:55 +01:00
|
|
|
async fn get_user_json(uuid: String, _token: AdminToken, conn: DbConn) -> JsonResult {
|
2022-06-01 15:26:11 +02:00
|
|
|
let u = get_user_or_404(&uuid, &conn).await?;
|
|
|
|
let mut usr = u.to_json(&conn).await;
|
|
|
|
usr["UserEnabled"] = json!(u.enabled);
|
|
|
|
usr["CreatedAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT));
|
|
|
|
Ok(Json(usr))
|
2021-05-08 22:03:03 +02:00
|
|
|
}
|
|
|
|
|
2018-12-18 18:52:58 +01:00
|
|
|
#[post("/users/<uuid>/delete")]
|
2021-11-16 17:07:55 +01:00
|
|
|
async fn delete_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
|
|
|
let user = get_user_or_404(&uuid, &conn).await?;
|
|
|
|
user.delete(&conn).await
|
2018-12-18 01:53:21 +01:00
|
|
|
}
|
|
|
|
|
2019-01-26 19:28:54 +01:00
|
|
|
#[post("/users/<uuid>/deauth")]
|
2021-11-16 17:07:55 +01:00
|
|
|
async fn deauth_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
|
|
|
let mut user = get_user_or_404(&uuid, &conn).await?;
|
|
|
|
Device::delete_all_by_user(&user.uuid, &conn).await?;
|
2019-01-26 19:28:54 +01:00
|
|
|
user.reset_security_stamp();
|
|
|
|
|
2021-11-16 17:07:55 +01:00
|
|
|
user.save(&conn).await
|
2019-01-26 19:28:54 +01:00
|
|
|
}
|
|
|
|
|
2020-11-30 23:12:56 +01:00
|
|
|
#[post("/users/<uuid>/disable")]
|
2021-11-16 17:07:55 +01:00
|
|
|
async fn disable_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
|
|
|
let mut user = get_user_or_404(&uuid, &conn).await?;
|
|
|
|
Device::delete_all_by_user(&user.uuid, &conn).await?;
|
2020-11-30 23:12:56 +01:00
|
|
|
user.reset_security_stamp();
|
|
|
|
user.enabled = false;
|
|
|
|
|
2021-11-16 17:07:55 +01:00
|
|
|
user.save(&conn).await
|
2020-11-30 23:12:56 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
#[post("/users/<uuid>/enable")]
|
2021-11-16 17:07:55 +01:00
|
|
|
async fn enable_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
|
|
|
let mut user = get_user_or_404(&uuid, &conn).await?;
|
2020-11-30 23:12:56 +01:00
|
|
|
user.enabled = true;
|
|
|
|
|
2021-11-16 17:07:55 +01:00
|
|
|
user.save(&conn).await
|
2020-11-30 23:12:56 +01:00
|
|
|
}
|
|
|
|
|
2019-08-21 17:13:06 +02:00
|
|
|
#[post("/users/<uuid>/remove-2fa")]
|
2021-11-16 17:07:55 +01:00
|
|
|
async fn remove_2fa(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
|
|
|
let mut user = get_user_or_404(&uuid, &conn).await?;
|
|
|
|
TwoFactor::delete_all_by_user(&user.uuid, &conn).await?;
|
2019-08-21 17:13:06 +02:00
|
|
|
user.totp_recover = None;
|
2021-11-16 17:07:55 +01:00
|
|
|
user.save(&conn).await
|
2019-08-21 17:13:06 +02:00
|
|
|
}
|
|
|
|
|
2021-02-03 18:43:54 +01:00
|
|
|
#[derive(Deserialize, Debug)]
|
|
|
|
struct UserOrgTypeData {
|
|
|
|
user_type: NumberOrString,
|
|
|
|
user_uuid: String,
|
|
|
|
org_uuid: String,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[post("/users/org_type", data = "<data>")]
|
2021-11-16 17:07:55 +01:00
|
|
|
async fn update_user_org_type(data: Json<UserOrgTypeData>, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
2021-02-03 18:43:54 +01:00
|
|
|
let data: UserOrgTypeData = data.into_inner();
|
|
|
|
|
2021-11-16 17:07:55 +01:00
|
|
|
let mut user_to_edit = match UserOrganization::find_by_user_and_org(&data.user_uuid, &data.org_uuid, &conn).await {
|
2021-02-03 18:43:54 +01:00
|
|
|
Some(user) => user,
|
|
|
|
None => err!("The specified user isn't member of the organization"),
|
|
|
|
};
|
|
|
|
|
|
|
|
let new_type = match UserOrgType::from_str(&data.user_type.into_string()) {
|
|
|
|
Some(new_type) => new_type as i32,
|
|
|
|
None => err!("Invalid type"),
|
|
|
|
};
|
|
|
|
|
|
|
|
if user_to_edit.atype == UserOrgType::Owner && new_type != UserOrgType::Owner {
|
2022-08-20 16:42:36 +02:00
|
|
|
// Removing owner permmission, check that there is at least one other confirmed owner
|
|
|
|
if UserOrganization::count_confirmed_by_org_and_type(&data.org_uuid, UserOrgType::Owner, &conn).await <= 1 {
|
2021-02-03 18:43:54 +01:00
|
|
|
err!("Can't change the type of the last owner")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-08-20 16:42:36 +02:00
|
|
|
// This check is also done at api::organizations::{accept_invite(), _confirm_invite, _activate_user(), edit_user()}, update_user_org_type
|
|
|
|
// It returns different error messages per function.
|
|
|
|
if new_type < UserOrgType::Admin {
|
|
|
|
match OrgPolicy::is_user_allowed(&user_to_edit.user_uuid, &user_to_edit.org_uuid, true, &conn).await {
|
|
|
|
Ok(_) => {}
|
|
|
|
Err(OrgPolicyErr::TwoFactorMissing) => {
|
|
|
|
err!("You cannot modify this user to this type because it has no two-step login method activated");
|
|
|
|
}
|
|
|
|
Err(OrgPolicyErr::SingleOrgEnforced) => {
|
|
|
|
err!("You cannot modify this user to this type because it is a member of an organization which forbids it");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-07-10 16:39:38 +02:00
|
|
|
user_to_edit.atype = new_type;
|
2021-11-16 17:07:55 +01:00
|
|
|
user_to_edit.save(&conn).await
|
2021-02-03 18:43:54 +01:00
|
|
|
}
|
|
|
|
|
2019-03-07 21:08:33 +01:00
|
|
|
#[post("/users/update_revision")]
|
2021-11-16 17:07:55 +01:00
|
|
|
async fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult {
|
|
|
|
User::update_all_revisions(&conn).await
|
2019-03-07 21:08:33 +01:00
|
|
|
}
|
|
|
|
|
2020-05-28 10:42:36 +02:00
|
|
|
#[get("/organizations/overview")]
|
2021-11-16 17:07:55 +01:00
|
|
|
async fn organizations_overview(_token: AdminToken, conn: DbConn) -> ApiResult<Html<String>> {
|
|
|
|
let organizations_json = stream::iter(Organization::get_all(&conn).await)
|
|
|
|
.then(|o| async {
|
|
|
|
let o = o; //Move out this single variable
|
2020-08-12 19:07:52 +02:00
|
|
|
let mut org = o.to_json();
|
2021-11-16 17:07:55 +01:00
|
|
|
org["user_count"] = json!(UserOrganization::count_by_org(&o.uuid, &conn).await);
|
|
|
|
org["cipher_count"] = json!(Cipher::count_by_org(&o.uuid, &conn).await);
|
|
|
|
org["attachment_count"] = json!(Attachment::count_by_org(&o.uuid, &conn).await);
|
|
|
|
org["attachment_size"] = json!(get_display_size(Attachment::size_by_org(&o.uuid, &conn).await as i32));
|
2020-08-12 19:07:52 +02:00
|
|
|
org
|
2021-01-19 17:55:21 +01:00
|
|
|
})
|
2021-11-16 17:07:55 +01:00
|
|
|
.collect::<Vec<Value>>()
|
|
|
|
.await;
|
2020-05-28 10:42:36 +02:00
|
|
|
|
2021-06-19 19:22:19 +02:00
|
|
|
let text = AdminTemplateData::with_data("admin/organizations", json!(organizations_json)).render()?;
|
2020-05-28 10:42:36 +02:00
|
|
|
Ok(Html(text))
|
|
|
|
}
|
|
|
|
|
2021-02-03 18:43:54 +01:00
|
|
|
#[post("/organizations/<uuid>/delete")]
|
2021-11-16 17:07:55 +01:00
|
|
|
async fn delete_organization(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult {
|
|
|
|
let org = Organization::find_by_uuid(&uuid, &conn).await.map_res("Organization doesn't exist")?;
|
|
|
|
org.delete(&conn).await
|
2021-02-03 18:43:54 +01:00
|
|
|
}
|
|
|
|
|
2020-06-03 17:07:32 +02:00
|
|
|
#[derive(Deserialize)]
|
|
|
|
struct WebVaultVersion {
|
2020-05-28 10:42:36 +02:00
|
|
|
version: String,
|
|
|
|
}
|
|
|
|
|
2020-06-03 17:07:32 +02:00
|
|
|
#[derive(Deserialize)]
|
|
|
|
struct GitRelease {
|
|
|
|
tag_name: String,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Deserialize)]
|
|
|
|
struct GitCommit {
|
|
|
|
sha: String,
|
|
|
|
}
|
|
|
|
|
2021-11-07 18:53:39 +01:00
|
|
|
async fn get_github_api<T: DeserializeOwned>(url: &str) -> Result<T, Error> {
|
2021-04-06 22:04:37 +02:00
|
|
|
let github_api = get_reqwest_client();
|
2020-05-28 20:25:25 +02:00
|
|
|
|
2021-11-07 18:53:39 +01:00
|
|
|
Ok(github_api.get(url).send().await?.error_for_status()?.json::<T>().await?)
|
2021-01-19 17:55:21 +01:00
|
|
|
}
|
|
|
|
|
2021-11-07 18:53:39 +01:00
|
|
|
async fn has_http_access() -> bool {
|
2021-04-06 22:04:37 +02:00
|
|
|
let http_access = get_reqwest_client();
|
2021-01-19 17:55:21 +01:00
|
|
|
|
2021-11-07 18:53:39 +01:00
|
|
|
match http_access.head("https://github.com/dani-garcia/vaultwarden").send().await {
|
2021-01-19 17:55:21 +01:00
|
|
|
Ok(r) => r.status().is_success(),
|
|
|
|
_ => false,
|
|
|
|
}
|
2020-05-28 20:25:25 +02:00
|
|
|
}
|
|
|
|
|
2022-06-08 19:46:33 +02:00
|
|
|
use cached::proc_macro::cached;
|
|
|
|
/// Cache this function to prevent API call rate limit. Github only allows 60 requests per hour, and we use 3 here already.
|
|
|
|
/// It will cache this function for 300 seconds (5 minutes) which should prevent the exhaustion of the rate limit.
|
|
|
|
#[cached(time = 300, sync_writes = true)]
|
|
|
|
async fn get_release_info(has_http_access: bool, running_within_docker: bool) -> (String, String, String) {
|
2021-01-19 17:55:21 +01:00
|
|
|
// If the HTTP Check failed, do not even attempt to check for new versions since we were not able to connect with github.com anyway.
|
2022-06-08 19:46:33 +02:00
|
|
|
if has_http_access {
|
|
|
|
info!("Running get_release_info!!");
|
2020-06-03 17:07:32 +02:00
|
|
|
(
|
2021-11-07 18:53:39 +01:00
|
|
|
match get_github_api::<GitRelease>("https://api.github.com/repos/dani-garcia/vaultwarden/releases/latest")
|
|
|
|
.await
|
|
|
|
{
|
2020-06-03 17:07:32 +02:00
|
|
|
Ok(r) => r.tag_name,
|
2021-01-19 17:55:21 +01:00
|
|
|
_ => "-".to_string(),
|
2020-06-03 17:07:32 +02:00
|
|
|
},
|
2021-11-07 18:53:39 +01:00
|
|
|
match get_github_api::<GitCommit>("https://api.github.com/repos/dani-garcia/vaultwarden/commits/main").await
|
|
|
|
{
|
2020-06-03 17:07:32 +02:00
|
|
|
Ok(mut c) => {
|
|
|
|
c.sha.truncate(8);
|
|
|
|
c.sha
|
2021-01-19 17:55:21 +01:00
|
|
|
}
|
|
|
|
_ => "-".to_string(),
|
2020-06-03 17:07:32 +02:00
|
|
|
},
|
2021-02-03 18:43:54 +01:00
|
|
|
// Do not fetch the web-vault version when running within Docker.
|
|
|
|
// The web-vault version is embedded within the container it self, and should not be updated manually
|
|
|
|
if running_within_docker {
|
|
|
|
"-".to_string()
|
|
|
|
} else {
|
2021-03-31 22:18:35 +02:00
|
|
|
match get_github_api::<GitRelease>(
|
|
|
|
"https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest",
|
2021-11-07 18:53:39 +01:00
|
|
|
)
|
|
|
|
.await
|
|
|
|
{
|
2021-02-03 18:43:54 +01:00
|
|
|
Ok(r) => r.tag_name.trim_start_matches('v').to_string(),
|
|
|
|
_ => "-".to_string(),
|
|
|
|
}
|
2020-06-03 17:07:32 +02:00
|
|
|
},
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
("-".to_string(), "-".to_string(), "-".to_string())
|
2022-06-08 19:46:33 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[get("/diagnostics")]
|
|
|
|
async fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResult<Html<String>> {
|
|
|
|
use chrono::prelude::*;
|
|
|
|
use std::net::ToSocketAddrs;
|
|
|
|
|
|
|
|
// Get current running versions
|
|
|
|
let web_vault_version: WebVaultVersion =
|
2022-07-15 19:13:26 +02:00
|
|
|
match std::fs::read_to_string(&format!("{}/{}", CONFIG.web_vault_folder(), "vw-version.json")) {
|
2022-06-08 19:46:33 +02:00
|
|
|
Ok(s) => serde_json::from_str(&s)?,
|
2022-07-15 19:13:26 +02:00
|
|
|
_ => match std::fs::read_to_string(&format!("{}/{}", CONFIG.web_vault_folder(), "version.json")) {
|
2022-06-08 19:46:33 +02:00
|
|
|
Ok(s) => serde_json::from_str(&s)?,
|
|
|
|
_ => WebVaultVersion {
|
|
|
|
version: String::from("Version file missing"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
// Execute some environment checks
|
|
|
|
let running_within_docker = is_running_in_docker();
|
|
|
|
let has_http_access = has_http_access().await;
|
|
|
|
let uses_proxy = env::var_os("HTTP_PROXY").is_some()
|
|
|
|
|| env::var_os("http_proxy").is_some()
|
|
|
|
|| env::var_os("HTTPS_PROXY").is_some()
|
|
|
|
|| env::var_os("https_proxy").is_some();
|
|
|
|
|
|
|
|
// Check if we are able to resolve DNS entries
|
|
|
|
let dns_resolved = match ("github.com", 0).to_socket_addrs().map(|mut i| i.next()) {
|
|
|
|
Ok(Some(a)) => a.ip().to_string(),
|
|
|
|
_ => "Could not resolve domain name.".to_string(),
|
2020-05-28 20:25:25 +02:00
|
|
|
};
|
2020-08-12 19:07:52 +02:00
|
|
|
|
2022-06-08 19:46:33 +02:00
|
|
|
let (latest_release, latest_commit, latest_web_build) =
|
|
|
|
get_release_info(has_http_access, running_within_docker).await;
|
|
|
|
|
2021-03-28 00:10:01 +01:00
|
|
|
let ip_header_name = match &ip_header.0 {
|
|
|
|
Some(h) => h,
|
2021-03-31 22:18:35 +02:00
|
|
|
_ => "",
|
2021-03-28 00:10:01 +01:00
|
|
|
};
|
|
|
|
|
2020-05-28 10:42:36 +02:00
|
|
|
let diagnostics_json = json!({
|
|
|
|
"dns_resolved": dns_resolved,
|
2020-05-28 20:25:25 +02:00
|
|
|
"latest_release": latest_release,
|
|
|
|
"latest_commit": latest_commit,
|
2021-04-05 15:09:16 +02:00
|
|
|
"web_vault_enabled": &CONFIG.web_vault_enabled(),
|
|
|
|
"web_vault_version": web_vault_version.version,
|
2020-06-03 17:07:32 +02:00
|
|
|
"latest_web_build": latest_web_build,
|
2021-01-19 17:55:21 +01:00
|
|
|
"running_within_docker": running_within_docker,
|
2021-11-05 19:18:54 +01:00
|
|
|
"docker_base_image": docker_base_image(),
|
2021-01-19 17:55:21 +01:00
|
|
|
"has_http_access": has_http_access,
|
2021-03-28 00:10:01 +01:00
|
|
|
"ip_header_exists": &ip_header.0.is_some(),
|
2021-03-28 11:59:49 +02:00
|
|
|
"ip_header_match": ip_header_name == CONFIG.ip_header(),
|
2021-03-28 00:10:01 +01:00
|
|
|
"ip_header_name": ip_header_name,
|
|
|
|
"ip_header_config": &CONFIG.ip_header(),
|
2021-01-19 17:55:21 +01:00
|
|
|
"uses_proxy": uses_proxy,
|
|
|
|
"db_type": *DB_TYPE,
|
2021-11-07 18:53:39 +01:00
|
|
|
"db_version": get_sql_server_version(&conn).await,
|
2021-01-19 17:55:21 +01:00
|
|
|
"admin_url": format!("{}/diagnostics", admin_url(Referer(None))),
|
2021-06-19 19:22:19 +02:00
|
|
|
"overrides": &CONFIG.get_overrides().join(", "),
|
2021-04-05 15:09:16 +02:00
|
|
|
"server_time_local": Local::now().format("%Y-%m-%d %H:%M:%S %Z").to_string(),
|
2021-01-19 17:55:21 +01:00
|
|
|
"server_time": Utc::now().format("%Y-%m-%d %H:%M:%S UTC").to_string(), // Run the date/time check as the last item to minimize the difference
|
2020-05-28 10:42:36 +02:00
|
|
|
});
|
|
|
|
|
2021-06-19 19:22:19 +02:00
|
|
|
let text = AdminTemplateData::with_data("admin/diagnostics", diagnostics_json).render()?;
|
2020-05-28 10:42:36 +02:00
|
|
|
Ok(Html(text))
|
|
|
|
}
|
|
|
|
|
2021-01-19 17:55:21 +01:00
|
|
|
#[get("/diagnostics/config")]
|
2021-03-27 16:07:26 +01:00
|
|
|
fn get_diagnostics_config(_token: AdminToken) -> Json<Value> {
|
2021-01-19 17:55:21 +01:00
|
|
|
let support_json = CONFIG.get_support_json();
|
2021-03-27 16:07:26 +01:00
|
|
|
Json(support_json)
|
2021-01-19 17:55:21 +01:00
|
|
|
}
|
|
|
|
|
2019-02-02 01:09:21 +01:00
|
|
|
#[post("/config", data = "<data>")]
|
2019-02-02 16:47:27 +01:00
|
|
|
fn post_config(data: Json<ConfigBuilder>, _token: AdminToken) -> EmptyResult {
|
|
|
|
let data: ConfigBuilder = data.into_inner();
|
|
|
|
CONFIG.update_config(data)
|
2019-02-02 01:09:21 +01:00
|
|
|
}
|
|
|
|
|
2019-02-06 17:32:13 +01:00
|
|
|
#[post("/config/delete")]
|
|
|
|
fn delete_config(_token: AdminToken) -> EmptyResult {
|
|
|
|
CONFIG.delete_user_config()
|
|
|
|
}
|
|
|
|
|
2019-05-03 15:46:29 +02:00
|
|
|
#[post("/config/backup_db")]
|
2021-11-07 18:53:39 +01:00
|
|
|
async fn backup_db(_token: AdminToken, conn: DbConn) -> EmptyResult {
|
2019-06-02 00:08:52 +02:00
|
|
|
if *CAN_BACKUP {
|
2021-11-07 18:53:39 +01:00
|
|
|
backup_database(&conn).await
|
2019-06-02 00:08:52 +02:00
|
|
|
} else {
|
2021-04-05 15:09:16 +02:00
|
|
|
err!("Can't back up current DB (Only SQLite supports this feature)");
|
2019-06-02 00:08:52 +02:00
|
|
|
}
|
2019-05-03 15:46:29 +02:00
|
|
|
}
|
|
|
|
|
2018-12-18 01:53:21 +01:00
|
|
|
pub struct AdminToken {}
|
|
|
|
|
2021-11-07 18:53:39 +01:00
|
|
|
#[rocket::async_trait]
|
|
|
|
impl<'r> FromRequest<'r> for AdminToken {
|
2018-12-18 01:53:21 +01:00
|
|
|
type Error = &'static str;
|
|
|
|
|
2021-11-07 18:53:39 +01:00
|
|
|
async fn from_request(request: &'r Request<'_>) -> request::Outcome<Self, Self::Error> {
|
2019-02-20 21:44:35 +01:00
|
|
|
if CONFIG.disable_admin_token() {
|
|
|
|
Outcome::Success(AdminToken {})
|
2019-03-03 16:11:55 +01:00
|
|
|
} else {
|
2021-11-07 18:53:39 +01:00
|
|
|
let cookies = request.cookies();
|
2019-02-20 21:44:35 +01:00
|
|
|
|
|
|
|
let access_token = match cookies.get(COOKIE_NAME) {
|
|
|
|
Some(cookie) => cookie.value(),
|
|
|
|
None => return Outcome::Forward(()), // If there is no cookie, redirect to login
|
|
|
|
};
|
|
|
|
|
2021-11-07 18:53:39 +01:00
|
|
|
let ip = match ClientIp::from_request(request).await {
|
2019-02-20 21:44:35 +01:00
|
|
|
Outcome::Success(ip) => ip.ip,
|
|
|
|
_ => err_handler!("Error getting Client IP"),
|
|
|
|
};
|
|
|
|
|
|
|
|
if decode_admin(access_token).is_err() {
|
|
|
|
// Remove admin cookie
|
2022-02-22 20:48:00 +01:00
|
|
|
cookies.remove(Cookie::build(COOKIE_NAME, "").path(admin_path()).finish());
|
2019-02-20 21:44:35 +01:00
|
|
|
error!("Invalid or expired admin JWT. IP: {}.", ip);
|
|
|
|
return Outcome::Forward(());
|
|
|
|
}
|
|
|
|
|
|
|
|
Outcome::Success(AdminToken {})
|
2018-12-18 01:53:21 +01:00
|
|
|
}
|
|
|
|
}
|
2018-12-18 18:52:58 +01:00
|
|
|
}
|