1
0
Fork 1
Spiegel von https://github.com/dani-garcia/vaultwarden.git synchronisiert 2025-03-12 16:47:03 +01:00

Add AWS S3 support for non-temporary files

Dieser Commit ist enthalten in:
Chase Douglas 2025-02-07 09:13:37 -08:00
Ursprung 3e886e670e
Commit 9a9786e370
22 geänderte Dateien mit 1448 neuen und 211 gelöschten Zeilen

461
Cargo.lock generiert
Datei anzeigen

@ -74,6 +74,56 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "anstream"
version = "0.6.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b"
dependencies = [
"anstyle",
"anstyle-parse",
"anstyle-query",
"anstyle-wincon",
"colorchoice",
"is_terminal_polyfill",
"utf8parse",
]
[[package]]
name = "anstyle"
version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9"
[[package]]
name = "anstyle-parse"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9"
dependencies = [
"utf8parse",
]
[[package]]
name = "anstyle-query"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c"
dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "anstyle-wincon"
version = "3.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e"
dependencies = [
"anstyle",
"once_cell",
"windows-sys 0.59.0",
]
[[package]] [[package]]
name = "argon2" name = "argon2"
version = "0.5.3" version = "0.5.3"
@ -355,13 +405,14 @@ dependencies = [
[[package]] [[package]]
name = "aws-runtime" name = "aws-runtime"
version = "1.5.2" version = "1.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44f6f1124d6e19ab6daf7f2e615644305dc6cb2d706892a8a8c0b98db35de020" checksum = "bee7643696e7fdd74c10f9eb42848a87fe469d35eae9c3323f80aa98f350baac"
dependencies = [ dependencies = [
"aws-credential-types", "aws-credential-types",
"aws-sigv4", "aws-sigv4",
"aws-smithy-async", "aws-smithy-async",
"aws-smithy-eventstream",
"aws-smithy-http", "aws-smithy-http",
"aws-smithy-runtime", "aws-smithy-runtime",
"aws-smithy-runtime-api", "aws-smithy-runtime-api",
@ -403,6 +454,40 @@ dependencies = [
"url", "url",
] ]
[[package]]
name = "aws-sdk-s3"
version = "1.72.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c7ce6d85596c4bcb3aba8ad5bb134b08e204c8a475c9999c1af9290f80aa8ad"
dependencies = [
"aws-credential-types",
"aws-runtime",
"aws-sigv4",
"aws-smithy-async",
"aws-smithy-checksums",
"aws-smithy-eventstream",
"aws-smithy-http",
"aws-smithy-json",
"aws-smithy-runtime",
"aws-smithy-runtime-api",
"aws-smithy-types",
"aws-smithy-xml",
"aws-types",
"bytes",
"fastrand",
"hex",
"hmac",
"http 0.2.12",
"http-body 0.4.6",
"lru",
"once_cell",
"percent-encoding",
"regex-lite",
"sha2",
"tracing",
"url",
]
[[package]] [[package]]
name = "aws-sdk-sso" name = "aws-sdk-sso"
version = "1.52.0" version = "1.52.0"
@ -472,32 +557,38 @@ dependencies = [
[[package]] [[package]]
name = "aws-sigv4" name = "aws-sigv4"
version = "1.2.6" version = "1.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d3820e0c08d0737872ff3c7c1f21ebbb6693d832312d6152bf18ef50a5471c2" checksum = "690118821e46967b3c4501d67d7d52dd75106a9c54cf36cefa1985cedbe94e05"
dependencies = [ dependencies = [
"aws-credential-types", "aws-credential-types",
"aws-smithy-eventstream",
"aws-smithy-http", "aws-smithy-http",
"aws-smithy-runtime-api", "aws-smithy-runtime-api",
"aws-smithy-types", "aws-smithy-types",
"bytes", "bytes",
"crypto-bigint 0.5.5",
"form_urlencoded", "form_urlencoded",
"hex", "hex",
"hmac", "hmac",
"http 0.2.12", "http 0.2.12",
"http 1.2.0", "http 1.2.0",
"once_cell", "once_cell",
"p256",
"percent-encoding", "percent-encoding",
"ring",
"sha2", "sha2",
"subtle",
"time", "time",
"tracing", "tracing",
"zeroize",
] ]
[[package]] [[package]]
name = "aws-smithy-async" name = "aws-smithy-async"
version = "1.2.3" version = "1.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "427cb637d15d63d6f9aae26358e1c9a9c09d5aa490d64b09354c8217cfef0f28" checksum = "fa59d1327d8b5053c54bf2eaae63bf629ba9e904434d0835a28ed3c0ed0a614e"
dependencies = [ dependencies = [
"futures-util", "futures-util",
"pin-project-lite", "pin-project-lite",
@ -505,11 +596,45 @@ dependencies = [
] ]
[[package]] [[package]]
name = "aws-smithy-http" name = "aws-smithy-checksums"
version = "0.60.11" version = "0.62.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c8bc3e8fdc6b8d07d976e301c02fe553f72a39b7a9fea820e023268467d7ab6" checksum = "f2f45a1c384d7a393026bc5f5c177105aa9fa68e4749653b985707ac27d77295"
dependencies = [ dependencies = [
"aws-smithy-http",
"aws-smithy-types",
"bytes",
"crc32c",
"crc32fast",
"crc64fast-nvme",
"hex",
"http 0.2.12",
"http-body 0.4.6",
"md-5",
"pin-project-lite",
"sha1",
"sha2",
"tracing",
]
[[package]]
name = "aws-smithy-eventstream"
version = "0.60.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b18559a41e0c909b77625adf2b8c50de480a8041e5e4a3f5f7d177db70abc5a"
dependencies = [
"aws-smithy-types",
"bytes",
"crc32fast",
]
[[package]]
name = "aws-smithy-http"
version = "0.60.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7809c27ad8da6a6a68c454e651d4962479e81472aa19ae99e59f9aba1f9713cc"
dependencies = [
"aws-smithy-eventstream",
"aws-smithy-runtime-api", "aws-smithy-runtime-api",
"aws-smithy-types", "aws-smithy-types",
"bytes", "bytes",
@ -526,9 +651,9 @@ dependencies = [
[[package]] [[package]]
name = "aws-smithy-json" name = "aws-smithy-json"
version = "0.61.1" version = "0.61.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee4e69cc50921eb913c6b662f8d909131bb3e6ad6cb6090d3a39b66fc5c52095" checksum = "623a51127f24c30776c8b374295f2df78d92517386f77ba30773f15a30ce1422"
dependencies = [ dependencies = [
"aws-smithy-types", "aws-smithy-types",
] ]
@ -545,9 +670,9 @@ dependencies = [
[[package]] [[package]]
name = "aws-smithy-runtime" name = "aws-smithy-runtime"
version = "1.7.6" version = "1.7.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a05dd41a70fc74051758ee75b5c4db2c0ca070ed9229c3df50e9475cda1cb985" checksum = "d526a12d9ed61fadefda24abe2e682892ba288c2018bcb38b1b4c111d13f6d92"
dependencies = [ dependencies = [
"aws-smithy-async", "aws-smithy-async",
"aws-smithy-http", "aws-smithy-http",
@ -589,9 +714,9 @@ dependencies = [
[[package]] [[package]]
name = "aws-smithy-types" name = "aws-smithy-types"
version = "1.2.11" version = "1.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38ddc9bd6c28aeb303477170ddd183760a956a03e083b3902a990238a7e3792d" checksum = "c7b8a53819e42f10d0821f56da995e1470b199686a1809168db6ca485665f042"
dependencies = [ dependencies = [
"base64-simd", "base64-simd",
"bytes", "bytes",
@ -624,9 +749,9 @@ dependencies = [
[[package]] [[package]]
name = "aws-types" name = "aws-types"
version = "1.3.3" version = "1.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5221b91b3e441e6675310829fd8984801b772cb1546ef6c0e54dec9f1ac13fef" checksum = "b0df5a18c4f951c645300d365fec53a61418bcf4650f604f85fe2a665bfaa0c2"
dependencies = [ dependencies = [
"aws-credential-types", "aws-credential-types",
"aws-smithy-async", "aws-smithy-async",
@ -651,6 +776,12 @@ dependencies = [
"windows-targets 0.52.6", "windows-targets 0.52.6",
] ]
[[package]]
name = "base16ct"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce"
[[package]] [[package]]
name = "base64" name = "base64"
version = "0.13.1" version = "0.13.1"
@ -832,6 +963,25 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ade8366b8bd5ba243f0a58f036cc0ca8a2f069cff1a2351ef1cac6b083e16fc0" checksum = "ade8366b8bd5ba243f0a58f036cc0ca8a2f069cff1a2351ef1cac6b083e16fc0"
[[package]]
name = "cbindgen"
version = "0.27.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fce8dd7fcfcbf3a0a87d8f515194b49d6135acab73e18bd380d1d93bb1a15eb"
dependencies = [
"clap",
"heck 0.4.1",
"indexmap",
"log",
"proc-macro2",
"quote",
"serde",
"serde_json",
"syn",
"tempfile",
"toml",
]
[[package]] [[package]]
name = "cc" name = "cc"
version = "1.2.13" version = "1.2.13"
@ -891,12 +1041,45 @@ dependencies = [
"stacker", "stacker",
] ]
[[package]]
name = "clap"
version = "4.5.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e77c3243bd94243c03672cb5154667347c457ca271254724f9f393aee1c05ff"
dependencies = [
"clap_builder",
]
[[package]]
name = "clap_builder"
version = "4.5.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b26884eb4b57140e4d2d93652abfa49498b938b3c9179f9fc487b0acc3edad7"
dependencies = [
"anstream",
"anstyle",
"clap_lex",
"strsim",
]
[[package]]
name = "clap_lex"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
[[package]] [[package]]
name = "codemap" name = "codemap"
version = "0.1.3" version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9e769b5c8c8283982a987c6e948e540254f1058d5a74b8794914d4ef5fc2a24" checksum = "b9e769b5c8c8283982a987c6e948e540254f1058d5a74b8794914d4ef5fc2a24"
[[package]]
name = "colorchoice"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990"
[[package]] [[package]]
name = "concurrent-queue" name = "concurrent-queue"
version = "2.5.0" version = "2.5.0"
@ -906,6 +1089,12 @@ dependencies = [
"crossbeam-utils", "crossbeam-utils",
] ]
[[package]]
name = "const-oid"
version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
[[package]] [[package]]
name = "cookie" name = "cookie"
version = "0.18.1" version = "0.18.1"
@ -960,6 +1149,30 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "crc"
version = "3.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636"
dependencies = [
"crc-catalog",
]
[[package]]
name = "crc-catalog"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5"
[[package]]
name = "crc32c"
version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a47af21622d091a8f0fb295b88bc886ac74efcc613efc19f5d0b21de5c89e47"
dependencies = [
"rustc_version",
]
[[package]] [[package]]
name = "crc32fast" name = "crc32fast"
version = "1.4.2" version = "1.4.2"
@ -969,6 +1182,16 @@ dependencies = [
"cfg-if", "cfg-if",
] ]
[[package]]
name = "crc64fast-nvme"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d5e2ee08013e3f228d6d2394116c4549a6df77708442c62d887d83f68ef2ee37"
dependencies = [
"cbindgen",
"crc",
]
[[package]] [[package]]
name = "cron" name = "cron"
version = "0.12.1" version = "0.12.1"
@ -986,6 +1209,28 @@ version = "0.8.21"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
[[package]]
name = "crypto-bigint"
version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef"
dependencies = [
"generic-array",
"rand_core 0.6.4",
"subtle",
"zeroize",
]
[[package]]
name = "crypto-bigint"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76"
dependencies = [
"rand_core 0.6.4",
"subtle",
]
[[package]] [[package]]
name = "crypto-common" name = "crypto-common"
version = "0.1.6" version = "0.1.6"
@ -1057,6 +1302,16 @@ version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c297a1c74b71ae29df00c3e22dd9534821d60eb9af5a0192823fa2acea70c2a" checksum = "5c297a1c74b71ae29df00c3e22dd9534821d60eb9af5a0192823fa2acea70c2a"
[[package]]
name = "der"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1a467a65c5e759bce6e65eaf91cc29f466cdc57cb65777bd646872a8a1fd4de"
dependencies = [
"const-oid",
"zeroize",
]
[[package]] [[package]]
name = "deranged" name = "deranged"
version = "0.3.11" version = "0.3.11"
@ -1274,18 +1529,50 @@ checksum = "139ae9aca7527f85f26dd76483eb38533fd84bd571065da1739656ef71c5ff5b"
dependencies = [ dependencies = [
"darling", "darling",
"either", "either",
"heck", "heck 0.5.0",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn", "syn",
] ]
[[package]]
name = "ecdsa"
version = "0.14.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c"
dependencies = [
"der",
"elliptic-curve",
"rfc6979",
"signature",
]
[[package]] [[package]]
name = "either" name = "either"
version = "1.13.0" version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
[[package]]
name = "elliptic-curve"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3"
dependencies = [
"base16ct",
"crypto-bigint 0.4.9",
"der",
"digest",
"ff",
"generic-array",
"group",
"pkcs8",
"rand_core 0.6.4",
"sec1",
"subtle",
"zeroize",
]
[[package]] [[package]]
name = "email-encoding" name = "email-encoding"
version = "0.3.1" version = "0.3.1"
@ -1320,7 +1607,7 @@ version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1e6a265c649f3f5979b601d26f1d05ada116434c87741c9493cb56218f76cbc" checksum = "a1e6a265c649f3f5979b601d26f1d05ada116434c87741c9493cb56218f76cbc"
dependencies = [ dependencies = [
"heck", "heck 0.5.0",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn", "syn",
@ -1393,6 +1680,16 @@ dependencies = [
"syslog", "syslog",
] ]
[[package]]
name = "ff"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160"
dependencies = [
"rand_core 0.6.4",
"subtle",
]
[[package]] [[package]]
name = "figment" name = "figment"
version = "0.10.19" version = "0.10.19"
@ -1423,6 +1720,12 @@ version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "foldhash"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f"
[[package]] [[package]]
name = "foreign-types" name = "foreign-types"
version = "0.3.2" version = "0.3.2"
@ -1661,6 +1964,17 @@ dependencies = [
"phf", "phf",
] ]
[[package]]
name = "group"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7"
dependencies = [
"ff",
"rand_core 0.6.4",
"subtle",
]
[[package]] [[package]]
name = "h2" name = "h2"
version = "0.3.26" version = "0.3.26"
@ -1737,6 +2051,17 @@ name = "hashbrown"
version = "0.15.2" version = "0.15.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
dependencies = [
"allocator-api2",
"equivalent",
"foldhash",
]
[[package]]
name = "heck"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
[[package]] [[package]]
name = "heck" name = "heck"
@ -2241,6 +2566,12 @@ dependencies = [
"windows-sys 0.59.0", "windows-sys 0.59.0",
] ]
[[package]]
name = "is_terminal_polyfill"
version = "1.70.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
[[package]] [[package]]
name = "itoa" name = "itoa"
version = "1.0.14" version = "1.0.14"
@ -2435,6 +2766,15 @@ dependencies = [
"tracing-subscriber", "tracing-subscriber",
] ]
[[package]]
name = "lru"
version = "0.12.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38"
dependencies = [
"hashbrown 0.15.2",
]
[[package]] [[package]]
name = "lru-cache" name = "lru-cache"
version = "0.1.2" version = "0.1.2"
@ -2467,6 +2807,16 @@ dependencies = [
"regex-automata 0.1.10", "regex-automata 0.1.10",
] ]
[[package]]
name = "md-5"
version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf"
dependencies = [
"cfg-if",
"digest",
]
[[package]] [[package]]
name = "memchr" name = "memchr"
version = "2.7.4" version = "2.7.4"
@ -2773,6 +3123,17 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "p256"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51f44edd08f51e2ade572f141051021c5af22677e42b7dd28a88155151c33594"
dependencies = [
"ecdsa",
"elliptic-curve",
"sha2",
]
[[package]] [[package]]
name = "parking" name = "parking"
version = "2.2.1" version = "2.2.1"
@ -2993,6 +3354,16 @@ dependencies = [
"futures-io", "futures-io",
] ]
[[package]]
name = "pkcs8"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9eca2c590a5f85da82668fa685c09ce2888b9430e83299debf1f34b65fd4a4ba"
dependencies = [
"der",
"spki",
]
[[package]] [[package]]
name = "pkg-config" name = "pkg-config"
version = "0.3.31" version = "0.3.31"
@ -3360,6 +3731,17 @@ dependencies = [
"quick-error", "quick-error",
] ]
[[package]]
name = "rfc6979"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb"
dependencies = [
"crypto-bigint 0.4.9",
"hmac",
"zeroize",
]
[[package]] [[package]]
name = "ring" name = "ring"
version = "0.17.8" version = "0.17.8"
@ -3683,6 +4065,20 @@ dependencies = [
"untrusted", "untrusted",
] ]
[[package]]
name = "sec1"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928"
dependencies = [
"base16ct",
"der",
"generic-array",
"pkcs8",
"subtle",
"zeroize",
]
[[package]] [[package]]
name = "security-framework" name = "security-framework"
version = "2.11.1" version = "2.11.1"
@ -3831,6 +4227,16 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "signature"
version = "1.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c"
dependencies = [
"digest",
"rand_core 0.6.4",
]
[[package]] [[package]]
name = "simple_asn1" name = "simple_asn1"
version = "0.6.3" version = "0.6.3"
@ -3889,6 +4295,16 @@ dependencies = [
"lock_api", "lock_api",
] ]
[[package]]
name = "spki"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67cf02bbac7a337dc36e4f5a693db6c21e7863f45070f7064577eb4367a3212b"
dependencies = [
"base64ct",
"der",
]
[[package]] [[package]]
name = "stable-pattern" name = "stable-pattern"
version = "0.1.0" version = "0.1.0"
@ -4485,6 +4901,12 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
[[package]]
name = "utf8parse"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]] [[package]]
name = "uuid" name = "uuid"
version = "1.13.1" version = "1.13.1"
@ -4513,6 +4935,7 @@ dependencies = [
"argon2", "argon2",
"aws-config", "aws-config",
"aws-sdk-dsql", "aws-sdk-dsql",
"aws-sdk-s3",
"bigdecimal", "bigdecimal",
"bytes", "bytes",
"cached", "cached",

Datei anzeigen

@ -20,8 +20,10 @@ build = "build.rs"
enable_syslog = [] enable_syslog = []
mysql = ["diesel/mysql", "diesel_migrations/mysql"] mysql = ["diesel/mysql", "diesel_migrations/mysql"]
postgresql = ["diesel/postgres", "diesel_migrations/postgres"] postgresql = ["diesel/postgres", "diesel_migrations/postgres"]
aws = ["dsql", "s3"]
dsql = ["postgresql", "dep:aws-config", "dep:aws-sdk-dsql"] dsql = ["postgresql", "dep:aws-config", "dep:aws-sdk-dsql"]
sqlite = ["diesel/sqlite", "diesel_migrations/sqlite", "dep:libsqlite3-sys"] sqlite = ["diesel/sqlite", "diesel_migrations/sqlite", "dep:libsqlite3-sys"]
s3 = ["dep:aws-config", "dep:aws-sdk-s3"]
# Enable to use a vendored and statically linked openssl # Enable to use a vendored and statically linked openssl
vendored_openssl = ["openssl/vendored"] vendored_openssl = ["openssl/vendored"]
# Enable MiMalloc memory allocator to replace the default malloc # Enable MiMalloc memory allocator to replace the default malloc
@ -89,8 +91,9 @@ diesel-derive-newtype = "2.1.2"
# Bundled/Static SQLite # Bundled/Static SQLite
libsqlite3-sys = { version = "0.31.0", features = ["bundled"], optional = true } libsqlite3-sys = { version = "0.31.0", features = ["bundled"], optional = true }
# Amazon Aurora DSQL # AWS / Amazon Aurora DSQL
aws-config = { version = "1.5.12", features = ["behavior-version-latest"], optional = true } aws-config = { version = "1.5.12", features = ["behavior-version-latest"], optional = true }
aws-sdk-s3 = { version = "1.72.0", features = ["behavior-version-latest"], optional = true }
aws-sdk-dsql = { version = "1.2.0", features = ["behavior-version-latest"], optional = true } aws-sdk-dsql = { version = "1.2.0", features = ["behavior-version-latest"], optional = true }
# Crypto-related libraries # Crypto-related libraries

Datei anzeigen

@ -13,6 +13,10 @@ fn main() {
println!("cargo:rustc-cfg=dsql"); println!("cargo:rustc-cfg=dsql");
#[cfg(feature = "query_logger")] #[cfg(feature = "query_logger")]
println!("cargo:rustc-cfg=query_logger"); println!("cargo:rustc-cfg=query_logger");
#[cfg(feature = "s3")]
println!("cargo:rustc-cfg=s3");
#[cfg(feature = "aws")]
println!("cargo:rustc-cfg=aws");
#[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql", feature = "dsql")))] #[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql", feature = "dsql")))]
compile_error!( compile_error!(
@ -26,6 +30,8 @@ fn main() {
println!("cargo::rustc-check-cfg=cfg(postgresql)"); println!("cargo::rustc-check-cfg=cfg(postgresql)");
println!("cargo::rustc-check-cfg=cfg(dsql)"); println!("cargo::rustc-check-cfg=cfg(dsql)");
println!("cargo::rustc-check-cfg=cfg(query_logger)"); println!("cargo::rustc-check-cfg=cfg(query_logger)");
println!("cargo::rustc-check-cfg=cfg(s3)");
println!("cargo::rustc-check-cfg=cfg(aws)");
// Rerun when these paths are changed. // Rerun when these paths are changed.
// Someone could have checked-out a tag or specific commit, but no other files changed. // Someone could have checked-out a tag or specific commit, but no other files changed.

Datei anzeigen

@ -745,17 +745,17 @@ fn get_diagnostics_http(code: u16, _token: AdminToken) -> EmptyResult {
} }
#[post("/config", format = "application/json", data = "<data>")] #[post("/config", format = "application/json", data = "<data>")]
fn post_config(data: Json<ConfigBuilder>, _token: AdminToken) -> EmptyResult { async fn post_config(data: Json<ConfigBuilder>, _token: AdminToken) -> EmptyResult {
let data: ConfigBuilder = data.into_inner(); let data: ConfigBuilder = data.into_inner();
if let Err(e) = CONFIG.update_config(data, true) { if let Err(e) = CONFIG.update_config(data, true).await {
err!(format!("Unable to save config: {e:?}")) err!(format!("Unable to save config: {e:?}"))
} }
Ok(()) Ok(())
} }
#[post("/config/delete", format = "application/json")] #[post("/config/delete", format = "application/json")]
fn delete_config(_token: AdminToken) -> EmptyResult { async fn delete_config(_token: AdminToken) -> EmptyResult {
if let Err(e) = CONFIG.delete_user_config() { if let Err(e) = CONFIG.delete_user_config().await {
err!(format!("Unable to delete config: {e:?}")) err!(format!("Unable to delete config: {e:?}"))
} }
Ok(()) Ok(())

Datei anzeigen

@ -17,6 +17,7 @@ use crate::{
auth::Headers, auth::Headers,
crypto, crypto,
db::{models::*, DbConn, DbPool}, db::{models::*, DbConn, DbPool},
persistent_fs::{canonicalize, create_dir_all, persist_temp_file},
CONFIG, CONFIG,
}; };
@ -110,7 +111,7 @@ async fn sync(
headers: Headers, headers: Headers,
client_version: Option<ClientVersion>, client_version: Option<ClientVersion>,
mut conn: DbConn, mut conn: DbConn,
) -> Json<Value> { ) -> JsonResult {
let user_json = headers.user.to_json(&mut conn).await; let user_json = headers.user.to_json(&mut conn).await;
// Get all ciphers which are visible by the user // Get all ciphers which are visible by the user
@ -134,7 +135,7 @@ async fn sync(
for c in ciphers { for c in ciphers {
ciphers_json.push( ciphers_json.push(
c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), CipherSyncType::User, &mut conn) c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), CipherSyncType::User, &mut conn)
.await, .await?,
); );
} }
@ -159,7 +160,7 @@ async fn sync(
api::core::_get_eq_domains(headers, true).into_inner() api::core::_get_eq_domains(headers, true).into_inner()
}; };
Json(json!({ Ok(Json(json!({
"profile": user_json, "profile": user_json,
"folders": folders_json, "folders": folders_json,
"collections": collections_json, "collections": collections_json,
@ -168,11 +169,11 @@ async fn sync(
"domains": domains_json, "domains": domains_json,
"sends": sends_json, "sends": sends_json,
"object": "sync" "object": "sync"
})) })))
} }
#[get("/ciphers")] #[get("/ciphers")]
async fn get_ciphers(headers: Headers, mut conn: DbConn) -> Json<Value> { async fn get_ciphers(headers: Headers, mut conn: DbConn) -> JsonResult {
let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &mut conn).await; let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &mut conn).await;
let cipher_sync_data = CipherSyncData::new(&headers.user.uuid, CipherSyncType::User, &mut conn).await; let cipher_sync_data = CipherSyncData::new(&headers.user.uuid, CipherSyncType::User, &mut conn).await;
@ -180,15 +181,15 @@ async fn get_ciphers(headers: Headers, mut conn: DbConn) -> Json<Value> {
for c in ciphers { for c in ciphers {
ciphers_json.push( ciphers_json.push(
c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), CipherSyncType::User, &mut conn) c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), CipherSyncType::User, &mut conn)
.await, .await?,
); );
} }
Json(json!({ Ok(Json(json!({
"data": ciphers_json, "data": ciphers_json,
"object": "list", "object": "list",
"continuationToken": null "continuationToken": null
})) })))
} }
#[get("/ciphers/<cipher_id>")] #[get("/ciphers/<cipher_id>")]
@ -201,7 +202,7 @@ async fn get_cipher(cipher_id: CipherId, headers: Headers, mut conn: DbConn) ->
err!("Cipher is not owned by user") err!("Cipher is not owned by user")
} }
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await)) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await?))
} }
#[get("/ciphers/<cipher_id>/admin")] #[get("/ciphers/<cipher_id>/admin")]
@ -339,7 +340,7 @@ async fn post_ciphers(data: Json<CipherData>, headers: Headers, mut conn: DbConn
let mut cipher = Cipher::new(data.r#type, data.name.clone()); let mut cipher = Cipher::new(data.r#type, data.name.clone());
update_cipher_from_data(&mut cipher, data, &headers, None, &mut conn, &nt, UpdateType::SyncCipherCreate).await?; update_cipher_from_data(&mut cipher, data, &headers, None, &mut conn, &nt, UpdateType::SyncCipherCreate).await?;
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await)) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await?))
} }
/// Enforces the personal ownership policy on user-owned ciphers, if applicable. /// Enforces the personal ownership policy on user-owned ciphers, if applicable.
@ -676,7 +677,7 @@ async fn put_cipher(
update_cipher_from_data(&mut cipher, data, &headers, None, &mut conn, &nt, UpdateType::SyncCipherUpdate).await?; update_cipher_from_data(&mut cipher, data, &headers, None, &mut conn, &nt, UpdateType::SyncCipherUpdate).await?;
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await)) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await?))
} }
#[post("/ciphers/<cipher_id>/partial", data = "<data>")] #[post("/ciphers/<cipher_id>/partial", data = "<data>")]
@ -714,7 +715,7 @@ async fn put_cipher_partial(
// Update favorite // Update favorite
cipher.set_favorite(Some(data.favorite), &headers.user.uuid, &mut conn).await?; cipher.set_favorite(Some(data.favorite), &headers.user.uuid, &mut conn).await?;
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await)) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await?))
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@ -825,7 +826,7 @@ async fn post_collections_update(
) )
.await; .await;
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await)) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await?))
} }
#[put("/ciphers/<cipher_id>/collections-admin", data = "<data>")] #[put("/ciphers/<cipher_id>/collections-admin", data = "<data>")]
@ -1030,7 +1031,7 @@ async fn share_cipher_by_uuid(
update_cipher_from_data(&mut cipher, data.cipher, headers, Some(shared_to_collections), conn, nt, ut).await?; update_cipher_from_data(&mut cipher, data.cipher, headers, Some(shared_to_collections), conn, nt, ut).await?;
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, conn).await)) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, conn).await?))
} }
/// v2 API for downloading an attachment. This just redirects the client to /// v2 API for downloading an attachment. This just redirects the client to
@ -1055,7 +1056,7 @@ async fn get_attachment(
} }
match Attachment::find_by_id(&attachment_id, &mut conn).await { match Attachment::find_by_id(&attachment_id, &mut conn).await {
Some(attachment) if cipher_id == attachment.cipher_uuid => Ok(Json(attachment.to_json(&headers.host))), Some(attachment) if cipher_id == attachment.cipher_uuid => Ok(Json(attachment.to_json(&headers.host).await?)),
Some(_) => err!("Attachment doesn't belong to cipher"), Some(_) => err!("Attachment doesn't belong to cipher"),
None => err!("Attachment doesn't exist"), None => err!("Attachment doesn't exist"),
} }
@ -1116,7 +1117,7 @@ async fn post_attachment_v2(
"attachmentId": attachment_id, "attachmentId": attachment_id,
"url": url, "url": url,
"fileUploadType": FileUploadType::Direct as i32, "fileUploadType": FileUploadType::Direct as i32,
response_key: cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await, response_key: cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await?,
}))) })))
} }
@ -1142,7 +1143,7 @@ async fn save_attachment(
mut conn: DbConn, mut conn: DbConn,
nt: Notify<'_>, nt: Notify<'_>,
) -> Result<(Cipher, DbConn), crate::error::Error> { ) -> Result<(Cipher, DbConn), crate::error::Error> {
let mut data = data.into_inner(); let data = data.into_inner();
let Some(size) = data.data.len().to_i64() else { let Some(size) = data.data.len().to_i64() else {
err!("Attachment data size overflow"); err!("Attachment data size overflow");
@ -1269,13 +1270,11 @@ async fn save_attachment(
attachment.save(&mut conn).await.expect("Error saving attachment"); attachment.save(&mut conn).await.expect("Error saving attachment");
} }
let folder_path = tokio::fs::canonicalize(&CONFIG.attachments_folder()).await?.join(cipher_id.as_ref()); let folder_path = canonicalize(&CONFIG.attachments_folder()).await?.join(cipher_id.as_ref());
let file_path = folder_path.join(file_id.as_ref()); let file_path = folder_path.join(file_id.as_ref());
tokio::fs::create_dir_all(&folder_path).await?;
if let Err(_err) = data.data.persist_to(&file_path).await { create_dir_all(&folder_path).await?;
data.data.move_copy_to(file_path).await? persist_temp_file(data.data, file_path).await?;
}
nt.send_cipher_update( nt.send_cipher_update(
UpdateType::SyncCipherUpdate, UpdateType::SyncCipherUpdate,
@ -1342,7 +1341,7 @@ async fn post_attachment(
let (cipher, mut conn) = save_attachment(attachment, cipher_id, data, &headers, conn, nt).await?; let (cipher, mut conn) = save_attachment(attachment, cipher_id, data, &headers, conn, nt).await?;
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await)) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await?))
} }
#[post("/ciphers/<cipher_id>/attachment-admin", format = "multipart/form-data", data = "<data>")] #[post("/ciphers/<cipher_id>/attachment-admin", format = "multipart/form-data", data = "<data>")]
@ -1786,7 +1785,7 @@ async fn _restore_cipher_by_uuid(
.await; .await;
} }
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, conn).await)) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, conn).await?))
} }
async fn _restore_multiple_ciphers( async fn _restore_multiple_ciphers(

Datei anzeigen

@ -582,7 +582,7 @@ async fn view_emergency_access(emer_id: EmergencyAccessId, headers: Headers, mut
CipherSyncType::User, CipherSyncType::User,
&mut conn, &mut conn,
) )
.await, .await?,
); );
} }

Datei anzeigen

@ -15,6 +15,7 @@ use crate::{
OwnerHeaders, OwnerHeaders,
}, },
db::{models::*, DbConn}, db::{models::*, DbConn},
error::Error,
mail, mail,
util::{convert_json_key_lcase_first, NumberOrString}, util::{convert_json_key_lcase_first, NumberOrString},
CONFIG, CONFIG,
@ -901,21 +902,21 @@ async fn get_org_details(data: OrgIdData, headers: OrgMemberHeaders, mut conn: D
} }
Ok(Json(json!({ Ok(Json(json!({
"data": _get_org_details(&data.organization_id, &headers.host, &headers.user.uuid, &mut conn).await, "data": _get_org_details(&data.organization_id, &headers.host, &headers.user.uuid, &mut conn).await?,
"object": "list", "object": "list",
"continuationToken": null, "continuationToken": null,
}))) })))
} }
async fn _get_org_details(org_id: &OrganizationId, host: &str, user_id: &UserId, conn: &mut DbConn) -> Value { async fn _get_org_details(org_id: &OrganizationId, host: &str, user_id: &UserId, conn: &mut DbConn) -> Result<Value, Error> {
let ciphers = Cipher::find_by_org(org_id, conn).await; let ciphers = Cipher::find_by_org(org_id, conn).await;
let cipher_sync_data = CipherSyncData::new(user_id, CipherSyncType::Organization, conn).await; let cipher_sync_data = CipherSyncData::new(user_id, CipherSyncType::Organization, conn).await;
let mut ciphers_json = Vec::with_capacity(ciphers.len()); let mut ciphers_json = Vec::with_capacity(ciphers.len());
for c in ciphers { for c in ciphers {
ciphers_json.push(c.to_json(host, user_id, Some(&cipher_sync_data), CipherSyncType::Organization, conn).await); ciphers_json.push(c.to_json(host, user_id, Some(&cipher_sync_data), CipherSyncType::Organization, conn).await?);
} }
json!(ciphers_json) Ok(json!(ciphers_json))
} }
#[derive(FromForm)] #[derive(FromForm)]
@ -3317,7 +3318,7 @@ async fn get_org_export(
"continuationToken": null, "continuationToken": null,
}, },
"ciphers": { "ciphers": {
"data": convert_json_key_lcase_first(_get_org_details(&org_id, &headers.host, &headers.user.uuid, &mut conn).await), "data": convert_json_key_lcase_first(_get_org_details(&org_id, &headers.host, &headers.user.uuid, &mut conn).await?),
"object": "list", "object": "list",
"continuationToken": null, "continuationToken": null,
} }
@ -3326,7 +3327,7 @@ async fn get_org_export(
// v2023.1.0 and newer response // v2023.1.0 and newer response
Ok(Json(json!({ Ok(Json(json!({
"collections": convert_json_key_lcase_first(_get_org_collections(&org_id, &mut conn).await), "collections": convert_json_key_lcase_first(_get_org_collections(&org_id, &mut conn).await),
"ciphers": convert_json_key_lcase_first(_get_org_details(&org_id, &headers.host, &headers.user.uuid, &mut conn).await), "ciphers": convert_json_key_lcase_first(_get_org_details(&org_id, &headers.host, &headers.user.uuid, &mut conn).await?),
}))) })))
} }
} }

Datei anzeigen

@ -12,6 +12,8 @@ use crate::{
api::{ApiResult, EmptyResult, JsonResult, Notify, UpdateType}, api::{ApiResult, EmptyResult, JsonResult, Notify, UpdateType},
auth::{ClientIp, Headers, Host}, auth::{ClientIp, Headers, Host},
db::{models::*, DbConn, DbPool}, db::{models::*, DbConn, DbPool},
error::Error,
persistent_fs::{canonicalize, create_dir_all, download_url, file_exists, persist_temp_file},
util::NumberOrString, util::NumberOrString,
CONFIG, CONFIG,
}; };
@ -210,7 +212,7 @@ async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, mut conn:
let UploadData { let UploadData {
model, model,
mut data, data,
} = data.into_inner(); } = data.into_inner();
let model = model.into_inner(); let model = model.into_inner();
@ -250,13 +252,11 @@ async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, mut conn:
} }
let file_id = crate::crypto::generate_send_file_id(); let file_id = crate::crypto::generate_send_file_id();
let folder_path = tokio::fs::canonicalize(&CONFIG.sends_folder()).await?.join(&send.uuid); let folder_path = canonicalize(&CONFIG.sends_folder()).await?.join(&send.uuid);
let file_path = folder_path.join(&file_id); let file_path = folder_path.join(&file_id);
tokio::fs::create_dir_all(&folder_path).await?;
if let Err(_err) = data.persist_to(&file_path).await { create_dir_all(&folder_path).await?;
data.move_copy_to(file_path).await? persist_temp_file(data, file_path).await?;
}
let mut data_value: Value = serde_json::from_str(&send.data)?; let mut data_value: Value = serde_json::from_str(&send.data)?;
if let Some(o) = data_value.as_object_mut() { if let Some(o) = data_value.as_object_mut() {
@ -363,7 +363,7 @@ async fn post_send_file_v2_data(
) -> EmptyResult { ) -> EmptyResult {
enforce_disable_send_policy(&headers, &mut conn).await?; enforce_disable_send_policy(&headers, &mut conn).await?;
let mut data = data.into_inner(); let data = data.into_inner();
let Some(send) = Send::find_by_uuid_and_user(&send_id, &headers.user.uuid, &mut conn).await else { let Some(send) = Send::find_by_uuid_and_user(&send_id, &headers.user.uuid, &mut conn).await else {
err!("Send not found. Unable to save the file.", "Invalid send uuid or does not belong to user.") err!("Send not found. Unable to save the file.", "Invalid send uuid or does not belong to user.")
@ -402,19 +402,18 @@ async fn post_send_file_v2_data(
err!("Send file size does not match.", format!("Expected a file size of {} got {size}", send_data.size)); err!("Send file size does not match.", format!("Expected a file size of {} got {size}", send_data.size));
} }
let folder_path = tokio::fs::canonicalize(&CONFIG.sends_folder()).await?.join(send_id); let folder_path = canonicalize(&CONFIG.sends_folder()).await?.join(send_id);
let file_path = folder_path.join(file_id); let file_path = folder_path.join(file_id);
// Check if the file already exists, if that is the case do not overwrite it // Check if the file already exists, if that is the case do not overwrite it
if tokio::fs::metadata(&file_path).await.is_ok() { match file_exists(&file_path).await {
err!("Send file has already been uploaded.", format!("File {file_path:?} already exists")) Ok(true) => err!("Send file has already been uploaded.", format!("File {file_path:?} already exists")),
Ok(false) => (),
Err(e) => err!("Error creating send file.", format!("Error checking if send file {file_path:?} already exists: {e}")),
} }
tokio::fs::create_dir_all(&folder_path).await?; create_dir_all(&folder_path).await?;
persist_temp_file(data.data, file_path).await?;
if let Err(_err) = data.data.persist_to(&file_path).await {
data.data.move_copy_to(file_path).await?
}
nt.send_send_update( nt.send_send_update(
UpdateType::SyncSendCreate, UpdateType::SyncSendCreate,
@ -547,12 +546,22 @@ async fn post_access_file(
) )
.await; .await;
let token_claims = crate::auth::generate_send_claims(&send_id, &file_id); let file_path = canonicalize(&CONFIG.sends_folder())
let token = crate::auth::encode_jwt(&token_claims); .await?
.join(&send_id)
.join(&file_id);
let url = download_url(file_path, &host.host)
.await
.map_err(|e| Error::new(
"Failed to generate send download URL",
format!("Failed to generate send URL for send_id: {send_id}, file_id: {file_id}. Error: {e:?}")
))?;
Ok(Json(json!({ Ok(Json(json!({
"object": "send-fileDownload", "object": "send-fileDownload",
"id": file_id, "id": file_id,
"url": format!("{}/api/sends/{}/{}?t={}", &host.host, send_id, file_id, token) "url": url
}))) })))
} }

Datei anzeigen

@ -258,7 +258,7 @@ pub(crate) async fn get_duo_keys_email(email: &str, conn: &mut DbConn) -> ApiRes
} }
.map_res("Can't fetch Duo Keys")?; .map_res("Can't fetch Duo Keys")?;
Ok((data.ik, data.sk, CONFIG.get_duo_akey(), data.host)) Ok((data.ik, data.sk, CONFIG.get_duo_akey().await, data.host))
} }
pub async fn generate_duo_signature(email: &str, conn: &mut DbConn) -> ApiResult<(String, String)> { pub async fn generate_duo_signature(email: &str, conn: &mut DbConn) -> ApiResult<(String, String)> {

Datei anzeigen

@ -2,7 +2,7 @@ use std::{
collections::HashMap, collections::HashMap,
net::IpAddr, net::IpAddr,
sync::Arc, sync::Arc,
time::{Duration, SystemTime}, time::Duration,
}; };
use bytes::{Bytes, BytesMut}; use bytes::{Bytes, BytesMut};
@ -14,15 +14,12 @@ use reqwest::{
Client, Response, Client, Response,
}; };
use rocket::{http::ContentType, response::Redirect, Route}; use rocket::{http::ContentType, response::Redirect, Route};
use tokio::{
fs::{create_dir_all, remove_file, symlink_metadata, File},
io::{AsyncReadExt, AsyncWriteExt},
};
use html5gum::{Emitter, HtmlString, Readable, StringReader, Tokenizer}; use html5gum::{Emitter, HtmlString, Readable, StringReader, Tokenizer};
use crate::{ use crate::{
error::Error, error::Error,
persistent_fs::{create_dir_all, file_is_expired, read, remove_file, write},
http_client::{get_reqwest_client_builder, should_block_address, CustomHttpClientError}, http_client::{get_reqwest_client_builder, should_block_address, CustomHttpClientError},
util::Cached, util::Cached,
CONFIG, CONFIG,
@ -207,23 +204,7 @@ async fn get_cached_icon(path: &str) -> Option<Vec<u8>> {
} }
// Try to read the cached icon, and return it if it exists // Try to read the cached icon, and return it if it exists
if let Ok(mut f) = File::open(path).await { read(path).await.ok()
let mut buffer = Vec::new();
if f.read_to_end(&mut buffer).await.is_ok() {
return Some(buffer);
}
}
None
}
async fn file_is_expired(path: &str, ttl: u64) -> Result<bool, Error> {
let meta = symlink_metadata(path).await?;
let modified = meta.modified()?;
let age = SystemTime::now().duration_since(modified)?;
Ok(ttl > 0 && ttl <= age.as_secs())
} }
async fn icon_is_negcached(path: &str) -> bool { async fn icon_is_negcached(path: &str) -> bool {
@ -569,13 +550,15 @@ async fn download_icon(domain: &str) -> Result<(Bytes, Option<&str>), Error> {
} }
async fn save_icon(path: &str, icon: &[u8]) { async fn save_icon(path: &str, icon: &[u8]) {
match File::create(path).await { match write(path, icon).await {
Ok(mut f) => { Ok(_) => (),
f.write_all(icon).await.expect("Error writing icon file"); Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
}
Err(ref e) if e.kind() == std::io::ErrorKind::NotFound => {
create_dir_all(&CONFIG.icon_cache_folder()).await.expect("Error creating icon cache folder"); create_dir_all(&CONFIG.icon_cache_folder()).await.expect("Error creating icon cache folder");
if let Err(e) = write(path, icon).await {
warn!("Unable to save icon: {:?}", e);
} }
},
Err(e) => { Err(e) => {
warn!("Unable to save icon: {:?}", e); warn!("Unable to save icon: {:?}", e);
} }

Datei anzeigen

@ -9,8 +9,6 @@ use serde::de::DeserializeOwned;
use serde::ser::Serialize; use serde::ser::Serialize;
use std::{ use std::{
env, env,
fs::File,
io::{Read, Write},
net::IpAddr, net::IpAddr,
}; };
@ -18,7 +16,7 @@ use crate::db::models::{
AttachmentId, CipherId, CollectionId, DeviceId, EmergencyAccessId, MembershipId, OrgApiKeyId, OrganizationId, AttachmentId, CipherId, CollectionId, DeviceId, EmergencyAccessId, MembershipId, OrgApiKeyId, OrganizationId,
SendFileId, SendId, UserId, SendFileId, SendId, UserId,
}; };
use crate::{error::Error, CONFIG}; use crate::{error::Error, CONFIG, persistent_fs::{read, write}};
const JWT_ALGORITHM: Algorithm = Algorithm::RS256; const JWT_ALGORITHM: Algorithm = Algorithm::RS256;
@ -39,37 +37,31 @@ static JWT_FILE_DOWNLOAD_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|file_do
static PRIVATE_RSA_KEY: OnceCell<EncodingKey> = OnceCell::new(); static PRIVATE_RSA_KEY: OnceCell<EncodingKey> = OnceCell::new();
static PUBLIC_RSA_KEY: OnceCell<DecodingKey> = OnceCell::new(); static PUBLIC_RSA_KEY: OnceCell<DecodingKey> = OnceCell::new();
pub fn initialize_keys() -> Result<(), Error> { pub async fn initialize_keys() -> Result<(), Error> {
fn read_key(create_if_missing: bool) -> Result<(Rsa<openssl::pkey::Private>, Vec<u8>), Error> { async fn read_key(create_if_missing: bool) -> Result<(Rsa<openssl::pkey::Private>, Vec<u8>), std::io::Error> {
let mut priv_key_buffer = Vec::with_capacity(2048); let priv_key_buffer = match read(&CONFIG.private_rsa_key()).await {
Ok(buffer) => Some(buffer),
let mut priv_key_file = File::options() Err(e) if e.kind() == std::io::ErrorKind::NotFound && create_if_missing => None,
.create(create_if_missing) Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Err(e),
.truncate(false) Err(e) => return Err(std::io::Error::new(std::io::ErrorKind::InvalidData, format!("Error reading private key: {e}"))),
.read(true)
.write(create_if_missing)
.open(CONFIG.private_rsa_key())?;
#[allow(clippy::verbose_file_reads)]
let bytes_read = priv_key_file.read_to_end(&mut priv_key_buffer)?;
let rsa_key = if bytes_read > 0 {
Rsa::private_key_from_pem(&priv_key_buffer[..bytes_read])?
} else if create_if_missing {
// Only create the key if the file doesn't exist or is empty
let rsa_key = Rsa::generate(2048)?;
priv_key_buffer = rsa_key.private_key_to_pem()?;
priv_key_file.write_all(&priv_key_buffer)?;
info!("Private key '{}' created correctly", CONFIG.private_rsa_key());
rsa_key
} else {
err!("Private key does not exist or invalid format", CONFIG.private_rsa_key());
}; };
Ok((rsa_key, priv_key_buffer)) if let Some(priv_key_buffer) = priv_key_buffer {
Ok((Rsa::private_key_from_pem(&priv_key_buffer)?, priv_key_buffer))
} else {
let rsa_key = Rsa::generate(2048)?;
let priv_key_buffer = rsa_key.private_key_to_pem()?;
write(&CONFIG.private_rsa_key(), &priv_key_buffer).await?;
info!("Private key '{}' created correctly", CONFIG.private_rsa_key());
Err(std::io::Error::new(std::io::ErrorKind::NotFound, "Private key created, forcing attempt to read it again"))
}
} }
let (priv_key, priv_key_buffer) = read_key(true).or_else(|_| read_key(false))?; let (priv_key, priv_key_buffer) = match read_key(true).await {
Ok(key) => key,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => read_key(false).await?,
Err(e) => return Err(e.into()),
};
let pub_key_buffer = priv_key.public_key_to_pem()?; let pub_key_buffer = priv_key.public_key_to_pem()?;
let enc = EncodingKey::from_rsa_pem(&priv_key_buffer)?; let enc = EncodingKey::from_rsa_pem(&priv_key_buffer)?;

24
src/aws.rs Normale Datei
Datei anzeigen

@ -0,0 +1,24 @@
use std::io::{Error, ErrorKind};
// Cache the AWS SDK config, as recommended by the AWS SDK documentation. The
// initial load is async, so we spawn a thread to load it and then join it to
// get the result in a blocking fashion.
static AWS_SDK_CONFIG: std::sync::LazyLock<std::io::Result<aws_config::SdkConfig>> = std::sync::LazyLock::new(|| {
std::thread::spawn(|| {
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()?;
std::io::Result::Ok(rt.block_on(aws_config::load_defaults(aws_config::BehaviorVersion::latest())))
})
.join()
.map_err(|e| Error::new(ErrorKind::Other, format!("Failed to load AWS config for DSQL connection: {e:#?}")))?
.map_err(|e| Error::new(ErrorKind::Other, format!("Failed to load AWS config for DSQL connection: {e}")))
});
pub(crate) fn aws_sdk_config() -> std::io::Result<&'static aws_config::SdkConfig> {
(*AWS_SDK_CONFIG).as_ref().map_err(|e| match e.get_ref() {
Some(inner) => Error::new(e.kind(), inner),
None => Error::from(e.kind()),
})
}

Datei anzeigen

@ -11,6 +11,7 @@ use job_scheduler_ng::Schedule;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use reqwest::Url; use reqwest::Url;
use crate::persistent_fs::{read, remove_file, write};
use crate::{ use crate::{
db::DbConnType, db::DbConnType,
error::Error, error::Error,
@ -25,7 +26,23 @@ static CONFIG_FILE: Lazy<String> = Lazy::new(|| {
pub static SKIP_CONFIG_VALIDATION: AtomicBool = AtomicBool::new(false); pub static SKIP_CONFIG_VALIDATION: AtomicBool = AtomicBool::new(false);
pub static CONFIG: Lazy<Config> = Lazy::new(|| { pub static CONFIG: Lazy<Config> = Lazy::new(|| {
Config::load().unwrap_or_else(|e| { std::thread::spawn(|| {
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap_or_else(|e| {
println!("Error loading config:\n {e:?}\n");
exit(12)
});
rt.block_on(Config::load())
.unwrap_or_else(|e| {
println!("Error loading config:\n {e:?}\n");
exit(12)
})
})
.join()
.unwrap_or_else(|e| {
println!("Error loading config:\n {e:?}\n"); println!("Error loading config:\n {e:?}\n");
exit(12) exit(12)
}) })
@ -110,8 +127,10 @@ macro_rules! make_config {
builder builder
} }
fn from_file(path: &str) -> Result<Self, Error> { async fn from_file(path: &str) -> Result<Self, Error> {
let config_str = std::fs::read_to_string(path)?; let config_bytes = read(path).await?;
let config_str = String::from_utf8(config_bytes)
.map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string()))?;
println!("[INFO] Using saved config from `{path}` for configuration.\n"); println!("[INFO] Using saved config from `{path}` for configuration.\n");
serde_json::from_str(&config_str).map_err(Into::into) serde_json::from_str(&config_str).map_err(Into::into)
} }
@ -1129,10 +1148,10 @@ fn smtp_convert_deprecated_ssl_options(smtp_ssl: Option<bool>, smtp_explicit_tls
} }
impl Config { impl Config {
pub fn load() -> Result<Self, Error> { pub async fn load() -> Result<Self, Error> {
// Loading from env and file // Loading from env and file
let _env = ConfigBuilder::from_env(); let _env = ConfigBuilder::from_env();
let _usr = ConfigBuilder::from_file(&CONFIG_FILE).unwrap_or_default(); let _usr = ConfigBuilder::from_file(&CONFIG_FILE).await.unwrap_or_default();
// Create merged config, config file overwrites env // Create merged config, config file overwrites env
let mut _overrides = Vec::new(); let mut _overrides = Vec::new();
@ -1156,7 +1175,7 @@ impl Config {
}) })
} }
pub fn update_config(&self, other: ConfigBuilder, ignore_non_editable: bool) -> Result<(), Error> { pub async fn update_config(&self, other: ConfigBuilder, ignore_non_editable: bool) -> Result<(), Error> {
// Remove default values // Remove default values
//let builder = other.remove(&self.inner.read().unwrap()._env); //let builder = other.remove(&self.inner.read().unwrap()._env);
@ -1188,20 +1207,18 @@ impl Config {
} }
//Save to file //Save to file
use std::{fs::File, io::Write}; write(&*CONFIG_FILE, config_str.as_bytes()).await?;
let mut file = File::create(&*CONFIG_FILE)?;
file.write_all(config_str.as_bytes())?;
Ok(()) Ok(())
} }
fn update_config_partial(&self, other: ConfigBuilder) -> Result<(), Error> { async fn update_config_partial(&self, other: ConfigBuilder) -> Result<(), Error> {
let builder = { let builder = {
let usr = &self.inner.read().unwrap()._usr; let usr = &self.inner.read().unwrap()._usr;
let mut _overrides = Vec::new(); let mut _overrides = Vec::new();
usr.merge(&other, false, &mut _overrides) usr.merge(&other, false, &mut _overrides)
}; };
self.update_config(builder, false) self.update_config(builder, false).await
} }
/// Tests whether an email's domain is allowed. A domain is allowed if it /// Tests whether an email's domain is allowed. A domain is allowed if it
@ -1243,8 +1260,8 @@ impl Config {
} }
} }
pub fn delete_user_config(&self) -> Result<(), Error> { pub async fn delete_user_config(&self) -> Result<(), Error> {
std::fs::remove_file(&*CONFIG_FILE)?; remove_file(&*CONFIG_FILE).await?;
// Empty user config // Empty user config
let usr = ConfigBuilder::default(); let usr = ConfigBuilder::default();
@ -1274,7 +1291,7 @@ impl Config {
inner._enable_smtp && (inner.smtp_host.is_some() || inner.use_sendmail) inner._enable_smtp && (inner.smtp_host.is_some() || inner.use_sendmail)
} }
pub fn get_duo_akey(&self) -> String { pub async fn get_duo_akey(&self) -> String {
if let Some(akey) = self._duo_akey() { if let Some(akey) = self._duo_akey() {
akey akey
} else { } else {
@ -1285,7 +1302,7 @@ impl Config {
_duo_akey: Some(akey_s.clone()), _duo_akey: Some(akey_s.clone()),
..Default::default() ..Default::default()
}; };
self.update_config_partial(builder).ok(); self.update_config_partial(builder).await.ok();
akey_s akey_s
} }

Datei anzeigen

@ -3,7 +3,6 @@ use std::sync::RwLock;
use diesel::{ use diesel::{
r2d2::{ManageConnection, R2D2Connection}, r2d2::{ManageConnection, R2D2Connection},
ConnectionError, ConnectionError,
ConnectionResult,
}; };
use url::Url; use url::Url;
@ -58,22 +57,6 @@ where
} }
} }
// Cache the AWS SDK config, as recommended by the AWS SDK documentation. The
// initial load is async, so we spawn a thread to load it and then join it to
// get the result in a blocking fashion.
static AWS_SDK_CONFIG: std::sync::LazyLock<ConnectionResult<aws_config::SdkConfig>> = std::sync::LazyLock::new(|| {
std::thread::spawn(|| {
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()?;
std::io::Result::Ok(rt.block_on(aws_config::load_defaults(aws_config::BehaviorVersion::latest())))
})
.join()
.map_err(|e| ConnectionError::BadConnection(format!("Failed to load AWS config for DSQL connection: {e:#?}")))?
.map_err(|e| ConnectionError::BadConnection(format!("Failed to load AWS config for DSQL connection: {e}")))
});
// Generate a Postgres libpq connection string. The input connection string has // Generate a Postgres libpq connection string. The input connection string has
// the following format: // the following format:
// //
@ -125,12 +108,8 @@ pub(crate) fn psql_url(url: &str) -> Result<String, ConnectionError> {
info!("Generating new DSQL auth token for connection '{url}'"); info!("Generating new DSQL auth token for connection '{url}'");
} }
// This would be so much easier if ConnectionError implemented Clone. let sdk_config = crate::aws::aws_sdk_config()
let sdk_config = match *AWS_SDK_CONFIG { .map_err(|e| ConnectionError::BadConnection(format!("Failed to load AWS SDK config: {e}")))?;
Ok(ref sdk_config) => sdk_config.clone(),
Err(ConnectionError::BadConnection(ref e)) => return Err(ConnectionError::BadConnection(e.to_owned())),
Err(ref e) => unreachable!("Unexpected error loading AWS SDK config: {e}"),
};
let mut psql_url = Url::parse(url).map_err(|e| { let mut psql_url = Url::parse(url).map_err(|e| {
ConnectionError::InvalidConnectionUrl(e.to_string()) ConnectionError::InvalidConnectionUrl(e.to_string())
@ -165,7 +144,7 @@ pub(crate) fn psql_url(url: &str) -> Result<String, ConnectionError> {
.enable_all() .enable_all()
.build()?; .build()?;
rt.block_on(signer.db_connect_admin_auth_token(&sdk_config)) rt.block_on(signer.db_connect_admin_auth_token(sdk_config))
}) })
.join() .join()
.map_err(|e| ConnectionError::BadConnection(format!("Failed to generate DSQL auth token: {e:#?}")))? .map_err(|e| ConnectionError::BadConnection(format!("Failed to generate DSQL auth token: {e:#?}")))?

Datei anzeigen

@ -5,6 +5,7 @@ use derive_more::{AsRef, Deref, Display};
use serde_json::Value; use serde_json::Value;
use super::{CipherId, OrganizationId, UserId}; use super::{CipherId, OrganizationId, UserId};
use crate::persistent_fs::{download_url, remove_file};
use crate::CONFIG; use crate::CONFIG;
use macros::IdFromParam; use macros::IdFromParam;
@ -44,29 +45,32 @@ impl Attachment {
format!("{}/{}/{}", CONFIG.attachments_folder(), self.cipher_uuid, self.id) format!("{}/{}/{}", CONFIG.attachments_folder(), self.cipher_uuid, self.id)
} }
pub fn get_url(&self, host: &str) -> String { pub async fn get_url(&self, host: &str) -> Result<String, Error> {
let token = encode_jwt(&generate_file_download_claims(self.cipher_uuid.clone(), self.id.clone())); download_url(self.get_file_path(), host)
format!("{}/attachments/{}/{}?token={}", host, self.cipher_uuid, self.id, token) .await
.map_err(|e| Error::new(
"Failed to generate attachment download URL",
format!("Failed to generate download URL for attachment cipher_uuid: {}, id: {}. Error: {e:?}", self.cipher_uuid, self.id)
))
} }
pub fn to_json(&self, host: &str) -> Value { pub async fn to_json(&self, host: &str) -> Result<Value, Error> {
json!({ Ok(json!({
"id": self.id, "id": self.id,
"url": self.get_url(host), "url": self.get_url(host).await?,
"fileName": self.file_name, "fileName": self.file_name,
"size": self.file_size.to_string(), "size": self.file_size.to_string(),
"sizeName": crate::util::get_display_size(self.file_size), "sizeName": crate::util::get_display_size(self.file_size),
"key": self.akey, "key": self.akey,
"object": "attachment" "object": "attachment"
}) }))
} }
} }
use crate::auth::{encode_jwt, generate_file_download_claims};
use crate::db::DbConn; use crate::db::DbConn;
use crate::api::EmptyResult; use crate::api::EmptyResult;
use crate::error::MapResult; use crate::error::{Error, MapResult};
/// Database methods /// Database methods
impl Attachment { impl Attachment {
@ -103,6 +107,19 @@ impl Attachment {
} }
pub async fn delete(&self, conn: &mut DbConn) -> EmptyResult { pub async fn delete(&self, conn: &mut DbConn) -> EmptyResult {
let file_path = &self.get_file_path();
if let Err(e) = remove_file(file_path).await {
// Ignore "file not found" errors. This can happen when the
// upstream caller has already cleaned up the file as part of
// its own error handling.
if e.kind() == ErrorKind::NotFound {
debug!("File '{}' already deleted.", file_path);
} else {
return Err(e.into());
}
}
db_run! { conn: { db_run! { conn: {
let _: () = crate::util::retry( let _: () = crate::util::retry(
|| diesel::delete(attachments::table.filter(attachments::id.eq(&self.id))).execute(conn), || diesel::delete(attachments::table.filter(attachments::id.eq(&self.id))).execute(conn),
@ -110,19 +127,7 @@ impl Attachment {
) )
.map_res("Error deleting attachment")?; .map_res("Error deleting attachment")?;
let file_path = &self.get_file_path();
match std::fs::remove_file(file_path) {
// Ignore "file not found" errors. This can happen when the
// upstream caller has already cleaned up the file as part of
// its own error handling.
Err(e) if e.kind() == ErrorKind::NotFound => {
debug!("File '{}' already deleted.", file_path);
Ok(()) Ok(())
}
Err(e) => Err(e.into()),
_ => Ok(()),
}
}} }}
} }

Datei anzeigen

@ -130,7 +130,7 @@ impl Cipher {
use crate::db::DbConn; use crate::db::DbConn;
use crate::api::EmptyResult; use crate::api::EmptyResult;
use crate::error::MapResult; use crate::error::{Error, MapResult};
/// Database methods /// Database methods
impl Cipher { impl Cipher {
@ -141,18 +141,28 @@ impl Cipher {
cipher_sync_data: Option<&CipherSyncData>, cipher_sync_data: Option<&CipherSyncData>,
sync_type: CipherSyncType, sync_type: CipherSyncType,
conn: &mut DbConn, conn: &mut DbConn,
) -> Value { ) -> Result<Value, Error> {
use crate::util::{format_date, validate_and_format_date}; use crate::util::{format_date, validate_and_format_date};
let mut attachments_json: Value = Value::Null; let mut attachments_json: Value = Value::Null;
if let Some(cipher_sync_data) = cipher_sync_data { if let Some(cipher_sync_data) = cipher_sync_data {
if let Some(attachments) = cipher_sync_data.cipher_attachments.get(&self.uuid) { if let Some(attachments) = cipher_sync_data.cipher_attachments.get(&self.uuid) {
attachments_json = attachments.iter().map(|c| c.to_json(host)).collect(); if !attachments.is_empty() {
let mut attachments_json_vec = vec![];
for attachment in attachments {
attachments_json_vec.push(attachment.to_json(host).await?);
}
attachments_json = Value::Array(attachments_json_vec);
}
} }
} else { } else {
let attachments = Attachment::find_by_cipher(&self.uuid, conn).await; let attachments = Attachment::find_by_cipher(&self.uuid, conn).await;
if !attachments.is_empty() { if !attachments.is_empty() {
attachments_json = attachments.iter().map(|c| c.to_json(host)).collect() let mut attachments_json_vec = vec![];
for attachment in attachments {
attachments_json_vec.push(attachment.to_json(host).await?);
}
attachments_json = Value::Array(attachments_json_vec);
} }
} }
@ -384,7 +394,7 @@ impl Cipher {
}; };
json_object[key] = type_data_json; json_object[key] = type_data_json;
json_object Ok(json_object)
} }
pub async fn update_users_revision(&self, conn: &mut DbConn) -> Vec<UserId> { pub async fn update_users_revision(&self, conn: &mut DbConn) -> Vec<UserId> {

Datei anzeigen

@ -1,6 +1,7 @@
use chrono::{NaiveDateTime, Utc}; use chrono::{NaiveDateTime, Utc};
use serde_json::Value; use serde_json::Value;
use crate::persistent_fs::remove_dir_all;
use crate::util::LowerCase; use crate::util::LowerCase;
use super::{OrganizationId, User, UserId}; use super::{OrganizationId, User, UserId};
@ -226,7 +227,7 @@ impl Send {
self.update_users_revision(conn).await; self.update_users_revision(conn).await;
if self.atype == SendType::File as i32 { if self.atype == SendType::File as i32 {
std::fs::remove_dir_all(std::path::Path::new(&crate::CONFIG.sends_folder()).join(&self.uuid)).ok(); remove_dir_all(std::path::Path::new(&crate::CONFIG.sends_folder()).join(&self.uuid)).await.ok();
} }
db_run! { conn: { db_run! { conn: {

Datei anzeigen

@ -29,7 +29,7 @@ extern crate diesel_derive_newtype;
use std::{ use std::{
collections::HashMap, collections::HashMap,
fs::{canonicalize, create_dir_all}, fs::canonicalize,
panic, panic,
path::Path, path::Path,
process::exit, process::exit,
@ -45,6 +45,9 @@ use tokio::{
#[cfg(unix)] #[cfg(unix)]
use tokio::signal::unix::SignalKind; use tokio::signal::unix::SignalKind;
#[cfg(any(dsql, s3))]
mod aws;
#[macro_use] #[macro_use]
mod error; mod error;
mod api; mod api;
@ -53,6 +56,7 @@ mod config;
mod crypto; mod crypto;
#[macro_use] #[macro_use]
mod db; mod db;
mod persistent_fs;
mod http_client; mod http_client;
mod mail; mod mail;
mod ratelimit; mod ratelimit;
@ -61,6 +65,7 @@ mod util;
use crate::api::core::two_factor::duo_oidc::purge_duo_contexts; use crate::api::core::two_factor::duo_oidc::purge_duo_contexts;
use crate::api::purge_auth_requests; use crate::api::purge_auth_requests;
use crate::api::{WS_ANONYMOUS_SUBSCRIPTIONS, WS_USERS}; use crate::api::{WS_ANONYMOUS_SUBSCRIPTIONS, WS_USERS};
use crate::persistent_fs::{create_dir_all, path_exists, path_is_dir};
pub use config::CONFIG; pub use config::CONFIG;
pub use error::{Error, MapResult}; pub use error::{Error, MapResult};
use rocket::data::{Limits, ToByteUnit}; use rocket::data::{Limits, ToByteUnit};
@ -75,16 +80,16 @@ async fn main() -> Result<(), Error> {
let level = init_logging()?; let level = init_logging()?;
check_data_folder().await; check_data_folder().await;
auth::initialize_keys().unwrap_or_else(|e| { auth::initialize_keys().await.unwrap_or_else(|e| {
error!("Error creating private key '{}'\n{e:?}\nExiting Vaultwarden!", CONFIG.private_rsa_key()); error!("Error creating private key '{}'\n{e:?}\nExiting Vaultwarden!", CONFIG.private_rsa_key());
exit(1); exit(1);
}); });
check_web_vault(); check_web_vault();
create_dir(&CONFIG.icon_cache_folder(), "icon cache"); create_dir(&CONFIG.icon_cache_folder(), "icon cache").await;
create_dir(&CONFIG.tmp_folder(), "tmp folder"); create_dir(&CONFIG.tmp_folder(), "tmp folder").await;
create_dir(&CONFIG.sends_folder(), "sends folder"); create_dir(&CONFIG.sends_folder(), "sends folder").await;
create_dir(&CONFIG.attachments_folder(), "attachments folder"); create_dir(&CONFIG.attachments_folder(), "attachments folder").await;
let pool = create_db_pool().await; let pool = create_db_pool().await;
schedule_jobs(pool.clone()); schedule_jobs(pool.clone());
@ -459,16 +464,16 @@ fn chain_syslog(logger: fern::Dispatch) -> fern::Dispatch {
} }
} }
fn create_dir(path: &str, description: &str) { async fn create_dir(path: &str, description: &str) {
// Try to create the specified dir, if it doesn't already exist. // Try to create the specified dir, if it doesn't already exist.
let err_msg = format!("Error creating {description} directory '{path}'"); let err_msg = format!("Error creating {description} directory '{path}'");
create_dir_all(path).expect(&err_msg); create_dir_all(path).await.expect(&err_msg);
} }
async fn check_data_folder() { async fn check_data_folder() {
let data_folder = &CONFIG.data_folder(); let data_folder = &CONFIG.data_folder();
let path = Path::new(data_folder); let path = Path::new(data_folder);
if !path.exists() { if !path_exists(path).await.unwrap_or(false) {
error!("Data folder '{}' doesn't exist.", data_folder); error!("Data folder '{}' doesn't exist.", data_folder);
if is_running_in_container() { if is_running_in_container() {
error!("Verify that your data volume is mounted at the correct location."); error!("Verify that your data volume is mounted at the correct location.");
@ -477,7 +482,7 @@ async fn check_data_folder() {
} }
exit(1); exit(1);
} }
if !path.is_dir() { if !path_is_dir(path).await.unwrap_or(false) {
error!("Data folder '{}' is not a directory.", data_folder); error!("Data folder '{}' is not a directory.", data_folder);
exit(1); exit(1);
} }

141
src/persistent_fs/local.rs Normale Datei
Datei anzeigen

@ -0,0 +1,141 @@
use std::{io::{Error, ErrorKind}, path::{Path, PathBuf}, time::SystemTime};
use rocket::fs::TempFile;
use tokio::{fs::{File, OpenOptions}, io::{AsyncReadExt, AsyncWriteExt}};
use super::PersistentFSBackend;
pub(crate) struct LocalFSBackend(String);
impl AsRef<Path> for LocalFSBackend {
fn as_ref(&self) -> &Path {
self.0.as_ref()
}
}
impl PersistentFSBackend for LocalFSBackend {
fn new<P: AsRef<Path>>(path: P) -> std::io::Result<Self> {
Ok(Self(path
.as_ref()
.to_str()
.ok_or_else(||
Error::new(
ErrorKind::InvalidInput,
"Data folder path {path:?} is not valid UTF-8"
)
)?
.to_string()
))
}
async fn read(self) -> std::io::Result<Vec<u8>> {
let mut file = File::open(self).await?;
let mut buffer = Vec::new();
file.read_to_end(&mut buffer).await?;
Ok(buffer)
}
async fn write(self, buf: &[u8]) -> std::io::Result<()> {
let mut file = OpenOptions::new().create(true).truncate(true).write(true).open(self).await?;
file.write_all(buf).await?;
Ok(())
}
async fn path_exists(self) -> std::io::Result<bool> {
match tokio::fs::metadata(self).await {
Ok(_) => Ok(true),
Err(e) => match e.kind() {
ErrorKind::NotFound => Ok(false),
_ => Err(e),
},
}
}
async fn file_exists(self) -> std::io::Result<bool> {
match tokio::fs::metadata(self).await {
Ok(metadata) => Ok(metadata.is_file()),
Err(e) => match e.kind() {
ErrorKind::NotFound => Ok(false),
_ => Err(e),
},
}
}
async fn path_is_dir(self) -> std::io::Result<bool> {
match tokio::fs::metadata(self).await {
Ok(metadata) => Ok(metadata.is_dir()),
Err(e) => match e.kind() {
ErrorKind::NotFound => Ok(false),
_ => Err(e),
},
}
}
async fn canonicalize(self) -> std::io::Result<PathBuf> {
tokio::fs::canonicalize(self).await
}
async fn create_dir_all(self) -> std::io::Result<()> {
tokio::fs::create_dir_all(self).await
}
async fn persist_temp_file(self, mut temp_file: TempFile<'_>) -> std::io::Result<()> {
if temp_file.persist_to(&self).await.is_err() {
temp_file.move_copy_to(self).await?;
}
Ok(())
}
async fn remove_file(self) -> std::io::Result<()> {
tokio::fs::remove_file(self).await
}
async fn remove_dir_all(self) -> std::io::Result<()> {
tokio::fs::remove_dir_all(self).await
}
async fn last_modified(self) -> std::io::Result<SystemTime> {
tokio::fs::symlink_metadata(self)
.await?
.modified()
}
async fn download_url(self, local_host: &str) -> std::io::Result<String> {
use std::sync::LazyLock;
use crate::{
auth::{encode_jwt, generate_file_download_claims, generate_send_claims},
db::models::{AttachmentId, CipherId, SendId, SendFileId},
CONFIG
};
let LocalFSBackend(path) = self;
static ATTACHMENTS_PREFIX: LazyLock<String> = LazyLock::new(|| format!("{}/", CONFIG.attachments_folder()));
static SENDS_PREFIX: LazyLock<String> = LazyLock::new(|| format!("{}/", CONFIG.sends_folder()));
if path.starts_with(&*ATTACHMENTS_PREFIX) {
let attachment_parts = path.trim_start_matches(&*ATTACHMENTS_PREFIX).split('/').collect::<Vec<&str>>();
let [cipher_uuid, attachment_id] = attachment_parts[..] else {
return Err(Error::new(ErrorKind::InvalidInput, format!("Attachment path {path:?} does not match a known download URL path pattern")));
};
let token = encode_jwt(&generate_file_download_claims(CipherId::from(cipher_uuid.to_string()), AttachmentId(attachment_id.to_string())));
Ok(format!("{}/attachments/{}/{}?token={}", local_host, cipher_uuid, attachment_id, token))
} else if path.starts_with(&*SENDS_PREFIX) {
let send_parts = path.trim_start_matches(&*SENDS_PREFIX).split('/').collect::<Vec<&str>>();
let [send_id, file_id] = send_parts[..] else {
return Err(Error::new(ErrorKind::InvalidInput, format!("Send path {path:?} does not match a known download URL path pattern")));
};
let token = encode_jwt(&generate_send_claims(&SendId::from(send_id.to_string()), &SendFileId::from(file_id.to_string())));
Ok(format!("{}/api/sends/{}/{}?t={}", local_host, send_id, file_id, token))
} else {
Err(Error::new(ErrorKind::InvalidInput, "Data folder path {path:?} does not match a known download URL path pattern"))
}
}
}

316
src/persistent_fs/mod.rs Normale Datei
Datei anzeigen

@ -0,0 +1,316 @@
mod local;
#[cfg(s3)]
mod s3;
use std::{io::{Error, ErrorKind}, path::{Path, PathBuf}, time::SystemTime};
use rocket::fs::TempFile;
enum FSType {
Local(local::LocalFSBackend),
#[cfg(s3)]
S3(s3::S3FSBackend),
}
pub(crate) trait PersistentFSBackend: Sized {
fn new<P: AsRef<Path>>(path: P) -> std::io::Result<Self>;
async fn read(self) -> std::io::Result<Vec<u8>>;
async fn write(self, buf: &[u8]) -> std::io::Result<()>;
async fn path_exists(self) -> std::io::Result<bool>;
async fn file_exists(self) -> std::io::Result<bool>;
async fn path_is_dir(self) -> std::io::Result<bool>;
async fn canonicalize(self) -> std::io::Result<PathBuf>;
async fn create_dir_all(self) -> std::io::Result<()>;
async fn persist_temp_file(self, temp_file: TempFile<'_>) -> std::io::Result<()>;
async fn remove_file(self) -> std::io::Result<()>;
async fn remove_dir_all(self) -> std::io::Result<()>;
async fn last_modified(self) -> std::io::Result<SystemTime>;
async fn download_url(self, local_host: &str) -> std::io::Result<String>;
}
impl PersistentFSBackend for FSType {
fn new<P: AsRef<Path>>(path: P) -> std::io::Result<Self> {
#[cfg(s3)]
if path.as_ref().starts_with("s3://") {
return Ok(FSType::S3(s3::S3FSBackend::new(path)?));
}
Ok(FSType::Local(local::LocalFSBackend::new(path)?))
}
async fn read(self) -> std::io::Result<Vec<u8>> {
match self {
FSType::Local(fs) => fs.read().await,
#[cfg(s3)]
FSType::S3(fs) => fs.read().await,
}
}
async fn write(self, buf: &[u8]) -> std::io::Result<()> {
match self {
FSType::Local(fs) => fs.write(buf).await,
#[cfg(s3)]
FSType::S3(fs) => fs.write(buf).await,
}
}
async fn path_exists(self) -> std::io::Result<bool> {
match self {
FSType::Local(fs) => fs.path_exists().await,
#[cfg(s3)]
FSType::S3(fs) => fs.path_exists().await,
}
}
async fn file_exists(self) -> std::io::Result<bool> {
match self {
FSType::Local(fs) => fs.file_exists().await,
#[cfg(s3)]
FSType::S3(fs) => fs.file_exists().await,
}
}
async fn path_is_dir(self) -> std::io::Result<bool> {
match self {
FSType::Local(fs) => fs.path_is_dir().await,
#[cfg(s3)]
FSType::S3(fs) => fs.path_is_dir().await,
}
}
async fn canonicalize(self) -> std::io::Result<PathBuf> {
match self {
FSType::Local(fs) => fs.canonicalize().await,
#[cfg(s3)]
FSType::S3(fs) => fs.canonicalize().await,
}
}
async fn create_dir_all(self) -> std::io::Result<()> {
match self {
FSType::Local(fs) => fs.create_dir_all().await,
#[cfg(s3)]
FSType::S3(fs) => fs.create_dir_all().await,
}
}
async fn persist_temp_file(self, temp_file: TempFile<'_>) -> std::io::Result<()> {
match self {
FSType::Local(fs) => fs.persist_temp_file(temp_file).await,
#[cfg(s3)]
FSType::S3(fs) => fs.persist_temp_file(temp_file).await,
}
}
async fn remove_file(self) -> std::io::Result<()> {
match self {
FSType::Local(fs) => fs.remove_file().await,
#[cfg(s3)]
FSType::S3(fs) => fs.remove_file().await,
}
}
async fn remove_dir_all(self) -> std::io::Result<()> {
match self {
FSType::Local(fs) => fs.remove_dir_all().await,
#[cfg(s3)]
FSType::S3(fs) => fs.remove_dir_all().await,
}
}
async fn last_modified(self) -> std::io::Result<SystemTime> {
match self {
FSType::Local(fs) => fs.last_modified().await,
#[cfg(s3)]
FSType::S3(fs) => fs.last_modified().await,
}
}
async fn download_url(self, local_host: &str) -> std::io::Result<String> {
match self {
FSType::Local(fs) => fs.download_url(local_host).await,
#[cfg(s3)]
FSType::S3(fs) => fs.download_url(local_host).await,
}
}
}
/// Reads the contents of a file at the given path.
///
/// # Arguments
///
/// * `path` - A reference to the path of the file to read.
///
/// # Returns
///
/// * `std::io::Result<Vec<u8>>` - A result containing a vector of bytes with the
/// file contents if successful, or an I/O error.
pub(crate) async fn read<P: AsRef<Path>>(path: P) -> std::io::Result<Vec<u8>> {
FSType::new(path)?.read().await
}
/// Writes data to a file at the given path.
///
/// If the file does not exist, it will be created. If it does exist, it will be
/// overwritten.
///
/// # Arguments
///
/// * `path` - A reference to the path of the file to write.
/// * `buf` - A byte slice containing the data to write to the file.
///
/// # Returns
///
/// * `std::io::Result<()>` - A result indicating success or an I/O error.
pub(crate) async fn write<P: AsRef<Path>>(path: P, buf: &[u8]) -> std::io::Result<()> {
FSType::new(path)?.write(buf).await
}
/// Checks whether a path exists.
///
/// This function returns `true` in all cases where the path exists, including
/// as a file, directory, or symlink.
///
/// # Arguments
///
/// * `path` - A reference to the path to check.
///
/// # Returns
///
/// * `std::io::Result<bool>` - A result containing a boolean value indicating
/// whether the path exists.
pub(crate) async fn path_exists<P: AsRef<Path>>(path: P) -> std::io::Result<bool> {
FSType::new(path)?.path_exists().await
}
/// Checks whether a regular file exists at the given path.
///
/// This function returns `false` if the path is a symlink.
///
/// # Arguments
///
/// * `path` - A reference to the path to check.
///
/// # Returns
///
/// * `std::io::Result<bool>` - A result containing a boolean value indicating
/// whether a regular file exists at the given path.
pub(crate) async fn file_exists<P: AsRef<Path>>(path: P) -> std::io::Result<bool> {
FSType::new(path)?.file_exists().await
}
/// Checks whether a directory exists at the given path.
///
/// This function returns `false` if the path is a symlink.
///
/// # Arguments
///
/// * `path` - A reference to the path to check.
///
/// # Returns
///
/// * `std::io::Result<bool>` - A result containing a boolean value indicating
/// whether a directory exists at the given path.
pub(crate) async fn path_is_dir<P: AsRef<Path>>(path: P) -> std::io::Result<bool> {
FSType::new(path)?.path_is_dir().await
}
/// Canonicalizes the given path.
///
/// This function resolves the given path to an absolute path, eliminating any
/// symbolic links and relative path components.
///
/// # Arguments
///
/// * `path` - A reference to the path to canonicalize.
///
/// # Returns
///
/// * `std::io::Result<PathBuf>` - A result containing the canonicalized path if successful,
/// or an I/O error.
pub(crate) async fn canonicalize<P: AsRef<Path>>(path: P) -> std::io::Result<PathBuf> {
FSType::new(path)?.canonicalize().await
}
/// Creates a directory and all its parent components as needed.
///
/// # Arguments
///
/// * `path` - A reference to the path of the directory to create.
///
/// # Returns
///
/// * `std::io::Result<()>` - A result indicating success or an I/O error.
pub(crate) async fn create_dir_all<P: AsRef<Path>>(path: P) -> std::io::Result<()> {
FSType::new(path)?.create_dir_all().await
}
/// Persists a temporary file to a permanent location.
///
/// # Arguments
///
/// * `temp_file` - The temporary file to persist.
/// * `path` - A reference to the path where the file should be persisted.
///
/// # Returns
///
/// * `std::io::Result<()>` - A result indicating success or an I/O error.
pub(crate) async fn persist_temp_file<P: AsRef<Path>>(temp_file: TempFile<'_>, path: P) -> std::io::Result<()> {
FSType::new(path)?.persist_temp_file(temp_file).await
}
/// Removes a file at the given path.
///
/// # Arguments
///
/// * `path` - A reference to the path of the file to remove.
///
/// # Returns
///
/// * `std::io::Result<()>` - A result indicating success or an I/O error.
pub(crate) async fn remove_file<P: AsRef<Path>>(path: P) -> std::io::Result<()> {
FSType::new(path)?.remove_file().await
}
/// Removes a directory and all its contents at the given path.
///
/// # Arguments
///
/// * `path` - A reference to the path of the directory to remove.
///
/// # Returns
///
/// * `std::io::Result<()>` - A result indicating success or an I/O error.
pub(crate) async fn remove_dir_all<P: AsRef<Path>>(path: P) -> std::io::Result<()> {
FSType::new(path)?.remove_dir_all().await
}
pub(crate) async fn file_is_expired<P: AsRef<Path>>(path: P, ttl: u64) -> Result<bool, Error> {
let path = path.as_ref();
let modified = FSType::new(path)?.last_modified().await?;
let age = SystemTime::now().duration_since(modified)
.map_err(|e| Error::new(
ErrorKind::InvalidData,
format!("Failed to determine file age for {path:?} from last modified timestamp '{modified:#?}': {e:?}"
)))?;
Ok(ttl > 0 && ttl <= age.as_secs())
}
/// Generates a pre-signed url to download attachment and send files.
///
/// # Arguments
///
/// * `path` - A reference to the path of the file to read.
/// * `local_host` - This API server host.
///
/// # Returns
///
/// * `std::io::Result<String>` - A result containing the url if successful, or an I/O error.
pub(crate) async fn download_url<P: AsRef<Path>>(path: P, local_host: &str) -> std::io::Result<String> {
FSType::new(path)?.download_url(local_host).await
}

316
src/persistent_fs/s3.rs Normale Datei
Datei anzeigen

@ -0,0 +1,316 @@
use std::{io::{Error, ErrorKind}, path::{Path, PathBuf}, time::SystemTime};
use aws_sdk_s3::{client::Client, primitives::ByteStream, types::StorageClass::IntelligentTiering};
use rocket::{fs::TempFile, http::ContentType};
use tokio::{fs::File, io::AsyncReadExt};
use url::Url;
use crate::aws::aws_sdk_config;
use super::PersistentFSBackend;
pub(crate) struct S3FSBackend {
path: PathBuf,
bucket: String,
key: String,
}
fn s3_client() -> std::io::Result<Client> {
static AWS_S3_CLIENT: std::sync::LazyLock<std::io::Result<Client>> = std::sync::LazyLock::new(|| {
Ok(Client::new(aws_sdk_config()?))
});
(*AWS_S3_CLIENT)
.as_ref()
.map(|client| client.clone())
.map_err(|e| match e.get_ref() {
Some(inner) => Error::new(e.kind(), inner),
None => Error::from(e.kind()),
})
}
impl PersistentFSBackend for S3FSBackend {
fn new<P: AsRef<Path>>(path: P) -> std::io::Result<Self> {
let path = path.as_ref();
let url = Url::parse(path.to_str().ok_or_else(|| Error::new(ErrorKind::InvalidInput, "Invalid path"))?)
.map_err(|e| Error::new(ErrorKind::InvalidInput, format!("Invalid data folder S3 URL {path:?}: {e}")))?;
let bucket = url.host_str()
.ok_or_else(|| Error::new(ErrorKind::InvalidInput, format!("Missing Bucket name in data folder S3 URL {path:?}")))?
.to_string();
let key = url.path().trim_start_matches('/').to_string();
Ok(S3FSBackend {
path: path.to_path_buf(),
bucket,
key,
})
}
async fn read(self) -> std::io::Result<Vec<u8>> {
let S3FSBackend { path, key, bucket } = self;
let result = s3_client()?
.get_object()
.bucket(bucket)
.key(key)
.send()
.await;
match result {
Ok(response) => {
let mut buffer = Vec::new();
response.body.into_async_read().read_to_end(&mut buffer).await?;
Ok(buffer)
}
Err(e) => {
if let Some(service_error) = e.as_service_error() {
if service_error.is_no_such_key() {
Err(Error::new(ErrorKind::NotFound, format!("Data folder S3 object {path:?} not found")))
} else {
Err(Error::other(format!("Failed to request data folder S3 object {path:?}: {e:?}")))
}
} else {
Err(Error::other(format!("Failed to request data folder S3 object {path:?}: {e:?}")))
}
}
}
}
async fn write(self, buf: &[u8]) -> std::io::Result<()> {
let S3FSBackend { path, key, bucket } = self;
let content_type = Path::new(&key)
.extension()
.and_then(|ext| ext.to_str())
.and_then(|ext| ContentType::from_extension(ext))
.and_then(|t| Some(t.to_string()));
s3_client()?
.put_object()
.bucket(bucket)
.set_content_type(content_type)
.key(key)
.storage_class(IntelligentTiering)
.body(ByteStream::from(buf.to_vec()))
.send()
.await
.map_err(|e| Error::other(format!("Failed to write to data folder S3 object {path:?}: {e:?}")))?;
Ok(())
}
async fn path_exists(self) -> std::io::Result<bool> {
Ok(true)
}
async fn file_exists(self) -> std::io::Result<bool> {
let S3FSBackend { path, key, bucket } = self;
match s3_client()?
.head_object()
.bucket(bucket)
.key(key)
.send()
.await {
Ok(_) => Ok(true),
Err(e) => {
if let Some(service_error) = e.as_service_error() {
if service_error.is_not_found() {
Ok(false)
} else {
Err(Error::other(format!("Failed to request data folder S3 object {path:?}: {e:?}")))
}
} else {
Err(Error::other(format!("Failed to request data folder S3 object {path:?}: {e:?}")))
}
}
}
}
async fn path_is_dir(self) -> std::io::Result<bool> {
Ok(true)
}
async fn canonicalize(self) -> std::io::Result<PathBuf> {
Ok(self.path)
}
async fn create_dir_all(self) -> std::io::Result<()> {
Ok(())
}
async fn persist_temp_file(self, temp_file: TempFile<'_>) -> std::io::Result<()> {
let S3FSBackend { path, key, bucket } = self;
// We want to stream the TempFile directly to S3 without copying it into
// another memory buffer. The official AWS SDK makes it easy to stream
// from a `tokio::fs::File`, but does not have a reasonable way to stream
// from an `impl AsyncBufRead`.
//
// A TempFile's contents may be saved in memory or on disk. We use the
// SDK to stream the file if we can access it on disk, otherwise we fall
// back to a second copy in memory.
let file = match temp_file.path() {
Some(path) => File::open(path).await.ok(),
None => None,
};
let byte_stream = match file {
Some(file) => ByteStream::read_from().file(file).build().await.ok(),
None => None,
};
let byte_stream = match byte_stream {
Some(byte_stream) => byte_stream,
None => {
// TODO: Implement a mechanism to stream the file directly to S3
// without buffering it again in memory. This would require
// chunking it into a multi-part upload. See example here:
// https://imfeld.dev/writing/rust_s3_streaming_upload
let mut read_stream = temp_file.open().await?;
let mut buf = Vec::with_capacity(temp_file.len() as usize);
read_stream.read_to_end(&mut buf).await?;
ByteStream::from(buf)
}
};
let content_type = temp_file
.content_type()
.map(|t| t.to_string())
.or_else(||
temp_file.name()
.and_then(|name| Path::new(name).extension())
.and_then(|ext| ext.to_str())
.and_then(|ext| ContentType::from_extension(ext))
.and_then(|t| Some(t.to_string()))
);
s3_client()?
.put_object()
.bucket(bucket)
.key(key)
.storage_class(IntelligentTiering)
.set_content_type(content_type)
.body(byte_stream)
.send()
.await
.map_err(|e| Error::other(format!("Failed to write to data folder S3 object {path:?}: {e:?}")))?;
Ok(())
}
async fn remove_file(self) -> std::io::Result<()> {
let S3FSBackend { path, key, bucket } = self;
s3_client()?
.delete_object()
.bucket(bucket)
.key(key)
.send()
.await
.map_err(|e| Error::other(format!("Failed to delete data folder S3 object {path:?}: {e:?}")))?;
Ok(())
}
async fn remove_dir_all(self) -> std::io::Result<()> {
use aws_sdk_s3::types::{Delete, ObjectIdentifier};
let S3FSBackend { path, key: prefix, bucket } = self;
let s3_client = s3_client()?;
let mut list_response = s3_client
.list_objects_v2()
.bucket(bucket.clone())
.prefix(format!("{prefix}/"))
.into_paginator()
.send();
while let Some(list_result) = list_response.next().await {
let list_result = list_result
.map_err(|e| Error::other(format!("Failed to list data folder S3 objects with prefix {path:?}/ intended for deletion: {e:?}")))?;
let objects = list_result
.contents
.ok_or_else(|| Error::other(format!("Failed to list data folder S3 objects with prefix {path:?}/ intended for deletion: Missing contents")))?;
let keys = objects.into_iter()
.map(|object| object.key
.ok_or_else(|| Error::other(format!("Failed to list data folder S3 objects with prefix {path:?}/ intended for deletion: An object is missing its key")))
)
.collect::<std::io::Result<Vec<_>>>()?;
let mut delete = Delete::builder().quiet(true);
for key in keys {
delete = delete.objects(
ObjectIdentifier::builder()
.key(key)
.build()
.map_err(|e| Error::other(format!("Failed to delete data folder S3 objects with prefix {path:?}/: {e:?}")))?
);
}
let delete = delete
.build()
.map_err(|e| Error::other(format!("Failed to delete data folder S3 objects with prefix {path:?}/: {e:?}")))?;
s3_client
.delete_objects()
.bucket(bucket.clone())
.delete(delete)
.send()
.await
.map_err(|e| Error::other(format!("Failed to delete data folder S3 objects with prefix {path:?}/: {e:?}")))?;
}
Ok(())
}
async fn last_modified(self) -> std::io::Result<SystemTime> {
let S3FSBackend { path, key, bucket } = self;
let response = s3_client()?
.head_object()
.bucket(bucket)
.key(key)
.send()
.await
.map_err(|e| match e.as_service_error() {
Some(service_error) if service_error.is_not_found() => Error::new(ErrorKind::NotFound, format!("Failed to get metadata for data folder S3 object {path:?}: Object does not exist")),
Some(service_error) => Error::other(format!("Failed to get metadata for data folder S3 object {path:?}: {service_error:?}")),
None => Error::other(format!("Failed to get metadata for data folder S3 object {path:?}: {e:?}")),
})?;
let last_modified = response.last_modified
.ok_or_else(|| Error::new(ErrorKind::NotFound, format!("Failed to get metadata for data folder S3 object {path:?}: Missing last modified data")))?;
SystemTime::try_from(last_modified)
.map_err(|e| Error::new(ErrorKind::InvalidData, format!("Failed to parse last modified date for data folder S3 object {path:?}: {e:?}")))
}
async fn download_url(self, _local_host: &str) -> std::io::Result<String> {
use std::time::Duration;
use aws_sdk_s3::presigning::PresigningConfig;
let S3FSBackend { path, key, bucket } = self;
s3_client()?
.get_object()
.bucket(bucket)
.key(key)
.presigned(
PresigningConfig::expires_in(Duration::from_secs(5 * 60))
.map_err(|e| Error::other(
format!("Failed to generate presigned config for GetObject URL for data folder S3 object {path:?}: {e:?}")
))?
)
.await
.map(|presigned| presigned.uri().to_string())
.map_err(|e| Error::other(format!("Failed to generate presigned URL for GetObject for data folder S3 object {path:?}: {e:?}")))
}
}

Datei anzeigen

@ -82,6 +82,12 @@ impl Fairing for AppHeaders {
// 2FA/MFA Site check: api.2fa.directory // 2FA/MFA Site check: api.2fa.directory
// # Mail Relay: https://bitwarden.com/blog/add-privacy-and-security-using-email-aliases-with-bitwarden/ // # Mail Relay: https://bitwarden.com/blog/add-privacy-and-security-using-email-aliases-with-bitwarden/
// app.simplelogin.io, app.addy.io, api.fastmail.com, quack.duckduckgo.com // app.simplelogin.io, app.addy.io, api.fastmail.com, quack.duckduckgo.com
#[cfg(s3)]
let s3_connect_src = "https://*.amazonaws.com";
#[cfg(not(s3))]
let s3_connect_src = "";
let csp = format!( let csp = format!(
"default-src 'none'; \ "default-src 'none'; \
font-src 'self'; \ font-src 'self'; \
@ -108,6 +114,7 @@ impl Fairing for AppHeaders {
https://app.addy.io/api/ \ https://app.addy.io/api/ \
https://api.fastmail.com/ \ https://api.fastmail.com/ \
https://api.forwardemail.net \ https://api.forwardemail.net \
{s3_connect_src} \
{allowed_connect_src};\ {allowed_connect_src};\
", ",
icon_service_csp = CONFIG._icon_service_csp(), icon_service_csp = CONFIG._icon_service_csp(),