1
0
Fork 0

Merge branch 'BlackDex-sql-optimizations' into main

Dieser Commit ist enthalten in:
Daniel García 2022-05-11 21:33:43 +02:00
Commit 7f61dd5fe3
Es konnte kein GPG-Schlüssel zu dieser Signatur gefunden werden
GPG-Schlüssel-ID: FC8A7D14C3CD543A
38 geänderte Dateien mit 6084 neuen und 5835 gelöschten Zeilen

Datei anzeigen

@ -46,7 +46,7 @@ jobs:
steps: steps:
# Checkout the repo # Checkout the repo
- name: Checkout - name: Checkout
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # v2.4.0 uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # v3.0.2
# End Checkout the repo # End Checkout the repo
@ -140,7 +140,7 @@ jobs:
# Upload artifact to Github Actions # Upload artifact to Github Actions
- name: Upload artifact - name: Upload artifact
uses: actions/upload-artifact@82c141cc518b40d92cc801eee768e7aafc9c2fa2 # v2.3.1 uses: actions/upload-artifact@6673cd052c4cd6fcf4b4e6e60ea986c889389535 # v3.0.0
with: with:
name: vaultwarden-${{ matrix.target-triple }}${{ matrix.ext }} name: vaultwarden-${{ matrix.target-triple }}${{ matrix.ext }}
path: target/${{ matrix.target-triple }}/release/vaultwarden${{ matrix.ext }} path: target/${{ matrix.target-triple }}/release/vaultwarden${{ matrix.ext }}

Datei anzeigen

@ -16,18 +16,18 @@ jobs:
steps: steps:
# Checkout the repo # Checkout the repo
- name: Checkout - name: Checkout
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # v2.4.0 uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # v3.0.2
# End Checkout the repo # End Checkout the repo
# Download hadolint # Download hadolint - https://github.com/hadolint/hadolint/releases
- name: Download hadolint - name: Download hadolint
shell: bash shell: bash
run: | run: |
sudo curl -L https://github.com/hadolint/hadolint/releases/download/v${HADOLINT_VERSION}/hadolint-$(uname -s)-$(uname -m) -o /usr/local/bin/hadolint && \ sudo curl -L https://github.com/hadolint/hadolint/releases/download/v${HADOLINT_VERSION}/hadolint-$(uname -s)-$(uname -m) -o /usr/local/bin/hadolint && \
sudo chmod +x /usr/local/bin/hadolint sudo chmod +x /usr/local/bin/hadolint
env: env:
HADOLINT_VERSION: 2.8.0 HADOLINT_VERSION: 2.10.0
# End Download hadolint # End Download hadolint
# Test Dockerfiles # Test Dockerfiles

Datei anzeigen

@ -31,7 +31,7 @@ jobs:
steps: steps:
- name: Skip Duplicates Actions - name: Skip Duplicates Actions
id: skip_check id: skip_check
uses: fkirc/skip-duplicate-actions@f75dd6564bb646f95277dc8c3b80612e46a4a1ea # v3.4.1 uses: fkirc/skip-duplicate-actions@9d116fa7e55f295019cfab7e3ab72b478bcf7fdd # v4.0.0
with: with:
cancel_others: 'true' cancel_others: 'true'
# Only run this when not creating a tag # Only run this when not creating a tag
@ -60,13 +60,13 @@ jobs:
steps: steps:
# Checkout the repo # Checkout the repo
- name: Checkout - name: Checkout
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # v2.4.0 uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # v3.0.2
with: with:
fetch-depth: 0 fetch-depth: 0
# Login to Docker Hub # Login to Docker Hub
- name: Login to Docker Hub - name: Login to Docker Hub
uses: docker/login-action@42d299face0c5c43a0487c477f595ac9cf22f1a7 # v1.12.0 uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # v2.0.0
with: with:
username: ${{ secrets.DOCKERHUB_USERNAME }} username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}

Datei anzeigen

@ -1,7 +1,7 @@
--- ---
repos: repos:
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.1.0 rev: v4.2.0
hooks: hooks:
- id: check-yaml - id: check-yaml
- id: check-json - id: check-json

231
Cargo.lock generiert
Datei anzeigen

@ -307,7 +307,7 @@ dependencies = [
"cached_proc_macro", "cached_proc_macro",
"cached_proc_macro_types", "cached_proc_macro_types",
"futures", "futures",
"hashbrown 0.12.0", "hashbrown 0.12.1",
"lazy_static", "lazy_static",
"once_cell", "once_cell",
"thiserror", "thiserror",
@ -508,12 +508,12 @@ dependencies = [
[[package]] [[package]]
name = "cron" name = "cron"
version = "0.9.0" version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e009ed0b762cf7a967a34dfdc67d5967d3f828f12901d37081432c3dd1668f8f" checksum = "d76219e9243e100d5a37676005f08379297f8addfebc247613299600625c734d"
dependencies = [ dependencies = [
"chrono", "chrono",
"nom 4.1.1", "nom",
"once_cell", "once_cell",
] ]
@ -568,9 +568,9 @@ dependencies = [
[[package]] [[package]]
name = "ctrlc" name = "ctrlc"
version = "3.2.1" version = "3.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a19c6cedffdc8c03a3346d723eb20bd85a13362bb96dc2ac000842c6381ec7bf" checksum = "b37feaa84e6861e00a1f5e5aa8da3ee56d605c9992d33e082786754828e20865"
dependencies = [ dependencies = [
"nix", "nix",
"winapi 0.3.9", "winapi 0.3.9",
@ -613,13 +613,13 @@ dependencies = [
[[package]] [[package]]
name = "dashmap" name = "dashmap"
version = "5.2.0" version = "5.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c8858831f7781322e539ea39e72449c46b059638250c14344fec8d0aa6e539c" checksum = "391b56fbd302e585b7a9494fb70e40949567b1cf9003a8e4a6041a1687c26573"
dependencies = [ dependencies = [
"cfg-if 1.0.0", "cfg-if 1.0.0",
"num_cpus", "hashbrown 0.12.1",
"parking_lot 0.12.0", "lock_api",
] ]
[[package]] [[package]]
@ -1157,9 +1157,9 @@ checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
[[package]] [[package]]
name = "hashbrown" name = "hashbrown"
version = "0.12.0" version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c21d40587b92fa6a6c6e3c1bdbf87d75511db5672f9c93175574b3a00df1758" checksum = "db0d4cf898abf0081f964436dc980e96670a0f36863e4b83aaacdb65c9d7ccc3"
[[package]] [[package]]
name = "heck" name = "heck"
@ -1236,9 +1236,9 @@ dependencies = [
[[package]] [[package]]
name = "http" name = "http"
version = "0.2.6" version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31f4c6746584866f0feabcc69893c5b51beef3831656a968ed7ae254cdc4fd03" checksum = "ff8670570af52249509a86f5e3e18a08c60b177071826898fde8997cf5f6bfbb"
dependencies = [ dependencies = [
"bytes 1.1.0", "bytes 1.1.0",
"fnv", "fnv",
@ -1258,9 +1258,9 @@ dependencies = [
[[package]] [[package]]
name = "httparse" name = "httparse"
version = "1.7.0" version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6330e8a36bd8c859f3fa6d9382911fbb7147ec39807f63b923933a247240b9ba" checksum = "496ce29bb5a52785b44e0f7ca2847ae0bb839c9bd28f69acac9b99d461c0c04c"
[[package]] [[package]]
name = "httpdate" name = "httpdate"
@ -1401,7 +1401,7 @@ checksum = "c9447923c57a8a2d5c1b0875cdf96a6324275df728b498f2ede0e5cbde088a15"
[[package]] [[package]]
name = "job_scheduler" name = "job_scheduler"
version = "1.2.1" version = "1.2.1"
source = "git+https://github.com/jjlin/job_scheduler?rev=ee023418dbba2bfe1e30a5fd7d937f9e33739806#ee023418dbba2bfe1e30a5fd7d937f9e33739806" source = "git+https://github.com/BlackDex/job_scheduler?rev=9100fc596a083fd9c0b560f8f11f108e0a19d07e#9100fc596a083fd9c0b560f8f11f108e0a19d07e"
dependencies = [ dependencies = [
"chrono", "chrono",
"cron", "cron",
@ -1455,9 +1455,9 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
[[package]] [[package]]
name = "lettre" name = "lettre"
version = "0.10.0-rc.5" version = "0.10.0-rc.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5144148f337be14dabfc0f0d85b691a68ac6c77ef22a5c47c5504b70a7c9fcf3" checksum = "2f6c70001f7ee6c93b6687a06607c7a38f9a7ae460139a496c23da21e95bc289"
dependencies = [ dependencies = [
"base64", "base64",
"email-encoding", "email-encoding",
@ -1467,7 +1467,7 @@ dependencies = [
"idna 0.2.3", "idna 0.2.3",
"mime", "mime",
"native-tls", "native-tls",
"nom 7.1.1", "nom",
"once_cell", "once_cell",
"quoted_printable", "quoted_printable",
"regex", "regex",
@ -1477,15 +1477,15 @@ dependencies = [
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.124" version = "0.2.125"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21a41fed9d98f27ab1c6d161da622a4fa35e8a54a8adc24bbf3ddd0ef70b0e50" checksum = "5916d2ae698f6de9bfb891ad7a8d65c09d232dc58cc4ac433c7da3b2fd84bc2b"
[[package]] [[package]]
name = "libmimalloc-sys" name = "libmimalloc-sys"
version = "0.1.24" version = "0.1.25"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7705fc40f6ed493f73584abbb324e74f96b358ff60dfe5659a0f8fc12c590a69" checksum = "11ca136052550448f55df7898c6dbe651c6b574fe38a0d9ea687a9f8088a2e2c"
dependencies = [ dependencies = [
"cc", "cc",
] ]
@ -1519,9 +1519,9 @@ dependencies = [
[[package]] [[package]]
name = "log" name = "log"
version = "0.4.16" version = "0.4.17"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6389c490849ff5bc16be905ae24bc913a9c8892e19b2341dbc175e14c341c2b8" checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
dependencies = [ dependencies = [
"cfg-if 1.0.0", "cfg-if 1.0.0",
] ]
@ -1594,18 +1594,9 @@ checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00"
[[package]] [[package]]
name = "memchr" name = "memchr"
version = "2.4.1" version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "memoffset"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
dependencies = [
"autocfg",
]
[[package]] [[package]]
name = "migrations_internals" name = "migrations_internals"
@ -1630,9 +1621,9 @@ dependencies = [
[[package]] [[package]]
name = "mimalloc" name = "mimalloc"
version = "0.1.28" version = "0.1.29"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0dfa131390c2f6bdb3242f65ff271fcdaca5ff7b6c08f28398be7f2280e3926" checksum = "2f64ad83c969af2e732e907564deb0d0ed393cec4af80776f77dd77a1a427698"
dependencies = [ dependencies = [
"libmimalloc-sys", "libmimalloc-sys",
] ]
@ -1785,15 +1776,13 @@ dependencies = [
[[package]] [[package]]
name = "nix" name = "nix"
version = "0.23.1" version = "0.24.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f866317acbd3a240710c63f065ffb1e4fd466259045ccb504130b7f668f35c6" checksum = "8f17df307904acd05aa8e32e97bb20f2a0df1728bbc2d771ae8f9a90463441e9"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"cc",
"cfg-if 1.0.0", "cfg-if 1.0.0",
"libc", "libc",
"memoffset",
] ]
[[package]] [[package]]
@ -1802,15 +1791,6 @@ version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b93853da6d84c2e3c7d730d6473e8817692dd89be387eb01b94d7f108ecb5b8c" checksum = "b93853da6d84c2e3c7d730d6473e8817692dd89be387eb01b94d7f108ecb5b8c"
[[package]]
name = "nom"
version = "4.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c349f68f25f596b9f44cf0e7c69752a5c633b0550c3ff849518bfba0233774a"
dependencies = [
"memchr",
]
[[package]] [[package]]
name = "nom" name = "nom"
version = "7.1.1" version = "7.1.1"
@ -1860,9 +1840,9 @@ dependencies = [
[[package]] [[package]]
name = "num-integer" name = "num-integer"
version = "0.1.44" version = "0.1.45"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"num-traits", "num-traits",
@ -1870,9 +1850,9 @@ dependencies = [
[[package]] [[package]]
name = "num-traits" name = "num-traits"
version = "0.2.14" version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd"
dependencies = [ dependencies = [
"autocfg", "autocfg",
] ]
@ -1889,9 +1869,9 @@ dependencies = [
[[package]] [[package]]
name = "num_threads" name = "num_threads"
version = "0.1.5" version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aba1801fb138d8e85e11d0fc70baf4fe1cdfffda7c6cd34a854905df588e5ed0" checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44"
dependencies = [ dependencies = [
"libc", "libc",
] ]
@ -1925,18 +1905,30 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
[[package]] [[package]]
name = "openssl" name = "openssl"
version = "0.10.38" version = "0.10.40"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c7ae222234c30df141154f159066c5093ff73b63204dcda7121eb082fc56a95" checksum = "fb81a6430ac911acb25fe5ac8f1d2af1b4ea8a4fdfda0f1ee4292af2e2d8eb0e"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"cfg-if 1.0.0", "cfg-if 1.0.0",
"foreign-types", "foreign-types",
"libc", "libc",
"once_cell", "once_cell",
"openssl-macros",
"openssl-sys", "openssl-sys",
] ]
[[package]]
name = "openssl-macros"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b501e44f11665960c7e7fcf062c7d96a14ade4aa98116c004b2e37b5be7d736c"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "openssl-probe" name = "openssl-probe"
version = "0.1.5" version = "0.1.5"
@ -1954,9 +1946,9 @@ dependencies = [
[[package]] [[package]]
name = "openssl-sys" name = "openssl-sys"
version = "0.9.72" version = "0.9.73"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e46109c383602735fa0a2e48dd2b7c892b048e1bf69e5c3b1d804b7d9c203cb" checksum = "9d5fd19fb3e0a8191c1e34935718976a3e70c112ab9a24af6d7cadccd9d90bc0"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"cc", "cc",
@ -2021,7 +2013,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87f5ec2493a61ac0506c0f4199f99070cbe83857b0337006a30f3e6719b8ef58" checksum = "87f5ec2493a61ac0506c0f4199f99070cbe83857b0337006a30f3e6719b8ef58"
dependencies = [ dependencies = [
"lock_api", "lock_api",
"parking_lot_core 0.9.2", "parking_lot_core 0.9.3",
] ]
[[package]] [[package]]
@ -2052,9 +2044,9 @@ dependencies = [
[[package]] [[package]]
name = "parking_lot_core" name = "parking_lot_core"
version = "0.9.2" version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "995f667a6c822200b0433ac218e05582f0e2efa1b922a3fd2fbaadc5f87bab37" checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929"
dependencies = [ dependencies = [
"cfg-if 1.0.0", "cfg-if 1.0.0",
"libc", "libc",
@ -2212,9 +2204,9 @@ checksum = "db8bcd96cb740d03149cbad5518db9fd87126a10ab519c011893b1754134c468"
[[package]] [[package]]
name = "pin-project-lite" name = "pin-project-lite"
version = "0.2.8" version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c" checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
[[package]] [[package]]
name = "pin-utils" name = "pin-utils"
@ -2263,9 +2255,9 @@ checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5"
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.37" version = "1.0.38"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec757218438d5fda206afc041538b2f6d889286160d649a86a24d37e1235afd1" checksum = "9027b48e9d4c9175fa2218adf3557f91c1137021739951d4932f5f8268ac48aa"
dependencies = [ dependencies = [
"unicode-xid", "unicode-xid",
] ]
@ -2503,18 +2495,18 @@ dependencies = [
[[package]] [[package]]
name = "ref-cast" name = "ref-cast"
version = "1.0.6" version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "300f2a835d808734ee295d45007adacb9ebb29dd3ae2424acfa17930cae541da" checksum = "685d58625b6c2b83e4cc88a27c4bf65adb7b6b16dbdc413e515c9405b47432ab"
dependencies = [ dependencies = [
"ref-cast-impl", "ref-cast-impl",
] ]
[[package]] [[package]]
name = "ref-cast-impl" name = "ref-cast-impl"
version = "1.0.6" version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c38e3aecd2b21cb3959637b883bb3714bc7e43f0268b9a29d3743ee3e55cdd2" checksum = "a043824e29c94169374ac5183ac0ed43f5724dc4556b19568007486bd840fa1f"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -2648,7 +2640,7 @@ dependencies = [
[[package]] [[package]]
name = "rocket" name = "rocket"
version = "0.5.0-rc.1" version = "0.5.0-rc.1"
source = "git+https://github.com/SergioBenitez/Rocket?rev=6bdd2f818642683b3aadbda51d7573abefe045ab#6bdd2f818642683b3aadbda51d7573abefe045ab" source = "git+https://github.com/SergioBenitez/Rocket?rev=761ffb009ea9d35c32d3c8eecd948ec4434cd0a3#761ffb009ea9d35c32d3c8eecd948ec4434cd0a3"
dependencies = [ dependencies = [
"async-stream", "async-stream",
"async-trait", "async-trait",
@ -2686,7 +2678,7 @@ dependencies = [
[[package]] [[package]]
name = "rocket_codegen" name = "rocket_codegen"
version = "0.5.0-rc.1" version = "0.5.0-rc.1"
source = "git+https://github.com/SergioBenitez/Rocket?rev=6bdd2f818642683b3aadbda51d7573abefe045ab#6bdd2f818642683b3aadbda51d7573abefe045ab" source = "git+https://github.com/SergioBenitez/Rocket?rev=761ffb009ea9d35c32d3c8eecd948ec4434cd0a3#761ffb009ea9d35c32d3c8eecd948ec4434cd0a3"
dependencies = [ dependencies = [
"devise", "devise",
"glob", "glob",
@ -2701,10 +2693,11 @@ dependencies = [
[[package]] [[package]]
name = "rocket_http" name = "rocket_http"
version = "0.5.0-rc.1" version = "0.5.0-rc.1"
source = "git+https://github.com/SergioBenitez/Rocket?rev=6bdd2f818642683b3aadbda51d7573abefe045ab#6bdd2f818642683b3aadbda51d7573abefe045ab" source = "git+https://github.com/SergioBenitez/Rocket?rev=761ffb009ea9d35c32d3c8eecd948ec4434cd0a3#761ffb009ea9d35c32d3c8eecd948ec4434cd0a3"
dependencies = [ dependencies = [
"cookie 0.16.0", "cookie 0.16.0",
"either", "either",
"futures",
"http", "http",
"hyper", "hyper",
"indexmap", "indexmap",
@ -2755,9 +2748,9 @@ dependencies = [
[[package]] [[package]]
name = "rustls-pemfile" name = "rustls-pemfile"
version = "0.3.0" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ee86d63972a7c661d1536fefe8c3c8407321c3df668891286de28abcd087360" checksum = "e7522c9de787ff061458fe9a829dc790a3f5b22dc571694fc5883f448b94d9a9"
dependencies = [ dependencies = [
"base64", "base64",
] ]
@ -2864,9 +2857,9 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.136" version = "1.0.137"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789" checksum = "61ea8d54c77f8315140a05f4c7237403bf38b72704d031543aa1d16abbf517d1"
dependencies = [ dependencies = [
"serde_derive", "serde_derive",
] ]
@ -2883,9 +2876,9 @@ dependencies = [
[[package]] [[package]]
name = "serde_derive" name = "serde_derive"
version = "1.0.136" version = "1.0.137"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" checksum = "1f26faba0c3959972377d3b2d306ee9f71faee9714294e41bb777f83f88578be"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -2894,9 +2887,9 @@ dependencies = [
[[package]] [[package]]
name = "serde_json" name = "serde_json"
version = "1.0.79" version = "1.0.81"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95" checksum = "9b7ce2b32a1aed03c558dc61a5cd328f15aff2dbc17daad8fb8af04d2100e15c"
dependencies = [ dependencies = [
"itoa", "itoa",
"ryu", "ryu",
@ -3095,9 +3088,9 @@ dependencies = [
[[package]] [[package]]
name = "state" name = "state"
version = "0.5.2" version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87cf4f5369e6d3044b5e365c9690f451516ac8f0954084622b49ea3fde2f6de5" checksum = "dbe866e1e51e8260c9eed836a042a5e7f6726bb2b411dffeaa712e19c388f23b"
dependencies = [ dependencies = [
"loom", "loom",
] ]
@ -3165,9 +3158,9 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
[[package]] [[package]]
name = "syn" name = "syn"
version = "1.0.91" version = "1.0.92"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b683b2b825c8eef438b77c36a06dc262294da3d5a5813fac20da149241dcd44d" checksum = "7ff7c592601f11445996a06f8ad0c27f094a58857c2f89e97974ab9235b92c52"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -3203,18 +3196,18 @@ dependencies = [
[[package]] [[package]]
name = "thiserror" name = "thiserror"
version = "1.0.30" version = "1.0.31"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" checksum = "bd829fe32373d27f76265620b5309d0340cb8550f523c1dda251d6298069069a"
dependencies = [ dependencies = [
"thiserror-impl", "thiserror-impl",
] ]
[[package]] [[package]]
name = "thiserror-impl" name = "thiserror-impl"
version = "1.0.30" version = "1.0.31"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -3308,9 +3301,9 @@ dependencies = [
[[package]] [[package]]
name = "tinyvec" name = "tinyvec"
version = "1.5.1" version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c1c1d5a42b6245520c249549ec267180beaffcc0615401ac8e31853d4b6d8d2" checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
dependencies = [ dependencies = [
"tinyvec_macros", "tinyvec_macros",
] ]
@ -3323,9 +3316,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
[[package]] [[package]]
name = "tokio" name = "tokio"
version = "1.17.0" version = "1.18.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2af73ac49756f3f7c01172e34a23e5d0216f6c32333757c2c61feb2bbff5a5ee" checksum = "dce653fb475565de9f6fb0614b28bca8df2c430c0cf84bcd9c843f15de5414cc"
dependencies = [ dependencies = [
"bytes 1.1.0", "bytes 1.1.0",
"libc", "libc",
@ -3364,9 +3357,9 @@ dependencies = [
[[package]] [[package]]
name = "tokio-rustls" name = "tokio-rustls"
version = "0.23.3" version = "0.23.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4151fda0cf2798550ad0b34bcfc9b9dcc2a9d2471c895c68f3a8818e54f2389e" checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59"
dependencies = [ dependencies = [
"rustls", "rustls",
"tokio", "tokio",
@ -3466,9 +3459,9 @@ dependencies = [
[[package]] [[package]]
name = "tracing-attributes" name = "tracing-attributes"
version = "0.1.20" version = "0.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e65ce065b4b5c53e73bb28912318cb8c9e9ad3921f1d669eb0e68b4c8143a2b" checksum = "cc6b8ad3567499f98a1db7a752b07a7c8c7c7c34c332ec00effb2b0027974b7c"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -3598,9 +3591,9 @@ dependencies = [
[[package]] [[package]]
name = "unicode-bidi" name = "unicode-bidi"
version = "0.3.7" version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a01404663e3db436ed2746d9fefef640d868edae3cceb81c3b8d5732fda678f" checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992"
[[package]] [[package]]
name = "unicode-normalization" name = "unicode-normalization"
@ -3613,9 +3606,9 @@ dependencies = [
[[package]] [[package]]
name = "unicode-xid" name = "unicode-xid"
version = "0.2.2" version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04"
[[package]] [[package]]
name = "universal-hash" name = "universal-hash"
@ -3659,9 +3652,9 @@ dependencies = [
[[package]] [[package]]
name = "uuid" name = "uuid"
version = "0.8.2" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" checksum = "8cfcd319456c4d6ea10087ed423473267e1a071f3bc0aa89f80d60997843c6f0"
dependencies = [ dependencies = [
"getrandom 0.2.6", "getrandom 0.2.6",
] ]
@ -3863,7 +3856,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90b266eccb4b32595876f5c73ea443b0516da0b1df72ca07bc08ed9ba7f96ec1" checksum = "90b266eccb4b32595876f5c73ea443b0516da0b1df72ca07bc08ed9ba7f96ec1"
dependencies = [ dependencies = [
"base64", "base64",
"nom 7.1.1", "nom",
"openssl", "openssl",
"rand 0.8.5", "rand 0.8.5",
"serde", "serde",
@ -3936,9 +3929,9 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]] [[package]]
name = "windows-sys" name = "windows-sys"
version = "0.34.0" version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5acdd78cb4ba54c0045ac14f62d8f94a03d10047904ae2a40afa1e99d8f70825" checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2"
dependencies = [ dependencies = [
"windows_aarch64_msvc", "windows_aarch64_msvc",
"windows_i686_gnu", "windows_i686_gnu",
@ -3949,33 +3942,33 @@ dependencies = [
[[package]] [[package]]
name = "windows_aarch64_msvc" name = "windows_aarch64_msvc"
version = "0.34.0" version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17cffbe740121affb56fad0fc0e421804adf0ae00891205213b5cecd30db881d" checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47"
[[package]] [[package]]
name = "windows_i686_gnu" name = "windows_i686_gnu"
version = "0.34.0" version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2564fde759adb79129d9b4f54be42b32c89970c18ebf93124ca8870a498688ed" checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6"
[[package]] [[package]]
name = "windows_i686_msvc" name = "windows_i686_msvc"
version = "0.34.0" version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9cd9d32ba70453522332c14d38814bceeb747d80b3958676007acadd7e166956" checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024"
[[package]] [[package]]
name = "windows_x86_64_gnu" name = "windows_x86_64_gnu"
version = "0.34.0" version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cfce6deae227ee8d356d19effc141a509cc503dfd1f850622ec4b0f84428e1f4" checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1"
[[package]] [[package]]
name = "windows_x86_64_msvc" name = "windows_x86_64_msvc"
version = "0.34.0" version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d19538ccc21819d01deaf88d6a17eae6596a12e9aafdbb97916fb49896d89de9" checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680"
[[package]] [[package]]
name = "winreg" name = "winreg"

Datei anzeigen

@ -3,7 +3,7 @@ name = "vaultwarden"
version = "1.0.0" version = "1.0.0"
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"] authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
edition = "2021" edition = "2021"
rust-version = "1.59" rust-version = "1.60"
resolver = "2" resolver = "2"
repository = "https://github.com/dani-garcia/vaultwarden" repository = "https://github.com/dani-garcia/vaultwarden"
@ -35,9 +35,10 @@ syslog = "6.0.1" # Needs to be v4 until fern is updated
[dependencies] [dependencies]
# Logging # Logging
log = "0.4.16" log = "0.4.17"
fern = { version = "0.6.1", features = ["syslog-6"] } fern = { version = "0.6.1", features = ["syslog-6"] }
tracing = { version = "0.1.34", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work tracing = { version = "0.1.34", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work
backtrace = "0.3.65" # Logging panics to logfile instead stderr only backtrace = "0.3.65" # Logging panics to logfile instead stderr only
# A `dotenv` implementation for Rust # A `dotenv` implementation for Rust
@ -47,7 +48,7 @@ dotenvy = { version = "0.15.1", default-features = false }
once_cell = "1.10.0" once_cell = "1.10.0"
# Numerical libraries # Numerical libraries
num-traits = "0.2.14" num-traits = "0.2.15"
num-derive = "0.3.3" num-derive = "0.3.3"
# Web framework # Web framework
@ -60,14 +61,14 @@ chashmap = "2.2.2" # Concurrent hashmap implementation
# Async futures # Async futures
futures = "0.3.21" futures = "0.3.21"
tokio = { version = "1.17.0", features = ["rt-multi-thread", "fs", "io-util", "parking_lot"] } tokio = { version = "1.18.1", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time"] }
# A generic serialization/deserialization framework # A generic serialization/deserialization framework
serde = { version = "1.0.136", features = ["derive"] } serde = { version = "1.0.137", features = ["derive"] }
serde_json = "1.0.79" serde_json = "1.0.81"
# A safe, extensible ORM and Query builder # A safe, extensible ORM and Query builder
diesel = { version = "1.4.8", features = [ "chrono", "r2d2"] } diesel = { version = "1.4.8", features = ["chrono", "r2d2"] }
diesel_migrations = "1.4.0" diesel_migrations = "1.4.0"
# Bundled SQLite # Bundled SQLite
@ -78,7 +79,7 @@ rand = "0.8.5"
ring = "0.16.20" ring = "0.16.20"
# UUID generation # UUID generation
uuid = { version = "0.8.2", features = ["v4"] } uuid = { version = "1.0.0", features = ["v4"] }
# Date and time libraries # Date and time libraries
chrono = { version = "0.4.19", features = ["clock", "serde"], default-features = false } chrono = { version = "0.4.19", features = ["clock", "serde"], default-features = false }
@ -107,8 +108,8 @@ webauthn-rs = "0.3.2"
url = "2.2.2" url = "2.2.2"
# Email libraries # Email libraries
lettre = { version = "0.10.0-rc.5", features = ["smtp-transport", "builder", "serde", "native-tls", "hostname", "tracing"], default-features = false }
idna = "0.2.3" # Punycode conversion idna = "0.2.3" # Punycode conversion
lettre = { version = "0.10.0-rc.6", features = ["smtp-transport", "builder", "serde", "native-tls", "hostname", "tracing"], default-features = false }
percent-encoding = "2.1.0" # URL encoding library used for URL's in the emails percent-encoding = "2.1.0" # URL encoding library used for URL's in the emails
# Template library # Template library
@ -129,7 +130,7 @@ cookie = "0.16.0"
cookie_store = "0.16.0" cookie_store = "0.16.0"
# Used by U2F, JWT and Postgres # Used by U2F, JWT and Postgres
openssl = "0.10.38" openssl = "0.10.40"
# CLI argument parsing # CLI argument parsing
pico-args = "0.4.2" pico-args = "0.4.2"
@ -139,21 +140,22 @@ paste = "1.0.7"
governor = "0.4.2" governor = "0.4.2"
# Capture CTRL+C # Capture CTRL+C
ctrlc = { version = "3.2.1", features = ["termination"] } ctrlc = { version = "3.2.2", features = ["termination"] }
# Allow overriding the default memory allocator # Allow overriding the default memory allocator
# Mainly used for the musl builds, since the default musl malloc is very slow # Mainly used for the musl builds, since the default musl malloc is very slow
mimalloc = { version = "0.1.28", features = ["secure"], default-features = false, optional = true } mimalloc = { version = "0.1.29", features = ["secure"], default-features = false, optional = true }
[patch.crates-io] [patch.crates-io]
rocket = { git = 'https://github.com/SergioBenitez/Rocket', rev = '6bdd2f818642683b3aadbda51d7573abefe045ab' } rocket = { git = 'https://github.com/SergioBenitez/Rocket', rev = '761ffb009ea9d35c32d3c8eecd948ec4434cd0a3' }
# The maintainer of the `job_scheduler` crate doesn't seem to have responded # The maintainer of the `job_scheduler` crate doesn't seem to have responded
# to any issues or PRs for almost a year (as of April 2021). This hopefully # to any issues or PRs for almost a year (as of April 2021). This hopefully
# temporary fork updates Cargo.toml to use more up-to-date dependencies. # temporary fork updates Cargo.toml to use more up-to-date dependencies.
# In particular, `cron` has since implemented parsing of some common syntax # In particular, `cron` has since implemented parsing of some common syntax
# that wasn't previously supported (https://github.com/zslayton/cron/pull/64). # that wasn't previously supported (https://github.com/zslayton/cron/pull/64).
job_scheduler = { git = 'https://github.com/jjlin/job_scheduler', rev = 'ee023418dbba2bfe1e30a5fd7d937f9e33739806' } # 2022-05-04: Forked/Updated the job_scheduler again use the latest dependencies and some fixes.
job_scheduler = { git = 'https://github.com/BlackDex/job_scheduler', rev = '9100fc596a083fd9c0b560f8f11f108e0a19d07e' }
# Strip debuginfo from the release builds # Strip debuginfo from the release builds
# Also enable thin LTO for some optimizations # Also enable thin LTO for some optimizations

Datei anzeigen

@ -3,7 +3,7 @@
# This file was generated using a Jinja2 template. # This file was generated using a Jinja2 template.
# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles. # Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles.
{% set build_stage_base_image = "rust:1.59-bullseye" %} {% set build_stage_base_image = "rust:1.60-bullseye" %}
{% if "alpine" in target_file %} {% if "alpine" in target_file %}
{% if "amd64" in target_file %} {% if "amd64" in target_file %}
{% set build_stage_base_image = "blackdex/rust-musl:x86_64-musl-stable" %} {% set build_stage_base_image = "blackdex/rust-musl:x86_64-musl-stable" %}

Datei anzeigen

@ -27,7 +27,7 @@
FROM vaultwarden/web-vault@sha256:ad3b47c152206f25f2d2b70a93e68650a90d5c8105b894814f9bc7599517a603 as vault FROM vaultwarden/web-vault@sha256:ad3b47c152206f25f2d2b70a93e68650a90d5c8105b894814f9bc7599517a603 as vault
########################## BUILD IMAGE ########################## ########################## BUILD IMAGE ##########################
FROM rust:1.59-bullseye as build FROM rust:1.60-bullseye as build

Datei anzeigen

@ -27,7 +27,7 @@
FROM vaultwarden/web-vault@sha256:ad3b47c152206f25f2d2b70a93e68650a90d5c8105b894814f9bc7599517a603 as vault FROM vaultwarden/web-vault@sha256:ad3b47c152206f25f2d2b70a93e68650a90d5c8105b894814f9bc7599517a603 as vault
########################## BUILD IMAGE ########################## ########################## BUILD IMAGE ##########################
FROM rust:1.59-bullseye as build FROM rust:1.60-bullseye as build

Datei anzeigen

@ -27,7 +27,7 @@
FROM vaultwarden/web-vault@sha256:ad3b47c152206f25f2d2b70a93e68650a90d5c8105b894814f9bc7599517a603 as vault FROM vaultwarden/web-vault@sha256:ad3b47c152206f25f2d2b70a93e68650a90d5c8105b894814f9bc7599517a603 as vault
########################## BUILD IMAGE ########################## ########################## BUILD IMAGE ##########################
FROM rust:1.59-bullseye as build FROM rust:1.60-bullseye as build

Datei anzeigen

@ -27,7 +27,7 @@
FROM vaultwarden/web-vault@sha256:ad3b47c152206f25f2d2b70a93e68650a90d5c8105b894814f9bc7599517a603 as vault FROM vaultwarden/web-vault@sha256:ad3b47c152206f25f2d2b70a93e68650a90d5c8105b894814f9bc7599517a603 as vault
########################## BUILD IMAGE ########################## ########################## BUILD IMAGE ##########################
FROM rust:1.59-bullseye as build FROM rust:1.60-bullseye as build

Datei anzeigen

@ -27,7 +27,7 @@
FROM vaultwarden/web-vault@sha256:ad3b47c152206f25f2d2b70a93e68650a90d5c8105b894814f9bc7599517a603 as vault FROM vaultwarden/web-vault@sha256:ad3b47c152206f25f2d2b70a93e68650a90d5c8105b894814f9bc7599517a603 as vault
########################## BUILD IMAGE ########################## ########################## BUILD IMAGE ##########################
FROM rust:1.59-bullseye as build FROM rust:1.60-bullseye as build

Datei anzeigen

@ -27,7 +27,7 @@
FROM vaultwarden/web-vault@sha256:ad3b47c152206f25f2d2b70a93e68650a90d5c8105b894814f9bc7599517a603 as vault FROM vaultwarden/web-vault@sha256:ad3b47c152206f25f2d2b70a93e68650a90d5c8105b894814f9bc7599517a603 as vault
########################## BUILD IMAGE ########################## ########################## BUILD IMAGE ##########################
FROM rust:1.59-bullseye as build FROM rust:1.60-bullseye as build

Datei anzeigen

@ -27,7 +27,7 @@
FROM vaultwarden/web-vault@sha256:ad3b47c152206f25f2d2b70a93e68650a90d5c8105b894814f9bc7599517a603 as vault FROM vaultwarden/web-vault@sha256:ad3b47c152206f25f2d2b70a93e68650a90d5c8105b894814f9bc7599517a603 as vault
########################## BUILD IMAGE ########################## ########################## BUILD IMAGE ##########################
FROM rust:1.59-bullseye as build FROM rust:1.60-bullseye as build

Datei anzeigen

@ -27,7 +27,7 @@
FROM vaultwarden/web-vault@sha256:ad3b47c152206f25f2d2b70a93e68650a90d5c8105b894814f9bc7599517a603 as vault FROM vaultwarden/web-vault@sha256:ad3b47c152206f25f2d2b70a93e68650a90d5c8105b894814f9bc7599517a603 as vault
########################## BUILD IMAGE ########################## ########################## BUILD IMAGE ##########################
FROM rust:1.59-bullseye as build FROM rust:1.60-bullseye as build

Datei anzeigen

@ -1,7 +1,7 @@
#version = "One" # version = "Two"
edition = "2021" edition = "2021"
max_width = 120 max_width = 120
newline_style = "Unix" newline_style = "Unix"
use_small_heuristics = "Off" use_small_heuristics = "Off"
#struct_lit_single_line = false # struct_lit_single_line = false
#overflow_delimited_expr = true # overflow_delimited_expr = true

Datei anzeigen

@ -101,30 +101,36 @@ struct SyncData {
async fn sync(data: SyncData, headers: Headers, conn: DbConn) -> Json<Value> { async fn sync(data: SyncData, headers: Headers, conn: DbConn) -> Json<Value> {
let user_json = headers.user.to_json(&conn).await; let user_json = headers.user.to_json(&conn).await;
let folders = Folder::find_by_user(&headers.user.uuid, &conn).await; // Get all ciphers which are visible by the user
let folders_json: Vec<Value> = folders.iter().map(Folder::to_json).collect(); let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn).await;
let collections_json = stream::iter(Collection::find_by_user_uuid(&headers.user.uuid, &conn).await) let cipher_sync_data = CipherSyncData::new(&headers.user.uuid, &ciphers, &conn).await;
// Lets generate the ciphers_json using all the gathered info
let ciphers_json: Vec<Value> = stream::iter(ciphers)
.then(|c| async { .then(|c| async {
let c = c; // Move out this single variable let c = c; // Move out this single variable
c.to_json_details(&headers.user.uuid, &conn).await c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), &conn).await
}) })
.collect::<Vec<Value>>() .collect()
.await; .await;
let policies = OrgPolicy::find_confirmed_by_user(&headers.user.uuid, &conn); let collections_json: Vec<Value> = stream::iter(Collection::find_by_user_uuid(&headers.user.uuid, &conn).await)
let policies_json: Vec<Value> = policies.await.iter().map(OrgPolicy::to_json).collect();
let ciphers_json = stream::iter(Cipher::find_by_user_visible(&headers.user.uuid, &conn).await)
.then(|c| async { .then(|c| async {
let c = c; // Move out this single variable let c = c; // Move out this single variable
c.to_json(&headers.host, &headers.user.uuid, &conn).await c.to_json_details(&headers.user.uuid, Some(&cipher_sync_data), &conn).await
}) })
.collect::<Vec<Value>>() .collect()
.await; .await;
let sends = Send::find_by_user(&headers.user.uuid, &conn); let folders_json: Vec<Value> =
let sends_json: Vec<Value> = sends.await.iter().map(|s| s.to_json()).collect(); Folder::find_by_user(&headers.user.uuid, &conn).await.iter().map(Folder::to_json).collect();
let sends_json: Vec<Value> =
Send::find_by_user(&headers.user.uuid, &conn).await.iter().map(Send::to_json).collect();
let policies_json: Vec<Value> =
OrgPolicy::find_confirmed_by_user(&headers.user.uuid, &conn).await.iter().map(OrgPolicy::to_json).collect();
let domains_json = if data.exclude_domains { let domains_json = if data.exclude_domains {
Value::Null Value::Null
@ -147,10 +153,13 @@ async fn sync(data: SyncData, headers: Headers, conn: DbConn) -> Json<Value> {
#[get("/ciphers")] #[get("/ciphers")]
async fn get_ciphers(headers: Headers, conn: DbConn) -> Json<Value> { async fn get_ciphers(headers: Headers, conn: DbConn) -> Json<Value> {
let ciphers_json = stream::iter(Cipher::find_by_user_visible(&headers.user.uuid, &conn).await) let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn).await;
let cipher_sync_data = CipherSyncData::new(&headers.user.uuid, &ciphers, &conn).await;
let ciphers_json = stream::iter(ciphers)
.then(|c| async { .then(|c| async {
let c = c; // Move out this single variable let c = c; // Move out this single variable
c.to_json(&headers.host, &headers.user.uuid, &conn).await c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), &conn).await
}) })
.collect::<Vec<Value>>() .collect::<Vec<Value>>()
.await; .await;
@ -173,7 +182,7 @@ async fn get_cipher(uuid: String, headers: Headers, conn: DbConn) -> JsonResult
err!("Cipher is not owned by user") err!("Cipher is not owned by user")
} }
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn).await)) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, &conn).await))
} }
#[get("/ciphers/<uuid>/admin")] #[get("/ciphers/<uuid>/admin")]
@ -303,7 +312,7 @@ async fn post_ciphers(data: JsonUpcase<CipherData>, headers: Headers, conn: DbCo
let mut cipher = Cipher::new(data.Type, data.Name.clone()); let mut cipher = Cipher::new(data.Type, data.Name.clone());
update_cipher_from_data(&mut cipher, data, &headers, false, &conn, &nt, UpdateType::CipherCreate).await?; update_cipher_from_data(&mut cipher, data, &headers, false, &conn, &nt, UpdateType::CipherCreate).await?;
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn).await)) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, &conn).await))
} }
/// Enforces the personal ownership policy on user-owned ciphers, if applicable. /// Enforces the personal ownership policy on user-owned ciphers, if applicable.
@ -582,7 +591,7 @@ async fn put_cipher(
update_cipher_from_data(&mut cipher, data, &headers, false, &conn, &nt, UpdateType::CipherUpdate).await?; update_cipher_from_data(&mut cipher, data, &headers, false, &conn, &nt, UpdateType::CipherUpdate).await?;
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn).await)) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, &conn).await))
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@ -797,7 +806,7 @@ async fn share_cipher_by_uuid(
) )
.await?; .await?;
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, conn).await)) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, conn).await))
} }
/// v2 API for downloading an attachment. This just redirects the client to /// v2 API for downloading an attachment. This just redirects the client to
@ -866,7 +875,7 @@ async fn post_attachment_v2(
"AttachmentId": attachment_id, "AttachmentId": attachment_id,
"Url": url, "Url": url,
"FileUploadType": FileUploadType::Direct as i32, "FileUploadType": FileUploadType::Direct as i32,
response_key: cipher.to_json(&headers.host, &headers.user.uuid, &conn).await, response_key: cipher.to_json(&headers.host, &headers.user.uuid, None, &conn).await,
}))) })))
} }
@ -1035,7 +1044,7 @@ async fn post_attachment(
let (cipher, conn) = save_attachment(attachment, uuid, data, &headers, conn, nt).await?; let (cipher, conn) = save_attachment(attachment, uuid, data, &headers, conn, nt).await?;
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn).await)) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, &conn).await))
} }
#[post("/ciphers/<uuid>/attachment-admin", format = "multipart/form-data", data = "<data>")] #[post("/ciphers/<uuid>/attachment-admin", format = "multipart/form-data", data = "<data>")]
@ -1399,7 +1408,7 @@ async fn _restore_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, n
cipher.save(conn).await?; cipher.save(conn).await?;
nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(conn).await); nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(conn).await);
Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, conn).await)) Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, conn).await))
} }
async fn _restore_multiple_ciphers( async fn _restore_multiple_ciphers(
@ -1463,3 +1472,66 @@ async fn _delete_cipher_attachment_by_id(
nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(conn).await); nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(conn).await);
Ok(()) Ok(())
} }
/// This will hold all the necessary data to improve a full sync of all the ciphers
/// It can be used during the `Cipher::to_json()` call.
/// It will prevent the so called N+1 SQL issue by running just a few queries which will hold all the data needed.
/// This will not improve the speed of a single cipher.to_json() call that much, so better not to use it for those calls.
pub struct CipherSyncData {
pub cipher_attachments: HashMap<String, Vec<Attachment>>,
pub cipher_folders: HashMap<String, String>,
pub cipher_favorites: HashSet<String>,
pub cipher_collections: HashMap<String, Vec<String>>,
pub user_organizations: HashMap<String, UserOrganization>,
pub user_collections: HashMap<String, CollectionUser>,
}
impl CipherSyncData {
pub async fn new(user_uuid: &str, ciphers: &Vec<Cipher>, conn: &DbConn) -> Self {
// Generate a list of Cipher UUID's to be used during a query filter with an eq_any.
let cipher_uuids = stream::iter(ciphers).map(|c| c.uuid.to_string()).collect::<Vec<String>>().await;
// Generate a list of Cipher UUID's containing a Vec with one or more Attachment records
let mut cipher_attachments: HashMap<String, Vec<Attachment>> = HashMap::new();
for attachment in Attachment::find_all_by_ciphers(&cipher_uuids, conn).await {
cipher_attachments.entry(attachment.cipher_uuid.to_string()).or_default().push(attachment);
}
// Generate a HashMap with the Cipher UUID as key and the Folder UUID as value
let cipher_folders: HashMap<String, String> =
stream::iter(FolderCipher::find_by_user(user_uuid, conn).await).collect().await;
// Generate a HashSet of all the Cipher UUID's which are marked as favorite
let cipher_favorites: HashSet<String> =
stream::iter(Favorite::get_all_cipher_uuid_by_user(user_uuid, conn).await).collect().await;
// Generate a HashMap with the Cipher UUID as key and one or more Collection UUID's
let mut cipher_collections: HashMap<String, Vec<String>> = HashMap::new();
for (cipher, collection) in Cipher::get_collections_with_cipher_by_user(user_uuid, conn).await {
cipher_collections.entry(cipher).or_default().push(collection);
}
// Generate a HashMap with the Organization UUID as key and the UserOrganization record
let user_organizations: HashMap<String, UserOrganization> =
stream::iter(UserOrganization::find_by_user(user_uuid, conn).await)
.map(|uo| (uo.org_uuid.to_string(), uo))
.collect()
.await;
// Generate a HashMap with the User_Collections UUID as key and the CollectionUser record
let user_collections: HashMap<String, CollectionUser> =
stream::iter(CollectionUser::find_by_user(user_uuid, conn).await)
.map(|uc| (uc.collection_uuid.to_string(), uc))
.collect()
.await;
Self {
cipher_attachments,
cipher_folders,
cipher_favorites,
cipher_collections,
user_organizations,
user_collections,
}
}
}

Datei anzeigen

@ -5,7 +5,7 @@ use serde_json::Value;
use std::borrow::Borrow; use std::borrow::Borrow;
use crate::{ use crate::{
api::{EmptyResult, JsonResult, JsonUpcase, NumberOrString}, api::{core::CipherSyncData, EmptyResult, JsonResult, JsonUpcase, NumberOrString},
auth::{decode_emergency_access_invite, Headers}, auth::{decode_emergency_access_invite, Headers},
db::{models::*, DbConn, DbPool}, db::{models::*, DbConn, DbPool},
mail, CONFIG, mail, CONFIG,
@ -595,10 +595,13 @@ async fn view_emergency_access(emer_id: String, headers: Headers, conn: DbConn)
err!("Emergency access not valid.") err!("Emergency access not valid.")
} }
let ciphers_json = stream::iter(Cipher::find_owned_by_user(&emergency_access.grantor_uuid, &conn).await) let ciphers = Cipher::find_owned_by_user(&emergency_access.grantor_uuid, &conn).await;
let cipher_sync_data = CipherSyncData::new(&emergency_access.grantor_uuid, &ciphers, &conn).await;
let ciphers_json = stream::iter(ciphers)
.then(|c| async { .then(|c| async {
let c = c; // Move out this single variable let c = c; // Move out this single variable
c.to_json(&host, &emergency_access.grantor_uuid, &conn).await c.to_json(&host, &emergency_access.grantor_uuid, Some(&cipher_sync_data), &conn).await
}) })
.collect::<Vec<Value>>() .collect::<Vec<Value>>()
.await; .await;

Datei anzeigen

@ -7,6 +7,7 @@ mod sends;
pub mod two_factor; pub mod two_factor;
pub use ciphers::purge_trashed_ciphers; pub use ciphers::purge_trashed_ciphers;
pub use ciphers::CipherSyncData;
pub use emergency_access::{emergency_notification_reminder_job, emergency_request_timeout_job}; pub use emergency_access::{emergency_notification_reminder_job, emergency_request_timeout_job};
pub use sends::purge_sends; pub use sends::purge_sends;
pub use two_factor::send_incomplete_2fa_notifications; pub use two_factor::send_incomplete_2fa_notifications;

Datei anzeigen

@ -4,7 +4,10 @@ use rocket::Route;
use serde_json::Value; use serde_json::Value;
use crate::{ use crate::{
api::{EmptyResult, JsonResult, JsonUpcase, JsonUpcaseVec, Notify, NumberOrString, PasswordData, UpdateType}, api::{
core::CipherSyncData, EmptyResult, JsonResult, JsonUpcase, JsonUpcaseVec, Notify, NumberOrString, PasswordData,
UpdateType,
},
auth::{decode_invite, AdminHeaders, Headers, ManagerHeaders, ManagerHeadersLoose, OwnerHeaders}, auth::{decode_invite, AdminHeaders, Headers, ManagerHeaders, ManagerHeadersLoose, OwnerHeaders},
db::{models::*, DbConn}, db::{models::*, DbConn},
mail, CONFIG, mail, CONFIG,
@ -483,10 +486,13 @@ struct OrgIdData {
#[get("/ciphers/organization-details?<data..>")] #[get("/ciphers/organization-details?<data..>")]
async fn get_org_details(data: OrgIdData, headers: Headers, conn: DbConn) -> Json<Value> { async fn get_org_details(data: OrgIdData, headers: Headers, conn: DbConn) -> Json<Value> {
let ciphers_json = stream::iter(Cipher::find_by_org(&data.organization_id, &conn).await) let ciphers = Cipher::find_by_org(&data.organization_id, &conn).await;
let cipher_sync_data = CipherSyncData::new(&headers.user.uuid, &ciphers, &conn).await;
let ciphers_json = stream::iter(ciphers)
.then(|c| async { .then(|c| async {
let c = c; // Move out this single variable let c = c; // Move out this single variable
c.to_json(&headers.host, &headers.user.uuid, &conn).await c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), &conn).await
}) })
.collect::<Vec<Value>>() .collect::<Vec<Value>>()
.await; .await;

Datei anzeigen

@ -206,16 +206,16 @@ macro_rules! db_run {
// Different code for each db // Different code for each db
( $conn:ident: $( $($db:ident),+ $body:block )+ ) => {{ ( $conn:ident: $( $($db:ident),+ $body:block )+ ) => {{
#[allow(unused)] use diesel::prelude::*; #[allow(unused)] use diesel::prelude::*;
#[allow(unused)] use crate::db::FromDb; #[allow(unused)] use $crate::db::FromDb;
let conn = $conn.conn.clone(); let conn = $conn.conn.clone();
let mut conn = conn.lock_owned().await; let mut conn = conn.lock_owned().await;
match conn.as_mut().expect("internal invariant broken: self.connection is Some") { match conn.as_mut().expect("internal invariant broken: self.connection is Some") {
$($( $($(
#[cfg($db)] #[cfg($db)]
crate::db::DbConnInner::$db($conn) => { $crate::db::DbConnInner::$db($conn) => {
paste::paste! { paste::paste! {
#[allow(unused)] use crate::db::[<__ $db _schema>]::{self as schema, *}; #[allow(unused)] use $crate::db::[<__ $db _schema>]::{self as schema, *};
#[allow(unused)] use [<__ $db _model>]::*; #[allow(unused)] use [<__ $db _model>]::*;
} }
@ -227,16 +227,16 @@ macro_rules! db_run {
( @raw $conn:ident: $( $($db:ident),+ $body:block )+ ) => {{ ( @raw $conn:ident: $( $($db:ident),+ $body:block )+ ) => {{
#[allow(unused)] use diesel::prelude::*; #[allow(unused)] use diesel::prelude::*;
#[allow(unused)] use crate::db::FromDb; #[allow(unused)] use $crate::db::FromDb;
let conn = $conn.conn.clone(); let conn = $conn.conn.clone();
let mut conn = conn.lock_owned().await; let mut conn = conn.lock_owned().await;
match conn.as_mut().expect("internal invariant broken: self.connection is Some") { match conn.as_mut().expect("internal invariant broken: self.connection is Some") {
$($( $($(
#[cfg($db)] #[cfg($db)]
crate::db::DbConnInner::$db($conn) => { $crate::db::DbConnInner::$db($conn) => {
paste::paste! { paste::paste! {
#[allow(unused)] use crate::db::[<__ $db _schema>]::{self as schema, *}; #[allow(unused)] use $crate::db::[<__ $db _schema>]::{self as schema, *};
// @ RAW: #[allow(unused)] use [<__ $db _model>]::*; // @ RAW: #[allow(unused)] use [<__ $db _model>]::*;
} }
@ -297,7 +297,7 @@ macro_rules! db_object {
paste::paste! { paste::paste! {
#[allow(unused)] use super::*; #[allow(unused)] use super::*;
#[allow(unused)] use diesel::prelude::*; #[allow(unused)] use diesel::prelude::*;
#[allow(unused)] use crate::db::[<__ $db _schema>]::*; #[allow(unused)] use $crate::db::[<__ $db _schema>]::*;
$( #[$attr] )* $( #[$attr] )*
pub struct [<$name Db>] { $( pub struct [<$name Db>] { $(
@ -309,7 +309,7 @@ macro_rules! db_object {
#[inline(always)] pub fn to_db(x: &super::$name) -> Self { Self { $( $field: x.$field.clone(), )+ } } #[inline(always)] pub fn to_db(x: &super::$name) -> Self { Self { $( $field: x.$field.clone(), )+ } }
} }
impl crate::db::FromDb for [<$name Db>] { impl $crate::db::FromDb for [<$name Db>] {
type Output = super::$name; type Output = super::$name;
#[allow(clippy::wrong_self_convention)] #[allow(clippy::wrong_self_convention)]
#[inline(always)] fn from_db(self) -> Self::Output { super::$name { $( $field: self.$field, )+ } } #[inline(always)] fn from_db(self) -> Self::Output { super::$name { $( $field: self.$field, )+ } }

Datei anzeigen

@ -2,14 +2,12 @@ use std::io::ErrorKind;
use serde_json::Value; use serde_json::Value;
use super::Cipher;
use crate::CONFIG; use crate::CONFIG;
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "attachments"] #[table_name = "attachments"]
#[changeset_options(treat_none_as_null="true")] #[changeset_options(treat_none_as_null="true")]
#[belongs_to(super::Cipher, foreign_key = "cipher_uuid")]
#[primary_key(id)] #[primary_key(id)]
pub struct Attachment { pub struct Attachment {
pub id: String, pub id: String,
@ -188,4 +186,15 @@ impl Attachment {
.unwrap_or(0) .unwrap_or(0)
}} }}
} }
pub async fn find_all_by_ciphers(cipher_uuids: &Vec<String>, conn: &DbConn) -> Vec<Self> {
db_run! { conn: {
attachments::table
.filter(attachments::cipher_uuid.eq_any(cipher_uuids))
.select(attachments::all_columns)
.load::<AttachmentDb>(conn)
.expect("Error loading attachments")
.from_db()
}}
}
} }

Datei anzeigen

@ -1,19 +1,17 @@
use crate::CONFIG;
use chrono::{Duration, NaiveDateTime, Utc}; use chrono::{Duration, NaiveDateTime, Utc};
use serde_json::Value; use serde_json::Value;
use crate::CONFIG; use super::{Attachment, CollectionCipher, Favorite, FolderCipher, User, UserOrgStatus, UserOrgType, UserOrganization};
use super::{ use crate::api::core::CipherSyncData;
Attachment, CollectionCipher, Favorite, FolderCipher, Organization, User, UserOrgStatus, UserOrgType,
UserOrganization, use std::borrow::Cow;
};
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "ciphers"] #[table_name = "ciphers"]
#[changeset_options(treat_none_as_null="true")] #[changeset_options(treat_none_as_null="true")]
#[belongs_to(User, foreign_key = "user_uuid")]
#[belongs_to(Organization, foreign_key = "organization_uuid")]
#[primary_key(uuid)] #[primary_key(uuid)]
pub struct Cipher { pub struct Cipher {
pub uuid: String, pub uuid: String,
@ -82,22 +80,32 @@ use crate::error::MapResult;
/// Database methods /// Database methods
impl Cipher { impl Cipher {
pub async fn to_json(&self, host: &str, user_uuid: &str, conn: &DbConn) -> Value { pub async fn to_json(
&self,
host: &str,
user_uuid: &str,
cipher_sync_data: Option<&CipherSyncData>,
conn: &DbConn,
) -> Value {
use crate::util::format_date; use crate::util::format_date;
let attachments = Attachment::find_by_cipher(&self.uuid, conn).await; let mut attachments_json: Value = Value::Null;
// When there are no attachments use null instead of an empty array if let Some(cipher_sync_data) = cipher_sync_data {
let attachments_json = if attachments.is_empty() { if let Some(attachments) = cipher_sync_data.cipher_attachments.get(&self.uuid) {
Value::Null attachments_json = attachments.iter().map(|c| c.to_json(host)).collect();
}
} else { } else {
attachments.iter().map(|c| c.to_json(host)).collect() let attachments = Attachment::find_by_cipher(&self.uuid, conn).await;
}; if !attachments.is_empty() {
attachments_json = attachments.iter().map(|c| c.to_json(host)).collect()
}
}
let fields_json = self.fields.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null); let fields_json = self.fields.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
let password_history_json = let password_history_json =
self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null); self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null);
let (read_only, hide_passwords) = match self.get_access_restrictions(user_uuid, conn).await { let (read_only, hide_passwords) = match self.get_access_restrictions(user_uuid, cipher_sync_data, conn).await {
Some((ro, hp)) => (ro, hp), Some((ro, hp)) => (ro, hp),
None => { None => {
error!("Cipher ownership assertion failure"); error!("Cipher ownership assertion failure");
@ -109,7 +117,7 @@ impl Cipher {
// If not passing an empty object, mobile clients will crash. // If not passing an empty object, mobile clients will crash.
let mut type_data_json: Value = serde_json::from_str(&self.data).unwrap_or_else(|_| json!({})); let mut type_data_json: Value = serde_json::from_str(&self.data).unwrap_or_else(|_| json!({}));
// NOTE: This was marked as *Backwards Compatibilty Code*, but as of January 2021 this is still being used by upstream // NOTE: This was marked as *Backwards Compatibility Code*, but as of January 2021 this is still being used by upstream
// Set the first element of the Uris array as Uri, this is needed several (mobile) clients. // Set the first element of the Uris array as Uri, this is needed several (mobile) clients.
if self.atype == 1 { if self.atype == 1 {
if type_data_json["Uris"].is_array() { if type_data_json["Uris"].is_array() {
@ -124,13 +132,23 @@ impl Cipher {
// Clone the type_data and add some default value. // Clone the type_data and add some default value.
let mut data_json = type_data_json.clone(); let mut data_json = type_data_json.clone();
// NOTE: This was marked as *Backwards Compatibilty Code*, but as of January 2021 this is still being used by upstream // NOTE: This was marked as *Backwards Compatibility Code*, but as of January 2021 this is still being used by upstream
// data_json should always contain the following keys with every atype // data_json should always contain the following keys with every atype
data_json["Fields"] = json!(fields_json); data_json["Fields"] = json!(fields_json);
data_json["Name"] = json!(self.name); data_json["Name"] = json!(self.name);
data_json["Notes"] = json!(self.notes); data_json["Notes"] = json!(self.notes);
data_json["PasswordHistory"] = json!(password_history_json); data_json["PasswordHistory"] = json!(password_history_json);
let collection_ids = if let Some(cipher_sync_data) = cipher_sync_data {
if let Some(cipher_collections) = cipher_sync_data.cipher_collections.get(&self.uuid) {
Cow::from(cipher_collections)
} else {
Cow::from(Vec::with_capacity(0))
}
} else {
Cow::from(self.get_collections(user_uuid, conn).await)
};
// There are three types of cipher response models in upstream // There are three types of cipher response models in upstream
// Bitwarden: "cipherMini", "cipher", and "cipherDetails" (in order // Bitwarden: "cipherMini", "cipher", and "cipherDetails" (in order
// of increasing level of detail). vaultwarden currently only // of increasing level of detail). vaultwarden currently only
@ -144,8 +162,8 @@ impl Cipher {
"Type": self.atype, "Type": self.atype,
"RevisionDate": format_date(&self.updated_at), "RevisionDate": format_date(&self.updated_at),
"DeletedDate": self.deleted_at.map_or(Value::Null, |d| Value::String(format_date(&d))), "DeletedDate": self.deleted_at.map_or(Value::Null, |d| Value::String(format_date(&d))),
"FolderId": self.get_folder_uuid(user_uuid, conn).await, "FolderId": if let Some(cipher_sync_data) = cipher_sync_data { cipher_sync_data.cipher_folders.get(&self.uuid).map(|c| c.to_string() ) } else { self.get_folder_uuid(user_uuid, conn).await },
"Favorite": self.is_favorite(user_uuid, conn).await, "Favorite": if let Some(cipher_sync_data) = cipher_sync_data { cipher_sync_data.cipher_favorites.contains(&self.uuid) } else { self.is_favorite(user_uuid, conn).await },
"Reprompt": self.reprompt.unwrap_or(RepromptType::None as i32), "Reprompt": self.reprompt.unwrap_or(RepromptType::None as i32),
"OrganizationId": self.organization_uuid, "OrganizationId": self.organization_uuid,
"Attachments": attachments_json, "Attachments": attachments_json,
@ -154,7 +172,7 @@ impl Cipher {
"OrganizationUseTotp": true, "OrganizationUseTotp": true,
// This field is specific to the cipherDetails type. // This field is specific to the cipherDetails type.
"CollectionIds": self.get_collections(user_uuid, conn).await, "CollectionIds": collection_ids,
"Name": self.name, "Name": self.name,
"Notes": self.notes, "Notes": self.notes,
@ -318,13 +336,21 @@ impl Cipher {
} }
/// Returns whether this cipher is owned by an org in which the user has full access. /// Returns whether this cipher is owned by an org in which the user has full access.
pub async fn is_in_full_access_org(&self, user_uuid: &str, conn: &DbConn) -> bool { pub async fn is_in_full_access_org(
&self,
user_uuid: &str,
cipher_sync_data: Option<&CipherSyncData>,
conn: &DbConn,
) -> bool {
if let Some(ref org_uuid) = self.organization_uuid { if let Some(ref org_uuid) = self.organization_uuid {
if let Some(user_org) = UserOrganization::find_by_user_and_org(user_uuid, org_uuid, conn).await { if let Some(cipher_sync_data) = cipher_sync_data {
if let Some(cached_user_org) = cipher_sync_data.user_organizations.get(org_uuid) {
return cached_user_org.has_full_access();
}
} else if let Some(user_org) = UserOrganization::find_by_user_and_org(user_uuid, org_uuid, conn).await {
return user_org.has_full_access(); return user_org.has_full_access();
} }
} }
false false
} }
@ -333,27 +359,32 @@ impl Cipher {
/// not in any collection the user has access to. Otherwise, the user has /// not in any collection the user has access to. Otherwise, the user has
/// access to this cipher, and Some(read_only, hide_passwords) represents /// access to this cipher, and Some(read_only, hide_passwords) represents
/// the access restrictions. /// the access restrictions.
pub async fn get_access_restrictions(&self, user_uuid: &str, conn: &DbConn) -> Option<(bool, bool)> { pub async fn get_access_restrictions(
&self,
user_uuid: &str,
cipher_sync_data: Option<&CipherSyncData>,
conn: &DbConn,
) -> Option<(bool, bool)> {
// Check whether this cipher is directly owned by the user, or is in // Check whether this cipher is directly owned by the user, or is in
// a collection that the user has full access to. If so, there are no // a collection that the user has full access to. If so, there are no
// access restrictions. // access restrictions.
if self.is_owned_by_user(user_uuid) || self.is_in_full_access_org(user_uuid, conn).await { if self.is_owned_by_user(user_uuid) || self.is_in_full_access_org(user_uuid, cipher_sync_data, conn).await {
return Some((false, false)); return Some((false, false));
} }
db_run! {conn: { let rows = if let Some(cipher_sync_data) = cipher_sync_data {
// Check whether this cipher is in any collections accessible to the let mut rows: Vec<(bool, bool)> = Vec::new();
// user. If so, retrieve the access flags for each collection. if let Some(collections) = cipher_sync_data.cipher_collections.get(&self.uuid) {
let rows = ciphers::table for collection in collections {
.filter(ciphers::uuid.eq(&self.uuid)) if let Some(uc) = cipher_sync_data.user_collections.get(collection) {
.inner_join(ciphers_collections::table.on( rows.push((uc.read_only, uc.hide_passwords));
ciphers::uuid.eq(ciphers_collections::cipher_uuid))) }
.inner_join(users_collections::table.on( }
ciphers_collections::collection_uuid.eq(users_collections::collection_uuid) }
.and(users_collections::user_uuid.eq(user_uuid)))) rows
.select((users_collections::read_only, users_collections::hide_passwords)) } else {
.load::<(bool, bool)>(conn) self.get_collections_access_flags(user_uuid, conn).await
.expect("Error getting access restrictions"); };
if rows.is_empty() { if rows.is_empty() {
// This cipher isn't in any collections accessible to the user. // This cipher isn't in any collections accessible to the user.
@ -377,18 +408,34 @@ impl Cipher {
} }
Some((read_only, hide_passwords)) Some((read_only, hide_passwords))
}
pub async fn get_collections_access_flags(&self, user_uuid: &str, conn: &DbConn) -> Vec<(bool, bool)> {
db_run! {conn: {
// Check whether this cipher is in any collections accessible to the
// user. If so, retrieve the access flags for each collection.
ciphers::table
.filter(ciphers::uuid.eq(&self.uuid))
.inner_join(ciphers_collections::table.on(
ciphers::uuid.eq(ciphers_collections::cipher_uuid)))
.inner_join(users_collections::table.on(
ciphers_collections::collection_uuid.eq(users_collections::collection_uuid)
.and(users_collections::user_uuid.eq(user_uuid))))
.select((users_collections::read_only, users_collections::hide_passwords))
.load::<(bool, bool)>(conn)
.expect("Error getting access restrictions")
}} }}
} }
pub async fn is_write_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool { pub async fn is_write_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool {
match self.get_access_restrictions(user_uuid, conn).await { match self.get_access_restrictions(user_uuid, None, conn).await {
Some((read_only, _hide_passwords)) => !read_only, Some((read_only, _hide_passwords)) => !read_only,
None => false, None => false,
} }
} }
pub async fn is_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool { pub async fn is_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool {
self.get_access_restrictions(user_uuid, conn).await.is_some() self.get_access_restrictions(user_uuid, None, conn).await.is_some()
} }
// Returns whether this cipher is a favorite of the specified user. // Returns whether this cipher is a favorite of the specified user.
@ -563,4 +610,32 @@ impl Cipher {
.load::<String>(conn).unwrap_or_default() .load::<String>(conn).unwrap_or_default()
}} }}
} }
/// Return a Vec with (cipher_uuid, collection_uuid)
/// This is used during a full sync so we only need one query for all collections accessible.
pub async fn get_collections_with_cipher_by_user(user_id: &str, conn: &DbConn) -> Vec<(String, String)> {
db_run! {conn: {
ciphers_collections::table
.inner_join(collections::table.on(
collections::uuid.eq(ciphers_collections::collection_uuid)
))
.inner_join(users_organizations::table.on(
users_organizations::org_uuid.eq(collections::org_uuid).and(
users_organizations::user_uuid.eq(user_id)
)
))
.left_join(users_collections::table.on(
users_collections::collection_uuid.eq(ciphers_collections::collection_uuid).and(
users_collections::user_uuid.eq(user_id)
)
))
.filter(users_collections::user_uuid.eq(user_id).or( // User has access to collection
users_organizations::access_all.eq(true).or( // User has access all
users_organizations::atype.le(UserOrgType::Admin as i32) // User is admin or owner
)
))
.select(ciphers_collections::all_columns)
.load::<(String, String)>(conn).unwrap_or_default()
}}
}
} }

Datei anzeigen

@ -1,11 +1,10 @@
use serde_json::Value; use serde_json::Value;
use super::{Cipher, Organization, User, UserOrgStatus, UserOrgType, UserOrganization}; use super::{User, UserOrgStatus, UserOrgType, UserOrganization};
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "collections"] #[table_name = "collections"]
#[belongs_to(Organization, foreign_key = "org_uuid")]
#[primary_key(uuid)] #[primary_key(uuid)]
pub struct Collection { pub struct Collection {
pub uuid: String, pub uuid: String,
@ -13,10 +12,8 @@ db_object! {
pub name: String, pub name: String,
} }
#[derive(Identifiable, Queryable, Insertable, Associations)] #[derive(Identifiable, Queryable, Insertable)]
#[table_name = "users_collections"] #[table_name = "users_collections"]
#[belongs_to(User, foreign_key = "user_uuid")]
#[belongs_to(Collection, foreign_key = "collection_uuid")]
#[primary_key(user_uuid, collection_uuid)] #[primary_key(user_uuid, collection_uuid)]
pub struct CollectionUser { pub struct CollectionUser {
pub user_uuid: String, pub user_uuid: String,
@ -25,10 +22,8 @@ db_object! {
pub hide_passwords: bool, pub hide_passwords: bool,
} }
#[derive(Identifiable, Queryable, Insertable, Associations)] #[derive(Identifiable, Queryable, Insertable)]
#[table_name = "ciphers_collections"] #[table_name = "ciphers_collections"]
#[belongs_to(Cipher, foreign_key = "cipher_uuid")]
#[belongs_to(Collection, foreign_key = "collection_uuid")]
#[primary_key(cipher_uuid, collection_uuid)] #[primary_key(cipher_uuid, collection_uuid)]
pub struct CollectionCipher { pub struct CollectionCipher {
pub cipher_uuid: String, pub cipher_uuid: String,
@ -57,11 +52,32 @@ impl Collection {
}) })
} }
pub async fn to_json_details(&self, user_uuid: &str, conn: &DbConn) -> Value { pub async fn to_json_details(
&self,
user_uuid: &str,
cipher_sync_data: Option<&crate::api::core::CipherSyncData>,
conn: &DbConn,
) -> Value {
let (read_only, hide_passwords) = if let Some(cipher_sync_data) = cipher_sync_data {
match cipher_sync_data.user_organizations.get(&self.org_uuid) {
Some(uo) if uo.has_full_access() => (false, false),
Some(_) => {
if let Some(uc) = cipher_sync_data.user_collections.get(&self.uuid) {
(uc.read_only, uc.hide_passwords)
} else {
(false, false)
}
}
_ => (true, true),
}
} else {
(!self.is_writable_by_user(user_uuid, conn).await, self.hide_passwords_for_user(user_uuid, conn).await)
};
let mut json_object = self.to_json(); let mut json_object = self.to_json();
json_object["Object"] = json!("collectionDetails"); json_object["Object"] = json!("collectionDetails");
json_object["ReadOnly"] = json!(!self.is_writable_by_user(user_uuid, conn).await); json_object["ReadOnly"] = json!(read_only);
json_object["HidePasswords"] = json!(self.hide_passwords_for_user(user_uuid, conn).await); json_object["HidePasswords"] = json!(hide_passwords);
json_object json_object
} }
} }
@ -374,6 +390,17 @@ impl CollectionUser {
}} }}
} }
pub async fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> {
db_run! { conn: {
users_collections::table
.filter(users_collections::user_uuid.eq(user_uuid))
.select(users_collections::all_columns)
.load::<CollectionUserDb>(conn)
.expect("Error loading users_collections")
.from_db()
}}
}
pub async fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult { pub async fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult {
for collection in CollectionUser::find_by_collection(collection_uuid, conn).await.iter() { for collection in CollectionUser::find_by_collection(collection_uuid, conn).await.iter() {
User::update_uuid_revision(&collection.user_uuid, conn).await; User::update_uuid_revision(&collection.user_uuid, conn).await;

Datei anzeigen

@ -1,13 +1,11 @@
use chrono::{NaiveDateTime, Utc}; use chrono::{NaiveDateTime, Utc};
use super::User;
use crate::CONFIG; use crate::CONFIG;
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "devices"] #[table_name = "devices"]
#[changeset_options(treat_none_as_null="true")] #[changeset_options(treat_none_as_null="true")]
#[belongs_to(User, foreign_key = "user_uuid")]
#[primary_key(uuid, user_uuid)] #[primary_key(uuid, user_uuid)]
pub struct Device { pub struct Device {
pub uuid: String, pub uuid: String,

Datei anzeigen

@ -4,10 +4,9 @@ use serde_json::Value;
use super::User; use super::User;
db_object! { db_object! {
#[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)] #[derive(Debug, Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "emergency_access"] #[table_name = "emergency_access"]
#[changeset_options(treat_none_as_null="true")] #[changeset_options(treat_none_as_null="true")]
#[belongs_to(User, foreign_key = "grantor_uuid")]
#[primary_key(uuid)] #[primary_key(uuid)]
pub struct EmergencyAccess { pub struct EmergencyAccess {
pub uuid: String, pub uuid: String,

Datei anzeigen

@ -1,10 +1,8 @@
use super::{Cipher, User}; use super::User;
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, Associations)] #[derive(Identifiable, Queryable, Insertable)]
#[table_name = "favorites"] #[table_name = "favorites"]
#[belongs_to(User, foreign_key = "user_uuid")]
#[belongs_to(Cipher, foreign_key = "cipher_uuid")]
#[primary_key(user_uuid, cipher_uuid)] #[primary_key(user_uuid, cipher_uuid)]
pub struct Favorite { pub struct Favorite {
pub user_uuid: String, pub user_uuid: String,
@ -80,4 +78,16 @@ impl Favorite {
.map_res("Error removing favorites by user") .map_res("Error removing favorites by user")
}} }}
} }
/// Return a vec with (cipher_uuid) this will only contain favorite flagged ciphers
/// This is used during a full sync so we only need one query for all favorite cipher matches.
pub async fn get_all_cipher_uuid_by_user(user_uuid: &str, conn: &DbConn) -> Vec<String> {
db_run! { conn: {
favorites::table
.filter(favorites::user_uuid.eq(user_uuid))
.select(favorites::cipher_uuid)
.load::<String>(conn)
.unwrap_or_default()
}}
}
} }

Datei anzeigen

@ -1,12 +1,11 @@
use chrono::{NaiveDateTime, Utc}; use chrono::{NaiveDateTime, Utc};
use serde_json::Value; use serde_json::Value;
use super::{Cipher, User}; use super::User;
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "folders"] #[table_name = "folders"]
#[belongs_to(User, foreign_key = "user_uuid")]
#[primary_key(uuid)] #[primary_key(uuid)]
pub struct Folder { pub struct Folder {
pub uuid: String, pub uuid: String,
@ -16,10 +15,8 @@ db_object! {
pub name: String, pub name: String,
} }
#[derive(Identifiable, Queryable, Insertable, Associations)] #[derive(Identifiable, Queryable, Insertable)]
#[table_name = "folders_ciphers"] #[table_name = "folders_ciphers"]
#[belongs_to(Cipher, foreign_key = "cipher_uuid")]
#[belongs_to(Folder, foreign_key = "folder_uuid")]
#[primary_key(cipher_uuid, folder_uuid)] #[primary_key(cipher_uuid, folder_uuid)]
pub struct FolderCipher { pub struct FolderCipher {
pub cipher_uuid: String, pub cipher_uuid: String,
@ -215,4 +212,17 @@ impl FolderCipher {
.from_db() .from_db()
}} }}
} }
/// Return a vec with (cipher_uuid, folder_uuid)
/// This is used during a full sync so we only need one query for all folder matches.
pub async fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<(String, String)> {
db_run! { conn: {
folders_ciphers::table
.inner_join(folders::table)
.filter(folders::user_uuid.eq(user_uuid))
.select(folders_ciphers::all_columns)
.load::<(String, String)>(conn)
.unwrap_or_default()
}}
}
} }

Datei anzeigen

@ -6,12 +6,11 @@ use crate::db::DbConn;
use crate::error::MapResult; use crate::error::MapResult;
use crate::util::UpCase; use crate::util::UpCase;
use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization}; use super::{UserOrgStatus, UserOrgType, UserOrganization};
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "org_policies"] #[table_name = "org_policies"]
#[belongs_to(Organization, foreign_key = "org_uuid")]
#[primary_key(uuid)] #[primary_key(uuid)]
pub struct OrgPolicy { pub struct OrgPolicy {
pub uuid: String, pub uuid: String,

Datei anzeigen

@ -547,6 +547,15 @@ impl UserOrganization {
}} }}
} }
pub async fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<Self> {
db_run! { conn: {
users_organizations::table
.filter(users_organizations::user_uuid.eq(user_uuid))
.load::<UserOrganizationDb>(conn)
.expect("Error loading user organizations").from_db()
}}
}
pub async fn find_by_user_and_policy(user_uuid: &str, policy_type: OrgPolicyType, conn: &DbConn) -> Vec<Self> { pub async fn find_by_user_and_policy(user_uuid: &str, policy_type: OrgPolicyType, conn: &DbConn) -> Vec<Self> {
db_run! { conn: { db_run! { conn: {
users_organizations::table users_organizations::table

Datei anzeigen

@ -1,14 +1,12 @@
use chrono::{NaiveDateTime, Utc}; use chrono::{NaiveDateTime, Utc};
use serde_json::Value; use serde_json::Value;
use super::{Organization, User}; use super::User;
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "sends"] #[table_name = "sends"]
#[changeset_options(treat_none_as_null="true")] #[changeset_options(treat_none_as_null="true")]
#[belongs_to(User, foreign_key = "user_uuid")]
#[belongs_to(Organization, foreign_key = "organization_uuid")]
#[primary_key(uuid)] #[primary_key(uuid)]
pub struct Send { pub struct Send {
pub uuid: String, pub uuid: String,

Datei anzeigen

@ -2,12 +2,9 @@ use serde_json::Value;
use crate::{api::EmptyResult, db::DbConn, error::MapResult}; use crate::{api::EmptyResult, db::DbConn, error::MapResult};
use super::User;
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "twofactor"] #[table_name = "twofactor"]
#[belongs_to(User, foreign_key = "user_uuid")]
#[primary_key(uuid)] #[primary_key(uuid)]
pub struct TwoFactor { pub struct TwoFactor {
pub uuid: String, pub uuid: String,

Datei anzeigen

@ -2,12 +2,9 @@ use chrono::{NaiveDateTime, Utc};
use crate::{api::EmptyResult, auth::ClientIp, db::DbConn, error::MapResult, CONFIG}; use crate::{api::EmptyResult, auth::ClientIp, db::DbConn, error::MapResult, CONFIG};
use super::User;
db_object! { db_object! {
#[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] #[derive(Identifiable, Queryable, Insertable, AsChangeset)]
#[table_name = "twofactor_incomplete"] #[table_name = "twofactor_incomplete"]
#[belongs_to(User, foreign_key = "user_uuid")]
#[primary_key(user_uuid, device_uuid)] #[primary_key(user_uuid, device_uuid)]
pub struct TwoFactorIncomplete { pub struct TwoFactorIncomplete {
pub user_uuid: String, pub user_uuid: String,

Datei anzeigen

@ -214,20 +214,20 @@ impl<'r> Responder<'r, 'static> for Error {
macro_rules! err { macro_rules! err {
($msg:expr) => {{ ($msg:expr) => {{
error!("{}", $msg); error!("{}", $msg);
return Err(crate::error::Error::new($msg, $msg)); return Err($crate::error::Error::new($msg, $msg));
}}; }};
($usr_msg:expr, $log_value:expr) => {{ ($usr_msg:expr, $log_value:expr) => {{
error!("{}. {}", $usr_msg, $log_value); error!("{}. {}", $usr_msg, $log_value);
return Err(crate::error::Error::new($usr_msg, $log_value)); return Err($crate::error::Error::new($usr_msg, $log_value));
}}; }};
} }
macro_rules! err_silent { macro_rules! err_silent {
($msg:expr) => {{ ($msg:expr) => {{
return Err(crate::error::Error::new($msg, $msg)); return Err($crate::error::Error::new($msg, $msg));
}}; }};
($usr_msg:expr, $log_value:expr) => {{ ($usr_msg:expr, $log_value:expr) => {{
return Err(crate::error::Error::new($usr_msg, $log_value)); return Err($crate::error::Error::new($usr_msg, $log_value));
}}; }};
} }
@ -235,11 +235,11 @@ macro_rules! err_silent {
macro_rules! err_code { macro_rules! err_code {
($msg:expr, $err_code: expr) => {{ ($msg:expr, $err_code: expr) => {{
error!("{}", $msg); error!("{}", $msg);
return Err(crate::error::Error::new($msg, $msg).with_code($err_code)); return Err($crate::error::Error::new($msg, $msg).with_code($err_code));
}}; }};
($usr_msg:expr, $log_value:expr, $err_code: expr) => {{ ($usr_msg:expr, $log_value:expr, $err_code: expr) => {{
error!("{}. {}", $usr_msg, $log_value); error!("{}. {}", $usr_msg, $log_value);
return Err(crate::error::Error::new($usr_msg, $log_value).with_code($err_code)); return Err($crate::error::Error::new($usr_msg, $log_value).with_code($err_code));
}}; }};
} }
@ -247,11 +247,11 @@ macro_rules! err_code {
macro_rules! err_discard { macro_rules! err_discard {
($msg:expr, $data:expr) => {{ ($msg:expr, $data:expr) => {{
std::io::copy(&mut $data.open(), &mut std::io::sink()).ok(); std::io::copy(&mut $data.open(), &mut std::io::sink()).ok();
return Err(crate::error::Error::new($msg, $msg)); return Err($crate::error::Error::new($msg, $msg));
}}; }};
($usr_msg:expr, $log_value:expr, $data:expr) => {{ ($usr_msg:expr, $log_value:expr, $data:expr) => {{
std::io::copy(&mut $data.open(), &mut std::io::sink()).ok(); std::io::copy(&mut $data.open(), &mut std::io::sink()).ok();
return Err(crate::error::Error::new($usr_msg, $log_value)); return Err($crate::error::Error::new($usr_msg, $log_value));
}}; }};
} }

Datei anzeigen

@ -377,12 +377,13 @@ async fn schedule_jobs(pool: db::DbPool) {
return; return;
} }
let runtime = tokio::runtime::Handle::current(); let runtime = tokio::runtime::Runtime::new().unwrap();
thread::Builder::new() thread::Builder::new()
.name("job-scheduler".to_string()) .name("job-scheduler".to_string())
.spawn(move || { .spawn(move || {
use job_scheduler::{Job, JobScheduler}; use job_scheduler::{Job, JobScheduler};
let _runtime_guard = runtime.enter();
let mut sched = JobScheduler::new(); let mut sched = JobScheduler::new();

Datei anzeigen

@ -1,5 +1,5 @@
/*! /*!
* Native JavaScript for Bootstrap v4.1.0 (https://thednp.github.io/bootstrap.native/) * Native JavaScript for Bootstrap v4.1.2 (https://thednp.github.io/bootstrap.native/)
* Copyright 2015-2022 © dnp_theme * Copyright 2015-2022 © dnp_theme
* Licensed under MIT (https://github.com/thednp/bootstrap.native/blob/master/LICENSE) * Licensed under MIT (https://github.com/thednp/bootstrap.native/blob/master/LICENSE)
*/ */
@ -545,7 +545,7 @@
return normalOps; return normalOps;
} }
var version = "4.1.0"; var version = "4.1.2";
const Version = version; const Version = version;
@ -2814,6 +2814,29 @@
} }
} }
/**
* This is a shortie for `document.createElement` method
* which allows you to create a new `HTMLElement` for a given `tagName`
* or based on an object with specific non-readonly attributes:
* `id`, `className`, `textContent`, `style`, etc.
* @see https://developer.mozilla.org/en-US/docs/Web/API/Document/createElement
*
* @param {Record<string, string> | string} param `tagName` or object
* @return {HTMLElement | Element} a new `HTMLElement` or `Element`
*/
function createElement(param) {
if (typeof param === 'string') {
return getDocument().createElement(param);
}
const { tagName } = param;
const attr = { ...param };
const newElement = createElement(tagName);
delete attr.tagName;
ObjectAssign(newElement, attr);
return newElement;
}
/** @type {string} */ /** @type {string} */
const offcanvasString = 'offcanvas'; const offcanvasString = 'offcanvas';
@ -2824,7 +2847,7 @@
const offcanvasActiveSelector = `.${offcanvasString}.${showClass}`; const offcanvasActiveSelector = `.${offcanvasString}.${showClass}`;
// any document would suffice // any document would suffice
const overlay = getDocument().createElement('div'); const overlay = createElement('div');
/** /**
* Returns the current active modal / offcancas element. * Returns the current active modal / offcancas element.
@ -2863,9 +2886,11 @@
* Shows the overlay to the user. * Shows the overlay to the user.
*/ */
function showOverlay() { function showOverlay() {
if (!hasClass(overlay, showClass)) {
addClass(overlay, showClass); addClass(overlay, showClass);
reflow(overlay); reflow(overlay);
} }
}
/** /**
* Hides the overlay from the user. * Hides the overlay from the user.
@ -2949,7 +2974,7 @@
if (!modalOverflow && scrollbarWidth) { if (!modalOverflow && scrollbarWidth) {
const pad = isRTL(element) ? 'paddingLeft' : 'paddingRight'; const pad = isRTL(element) ? 'paddingLeft' : 'paddingRight';
// @ts-ignore // @ts-ignore -- cannot use `setElementStyle`
element.style[pad] = `${scrollbarWidth}px`; element.style[pad] = `${scrollbarWidth}px`;
} }
setScrollbar(element, (modalOverflow || clientHeight !== scrollHeight)); setScrollbar(element, (modalOverflow || clientHeight !== scrollHeight));
@ -2989,15 +3014,16 @@
* @param {Modal} self the `Modal` instance * @param {Modal} self the `Modal` instance
*/ */
function afterModalHide(self) { function afterModalHide(self) {
const { triggers, element } = self; const { triggers, element, relatedTarget } = self;
removeOverlay(element); removeOverlay(element);
// @ts-ignore setElementStyle(element, { paddingRight: '' });
element.style.paddingRight = ''; toggleModalDismiss(self);
if (triggers.length) { const focusElement = showModalEvent.relatedTarget || triggers.find(isVisible);
const visibleTrigger = triggers.find((x) => isVisible(x)); if (focusElement) focus(focusElement);
if (visibleTrigger) focus(visibleTrigger);
} hiddenModalEvent.relatedTarget = relatedTarget;
dispatchEvent(element, hiddenModalEvent);
} }
/** /**
@ -3019,12 +3045,11 @@
*/ */
function beforeModalShow(self) { function beforeModalShow(self) {
const { element, hasFade } = self; const { element, hasFade } = self;
// @ts-ignore setElementStyle(element, { display: 'block' });
element.style.display = 'block';
setModalScrollbar(self); setModalScrollbar(self);
if (!getCurrentOpen(element)) { if (!getCurrentOpen(element)) {
getDocumentBody(element).style.overflow = 'hidden'; setElementStyle(getDocumentBody(element), { overflow: 'hidden' });
} }
addClass(element, showClass); addClass(element, showClass);
@ -3042,11 +3067,10 @@
*/ */
function beforeModalHide(self, force) { function beforeModalHide(self, force) {
const { const {
element, options, relatedTarget, hasFade, element, options, hasFade,
} = self; } = self;
// @ts-ignore setElementStyle(element, { display: '' });
element.style.display = '';
// force can also be the transitionEvent object, we wanna make sure it's not // force can also be the transitionEvent object, we wanna make sure it's not
// call is not forced and overlay is visible // call is not forced and overlay is visible
@ -3057,11 +3081,6 @@
} else { } else {
afterModalHide(self); afterModalHide(self);
} }
toggleModalDismiss(self);
hiddenModalEvent.relatedTarget = relatedTarget;
dispatchEvent(element, hiddenModalEvent);
} }
// MODAL EVENT HANDLERS // MODAL EVENT HANDLERS
@ -3243,14 +3262,15 @@
} }
if (backdrop) { if (backdrop) {
if (!currentOpen && !hasClass(overlay, showClass)) { if (!container.contains(overlay)) {
appendOverlay(container, hasFade, true); appendOverlay(container, hasFade, true);
} else { } else {
toggleOverlayType(true); toggleOverlayType(true);
} }
overlayDelay = getElementTransitionDuration(overlay); overlayDelay = getElementTransitionDuration(overlay);
if (!hasClass(overlay, showClass)) showOverlay(); showOverlay();
setTimeout(() => beforeModalShow(self), overlayDelay); setTimeout(() => beforeModalShow(self), overlayDelay);
} else { } else {
beforeModalShow(self); beforeModalShow(self);
@ -3398,13 +3418,12 @@
if (!options.scroll) { if (!options.scroll) {
setOffCanvasScrollbar(self); setOffCanvasScrollbar(self);
getDocumentBody(element).style.overflow = 'hidden'; setElementStyle(getDocumentBody(element), { overflow: 'hidden' });
} }
addClass(element, offcanvasTogglingClass); addClass(element, offcanvasTogglingClass);
addClass(element, showClass); addClass(element, showClass);
// @ts-ignore setElementStyle(element, { visibility: 'visible' });
element.style.visibility = 'visible';
emulateTransitionEnd(element, () => showOffcanvasComplete(self)); emulateTransitionEnd(element, () => showOffcanvasComplete(self));
} }
@ -3509,17 +3528,13 @@
* @param {Offcanvas} self the `Offcanvas` instance * @param {Offcanvas} self the `Offcanvas` instance
*/ */
function showOffcanvasComplete(self) { function showOffcanvasComplete(self) {
const { element, triggers } = self; const { element } = self;
removeClass(element, offcanvasTogglingClass); removeClass(element, offcanvasTogglingClass);
removeAttribute(element, ariaHidden); removeAttribute(element, ariaHidden);
setAttribute(element, ariaModal, 'true'); setAttribute(element, ariaModal, 'true');
setAttribute(element, 'role', 'dialog'); setAttribute(element, 'role', 'dialog');
if (triggers.length) {
triggers.forEach((btn) => setAttribute(btn, ariaExpanded, 'true'));
}
dispatchEvent(element, shownOffcanvasEvent); dispatchEvent(element, shownOffcanvasEvent);
toggleOffCanvasDismiss(self, true); toggleOffCanvasDismiss(self, true);
@ -3537,14 +3552,10 @@
setAttribute(element, ariaHidden, 'true'); setAttribute(element, ariaHidden, 'true');
removeAttribute(element, ariaModal); removeAttribute(element, ariaModal);
removeAttribute(element, 'role'); removeAttribute(element, 'role');
// @ts-ignore setElementStyle(element, { visibility: '' });
element.style.visibility = '';
if (triggers.length) { const visibleTrigger = showOffcanvasEvent.relatedTarget || triggers.find((x) => isVisible(x));
triggers.forEach((btn) => setAttribute(btn, ariaExpanded, 'false'));
const visibleTrigger = triggers.find((x) => isVisible(x));
if (visibleTrigger) focus(visibleTrigger); if (visibleTrigger) focus(visibleTrigger);
}
removeOverlay(element); removeOverlay(element);
@ -3634,13 +3645,14 @@
} }
if (options.backdrop) { if (options.backdrop) {
if (!currentOpen) { if (!container.contains(overlay)) {
appendOverlay(container, true); appendOverlay(container, true);
} else { } else {
toggleOverlayType(); toggleOverlayType();
} }
overlayDelay = getElementTransitionDuration(overlay); overlayDelay = getElementTransitionDuration(overlay);
if (!hasClass(overlay, showClass)) showOverlay(); showOverlay();
setTimeout(() => beforeOffcanvasShow(self), overlayDelay); setTimeout(() => beforeOffcanvasShow(self), overlayDelay);
} else { } else {
@ -4055,7 +4067,8 @@
*/ */
const mousehoverEvent = 'hover'; const mousehoverEvent = 'hover';
let elementUID = 1; let elementUID = 0;
let elementMapUID = 0;
const elementIDMap = new Map(); const elementIDMap = new Map();
/** /**
@ -4066,27 +4079,25 @@
* @returns {number} an existing or new unique ID * @returns {number} an existing or new unique ID
*/ */
function getUID(element, key) { function getUID(element, key) {
elementUID += 1; let result = key ? elementUID : elementMapUID;
let elMap = elementIDMap.get(element);
let result = elementUID;
if (key && key.length) { if (key) {
if (elMap) { const elID = getUID(element);
const elMapId = elMap.get(key); const elMap = elementIDMap.get(elID) || new Map();
if (!Number.isNaN(elMapId)) { if (!elementIDMap.has(elID)) {
result = elMapId; elementIDMap.set(elID, elMap);
} else {
elMap.set(key, result);
} }
} else { if (!elMap.has(key)) {
elementIDMap.set(element, new Map());
elMap = elementIDMap.get(element);
elMap.set(key, result); elMap.set(key, result);
} elementUID += 1;
} else if (!Number.isNaN(elMap)) { } else result = elMap.get(key);
result = elMap;
} else { } else {
elementIDMap.set(element, result); const elkey = element.id || element;
if (!elementIDMap.has(elkey)) {
elementIDMap.set(elkey, result);
elementMapUID += 1;
} else result = elementIDMap.get(elkey);
} }
return result; return result;
} }
@ -5098,6 +5109,8 @@
const hiddenTabEvent = OriginalEvent(`hidden.bs.${tabString}`); const hiddenTabEvent = OriginalEvent(`hidden.bs.${tabString}`);
/** /**
* Stores the current active tab and its content
* for a given `.nav` element.
* @type {Map<(HTMLElement | Element), any>} * @type {Map<(HTMLElement | Element), any>}
*/ */
const tabPrivate = new Map(); const tabPrivate = new Map();
@ -5111,7 +5124,7 @@
function triggerTabEnd(self) { function triggerTabEnd(self) {
const { tabContent, nav } = self; const { tabContent, nav } = self;
if (tabContent) { if (tabContent && hasClass(tabContent, collapsingClass)) {
// @ts-ignore // @ts-ignore
tabContent.style.height = ''; tabContent.style.height = '';
removeClass(tabContent, collapsingClass); removeClass(tabContent, collapsingClass);
@ -5125,11 +5138,13 @@
* @param {Tab} self the `Tab` instance * @param {Tab} self the `Tab` instance
*/ */
function triggerTabShow(self) { function triggerTabShow(self) {
const { element, tabContent, nav } = self; const {
const { currentHeight, nextHeight } = tabPrivate.get(element); element, tabContent, content: nextContent, nav,
} = self;
const { tab } = nav && tabPrivate.get(nav); const { tab } = nav && tabPrivate.get(nav);
if (tabContent) { // height animation if (tabContent && hasClass(nextContent, fadeClass)) { // height animation
const { currentHeight, nextHeight } = tabPrivate.get(element);
if (currentHeight === nextHeight) { if (currentHeight === nextHeight) {
triggerTabEnd(self); triggerTabEnd(self);
} else { } else {
@ -5141,6 +5156,7 @@
}, 50); }, 50);
} }
} else if (nav) Timer.clear(nav); } else if (nav) Timer.clear(nav);
shownTabEvent.relatedTarget = tab; shownTabEvent.relatedTarget = tab;
dispatchEvent(element, shownTabEvent); dispatchEvent(element, shownTabEvent);
} }
@ -5156,9 +5172,11 @@
const { tab, content } = nav && tabPrivate.get(nav); const { tab, content } = nav && tabPrivate.get(nav);
let currentHeight = 0; let currentHeight = 0;
if (tabContent) { if (tabContent && hasClass(nextContent, fadeClass)) {
[content, nextContent].forEach((c) => addClass(c, 'overflow-hidden')); [content, nextContent].forEach((c) => {
currentHeight = content.scrollHeight; addClass(c, 'overflow-hidden');
});
currentHeight = content.scrollHeight || 0;
} }
// update relatedTarget and dispatch event // update relatedTarget and dispatch event
@ -5170,7 +5188,7 @@
addClass(nextContent, activeClass); addClass(nextContent, activeClass);
removeClass(content, activeClass); removeClass(content, activeClass);
if (tabContent) { if (tabContent && hasClass(nextContent, fadeClass)) {
const nextHeight = nextContent.scrollHeight; const nextHeight = nextContent.scrollHeight;
tabPrivate.set(element, { currentHeight, nextHeight }); tabPrivate.set(element, { currentHeight, nextHeight });
@ -5178,7 +5196,9 @@
// @ts-ignore -- height animation // @ts-ignore -- height animation
tabContent.style.height = `${currentHeight}px`; tabContent.style.height = `${currentHeight}px`;
reflow(tabContent); reflow(tabContent);
[content, nextContent].forEach((c) => removeClass(c, 'overflow-hidden')); [content, nextContent].forEach((c) => {
removeClass(c, 'overflow-hidden');
});
} }
if (nextContent && hasClass(nextContent, fadeClass)) { if (nextContent && hasClass(nextContent, fadeClass)) {
@ -5187,8 +5207,11 @@
emulateTransitionEnd(nextContent, () => { emulateTransitionEnd(nextContent, () => {
triggerTabShow(self); triggerTabShow(self);
}); });
}, 17); }, 1);
} else { triggerTabShow(self); } } else {
addClass(nextContent, showClass);
triggerTabShow(self);
}
dispatchEvent(tab, hiddenTabEvent); dispatchEvent(tab, hiddenTabEvent);
} }
@ -5217,6 +5240,16 @@
return { tab, content }; return { tab, content };
} }
/**
* Returns a parent dropdown.
* @param {HTMLElement | Element} element the `Tab` element
* @returns {(HTMLElement | Element)?} the parent dropdown
*/
function getParentDropdown(element) {
const dropdown = closest(element, `.${dropdownMenuClasses.join(',.')}`);
return dropdown ? querySelector(`.${dropdownMenuClasses[0]}-toggle`, dropdown) : null;
}
/** /**
* Toggles on/off the `click` event listener. * Toggles on/off the `click` event listener.
* @param {Tab} self the `Tab` instance * @param {Tab} self the `Tab` instance
@ -5273,7 +5306,22 @@
// event targets // event targets
/** @type {(HTMLElement | Element)?} */ /** @type {(HTMLElement | Element)?} */
self.dropdown = nav && querySelector(`.${dropdownMenuClasses[0]}-toggle`, nav); self.dropdown = getParentDropdown(element);
// show first Tab instance of none is shown
// suggested on #432
const { tab } = getActiveTab(self);
if (nav && !tab) {
const firstTab = querySelector(tabSelector, nav);
const firstTabContent = firstTab && getTargetElement(firstTab);
if (firstTabContent) {
addClass(firstTab, activeClass);
addClass(firstTabContent, showClass);
addClass(firstTabContent, activeClass);
setAttribute(element, ariaSelected, 'true');
}
}
// add event listener // add event listener
toggleTabHandler(self, true); toggleTabHandler(self, true);
@ -5301,20 +5349,24 @@
// update relatedTarget and dispatch // update relatedTarget and dispatch
hideTabEvent.relatedTarget = element; hideTabEvent.relatedTarget = element;
dispatchEvent(tab, hideTabEvent); dispatchEvent(tab, hideTabEvent);
if (hideTabEvent.defaultPrevented) return; if (hideTabEvent.defaultPrevented) return;
if (nav) Timer.set(nav, () => {}, 17);
removeClass(tab, activeClass);
setAttribute(tab, ariaSelected, 'false');
addClass(element, activeClass); addClass(element, activeClass);
setAttribute(element, ariaSelected, 'true'); setAttribute(element, ariaSelected, 'true');
if (dropdown) { const activeDropdown = getParentDropdown(tab);
// @ts-ignore if (activeDropdown && hasClass(activeDropdown, activeClass)) {
if (!hasClass(element.parentNode, dropdownMenuClass)) { removeClass(activeDropdown, activeClass);
if (hasClass(dropdown, activeClass)) removeClass(dropdown, activeClass); }
} else if (!hasClass(dropdown, activeClass)) addClass(dropdown, activeClass);
if (nav) {
Timer.set(nav, () => {
removeClass(tab, activeClass);
setAttribute(tab, ariaSelected, 'false');
if (dropdown && !hasClass(dropdown, activeClass)) addClass(dropdown, activeClass);
}, 1);
} }
if (hasClass(content, fadeClass)) { if (hasClass(content, fadeClass)) {

Datei anzeigen

@ -4,10 +4,10 @@
* *
* To rebuild or modify this file with the latest versions of the included * To rebuild or modify this file with the latest versions of the included
* software please visit: * software please visit:
* https://datatables.net/download/#bs5/dt-1.11.4 * https://datatables.net/download/#bs5/dt-1.11.5
* *
* Included libraries: * Included libraries:
* DataTables 1.11.4 * DataTables 1.11.5
*/ */
@charset "UTF-8"; @charset "UTF-8";

Datei anzeigen

@ -4,20 +4,20 @@
* *
* To rebuild or modify this file with the latest versions of the included * To rebuild or modify this file with the latest versions of the included
* software please visit: * software please visit:
* https://datatables.net/download/#bs5/dt-1.11.4 * https://datatables.net/download/#bs5/dt-1.11.5
* *
* Included libraries: * Included libraries:
* DataTables 1.11.4 * DataTables 1.11.5
*/ */
/*! DataTables 1.11.4 /*! DataTables 1.11.5
* ©2008-2021 SpryMedia Ltd - datatables.net/license * ©2008-2021 SpryMedia Ltd - datatables.net/license
*/ */
/** /**
* @summary DataTables * @summary DataTables
* @description Paginate, search and order HTML tables * @description Paginate, search and order HTML tables
* @version 1.11.4 * @version 1.11.5
* @file jquery.dataTables.js * @file jquery.dataTables.js
* @author SpryMedia Ltd * @author SpryMedia Ltd
* @contact www.datatables.net * @contact www.datatables.net
@ -71,38 +71,7 @@
(function( $, window, document, undefined ) { (function( $, window, document, undefined ) {
"use strict"; "use strict";
/**
* DataTables is a plug-in for the jQuery Javascript library. It is a highly
* flexible tool, based upon the foundations of progressive enhancement,
* which will add advanced interaction controls to any HTML table. For a
* full list of features please refer to
* [DataTables.net](href="http://datatables.net).
*
* Note that the `DataTable` object is not a global variable but is aliased
* to `jQuery.fn.DataTable` and `jQuery.fn.dataTable` through which it may
* be accessed.
*
* @class
* @param {object} [init={}] Configuration object for DataTables. Options
* are defined by {@link DataTable.defaults}
* @requires jQuery 1.7+
*
* @example
* // Basic initialisation
* $(document).ready( function {
* $('#example').dataTable();
* } );
*
* @example
* // Initialisation with configuration options - in this case, disable
* // pagination and sorting.
* $(document).ready( function {
* $('#example').dataTable( {
* "paginate": false,
* "sort": false
* } );
* } );
*/
var DataTable = function ( selector, options ) var DataTable = function ( selector, options )
{ {
// When creating with `new`, create a new DataTable, returning the API instance // When creating with `new`, create a new DataTable, returning the API instance
@ -5449,7 +5418,7 @@
nToSize.style.width = headerWidths[i]; nToSize.style.width = headerWidths[i];
}, headerTrgEls ); }, headerTrgEls );
$(headerSrcEls).height(0); $(headerSrcEls).css('height', 0);
/* Same again with the footer if we have one */ /* Same again with the footer if we have one */
if ( footer ) if ( footer )
@ -8344,22 +8313,35 @@
$(document).on('plugin-init.dt', function (e, context) { $(document).on('plugin-init.dt', function (e, context) {
var api = new _Api( context ); var api = new _Api( context );
api.on( 'stateSaveParams', function ( e, settings, data ) {
var indexes = api.rows().iterator( 'row', function ( settings, idx ) {
return settings.aoData[idx]._detailsShow ? idx : undefined;
});
data.childRows = api.rows( indexes ).ids( true ).toArray(); api.on( 'stateSaveParams', function ( e, settings, d ) {
// This could be more compact with the API, but it is a lot faster as a simple
// internal loop
var idFn = settings.rowIdFn;
var data = settings.aoData;
var ids = [];
for (var i=0 ; i<data.length ; i++) {
if (data[i]._detailsShow) {
ids.push( '#' + idFn(data[i]._aData) );
}
}
d.childRows = ids;
}) })
var loaded = api.state.loaded(); var loaded = api.state.loaded();
if ( loaded && loaded.childRows ) { if ( loaded && loaded.childRows ) {
api.rows( loaded.childRows ).every( function () { api
.rows( $.map(loaded.childRows, function (id){
return id.replace(/:/g, '\\:')
}) )
.every( function () {
_fnCallbackFire( context, null, 'requestChild', [ this ] ) _fnCallbackFire( context, null, 'requestChild', [ this ] )
}) });
} }
}) });
var __details_add = function ( ctx, row, data, klass ) var __details_add = function ( ctx, row, data, klass )
{ {
@ -8406,6 +8388,15 @@
}; };
// Make state saving of child row details async to allow them to be batch processed
var __details_state = DataTable.util.throttle(
function (ctx) {
_fnSaveState( ctx[0] )
},
500
);
var __details_remove = function ( api, idx ) var __details_remove = function ( api, idx )
{ {
var ctx = api.context; var ctx = api.context;
@ -8419,7 +8410,7 @@
row._detailsShow = undefined; row._detailsShow = undefined;
row._details = undefined; row._details = undefined;
$( row.nTr ).removeClass( 'dt-hasChild' ); $( row.nTr ).removeClass( 'dt-hasChild' );
_fnSaveState( ctx[0] ); __details_state( ctx );
} }
} }
}; };
@ -8446,7 +8437,7 @@
_fnCallbackFire( ctx[0], null, 'childRow', [ show, api.row( api[0] ) ] ) _fnCallbackFire( ctx[0], null, 'childRow', [ show, api.row( api[0] ) ] )
__details_events( ctx[0] ); __details_events( ctx[0] );
_fnSaveState( ctx[0] ); __details_state( ctx );
} }
} }
}; };
@ -9657,7 +9648,7 @@
* @type string * @type string
* @default Version number * @default Version number
*/ */
DataTable.version = "1.11.4"; DataTable.version = "1.11.5";
/** /**
* Private data store, containing all of the settings objects that are * Private data store, containing all of the settings objects that are
@ -14082,7 +14073,7 @@
* *
* @type string * @type string
*/ */
build:"bs5/dt-1.11.4", build:"bs5/dt-1.11.5",
/** /**