Spiegel von
https://github.com/dani-garcia/vaultwarden.git
synchronisiert 2025-03-13 16:57:01 +01:00
Merge remote-tracking branch 'dani/main' into sso-support
Dieser Commit ist enthalten in:
Commit
a0662bb0db
11 geänderte Dateien mit 247 neuen und 129 gelöschten Zeilen
76
.github/workflows/build.yml
gevendort
76
.github/workflows/build.yml
gevendort
|
@ -1,4 +1,5 @@
|
|||
name: Build
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
push:
|
||||
|
@ -13,6 +14,7 @@ on:
|
|||
- "diesel.toml"
|
||||
- "docker/Dockerfile.j2"
|
||||
- "docker/DockerSettings.yaml"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- ".github/workflows/build.yml"
|
||||
|
@ -28,13 +30,17 @@ on:
|
|||
|
||||
jobs:
|
||||
build:
|
||||
name: Build and Test ${{ matrix.channel }}
|
||||
permissions:
|
||||
actions: write
|
||||
contents: read
|
||||
# We use Ubuntu 22.04 here because this matches the library versions used within the Debian docker containers
|
||||
runs-on: ubuntu-22.04
|
||||
timeout-minutes: 120
|
||||
# Make warnings errors, this is to prevent warnings slipping through.
|
||||
# This is done globally to prevent rebuilds when the RUSTFLAGS env variable changes.
|
||||
env:
|
||||
RUSTFLAGS: "-D warnings"
|
||||
RUSTFLAGS: "-Dwarnings"
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
|
@ -42,20 +48,19 @@ jobs:
|
|||
- "rust-toolchain" # The version defined in rust-toolchain
|
||||
- "msrv" # The supported MSRV
|
||||
|
||||
name: Build and Test ${{ matrix.channel }}
|
||||
|
||||
steps:
|
||||
# Checkout the repo
|
||||
- name: "Checkout"
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
# End Checkout the repo
|
||||
|
||||
|
||||
# Install dependencies
|
||||
- name: "Install dependencies Ubuntu"
|
||||
run: sudo apt-get update && sudo apt-get install -y --no-install-recommends openssl build-essential libmariadb-dev-compat libpq-dev libssl-dev pkg-config
|
||||
# End Install dependencies
|
||||
|
||||
# Checkout the repo
|
||||
- name: "Checkout"
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
# End Checkout the repo
|
||||
|
||||
# Determine rust-toolchain version
|
||||
- name: Init Variables
|
||||
|
@ -75,7 +80,7 @@ jobs:
|
|||
|
||||
# Only install the clippy and rustfmt components on the default rust-toolchain
|
||||
- name: "Install rust-toolchain version"
|
||||
uses: dtolnay/rust-toolchain@a54c7afa936fefeb4456b2dd8068152669aa8203 # master @ Dec 14, 2024, 5:49 AM GMT+1
|
||||
uses: dtolnay/rust-toolchain@c5a29ddb4d9d194e7c84ec8c3fba61b1c31fee8c # master @ Jan 30, 2025, 8:16 PM GMT+1
|
||||
if: ${{ matrix.channel == 'rust-toolchain' }}
|
||||
with:
|
||||
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
|
||||
|
@ -85,7 +90,7 @@ jobs:
|
|||
|
||||
# Install the any other channel to be used for which we do not execute clippy and rustfmt
|
||||
- name: "Install MSRV version"
|
||||
uses: dtolnay/rust-toolchain@a54c7afa936fefeb4456b2dd8068152669aa8203 # master @ Dec 14, 2024, 5:49 AM GMT+1
|
||||
uses: dtolnay/rust-toolchain@c5a29ddb4d9d194e7c84ec8c3fba61b1c31fee8c # master @ Jan 30, 2025, 8:16 PM GMT+1
|
||||
if: ${{ matrix.channel != 'rust-toolchain' }}
|
||||
with:
|
||||
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
|
||||
|
@ -93,11 +98,13 @@ jobs:
|
|||
|
||||
# Set the current matrix toolchain version as default
|
||||
- name: "Set toolchain ${{steps.toolchain.outputs.RUST_TOOLCHAIN}} as default"
|
||||
env:
|
||||
RUST_TOOLCHAIN: ${{steps.toolchain.outputs.RUST_TOOLCHAIN}}
|
||||
run: |
|
||||
# Remove the rust-toolchain.toml
|
||||
rm rust-toolchain.toml
|
||||
# Set the default
|
||||
rustup default ${{steps.toolchain.outputs.RUST_TOOLCHAIN}}
|
||||
rustup default "${RUST_TOOLCHAIN}"
|
||||
|
||||
# Show environment
|
||||
- name: "Show environment"
|
||||
|
@ -161,7 +168,7 @@ jobs:
|
|||
id: clippy
|
||||
if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }}
|
||||
run: |
|
||||
cargo clippy --features sqlite,mysql,postgresql,enable_mimalloc -- -D warnings
|
||||
cargo clippy --features sqlite,mysql,postgresql,enable_mimalloc
|
||||
# End Run cargo clippy
|
||||
|
||||
|
||||
|
@ -178,22 +185,31 @@ jobs:
|
|||
# This is useful so all test/clippy/fmt actions are done, and they can all be addressed
|
||||
- name: "Some checks failed"
|
||||
if: ${{ failure() }}
|
||||
env:
|
||||
TEST_DB_M_L: ${{ steps.test_sqlite_mysql_postgresql_mimalloc_logger.outcome }}
|
||||
TEST_DB_M: ${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}
|
||||
TEST_DB: ${{ steps.test_sqlite_mysql_postgresql.outcome }}
|
||||
TEST_SQLITE: ${{ steps.test_sqlite.outcome }}
|
||||
TEST_MYSQL: ${{ steps.test_mysql.outcome }}
|
||||
TEST_POSTGRESQL: ${{ steps.test_postgresql.outcome }}
|
||||
CLIPPY: ${{ steps.clippy.outcome }}
|
||||
FMT: ${{ steps.formatting.outcome }}
|
||||
run: |
|
||||
echo "### :x: Checks Failed!" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|Job|Status|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|---|------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|test (sqlite,mysql,postgresql,enable_mimalloc,query_logger)|${{ steps.test_sqlite_mysql_postgresql_mimalloc_logger.outcome }}|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|test (sqlite,mysql,postgresql)|${{ steps.test_sqlite_mysql_postgresql.outcome }}|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|test (sqlite)|${{ steps.test_sqlite.outcome }}|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|test (mysql)|${{ steps.test_mysql.outcome }}|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|test (postgresql)|${{ steps.test_postgresql.outcome }}|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc)|${{ steps.clippy.outcome }}|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|fmt|${{ steps.formatting.outcome }}|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Please check the failed jobs and fix where needed." >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### :x: Checks Failed!" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|Job|Status|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|---|------|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (sqlite,mysql,postgresql,enable_mimalloc,query_logger)|${TEST_DB_M_L}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${TEST_DB_M}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (sqlite,mysql,postgresql)|${TEST_DB}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (sqlite)|${TEST_SQLITE}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (mysql)|${TEST_MYSQL}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|test (postgresql)|${TEST_POSTGRESQL}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc)|${CLIPPY}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "|fmt|${FMT}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "Please check the failed jobs and fix where needed." >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "" >> "${GITHUB_STEP_SUMMARY}"
|
||||
exit 1
|
||||
|
||||
|
||||
|
@ -202,5 +218,5 @@ jobs:
|
|||
- name: "All checks passed"
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
echo "### :tada: Checks Passed!" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### :tada: Checks Passed!" >> "${GITHUB_STEP_SUMMARY}"
|
||||
echo "" >> "${GITHUB_STEP_SUMMARY}"
|
||||
|
|
20
.github/workflows/hadolint.yml
gevendort
20
.github/workflows/hadolint.yml
gevendort
|
@ -1,21 +1,17 @@
|
|||
name: Hadolint
|
||||
permissions: {}
|
||||
|
||||
on: [
|
||||
push,
|
||||
pull_request
|
||||
]
|
||||
on: [ push, pull_request ]
|
||||
|
||||
jobs:
|
||||
hadolint:
|
||||
name: Validate Dockerfile syntax
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
# Checkout the repo
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
# End Checkout the repo
|
||||
|
||||
steps:
|
||||
# Start Docker Buildx
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5 # v3.8.0
|
||||
|
@ -37,6 +33,12 @@ jobs:
|
|||
env:
|
||||
HADOLINT_VERSION: 2.12.0
|
||||
# End Download hadolint
|
||||
# Checkout the repo
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
# End Checkout the repo
|
||||
|
||||
# Test Dockerfiles with hadolint
|
||||
- name: Run hadolint
|
||||
|
|
88
.github/workflows/release.yml
gevendort
88
.github/workflows/release.yml
gevendort
|
@ -1,4 +1,5 @@
|
|||
name: Release
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
push:
|
||||
|
@ -6,17 +7,23 @@ on:
|
|||
- main
|
||||
|
||||
tags:
|
||||
- '*'
|
||||
# https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet
|
||||
- '[1-2].[0-9]+.[0-9]+'
|
||||
|
||||
jobs:
|
||||
# https://github.com/marketplace/actions/skip-duplicate-actions
|
||||
# Some checks to determine if we need to continue with building a new docker.
|
||||
# We will skip this check if we are creating a tag, because that has the same hash as a previous run already.
|
||||
skip_check:
|
||||
runs-on: ubuntu-24.04
|
||||
# Only run this in the upstream repo and not on forks
|
||||
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
|
||||
name: Cancel older jobs when running
|
||||
permissions:
|
||||
actions: write
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
should_skip: ${{ steps.skip_check.outputs.should_skip }}
|
||||
|
||||
steps:
|
||||
- name: Skip Duplicates Actions
|
||||
id: skip_check
|
||||
|
@ -27,6 +34,9 @@ jobs:
|
|||
if: ${{ github.ref_type == 'branch' }}
|
||||
|
||||
docker-build:
|
||||
needs: skip_check
|
||||
if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }}
|
||||
name: Build Vaultwarden containers
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
|
@ -34,8 +44,6 @@ jobs:
|
|||
id-token: write
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 120
|
||||
needs: skip_check
|
||||
if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }}
|
||||
# Start a local docker registry to extract the compiled binaries to upload as artifacts and attest them
|
||||
services:
|
||||
registry:
|
||||
|
@ -61,12 +69,6 @@ jobs:
|
|||
base_image: ["debian","alpine"]
|
||||
|
||||
steps:
|
||||
# Checkout the repo
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Initialize QEMU binfmt support
|
||||
uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a # v3.3.0
|
||||
with:
|
||||
|
@ -78,20 +80,31 @@ jobs:
|
|||
# https://github.com/moby/buildkit/issues/3969
|
||||
# Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills
|
||||
with:
|
||||
cache-binary: false
|
||||
buildkitd-config-inline: |
|
||||
[worker.oci]
|
||||
max-parallelism = 2
|
||||
driver-opts: |
|
||||
network=host
|
||||
|
||||
# Checkout the repo
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
# We need fetch-depth of 0 so we also get all the tag metadata
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
|
||||
# Determine Base Tags and Source Version
|
||||
- name: Determine Base Tags and Source Version
|
||||
shell: bash
|
||||
env:
|
||||
REF_TYPE: ${{ github.ref_type }}
|
||||
run: |
|
||||
# Check which main tag we are going to build determined by github.ref_type
|
||||
if [[ "${{ github.ref_type }}" == "tag" ]]; then
|
||||
# Check which main tag we are going to build determined by ref_type
|
||||
if [[ "${REF_TYPE}" == "tag" ]]; then
|
||||
echo "BASE_TAGS=latest,${GITHUB_REF#refs/*/}" | tee -a "${GITHUB_ENV}"
|
||||
elif [[ "${{ github.ref_type }}" == "branch" ]]; then
|
||||
elif [[ "${REF_TYPE}" == "branch" ]]; then
|
||||
echo "BASE_TAGS=testing" | tee -a "${GITHUB_ENV}"
|
||||
fi
|
||||
|
||||
|
@ -116,8 +129,10 @@ jobs:
|
|||
- name: Add registry for DockerHub
|
||||
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }}
|
||||
shell: bash
|
||||
env:
|
||||
DOCKERHUB_REPO: ${{ vars.DOCKERHUB_REPO }}
|
||||
run: |
|
||||
echo "CONTAINER_REGISTRIES=${{ vars.DOCKERHUB_REPO }}" | tee -a "${GITHUB_ENV}"
|
||||
echo "CONTAINER_REGISTRIES=${DOCKERHUB_REPO}" | tee -a "${GITHUB_ENV}"
|
||||
|
||||
# Login to GitHub Container Registry
|
||||
- name: Login to GitHub Container Registry
|
||||
|
@ -131,8 +146,10 @@ jobs:
|
|||
- name: Add registry for ghcr.io
|
||||
if: ${{ env.HAVE_GHCR_LOGIN == 'true' }}
|
||||
shell: bash
|
||||
env:
|
||||
GHCR_REPO: ${{ vars.GHCR_REPO }}
|
||||
run: |
|
||||
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${{ vars.GHCR_REPO }}" | tee -a "${GITHUB_ENV}"
|
||||
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${GHCR_REPO}" | tee -a "${GITHUB_ENV}"
|
||||
|
||||
# Login to Quay.io
|
||||
- name: Login to Quay.io
|
||||
|
@ -146,17 +163,22 @@ jobs:
|
|||
- name: Add registry for Quay.io
|
||||
if: ${{ env.HAVE_QUAY_LOGIN == 'true' }}
|
||||
shell: bash
|
||||
env:
|
||||
QUAY_REPO: ${{ vars.QUAY_REPO }}
|
||||
run: |
|
||||
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${{ vars.QUAY_REPO }}" | tee -a "${GITHUB_ENV}"
|
||||
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${QUAY_REPO}" | tee -a "${GITHUB_ENV}"
|
||||
|
||||
- name: Configure build cache from/to
|
||||
shell: bash
|
||||
env:
|
||||
GHCR_REPO: ${{ vars.GHCR_REPO }}
|
||||
BASE_IMAGE: ${{ matrix.base_image }}
|
||||
run: |
|
||||
#
|
||||
# Check if there is a GitHub Container Registry Login and use it for caching
|
||||
if [[ -n "${HAVE_GHCR_LOGIN}" ]]; then
|
||||
echo "BAKE_CACHE_FROM=type=registry,ref=${{ vars.GHCR_REPO }}-buildcache:${{ matrix.base_image }}" | tee -a "${GITHUB_ENV}"
|
||||
echo "BAKE_CACHE_TO=type=registry,ref=${{ vars.GHCR_REPO }}-buildcache:${{ matrix.base_image }},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}"
|
||||
echo "BAKE_CACHE_FROM=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}" | tee -a "${GITHUB_ENV}"
|
||||
echo "BAKE_CACHE_TO=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}"
|
||||
else
|
||||
echo "BAKE_CACHE_FROM="
|
||||
echo "BAKE_CACHE_TO="
|
||||
|
@ -170,7 +192,7 @@ jobs:
|
|||
|
||||
- name: Bake ${{ matrix.base_image }} containers
|
||||
id: bake_vw
|
||||
uses: docker/bake-action@5ca506d06f70338a4968df87fd8bfee5cbfb84c7 # v6.0.0
|
||||
uses: docker/bake-action@7bff531c65a5cda33e52e43950a795b91d450f63 # v6.3.0
|
||||
env:
|
||||
BASE_TAGS: "${{ env.BASE_TAGS }}"
|
||||
SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}"
|
||||
|
@ -189,14 +211,16 @@ jobs:
|
|||
|
||||
- name: Extract digest SHA
|
||||
shell: bash
|
||||
env:
|
||||
BAKE_METADATA: ${{ steps.bake_vw.outputs.metadata }}
|
||||
run: |
|
||||
GET_DIGEST_SHA="$(jq -r '.["${{ matrix.base_image }}-multi"]."containerimage.digest"' <<< '${{ steps.bake_vw.outputs.metadata }}')"
|
||||
GET_DIGEST_SHA="$(jq -r '.["${{ matrix.base_image }}-multi"]."containerimage.digest"' <<< "${BAKE_METADATA}")"
|
||||
echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}"
|
||||
|
||||
# Attest container images
|
||||
- name: Attest - docker.io - ${{ matrix.base_image }}
|
||||
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
|
||||
uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0
|
||||
uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0
|
||||
with:
|
||||
subject-name: ${{ vars.DOCKERHUB_REPO }}
|
||||
subject-digest: ${{ env.DIGEST_SHA }}
|
||||
|
@ -204,7 +228,7 @@ jobs:
|
|||
|
||||
- name: Attest - ghcr.io - ${{ matrix.base_image }}
|
||||
if: ${{ env.HAVE_GHCR_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
|
||||
uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0
|
||||
uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0
|
||||
with:
|
||||
subject-name: ${{ vars.GHCR_REPO }}
|
||||
subject-digest: ${{ env.DIGEST_SHA }}
|
||||
|
@ -212,7 +236,7 @@ jobs:
|
|||
|
||||
- name: Attest - quay.io - ${{ matrix.base_image }}
|
||||
if: ${{ env.HAVE_QUAY_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
|
||||
uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0
|
||||
uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0
|
||||
with:
|
||||
subject-name: ${{ vars.QUAY_REPO }}
|
||||
subject-digest: ${{ env.DIGEST_SHA }}
|
||||
|
@ -222,11 +246,13 @@ jobs:
|
|||
# Extract the Alpine binaries from the containers
|
||||
- name: Extract binaries
|
||||
shell: bash
|
||||
env:
|
||||
REF_TYPE: ${{ github.ref_type }}
|
||||
run: |
|
||||
# Check which main tag we are going to build determined by github.ref_type
|
||||
if [[ "${{ github.ref_type }}" == "tag" ]]; then
|
||||
# Check which main tag we are going to build determined by ref_type
|
||||
if [[ "${REF_TYPE}" == "tag" ]]; then
|
||||
EXTRACT_TAG="latest"
|
||||
elif [[ "${{ github.ref_type }}" == "branch" ]]; then
|
||||
elif [[ "${REF_TYPE}" == "branch" ]]; then
|
||||
EXTRACT_TAG="testing"
|
||||
fi
|
||||
|
||||
|
@ -264,31 +290,31 @@ jobs:
|
|||
|
||||
# Upload artifacts to Github Actions and Attest the binaries
|
||||
- name: "Upload amd64 artifact ${{ matrix.base_image }}"
|
||||
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b #v4.5.0
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0
|
||||
with:
|
||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64-${{ matrix.base_image }}
|
||||
path: vaultwarden-amd64-${{ matrix.base_image }}
|
||||
|
||||
- name: "Upload arm64 artifact ${{ matrix.base_image }}"
|
||||
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b #v4.5.0
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0
|
||||
with:
|
||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64-${{ matrix.base_image }}
|
||||
path: vaultwarden-arm64-${{ matrix.base_image }}
|
||||
|
||||
- name: "Upload armv7 artifact ${{ matrix.base_image }}"
|
||||
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b #v4.5.0
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0
|
||||
with:
|
||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7-${{ matrix.base_image }}
|
||||
path: vaultwarden-armv7-${{ matrix.base_image }}
|
||||
|
||||
- name: "Upload armv6 artifact ${{ matrix.base_image }}"
|
||||
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b #v4.5.0
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0
|
||||
with:
|
||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6-${{ matrix.base_image }}
|
||||
path: vaultwarden-armv6-${{ matrix.base_image }}
|
||||
|
||||
- name: "Attest artifacts ${{ matrix.base_image }}"
|
||||
uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0
|
||||
uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0
|
||||
with:
|
||||
subject-path: vaultwarden-*
|
||||
# End Upload artifacts to Github Actions
|
||||
|
|
6
.github/workflows/releasecache-cleanup.yml
gevendort
6
.github/workflows/releasecache-cleanup.yml
gevendort
|
@ -1,3 +1,6 @@
|
|||
name: Cleanup
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
|
@ -9,10 +12,11 @@ on:
|
|||
schedule:
|
||||
- cron: '0 1 * * FRI'
|
||||
|
||||
name: Cleanup
|
||||
jobs:
|
||||
releasecache-cleanup:
|
||||
name: Releasecache Cleanup
|
||||
permissions:
|
||||
packages: write
|
||||
runs-on: ubuntu-24.04
|
||||
continue-on-error: true
|
||||
timeout-minutes: 30
|
||||
|
|
25
.github/workflows/trivy.yml
gevendort
25
.github/workflows/trivy.yml
gevendort
|
@ -1,34 +1,39 @@
|
|||
name: trivy
|
||||
name: Trivy
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
pull_request:
|
||||
branches: [ "main" ]
|
||||
branches:
|
||||
- main
|
||||
|
||||
schedule:
|
||||
- cron: '08 11 * * *'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
trivy-scan:
|
||||
# Only run this in the master repo and not on forks
|
||||
# Only run this in the upstream repo and not on forks
|
||||
# When all forks run this at the same time, it is causing `Too Many Requests` issues
|
||||
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
|
||||
name: Check
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 30
|
||||
name: Trivy Scan
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
actions: read
|
||||
security-events: write
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 30
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Run Trivy vulnerability scanner
|
||||
uses: aquasecurity/trivy-action@18f2510ee396bbf400402947b394f2dd8c87dbb0 # v0.29.0
|
||||
|
|
8
Cargo.lock
generiert
8
Cargo.lock
generiert
|
@ -884,18 +884,18 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "derive_more"
|
||||
version = "1.0.0"
|
||||
version = "2.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4a9b99b9cbbe49445b21764dc0625032a89b145a2642e67603e1c936f5458d05"
|
||||
checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678"
|
||||
dependencies = [
|
||||
"derive_more-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derive_more-impl"
|
||||
version = "1.0.0"
|
||||
version = "2.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22"
|
||||
checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
|
|
@ -82,7 +82,7 @@ diesel = { version = "2.2.7", features = ["chrono", "r2d2", "numeric"] }
|
|||
diesel_migrations = "2.2.0"
|
||||
diesel_logger = { version = "0.4.0", optional = true }
|
||||
|
||||
derive_more = { version = "1.0.0", features = ["from", "into", "as_ref", "deref", "display"] }
|
||||
derive_more = { version = "2.0.0", features = ["from", "into", "as_ref", "deref", "display"] }
|
||||
diesel-derive-newtype = "2.1.2"
|
||||
|
||||
# Bundled/Static SQLite
|
||||
|
@ -122,7 +122,7 @@ webauthn-rs = "0.3.2"
|
|||
url = "2.5.4"
|
||||
|
||||
# Email libraries
|
||||
lettre = { version = "0.11.11", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false }
|
||||
lettre = { version = "0.11.12", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false }
|
||||
percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails
|
||||
email_address = "0.2.9"
|
||||
|
||||
|
@ -137,7 +137,7 @@ hickory-resolver = "0.24.2"
|
|||
html5gum = "0.7.0"
|
||||
regex = { version = "1.11.1", features = ["std", "perf", "unicode-perl"], default-features = false }
|
||||
data-url = "0.3.1"
|
||||
bytes = "1.9.0"
|
||||
bytes = "1.10.0"
|
||||
|
||||
# Cache function results (Used for version check and favicon fetching)
|
||||
cached = { version = "0.54.0", features = ["async"] }
|
||||
|
@ -147,7 +147,7 @@ cookie = "0.18.1"
|
|||
cookie_store = "0.21.1"
|
||||
|
||||
# Used by U2F, JWT and PostgreSQL
|
||||
openssl = "0.10.69"
|
||||
openssl = "0.10.70"
|
||||
|
||||
# CLI argument parsing
|
||||
pico-args = "0.5.0"
|
||||
|
|
|
@ -10,4 +10,4 @@ proc-macro = true
|
|||
|
||||
[dependencies]
|
||||
quote = "1.0.38"
|
||||
syn = "2.0.96"
|
||||
syn = "2.0.98"
|
||||
|
|
|
@ -38,6 +38,7 @@ pub fn routes() -> Vec<Route> {
|
|||
post_organization_collections,
|
||||
delete_organization_collection_member,
|
||||
post_organization_collection_delete_member,
|
||||
post_bulk_access_collections,
|
||||
post_organization_collection_update,
|
||||
put_organization_collection_update,
|
||||
delete_organization_collection,
|
||||
|
@ -132,17 +133,17 @@ struct OrganizationUpdateData {
|
|||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct NewCollectionData {
|
||||
struct FullCollectionData {
|
||||
name: String,
|
||||
groups: Vec<NewCollectionGroupData>,
|
||||
users: Vec<NewCollectionMemberData>,
|
||||
groups: Vec<CollectionGroupData>,
|
||||
users: Vec<CollectionMembershipData>,
|
||||
id: Option<CollectionId>,
|
||||
external_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct NewCollectionGroupData {
|
||||
struct CollectionGroupData {
|
||||
hide_passwords: bool,
|
||||
id: GroupId,
|
||||
read_only: bool,
|
||||
|
@ -151,7 +152,7 @@ struct NewCollectionGroupData {
|
|||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct NewCollectionMemberData {
|
||||
struct CollectionMembershipData {
|
||||
hide_passwords: bool,
|
||||
id: MembershipId,
|
||||
read_only: bool,
|
||||
|
@ -443,13 +444,13 @@ async fn _get_org_collections(org_id: &OrganizationId, conn: &mut DbConn) -> Val
|
|||
async fn post_organization_collections(
|
||||
org_id: OrganizationId,
|
||||
headers: ManagerHeadersLoose,
|
||||
data: Json<NewCollectionData>,
|
||||
data: Json<FullCollectionData>,
|
||||
mut conn: DbConn,
|
||||
) -> JsonResult {
|
||||
if org_id != headers.membership.org_uuid {
|
||||
err!("Organization not found", "Organization id's do not match");
|
||||
}
|
||||
let data: NewCollectionData = data.into_inner();
|
||||
let data: FullCollectionData = data.into_inner();
|
||||
|
||||
let Some(org) = Organization::find_by_uuid(&org_id, &mut conn).await else {
|
||||
err!("Can't find organization details")
|
||||
|
@ -502,29 +503,104 @@ async fn post_organization_collections(
|
|||
Ok(Json(collection.to_json_details(&headers.membership.user_uuid, None, &mut conn).await))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct BulkCollectionAccessData {
|
||||
collection_ids: Vec<CollectionId>,
|
||||
groups: Vec<CollectionGroupData>,
|
||||
users: Vec<CollectionMembershipData>,
|
||||
}
|
||||
|
||||
#[post("/organizations/<org_id>/collections/bulk-access", data = "<data>", rank = 1)]
|
||||
async fn post_bulk_access_collections(
|
||||
org_id: OrganizationId,
|
||||
headers: ManagerHeadersLoose,
|
||||
data: Json<BulkCollectionAccessData>,
|
||||
mut conn: DbConn,
|
||||
) -> EmptyResult {
|
||||
if org_id != headers.membership.org_uuid {
|
||||
err!("Organization not found", "Organization id's do not match");
|
||||
}
|
||||
let data: BulkCollectionAccessData = data.into_inner();
|
||||
|
||||
if Organization::find_by_uuid(&org_id, &mut conn).await.is_none() {
|
||||
err!("Can't find organization details")
|
||||
};
|
||||
|
||||
for col_id in data.collection_ids {
|
||||
let Some(collection) = Collection::find_by_uuid_and_org(&col_id, &org_id, &mut conn).await else {
|
||||
err!("Collection not found")
|
||||
};
|
||||
|
||||
// update collection modification date
|
||||
collection.save(&mut conn).await?;
|
||||
|
||||
log_event(
|
||||
EventType::CollectionUpdated as i32,
|
||||
&collection.uuid,
|
||||
&org_id,
|
||||
&headers.user.uuid,
|
||||
headers.device.atype,
|
||||
&headers.ip.ip,
|
||||
&mut conn,
|
||||
)
|
||||
.await;
|
||||
|
||||
CollectionGroup::delete_all_by_collection(&col_id, &mut conn).await?;
|
||||
for group in &data.groups {
|
||||
CollectionGroup::new(col_id.clone(), group.id.clone(), group.read_only, group.hide_passwords, group.manage)
|
||||
.save(&mut conn)
|
||||
.await?;
|
||||
}
|
||||
|
||||
CollectionUser::delete_all_by_collection(&col_id, &mut conn).await?;
|
||||
for user in &data.users {
|
||||
let Some(member) = Membership::find_by_uuid_and_org(&user.id, &org_id, &mut conn).await else {
|
||||
err!("User is not part of organization")
|
||||
};
|
||||
|
||||
if member.access_all {
|
||||
continue;
|
||||
}
|
||||
|
||||
CollectionUser::save(
|
||||
&member.user_uuid,
|
||||
&col_id,
|
||||
user.read_only,
|
||||
user.hide_passwords,
|
||||
user.manage,
|
||||
&mut conn,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[put("/organizations/<org_id>/collections/<col_id>", data = "<data>")]
|
||||
async fn put_organization_collection_update(
|
||||
org_id: OrganizationId,
|
||||
col_id: CollectionId,
|
||||
headers: ManagerHeaders,
|
||||
data: Json<NewCollectionData>,
|
||||
data: Json<FullCollectionData>,
|
||||
conn: DbConn,
|
||||
) -> JsonResult {
|
||||
post_organization_collection_update(org_id, col_id, headers, data, conn).await
|
||||
}
|
||||
|
||||
#[post("/organizations/<org_id>/collections/<col_id>", data = "<data>")]
|
||||
#[post("/organizations/<org_id>/collections/<col_id>", data = "<data>", rank = 2)]
|
||||
async fn post_organization_collection_update(
|
||||
org_id: OrganizationId,
|
||||
col_id: CollectionId,
|
||||
headers: ManagerHeaders,
|
||||
data: Json<NewCollectionData>,
|
||||
data: Json<FullCollectionData>,
|
||||
mut conn: DbConn,
|
||||
) -> JsonResult {
|
||||
if org_id != headers.org_id {
|
||||
err!("Organization not found", "Organization id's do not match");
|
||||
}
|
||||
let data: NewCollectionData = data.into_inner();
|
||||
let data: FullCollectionData = data.into_inner();
|
||||
|
||||
if Organization::find_by_uuid(&org_id, &mut conn).await.is_none() {
|
||||
err!("Can't find organization details")
|
||||
|
@ -795,7 +871,7 @@ async fn get_collection_users(
|
|||
async fn put_collection_users(
|
||||
org_id: OrganizationId,
|
||||
col_id: CollectionId,
|
||||
data: Json<Vec<MembershipData>>,
|
||||
data: Json<Vec<CollectionMembershipData>>,
|
||||
headers: ManagerHeaders,
|
||||
mut conn: DbConn,
|
||||
) -> EmptyResult {
|
||||
|
@ -939,24 +1015,6 @@ async fn post_org_keys(
|
|||
})))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct CollectionData {
|
||||
id: CollectionId,
|
||||
read_only: bool,
|
||||
hide_passwords: bool,
|
||||
manage: bool,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct MembershipData {
|
||||
id: MembershipId,
|
||||
read_only: bool,
|
||||
hide_passwords: bool,
|
||||
manage: bool,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct InviteData {
|
||||
|
@ -1780,7 +1838,7 @@ use super::ciphers::CipherData;
|
|||
#[serde(rename_all = "camelCase")]
|
||||
struct ImportData {
|
||||
ciphers: Vec<CipherData>,
|
||||
collections: Vec<NewCollectionData>,
|
||||
collections: Vec<FullCollectionData>,
|
||||
collection_relationships: Vec<RelationsData>,
|
||||
}
|
||||
|
||||
|
@ -2591,7 +2649,7 @@ struct GroupRequest {
|
|||
#[serde(default)]
|
||||
access_all: bool,
|
||||
external_id: Option<String>,
|
||||
collections: Vec<SelectedCollection>,
|
||||
collections: Vec<CollectionData>,
|
||||
users: Vec<MembershipId>,
|
||||
}
|
||||
|
||||
|
@ -2612,14 +2670,14 @@ impl GroupRequest {
|
|||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct SelectedCollection {
|
||||
struct CollectionData {
|
||||
id: CollectionId,
|
||||
read_only: bool,
|
||||
hide_passwords: bool,
|
||||
manage: bool,
|
||||
}
|
||||
|
||||
impl SelectedCollection {
|
||||
impl CollectionData {
|
||||
pub fn to_collection_group(&self, groups_uuid: GroupId) -> CollectionGroup {
|
||||
CollectionGroup::new(self.id.clone(), groups_uuid, self.read_only, self.hide_passwords, self.manage)
|
||||
}
|
||||
|
@ -2702,7 +2760,7 @@ async fn put_group(
|
|||
|
||||
async fn add_update_group(
|
||||
mut group: Group,
|
||||
collections: Vec<SelectedCollection>,
|
||||
collections: Vec<CollectionData>,
|
||||
members: Vec<MembershipId>,
|
||||
org_id: OrganizationId,
|
||||
headers: &AdminHeaders,
|
||||
|
|
|
@ -63,6 +63,9 @@ static CLIENT: Lazy<Client> = Lazy::new(|| {
|
|||
// Build Regex only once since this takes a lot of time.
|
||||
static ICON_SIZE_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?x)(\d+)\D*(\d+)").unwrap());
|
||||
|
||||
// The function name `icon_external` is checked in the `on_response` function in `AppHeaders`
|
||||
// It is used to prevent sending a specific header which breaks icon downloads.
|
||||
// If this function needs to be renamed, also adjust the code in `util.rs`
|
||||
#[get("/<domain>/icon.png")]
|
||||
fn icon_external(domain: &str) -> Option<Redirect> {
|
||||
if !is_valid_domain(domain) {
|
||||
|
|
12
src/util.rs
12
src/util.rs
|
@ -56,13 +56,17 @@ impl Fairing for AppHeaders {
|
|||
res.set_raw_header("X-Content-Type-Options", "nosniff");
|
||||
res.set_raw_header("X-Robots-Tag", "noindex, nofollow");
|
||||
|
||||
if !res.headers().get_one("Content-Type").is_some_and(|v| v.starts_with("image/")) {
|
||||
res.set_raw_header("Cross-Origin-Resource-Policy", "same-origin");
|
||||
}
|
||||
|
||||
// Obsolete in modern browsers, unsafe (XS-Leak), and largely replaced by CSP
|
||||
res.set_raw_header("X-XSS-Protection", "0");
|
||||
|
||||
// The `Cross-Origin-Resource-Policy` header should not be set on images or on the `icon_external` route.
|
||||
// Otherwise some clients, like the Bitwarden Desktop, will fail to download the icons
|
||||
if !(res.headers().get_one("Content-Type").is_some_and(|v| v.starts_with("image/"))
|
||||
|| req.route().is_some_and(|v| v.name.as_deref() == Some("icon_external")))
|
||||
{
|
||||
res.set_raw_header("Cross-Origin-Resource-Policy", "same-origin");
|
||||
}
|
||||
|
||||
// Do not send the Content-Security-Policy (CSP) Header and X-Frame-Options for the *-connector.html files.
|
||||
// This can cause issues when some MFA requests needs to open a popup or page within the clients like WebAuthn, or Duo.
|
||||
// This is the same behavior as upstream Bitwarden.
|
||||
|
|
Laden …
Tabelle hinzufügen
In neuem Issue referenzieren