From a02fb0fd243de354a9da2561388e23da844496ae Mon Sep 17 00:00:00 2001 From: Mathijs van Veluw Date: Tue, 4 Feb 2025 00:33:43 +0100 Subject: [PATCH 1/4] Update workflows and enhance security (#5537) This commit updates the workflow files and also fixes some security issues which were reported by using zizmor https://github.com/woodruffw/zizmor Signed-off-by: BlackDex --- .github/workflows/build.yml | 76 +++++++++++-------- .github/workflows/hadolint.yml | 20 ++--- .github/workflows/release.yml | 88 ++++++++++++++-------- .github/workflows/releasecache-cleanup.yml | 6 +- .github/workflows/trivy.yml | 25 +++--- 5 files changed, 134 insertions(+), 81 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 0202e681..86e5213f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,4 +1,5 @@ name: Build +permissions: {} on: push: @@ -13,6 +14,7 @@ on: - "diesel.toml" - "docker/Dockerfile.j2" - "docker/DockerSettings.yaml" + pull_request: paths: - ".github/workflows/build.yml" @@ -28,13 +30,17 @@ on: jobs: build: + name: Build and Test ${{ matrix.channel }} + permissions: + actions: write + contents: read # We use Ubuntu 22.04 here because this matches the library versions used within the Debian docker containers runs-on: ubuntu-22.04 timeout-minutes: 120 # Make warnings errors, this is to prevent warnings slipping through. # This is done globally to prevent rebuilds when the RUSTFLAGS env variable changes. env: - RUSTFLAGS: "-D warnings" + RUSTFLAGS: "-Dwarnings" strategy: fail-fast: false matrix: @@ -42,20 +48,19 @@ jobs: - "rust-toolchain" # The version defined in rust-toolchain - "msrv" # The supported MSRV - name: Build and Test ${{ matrix.channel }} - steps: - # Checkout the repo - - name: "Checkout" - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 - # End Checkout the repo - - # Install dependencies - name: "Install dependencies Ubuntu" run: sudo apt-get update && sudo apt-get install -y --no-install-recommends openssl build-essential libmariadb-dev-compat libpq-dev libssl-dev pkg-config # End Install dependencies + # Checkout the repo + - name: "Checkout" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 + with: + persist-credentials: false + fetch-depth: 0 + # End Checkout the repo # Determine rust-toolchain version - name: Init Variables @@ -75,7 +80,7 @@ jobs: # Only install the clippy and rustfmt components on the default rust-toolchain - name: "Install rust-toolchain version" - uses: dtolnay/rust-toolchain@a54c7afa936fefeb4456b2dd8068152669aa8203 # master @ Dec 14, 2024, 5:49 AM GMT+1 + uses: dtolnay/rust-toolchain@c5a29ddb4d9d194e7c84ec8c3fba61b1c31fee8c # master @ Jan 30, 2025, 8:16 PM GMT+1 if: ${{ matrix.channel == 'rust-toolchain' }} with: toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}" @@ -85,7 +90,7 @@ jobs: # Install the any other channel to be used for which we do not execute clippy and rustfmt - name: "Install MSRV version" - uses: dtolnay/rust-toolchain@a54c7afa936fefeb4456b2dd8068152669aa8203 # master @ Dec 14, 2024, 5:49 AM GMT+1 + uses: dtolnay/rust-toolchain@c5a29ddb4d9d194e7c84ec8c3fba61b1c31fee8c # master @ Jan 30, 2025, 8:16 PM GMT+1 if: ${{ matrix.channel != 'rust-toolchain' }} with: toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}" @@ -93,11 +98,13 @@ jobs: # Set the current matrix toolchain version as default - name: "Set toolchain ${{steps.toolchain.outputs.RUST_TOOLCHAIN}} as default" + env: + RUST_TOOLCHAIN: ${{steps.toolchain.outputs.RUST_TOOLCHAIN}} run: | # Remove the rust-toolchain.toml rm rust-toolchain.toml # Set the default - rustup default ${{steps.toolchain.outputs.RUST_TOOLCHAIN}} + rustup default "${RUST_TOOLCHAIN}" # Show environment - name: "Show environment" @@ -161,7 +168,7 @@ jobs: id: clippy if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }} run: | - cargo clippy --features sqlite,mysql,postgresql,enable_mimalloc -- -D warnings + cargo clippy --features sqlite,mysql,postgresql,enable_mimalloc # End Run cargo clippy @@ -178,22 +185,31 @@ jobs: # This is useful so all test/clippy/fmt actions are done, and they can all be addressed - name: "Some checks failed" if: ${{ failure() }} + env: + TEST_DB_M_L: ${{ steps.test_sqlite_mysql_postgresql_mimalloc_logger.outcome }} + TEST_DB_M: ${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }} + TEST_DB: ${{ steps.test_sqlite_mysql_postgresql.outcome }} + TEST_SQLITE: ${{ steps.test_sqlite.outcome }} + TEST_MYSQL: ${{ steps.test_mysql.outcome }} + TEST_POSTGRESQL: ${{ steps.test_postgresql.outcome }} + CLIPPY: ${{ steps.clippy.outcome }} + FMT: ${{ steps.formatting.outcome }} run: | - echo "### :x: Checks Failed!" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "|Job|Status|" >> $GITHUB_STEP_SUMMARY - echo "|---|------|" >> $GITHUB_STEP_SUMMARY - echo "|test (sqlite,mysql,postgresql,enable_mimalloc,query_logger)|${{ steps.test_sqlite_mysql_postgresql_mimalloc_logger.outcome }}|" >> $GITHUB_STEP_SUMMARY - echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}|" >> $GITHUB_STEP_SUMMARY - echo "|test (sqlite,mysql,postgresql)|${{ steps.test_sqlite_mysql_postgresql.outcome }}|" >> $GITHUB_STEP_SUMMARY - echo "|test (sqlite)|${{ steps.test_sqlite.outcome }}|" >> $GITHUB_STEP_SUMMARY - echo "|test (mysql)|${{ steps.test_mysql.outcome }}|" >> $GITHUB_STEP_SUMMARY - echo "|test (postgresql)|${{ steps.test_postgresql.outcome }}|" >> $GITHUB_STEP_SUMMARY - echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc)|${{ steps.clippy.outcome }}|" >> $GITHUB_STEP_SUMMARY - echo "|fmt|${{ steps.formatting.outcome }}|" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "Please check the failed jobs and fix where needed." >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY + echo "### :x: Checks Failed!" >> "${GITHUB_STEP_SUMMARY}" + echo "" >> "${GITHUB_STEP_SUMMARY}" + echo "|Job|Status|" >> "${GITHUB_STEP_SUMMARY}" + echo "|---|------|" >> "${GITHUB_STEP_SUMMARY}" + echo "|test (sqlite,mysql,postgresql,enable_mimalloc,query_logger)|${TEST_DB_M_L}|" >> "${GITHUB_STEP_SUMMARY}" + echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${TEST_DB_M}|" >> "${GITHUB_STEP_SUMMARY}" + echo "|test (sqlite,mysql,postgresql)|${TEST_DB}|" >> "${GITHUB_STEP_SUMMARY}" + echo "|test (sqlite)|${TEST_SQLITE}|" >> "${GITHUB_STEP_SUMMARY}" + echo "|test (mysql)|${TEST_MYSQL}|" >> "${GITHUB_STEP_SUMMARY}" + echo "|test (postgresql)|${TEST_POSTGRESQL}|" >> "${GITHUB_STEP_SUMMARY}" + echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc)|${CLIPPY}|" >> "${GITHUB_STEP_SUMMARY}" + echo "|fmt|${FMT}|" >> "${GITHUB_STEP_SUMMARY}" + echo "" >> "${GITHUB_STEP_SUMMARY}" + echo "Please check the failed jobs and fix where needed." >> "${GITHUB_STEP_SUMMARY}" + echo "" >> "${GITHUB_STEP_SUMMARY}" exit 1 @@ -202,5 +218,5 @@ jobs: - name: "All checks passed" if: ${{ success() }} run: | - echo "### :tada: Checks Passed!" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY + echo "### :tada: Checks Passed!" >> "${GITHUB_STEP_SUMMARY}" + echo "" >> "${GITHUB_STEP_SUMMARY}" diff --git a/.github/workflows/hadolint.yml b/.github/workflows/hadolint.yml index 787feeec..240d6dbf 100644 --- a/.github/workflows/hadolint.yml +++ b/.github/workflows/hadolint.yml @@ -1,21 +1,17 @@ name: Hadolint +permissions: {} -on: [ - push, - pull_request - ] +on: [ push, pull_request ] jobs: hadolint: name: Validate Dockerfile syntax + permissions: + contents: read runs-on: ubuntu-24.04 timeout-minutes: 30 - steps: - # Checkout the repo - - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 - # End Checkout the repo + steps: # Start Docker Buildx - name: Setup Docker Buildx uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5 # v3.8.0 @@ -37,6 +33,12 @@ jobs: env: HADOLINT_VERSION: 2.12.0 # End Download hadolint + # Checkout the repo + - name: Checkout + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 + with: + persist-credentials: false + # End Checkout the repo # Test Dockerfiles with hadolint - name: Run hadolint diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index b760f207..d155c159 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,4 +1,5 @@ name: Release +permissions: {} on: push: @@ -6,17 +7,23 @@ on: - main tags: - - '*' + # https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet + - '[1-2].[0-9]+.[0-9]+' jobs: # https://github.com/marketplace/actions/skip-duplicate-actions # Some checks to determine if we need to continue with building a new docker. # We will skip this check if we are creating a tag, because that has the same hash as a previous run already. skip_check: - runs-on: ubuntu-24.04 + # Only run this in the upstream repo and not on forks if: ${{ github.repository == 'dani-garcia/vaultwarden' }} + name: Cancel older jobs when running + permissions: + actions: write + runs-on: ubuntu-24.04 outputs: should_skip: ${{ steps.skip_check.outputs.should_skip }} + steps: - name: Skip Duplicates Actions id: skip_check @@ -27,6 +34,9 @@ jobs: if: ${{ github.ref_type == 'branch' }} docker-build: + needs: skip_check + if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }} + name: Build Vaultwarden containers permissions: packages: write contents: read @@ -34,8 +44,6 @@ jobs: id-token: write runs-on: ubuntu-24.04 timeout-minutes: 120 - needs: skip_check - if: ${{ needs.skip_check.outputs.should_skip != 'true' && github.repository == 'dani-garcia/vaultwarden' }} # Start a local docker registry to extract the compiled binaries to upload as artifacts and attest them services: registry: @@ -61,12 +69,6 @@ jobs: base_image: ["debian","alpine"] steps: - # Checkout the repo - - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 - with: - fetch-depth: 0 - - name: Initialize QEMU binfmt support uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a # v3.3.0 with: @@ -78,20 +80,31 @@ jobs: # https://github.com/moby/buildkit/issues/3969 # Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills with: + cache-binary: false buildkitd-config-inline: | [worker.oci] max-parallelism = 2 driver-opts: | network=host + # Checkout the repo + - name: Checkout + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 + # We need fetch-depth of 0 so we also get all the tag metadata + with: + persist-credentials: false + fetch-depth: 0 + # Determine Base Tags and Source Version - name: Determine Base Tags and Source Version shell: bash + env: + REF_TYPE: ${{ github.ref_type }} run: | - # Check which main tag we are going to build determined by github.ref_type - if [[ "${{ github.ref_type }}" == "tag" ]]; then + # Check which main tag we are going to build determined by ref_type + if [[ "${REF_TYPE}" == "tag" ]]; then echo "BASE_TAGS=latest,${GITHUB_REF#refs/*/}" | tee -a "${GITHUB_ENV}" - elif [[ "${{ github.ref_type }}" == "branch" ]]; then + elif [[ "${REF_TYPE}" == "branch" ]]; then echo "BASE_TAGS=testing" | tee -a "${GITHUB_ENV}" fi @@ -116,8 +129,10 @@ jobs: - name: Add registry for DockerHub if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }} shell: bash + env: + DOCKERHUB_REPO: ${{ vars.DOCKERHUB_REPO }} run: | - echo "CONTAINER_REGISTRIES=${{ vars.DOCKERHUB_REPO }}" | tee -a "${GITHUB_ENV}" + echo "CONTAINER_REGISTRIES=${DOCKERHUB_REPO}" | tee -a "${GITHUB_ENV}" # Login to GitHub Container Registry - name: Login to GitHub Container Registry @@ -131,8 +146,10 @@ jobs: - name: Add registry for ghcr.io if: ${{ env.HAVE_GHCR_LOGIN == 'true' }} shell: bash + env: + GHCR_REPO: ${{ vars.GHCR_REPO }} run: | - echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${{ vars.GHCR_REPO }}" | tee -a "${GITHUB_ENV}" + echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${GHCR_REPO}" | tee -a "${GITHUB_ENV}" # Login to Quay.io - name: Login to Quay.io @@ -146,17 +163,22 @@ jobs: - name: Add registry for Quay.io if: ${{ env.HAVE_QUAY_LOGIN == 'true' }} shell: bash + env: + QUAY_REPO: ${{ vars.QUAY_REPO }} run: | - echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${{ vars.QUAY_REPO }}" | tee -a "${GITHUB_ENV}" + echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${QUAY_REPO}" | tee -a "${GITHUB_ENV}" - name: Configure build cache from/to shell: bash + env: + GHCR_REPO: ${{ vars.GHCR_REPO }} + BASE_IMAGE: ${{ matrix.base_image }} run: | # # Check if there is a GitHub Container Registry Login and use it for caching if [[ -n "${HAVE_GHCR_LOGIN}" ]]; then - echo "BAKE_CACHE_FROM=type=registry,ref=${{ vars.GHCR_REPO }}-buildcache:${{ matrix.base_image }}" | tee -a "${GITHUB_ENV}" - echo "BAKE_CACHE_TO=type=registry,ref=${{ vars.GHCR_REPO }}-buildcache:${{ matrix.base_image }},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}" + echo "BAKE_CACHE_FROM=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}" | tee -a "${GITHUB_ENV}" + echo "BAKE_CACHE_TO=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}" else echo "BAKE_CACHE_FROM=" echo "BAKE_CACHE_TO=" @@ -170,7 +192,7 @@ jobs: - name: Bake ${{ matrix.base_image }} containers id: bake_vw - uses: docker/bake-action@5ca506d06f70338a4968df87fd8bfee5cbfb84c7 # v6.0.0 + uses: docker/bake-action@7bff531c65a5cda33e52e43950a795b91d450f63 # v6.3.0 env: BASE_TAGS: "${{ env.BASE_TAGS }}" SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}" @@ -189,14 +211,16 @@ jobs: - name: Extract digest SHA shell: bash + env: + BAKE_METADATA: ${{ steps.bake_vw.outputs.metadata }} run: | - GET_DIGEST_SHA="$(jq -r '.["${{ matrix.base_image }}-multi"]."containerimage.digest"' <<< '${{ steps.bake_vw.outputs.metadata }}')" + GET_DIGEST_SHA="$(jq -r '.["${{ matrix.base_image }}-multi"]."containerimage.digest"' <<< "${BAKE_METADATA}")" echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}" # Attest container images - name: Attest - docker.io - ${{ matrix.base_image }} if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} - uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0 + uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0 with: subject-name: ${{ vars.DOCKERHUB_REPO }} subject-digest: ${{ env.DIGEST_SHA }} @@ -204,7 +228,7 @@ jobs: - name: Attest - ghcr.io - ${{ matrix.base_image }} if: ${{ env.HAVE_GHCR_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} - uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0 + uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0 with: subject-name: ${{ vars.GHCR_REPO }} subject-digest: ${{ env.DIGEST_SHA }} @@ -212,7 +236,7 @@ jobs: - name: Attest - quay.io - ${{ matrix.base_image }} if: ${{ env.HAVE_QUAY_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} - uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0 + uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0 with: subject-name: ${{ vars.QUAY_REPO }} subject-digest: ${{ env.DIGEST_SHA }} @@ -222,11 +246,13 @@ jobs: # Extract the Alpine binaries from the containers - name: Extract binaries shell: bash + env: + REF_TYPE: ${{ github.ref_type }} run: | - # Check which main tag we are going to build determined by github.ref_type - if [[ "${{ github.ref_type }}" == "tag" ]]; then + # Check which main tag we are going to build determined by ref_type + if [[ "${REF_TYPE}" == "tag" ]]; then EXTRACT_TAG="latest" - elif [[ "${{ github.ref_type }}" == "branch" ]]; then + elif [[ "${REF_TYPE}" == "branch" ]]; then EXTRACT_TAG="testing" fi @@ -264,31 +290,31 @@ jobs: # Upload artifacts to Github Actions and Attest the binaries - name: "Upload amd64 artifact ${{ matrix.base_image }}" - uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b #v4.5.0 + uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0 with: name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64-${{ matrix.base_image }} path: vaultwarden-amd64-${{ matrix.base_image }} - name: "Upload arm64 artifact ${{ matrix.base_image }}" - uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b #v4.5.0 + uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0 with: name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64-${{ matrix.base_image }} path: vaultwarden-arm64-${{ matrix.base_image }} - name: "Upload armv7 artifact ${{ matrix.base_image }}" - uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b #v4.5.0 + uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0 with: name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7-${{ matrix.base_image }} path: vaultwarden-armv7-${{ matrix.base_image }} - name: "Upload armv6 artifact ${{ matrix.base_image }}" - uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b #v4.5.0 + uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0 with: name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6-${{ matrix.base_image }} path: vaultwarden-armv6-${{ matrix.base_image }} - name: "Attest artifacts ${{ matrix.base_image }}" - uses: actions/attest-build-provenance@7668571508540a607bdfd90a87a560489fe372eb # v2.1.0 + uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0 with: subject-path: vaultwarden-* # End Upload artifacts to Github Actions diff --git a/.github/workflows/releasecache-cleanup.yml b/.github/workflows/releasecache-cleanup.yml index 6fd880bb..f62fccd3 100644 --- a/.github/workflows/releasecache-cleanup.yml +++ b/.github/workflows/releasecache-cleanup.yml @@ -1,3 +1,6 @@ +name: Cleanup +permissions: {} + on: workflow_dispatch: inputs: @@ -9,10 +12,11 @@ on: schedule: - cron: '0 1 * * FRI' -name: Cleanup jobs: releasecache-cleanup: name: Releasecache Cleanup + permissions: + packages: write runs-on: ubuntu-24.04 continue-on-error: true timeout-minutes: 30 diff --git a/.github/workflows/trivy.yml b/.github/workflows/trivy.yml index 4481ec6a..6cba5df4 100644 --- a/.github/workflows/trivy.yml +++ b/.github/workflows/trivy.yml @@ -1,34 +1,39 @@ -name: trivy +name: Trivy +permissions: {} on: push: branches: - main + tags: - '*' + pull_request: - branches: [ "main" ] + branches: + - main + schedule: - cron: '08 11 * * *' -permissions: - contents: read - jobs: trivy-scan: - # Only run this in the master repo and not on forks + # Only run this in the upstream repo and not on forks # When all forks run this at the same time, it is causing `Too Many Requests` issues if: ${{ github.repository == 'dani-garcia/vaultwarden' }} - name: Check - runs-on: ubuntu-24.04 - timeout-minutes: 30 + name: Trivy Scan permissions: contents: read - security-events: write actions: read + security-events: write + runs-on: ubuntu-24.04 + timeout-minutes: 30 + steps: - name: Checkout code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 + with: + persist-credentials: false - name: Run Trivy vulnerability scanner uses: aquasecurity/trivy-action@18f2510ee396bbf400402947b394f2dd8c87dbb0 # v0.29.0 From d2b36642a6d5732c1630b391e1d6dd30b245e918 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Feb 2025 02:01:06 +0200 Subject: [PATCH 2/4] Update crates & fix CVE-2025-24898 (#5538) --- Cargo.lock | 32 ++++++++++++++++---------------- Cargo.toml | 8 ++++---- macros/Cargo.toml | 2 +- 3 files changed, 21 insertions(+), 21 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1d220bea..77eabf5b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -447,9 +447,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" +checksum = "f61dac84819c6588b558454b194026eb1f09c293b9036ae9b159e74e73ab6cf9" [[package]] name = "cached" @@ -754,18 +754,18 @@ dependencies = [ [[package]] name = "derive_more" -version = "1.0.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a9b99b9cbbe49445b21764dc0625032a89b145a2642e67603e1c936f5458d05" +checksum = "71158d5e914dec8a242751a3fc516b03ed3e6772ce9de79e1aeea6420663cad4" dependencies = [ "derive_more-impl", ] [[package]] name = "derive_more-impl" -version = "1.0.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" +checksum = "9e04e066e440d7973a852a3acdc25b0ae712bb6d311755fbf773d6a4518b2226" dependencies = [ "proc-macro2", "quote", @@ -1928,9 +1928,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "lettre" -version = "0.11.11" +version = "0.11.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab4c9a167ff73df98a5ecc07e8bf5ce90b583665da3d1762eb1f775ad4d0d6f5" +checksum = "e882e1489810a45919477602194312b1a7df0e5acc30a6188be7b520268f63f8" dependencies = [ "async-std", "async-trait", @@ -2322,9 +2322,9 @@ checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" [[package]] name = "openssl" -version = "0.10.69" +version = "0.10.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5e534d133a060a3c19daec1eb3e98ec6f4685978834f2dbadfe2ec215bab64e" +checksum = "61cfb4e166a8bb8c9b55c500bc2308550148ece889be90f609377e58140f42c6" dependencies = [ "bitflags", "cfg-if", @@ -2363,9 +2363,9 @@ dependencies = [ [[package]] name = "openssl-sys" -version = "0.9.104" +version = "0.9.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741" +checksum = "8b22d5b84be05a8d6947c7cb71f7c849aa0f112acd4bf51c2a7c1c988ac0a9dc" dependencies = [ "cc", "libc", @@ -3520,9 +3520,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" -version = "2.0.96" +version = "2.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80" +checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1" dependencies = [ "proc-macro2", "quote", @@ -4572,9 +4572,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e49d2d35d3fad69b39b94139037ecfb4f359f08958b9c11e7315ce770462419" +checksum = "86e376c75f4f43f44db463cf729e0d3acbf954d13e22c51e26e4c264b4ab545f" dependencies = [ "memchr", ] diff --git a/Cargo.toml b/Cargo.toml index 867a05d1..2542e3c6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -82,7 +82,7 @@ diesel = { version = "2.2.7", features = ["chrono", "r2d2", "numeric"] } diesel_migrations = "2.2.0" diesel_logger = { version = "0.4.0", optional = true } -derive_more = { version = "1.0.0", features = ["from", "into", "as_ref", "deref", "display"] } +derive_more = { version = "2.0.0", features = ["from", "into", "as_ref", "deref", "display"] } diesel-derive-newtype = "2.1.2" # Bundled/Static SQLite @@ -122,7 +122,7 @@ webauthn-rs = "0.3.2" url = "2.5.4" # Email libraries -lettre = { version = "0.11.11", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false } +lettre = { version = "0.11.12", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false } percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails email_address = "0.2.9" @@ -137,7 +137,7 @@ hickory-resolver = "0.24.2" html5gum = "0.7.0" regex = { version = "1.11.1", features = ["std", "perf", "unicode-perl"], default-features = false } data-url = "0.3.1" -bytes = "1.9.0" +bytes = "1.10.0" # Cache function results (Used for version check and favicon fetching) cached = { version = "0.54.0", features = ["async"] } @@ -147,7 +147,7 @@ cookie = "0.18.1" cookie_store = "0.21.1" # Used by U2F, JWT and PostgreSQL -openssl = "0.10.69" +openssl = "0.10.70" # CLI argument parsing pico-args = "0.5.0" diff --git a/macros/Cargo.toml b/macros/Cargo.toml index 3beea5d8..323f198d 100644 --- a/macros/Cargo.toml +++ b/macros/Cargo.toml @@ -10,4 +10,4 @@ proc-macro = true [dependencies] quote = "1.0.38" -syn = "2.0.96" +syn = "2.0.98" From 3b6bccde973d4de86dc7d31d109256911cc64f67 Mon Sep 17 00:00:00 2001 From: Stefan Melmuk <509385+stefan0xC@users.noreply.github.com> Date: Tue, 4 Feb 2025 09:42:02 +0100 Subject: [PATCH 3/4] add bulk-access endpoint for collections (#5542) --- src/api/core/organizations.rs | 128 ++++++++++++++++++++++++---------- 1 file changed, 93 insertions(+), 35 deletions(-) diff --git a/src/api/core/organizations.rs b/src/api/core/organizations.rs index 624ff590..aabcc5e2 100644 --- a/src/api/core/organizations.rs +++ b/src/api/core/organizations.rs @@ -38,6 +38,7 @@ pub fn routes() -> Vec { post_organization_collections, delete_organization_collection_member, post_organization_collection_delete_member, + post_bulk_access_collections, post_organization_collection_update, put_organization_collection_update, delete_organization_collection, @@ -129,17 +130,17 @@ struct OrganizationUpdateData { #[derive(Deserialize)] #[serde(rename_all = "camelCase")] -struct NewCollectionData { +struct FullCollectionData { name: String, - groups: Vec, - users: Vec, + groups: Vec, + users: Vec, id: Option, external_id: Option, } #[derive(Deserialize)] #[serde(rename_all = "camelCase")] -struct NewCollectionGroupData { +struct CollectionGroupData { hide_passwords: bool, id: GroupId, read_only: bool, @@ -148,7 +149,7 @@ struct NewCollectionGroupData { #[derive(Deserialize)] #[serde(rename_all = "camelCase")] -struct NewCollectionMemberData { +struct CollectionMembershipData { hide_passwords: bool, id: MembershipId, read_only: bool, @@ -429,13 +430,13 @@ async fn _get_org_collections(org_id: &OrganizationId, conn: &mut DbConn) -> Val async fn post_organization_collections( org_id: OrganizationId, headers: ManagerHeadersLoose, - data: Json, + data: Json, mut conn: DbConn, ) -> JsonResult { if org_id != headers.membership.org_uuid { err!("Organization not found", "Organization id's do not match"); } - let data: NewCollectionData = data.into_inner(); + let data: FullCollectionData = data.into_inner(); let Some(org) = Organization::find_by_uuid(&org_id, &mut conn).await else { err!("Can't find organization details") @@ -488,29 +489,104 @@ async fn post_organization_collections( Ok(Json(collection.to_json_details(&headers.membership.user_uuid, None, &mut conn).await)) } +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct BulkCollectionAccessData { + collection_ids: Vec, + groups: Vec, + users: Vec, +} + +#[post("/organizations//collections/bulk-access", data = "", rank = 1)] +async fn post_bulk_access_collections( + org_id: OrganizationId, + headers: ManagerHeadersLoose, + data: Json, + mut conn: DbConn, +) -> EmptyResult { + if org_id != headers.membership.org_uuid { + err!("Organization not found", "Organization id's do not match"); + } + let data: BulkCollectionAccessData = data.into_inner(); + + if Organization::find_by_uuid(&org_id, &mut conn).await.is_none() { + err!("Can't find organization details") + }; + + for col_id in data.collection_ids { + let Some(collection) = Collection::find_by_uuid_and_org(&col_id, &org_id, &mut conn).await else { + err!("Collection not found") + }; + + // update collection modification date + collection.save(&mut conn).await?; + + log_event( + EventType::CollectionUpdated as i32, + &collection.uuid, + &org_id, + &headers.user.uuid, + headers.device.atype, + &headers.ip.ip, + &mut conn, + ) + .await; + + CollectionGroup::delete_all_by_collection(&col_id, &mut conn).await?; + for group in &data.groups { + CollectionGroup::new(col_id.clone(), group.id.clone(), group.read_only, group.hide_passwords, group.manage) + .save(&mut conn) + .await?; + } + + CollectionUser::delete_all_by_collection(&col_id, &mut conn).await?; + for user in &data.users { + let Some(member) = Membership::find_by_uuid_and_org(&user.id, &org_id, &mut conn).await else { + err!("User is not part of organization") + }; + + if member.access_all { + continue; + } + + CollectionUser::save( + &member.user_uuid, + &col_id, + user.read_only, + user.hide_passwords, + user.manage, + &mut conn, + ) + .await?; + } + } + + Ok(()) +} + #[put("/organizations//collections/", data = "")] async fn put_organization_collection_update( org_id: OrganizationId, col_id: CollectionId, headers: ManagerHeaders, - data: Json, + data: Json, conn: DbConn, ) -> JsonResult { post_organization_collection_update(org_id, col_id, headers, data, conn).await } -#[post("/organizations//collections/", data = "")] +#[post("/organizations//collections/", data = "", rank = 2)] async fn post_organization_collection_update( org_id: OrganizationId, col_id: CollectionId, headers: ManagerHeaders, - data: Json, + data: Json, mut conn: DbConn, ) -> JsonResult { if org_id != headers.org_id { err!("Organization not found", "Organization id's do not match"); } - let data: NewCollectionData = data.into_inner(); + let data: FullCollectionData = data.into_inner(); if Organization::find_by_uuid(&org_id, &mut conn).await.is_none() { err!("Can't find organization details") @@ -781,7 +857,7 @@ async fn get_collection_users( async fn put_collection_users( org_id: OrganizationId, col_id: CollectionId, - data: Json>, + data: Json>, headers: ManagerHeaders, mut conn: DbConn, ) -> EmptyResult { @@ -913,24 +989,6 @@ async fn post_org_keys( }))) } -#[derive(Deserialize)] -#[serde(rename_all = "camelCase")] -struct CollectionData { - id: CollectionId, - read_only: bool, - hide_passwords: bool, - manage: bool, -} - -#[derive(Deserialize)] -#[serde(rename_all = "camelCase")] -struct MembershipData { - id: MembershipId, - read_only: bool, - hide_passwords: bool, - manage: bool, -} - #[derive(Deserialize)] #[serde(rename_all = "camelCase")] struct InviteData { @@ -1754,7 +1812,7 @@ use super::ciphers::CipherData; #[serde(rename_all = "camelCase")] struct ImportData { ciphers: Vec, - collections: Vec, + collections: Vec, collection_relationships: Vec, } @@ -2549,7 +2607,7 @@ struct GroupRequest { #[serde(default)] access_all: bool, external_id: Option, - collections: Vec, + collections: Vec, users: Vec, } @@ -2570,14 +2628,14 @@ impl GroupRequest { #[derive(Deserialize, Serialize)] #[serde(rename_all = "camelCase")] -struct SelectedCollection { +struct CollectionData { id: CollectionId, read_only: bool, hide_passwords: bool, manage: bool, } -impl SelectedCollection { +impl CollectionData { pub fn to_collection_group(&self, groups_uuid: GroupId) -> CollectionGroup { CollectionGroup::new(self.id.clone(), groups_uuid, self.read_only, self.hide_passwords, self.manage) } @@ -2660,7 +2718,7 @@ async fn put_group( async fn add_update_group( mut group: Group, - collections: Vec, + collections: Vec, members: Vec, org_id: OrganizationId, headers: &AdminHeaders, From 8d1df08b81e1e0eea28e480de236dc0501674edc Mon Sep 17 00:00:00 2001 From: Mathijs van Veluw Date: Tue, 4 Feb 2025 13:20:32 +0100 Subject: [PATCH 4/4] Fix icon redirect not working on desktop (#5536) * Fix icon redirect not working on desktop We also need to exclude the header in case we do an external_icon call. Fixes #5535 Signed-off-by: BlackDex * Add informational comments to the icon_external function Signed-off-by: BlackDex * Fix spelling/grammar Signed-off-by: BlackDex --------- Signed-off-by: BlackDex --- src/api/icons.rs | 3 +++ src/util.rs | 12 ++++++++---- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/src/api/icons.rs b/src/api/icons.rs index fc4e0ccf..0b437d53 100644 --- a/src/api/icons.rs +++ b/src/api/icons.rs @@ -63,6 +63,9 @@ static CLIENT: Lazy = Lazy::new(|| { // Build Regex only once since this takes a lot of time. static ICON_SIZE_REGEX: Lazy = Lazy::new(|| Regex::new(r"(?x)(\d+)\D*(\d+)").unwrap()); +// The function name `icon_external` is checked in the `on_response` function in `AppHeaders` +// It is used to prevent sending a specific header which breaks icon downloads. +// If this function needs to be renamed, also adjust the code in `util.rs` #[get("//icon.png")] fn icon_external(domain: &str) -> Option { if !is_valid_domain(domain) { diff --git a/src/util.rs b/src/util.rs index ecd079cf..1f8d1c27 100644 --- a/src/util.rs +++ b/src/util.rs @@ -56,13 +56,17 @@ impl Fairing for AppHeaders { res.set_raw_header("X-Content-Type-Options", "nosniff"); res.set_raw_header("X-Robots-Tag", "noindex, nofollow"); - if !res.headers().get_one("Content-Type").is_some_and(|v| v.starts_with("image/")) { - res.set_raw_header("Cross-Origin-Resource-Policy", "same-origin"); - } - // Obsolete in modern browsers, unsafe (XS-Leak), and largely replaced by CSP res.set_raw_header("X-XSS-Protection", "0"); + // The `Cross-Origin-Resource-Policy` header should not be set on images or on the `icon_external` route. + // Otherwise some clients, like the Bitwarden Desktop, will fail to download the icons + if !(res.headers().get_one("Content-Type").is_some_and(|v| v.starts_with("image/")) + || req.route().is_some_and(|v| v.name.as_deref() == Some("icon_external"))) + { + res.set_raw_header("Cross-Origin-Resource-Policy", "same-origin"); + } + // Do not send the Content-Security-Policy (CSP) Header and X-Frame-Options for the *-connector.html files. // This can cause issues when some MFA requests needs to open a popup or page within the clients like WebAuthn, or Duo. // This is the same behavior as upstream Bitwarden.