From bb41f64c0ad45a142c1cac7b83422ab30c7955d7 Mon Sep 17 00:00:00 2001 From: Daniel Date: Sun, 23 Nov 2025 22:48:23 +0200 Subject: [PATCH 1/5] Switch to multiple runners per arch (#6472) - now uses arm64 native runners for faster compilation --- .github/workflows/release.yml | 300 +++++++++++++++++++++++----------- docker/DockerSettings.yaml | 1 - docker/Dockerfile.alpine | 12 +- docker/Dockerfile.j2 | 6 +- 4 files changed, 215 insertions(+), 104 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 35c6d3ac..95220f42 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -16,6 +16,23 @@ concurrency: # Don't cancel other runs when creating a tag cancel-in-progress: ${{ github.ref_type == 'branch' }} +defaults: + run: + shell: bash + +env: + # The *_REPO variables need to be configured as repository variables + # Append `/settings/variables/actions` to your repo url + # DOCKERHUB_REPO needs to be 'index.docker.io//' + # Check for Docker hub credentials in secrets + HAVE_DOCKERHUB_LOGIN: ${{ vars.DOCKERHUB_REPO != '' && secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }} + # GHCR_REPO needs to be 'ghcr.io//' + # Check for Github credentials in secrets + HAVE_GHCR_LOGIN: ${{ vars.GHCR_REPO != '' && github.repository_owner != '' && secrets.GITHUB_TOKEN != '' }} + # QUAY_REPO needs to be 'quay.io//' + # Check for Quay.io credentials in secrets + HAVE_QUAY_LOGIN: ${{ vars.QUAY_REPO != '' && secrets.QUAY_USERNAME != '' && secrets.QUAY_TOKEN != '' }} + jobs: docker-build: name: Build Vaultwarden containers @@ -25,7 +42,7 @@ jobs: contents: read attestations: write # Needed to generate an artifact attestation for a build id-token: write # Needed to mint the OIDC token necessary to request a Sigstore signing certificate - runs-on: ubuntu-24.04 + runs-on: ${{ contains(matrix.arch, 'arm') && 'ubuntu-24.04-arm' || 'ubuntu-24.04' }} timeout-minutes: 120 # Start a local docker registry to extract the compiled binaries to upload as artifacts and attest them services: @@ -36,20 +53,12 @@ jobs: env: SOURCE_COMMIT: ${{ github.sha }} SOURCE_REPOSITORY_URL: "https://github.com/${{ github.repository }}" - # The *_REPO variables need to be configured as repository variables - # Append `/settings/variables/actions` to your repo url - # DOCKERHUB_REPO needs to be 'index.docker.io//' - # Check for Docker hub credentials in secrets - HAVE_DOCKERHUB_LOGIN: ${{ vars.DOCKERHUB_REPO != '' && secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }} - # GHCR_REPO needs to be 'ghcr.io//' - # Check for Github credentials in secrets - HAVE_GHCR_LOGIN: ${{ vars.GHCR_REPO != '' && github.repository_owner != '' && secrets.GITHUB_TOKEN != '' }} - # QUAY_REPO needs to be 'quay.io//' - # Check for Quay.io credentials in secrets - HAVE_QUAY_LOGIN: ${{ vars.QUAY_REPO != '' && secrets.QUAY_USERNAME != '' && secrets.QUAY_TOKEN != '' }} strategy: matrix: + arch: ["amd64", "arm64", "arm/v7", "arm/v6"] base_image: ["debian","alpine"] + outputs: + base-tags: ${{ steps.determine-version.outputs.BASE_TAGS }} steps: - name: Initialize QEMU binfmt support @@ -78,17 +87,26 @@ jobs: persist-credentials: false fetch-depth: 0 + # Normalize the architecture string for use in paths and cache keys + - name: Normalize architecture string + env: + MATRIX_ARCH: ${{ matrix.arch }} + run: | + # Replace slashes with nothing to create a safe string for paths/cache keys + NORMALIZED_ARCH="${MATRIX_ARCH//\/}" + echo "NORMALIZED_ARCH=${NORMALIZED_ARCH}" | tee -a "${GITHUB_ENV}" + # Determine Base Tags and Source Version - name: Determine Base Tags and Source Version - shell: bash + id: determine-version env: REF_TYPE: ${{ github.ref_type }} run: | # Check which main tag we are going to build determined by ref_type if [[ "${REF_TYPE}" == "tag" ]]; then - echo "BASE_TAGS=latest,${GITHUB_REF#refs/*/}" | tee -a "${GITHUB_ENV}" + echo "BASE_TAGS=latest,${GITHUB_REF#refs/*/}" | tee -a "${GITHUB_OUTPUT}" elif [[ "${REF_TYPE}" == "branch" ]]; then - echo "BASE_TAGS=testing" | tee -a "${GITHUB_ENV}" + echo "BASE_TAGS=testing" | tee -a "${GITHUB_OUTPUT}" fi # Get the Source Version for this release @@ -111,7 +129,6 @@ jobs: - name: Add registry for DockerHub if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }} - shell: bash env: DOCKERHUB_REPO: ${{ vars.DOCKERHUB_REPO }} run: | @@ -128,7 +145,6 @@ jobs: - name: Add registry for ghcr.io if: ${{ env.HAVE_GHCR_LOGIN == 'true' }} - shell: bash env: GHCR_REPO: ${{ vars.GHCR_REPO }} run: | @@ -145,23 +161,22 @@ jobs: - name: Add registry for Quay.io if: ${{ env.HAVE_QUAY_LOGIN == 'true' }} - shell: bash env: QUAY_REPO: ${{ vars.QUAY_REPO }} run: | echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${QUAY_REPO}" | tee -a "${GITHUB_ENV}" - name: Configure build cache from/to - shell: bash env: GHCR_REPO: ${{ vars.GHCR_REPO }} BASE_IMAGE: ${{ matrix.base_image }} + NORMALIZED_ARCH: ${{ env.NORMALIZED_ARCH }} run: | # # Check if there is a GitHub Container Registry Login and use it for caching if [[ -n "${HAVE_GHCR_LOGIN}" ]]; then - echo "BAKE_CACHE_FROM=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}" | tee -a "${GITHUB_ENV}" - echo "BAKE_CACHE_TO=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}" + echo "BAKE_CACHE_FROM=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}-${NORMALIZED_ARCH}" | tee -a "${GITHUB_ENV}" + echo "BAKE_CACHE_TO=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}-${NORMALIZED_ARCH},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}" else echo "BAKE_CACHE_FROM=" echo "BAKE_CACHE_TO=" @@ -169,31 +184,45 @@ jobs: # - name: Add localhost registry - shell: bash run: | echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}localhost:5000/vaultwarden/server" | tee -a "${GITHUB_ENV}" + - name: Generate tags + id: tags + env: + CONTAINER_REGISTRIES: "${{ env.CONTAINER_REGISTRIES }}" + run: | + # Convert comma-separated list to newline-separated set commands + TAGS=$(echo "${CONTAINER_REGISTRIES}" | tr ',' '\n' | sed "s|.*|*.tags=&|") + + # Output for use in next step + { + echo "TAGS<> "$GITHUB_ENV" + - name: Bake ${{ matrix.base_image }} containers id: bake_vw uses: docker/bake-action@3acf805d94d93a86cce4ca44798a76464a75b88c # v6.9.0 env: - BASE_TAGS: "${{ env.BASE_TAGS }}" + BASE_TAGS: "${{ steps.determine-version.outputs.BASE_TAGS }}" SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}" SOURCE_VERSION: "${{ env.SOURCE_VERSION }}" SOURCE_REPOSITORY_URL: "${{ env.SOURCE_REPOSITORY_URL }}" - CONTAINER_REGISTRIES: "${{ env.CONTAINER_REGISTRIES }}" with: pull: true - push: true source: . files: docker/docker-bake.hcl targets: "${{ matrix.base_image }}-multi" set: | *.cache-from=${{ env.BAKE_CACHE_FROM }} *.cache-to=${{ env.BAKE_CACHE_TO }} + *.platform=linux/${{ matrix.arch }} + ${{ env.TAGS }} + *.output=type=image,push-by-digest=true,name-canonical=true,push=true,compression=zstd - name: Extract digest SHA - shell: bash env: BAKE_METADATA: ${{ steps.bake_vw.outputs.metadata }} BASE_IMAGE: ${{ matrix.base_image }} @@ -201,38 +230,30 @@ jobs: GET_DIGEST_SHA="$(jq -r --arg base "$BASE_IMAGE" '.[$base + "-multi"]."containerimage.digest"' <<< "${BAKE_METADATA}")" echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}" - # Attest container images - - name: Attest - docker.io - ${{ matrix.base_image }} - if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} - uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0 - with: - subject-name: ${{ vars.DOCKERHUB_REPO }} - subject-digest: ${{ env.DIGEST_SHA }} - push-to-registry: true - - - name: Attest - ghcr.io - ${{ matrix.base_image }} - if: ${{ env.HAVE_GHCR_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} - uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0 - with: - subject-name: ${{ vars.GHCR_REPO }} - subject-digest: ${{ env.DIGEST_SHA }} - push-to-registry: true + - name: Export digest + env: + DIGEST_SHA: ${{ env.DIGEST_SHA }} + RUNNER_TEMP: ${{ runner.temp }} + run: | + mkdir -p "${RUNNER_TEMP}"/digests + digest="${DIGEST_SHA}" + touch "${RUNNER_TEMP}/digests/${digest#sha256:}" - - name: Attest - quay.io - ${{ matrix.base_image }} - if: ${{ env.HAVE_QUAY_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} - uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0 + - name: Upload digest + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: - subject-name: ${{ vars.QUAY_REPO }} - subject-digest: ${{ env.DIGEST_SHA }} - push-to-registry: true - + name: digests-${{ env.NORMALIZED_ARCH }}-${{ matrix.base_image }} + path: ${{ runner.temp }}/digests/* + if-no-files-found: error + retention-days: 1 # Extract the Alpine binaries from the containers - name: Extract binaries - shell: bash env: REF_TYPE: ${{ github.ref_type }} BASE_IMAGE: ${{ matrix.base_image }} + DIGEST_SHA: ${{ env.DIGEST_SHA }} + NORMALIZED_ARCH: ${{ env.NORMALIZED_ARCH }} run: | # Check which main tag we are going to build determined by ref_type if [[ "${REF_TYPE}" == "tag" ]]; then @@ -246,60 +267,151 @@ jobs: EXTRACT_TAG="${EXTRACT_TAG}-alpine" fi - # After each extraction the image is removed. - # This is needed because using different platforms doesn't trigger a new pull/download - - # Extract amd64 binary - docker create --name amd64 --platform=linux/amd64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" - docker cp amd64:/vaultwarden vaultwarden-amd64-${BASE_IMAGE} - docker rm --force amd64 - docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" - - # Extract arm64 binary - docker create --name arm64 --platform=linux/arm64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" - docker cp arm64:/vaultwarden vaultwarden-arm64-${BASE_IMAGE} - docker rm --force arm64 - docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" - - # Extract armv7 binary - docker create --name armv7 --platform=linux/arm/v7 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" - docker cp armv7:/vaultwarden vaultwarden-armv7-${BASE_IMAGE} - docker rm --force armv7 - docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" - - # Extract armv6 binary - docker create --name armv6 --platform=linux/arm/v6 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" - docker cp armv6:/vaultwarden vaultwarden-armv6-${BASE_IMAGE} - docker rm --force armv6 - docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" + CONTAINER_ID="$(docker create "localhost:5000/vaultwarden/server:${EXTRACT_TAG}@${DIGEST_SHA}")" + + # Copy the binary + docker cp "$CONTAINER_ID":/vaultwarden vaultwarden-"${NORMALIZED_ARCH}" + + # Clean up + docker rm "$CONTAINER_ID" # Upload artifacts to Github Actions and Attest the binaries - - name: "Upload amd64 artifact ${{ matrix.base_image }}" - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 + - name: Attest binaries + uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0 with: - name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64-${{ matrix.base_image }} - path: vaultwarden-amd64-${{ matrix.base_image }} + subject-path: vaultwarden-${{ env.NORMALIZED_ARCH }} - - name: "Upload arm64 artifact ${{ matrix.base_image }}" + - name: Upload binaries as artifacts uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 with: - name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64-${{ matrix.base_image }} - path: vaultwarden-arm64-${{ matrix.base_image }} + name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-${{ env.NORMALIZED_ARCH }}-${{ matrix.base_image }} + path: vaultwarden-${{ env.NORMALIZED_ARCH }} - - name: "Upload armv7 artifact ${{ matrix.base_image }}" - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 + merge-manifests: + name: Merge manifests + runs-on: ubuntu-latest + needs: docker-build + + env: + BASE_TAGS: ${{ needs.docker-build.outputs.base-tags }} + + permissions: + packages: write # Needed to upload packages and artifacts + attestations: write # Needed to generate an artifact attestation for a build + id-token: write # Needed to mint the OIDC token necessary to request a Sigstore signing certificate + + strategy: + matrix: + base_image: ["debian","alpine"] + + steps: + - name: Download digests + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 with: - name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7-${{ matrix.base_image }} - path: vaultwarden-armv7-${{ matrix.base_image }} + path: ${{ runner.temp }}/digests + pattern: digests-*-${{ matrix.base_image }} + merge-multiple: true - - name: "Upload armv6 artifact ${{ matrix.base_image }}" - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 + # Login to Docker Hub + - name: Login to Docker Hub + uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 with: - name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6-${{ matrix.base_image }} - path: vaultwarden-armv6-${{ matrix.base_image }} + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }} - - name: "Attest artifacts ${{ matrix.base_image }}" + - name: Add registry for DockerHub + if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }} + env: + DOCKERHUB_REPO: ${{ vars.DOCKERHUB_REPO }} + run: | + echo "CONTAINER_REGISTRIES=${DOCKERHUB_REPO}" | tee -a "${GITHUB_ENV}" + + # Login to GitHub Container Registry + - name: Login to GitHub Container Registry + uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + if: ${{ env.HAVE_GHCR_LOGIN == 'true' }} + + - name: Add registry for ghcr.io + if: ${{ env.HAVE_GHCR_LOGIN == 'true' }} + env: + GHCR_REPO: ${{ vars.GHCR_REPO }} + run: | + echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${GHCR_REPO}" | tee -a "${GITHUB_ENV}" + + # Login to Quay.io + - name: Login to Quay.io + uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 + with: + registry: quay.io + username: ${{ secrets.QUAY_USERNAME }} + password: ${{ secrets.QUAY_TOKEN }} + if: ${{ env.HAVE_QUAY_LOGIN == 'true' }} + + - name: Add registry for Quay.io + if: ${{ env.HAVE_QUAY_LOGIN == 'true' }} + env: + QUAY_REPO: ${{ vars.QUAY_REPO }} + run: | + echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${QUAY_REPO}" | tee -a "${GITHUB_ENV}" + + - name: Create manifest list, push it and extract digest SHA + working-directory: ${{ runner.temp }}/digests + env: + BASE_IMAGE: "${{ matrix.base_image }}" + BASE_TAGS: "${{ env.BASE_TAGS }}" + CONTAINER_REGISTRIES: "${{ env.CONTAINER_REGISTRIES }}" + run: | + set +e + IFS=',' read -ra IMAGES <<< "${CONTAINER_REGISTRIES}" + for img in "${IMAGES[@]}"; do + echo "Creating manifest for $img:${BASE_TAGS}-${BASE_IMAGE}" + + OUTPUT=$(docker buildx imagetools create \ + -t "$img:${BASE_TAGS}-${BASE_IMAGE}" \ + $(printf "$img:${BASE_TAGS}-${BASE_IMAGE}@sha256:%s " *) 2>&1) + STATUS=$? + + if [ $STATUS -ne 0 ]; then + echo "Manifest creation failed for $img" + echo "$OUTPUT" + exit $STATUS + fi + + echo "Manifest created for $img" + echo "$OUTPUT" + done + set -e + + # Extract digest SHA for subsequent steps + GET_DIGEST_SHA="$(echo "$OUTPUT" | grep -oE 'sha256:[a-f0-9]{64}' | tail -1)" + echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}" + + # Attest container images + - name: Attest - docker.io - ${{ matrix.base_image }} + if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && env.DIGEST_SHA != ''}} + uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0 + with: + subject-name: ${{ vars.DOCKERHUB_REPO }} + subject-digest: ${{ env.DIGEST_SHA }} + push-to-registry: true + + - name: Attest - ghcr.io - ${{ matrix.base_image }} + if: ${{ env.HAVE_GHCR_LOGIN == 'true' && env.DIGEST_SHA != ''}} uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0 with: - subject-path: vaultwarden-* - # End Upload artifacts to Github Actions + subject-name: ${{ vars.GHCR_REPO }} + subject-digest: ${{ env.DIGEST_SHA }} + push-to-registry: true + + - name: Attest - quay.io - ${{ matrix.base_image }} + if: ${{ env.HAVE_QUAY_LOGIN == 'true' && env.DIGEST_SHA != ''}} + uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0 + with: + subject-name: ${{ vars.QUAY_REPO }} + subject-digest: ${{ env.DIGEST_SHA }} + push-to-registry: true diff --git a/docker/DockerSettings.yaml b/docker/DockerSettings.yaml index 1707affe..119dbf9f 100644 --- a/docker/DockerSettings.yaml +++ b/docker/DockerSettings.yaml @@ -17,7 +17,6 @@ build_stage_image: platform: "$BUILDPLATFORM" alpine: image: "build_${TARGETARCH}${TARGETVARIANT}" - platform: "linux/amd64" # The Alpine build images only have linux/amd64 images arch_image: amd64: "ghcr.io/blackdex/rust-musl:x86_64-musl-stable-{{rust_version}}" arm64: "ghcr.io/blackdex/rust-musl:aarch64-musl-stable-{{rust_version}}" diff --git a/docker/Dockerfile.alpine b/docker/Dockerfile.alpine index 0b0a7c10..f593e279 100644 --- a/docker/Dockerfile.alpine +++ b/docker/Dockerfile.alpine @@ -30,16 +30,16 @@ FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa AS vault ########################## ALPINE BUILD IMAGES ########################## -## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 +## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 and linux/arm64 ## And for Alpine we define all build images here, they will only be loaded when actually used -FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.91.0 AS build_amd64 -FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.91.0 AS build_arm64 -FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.91.0 AS build_armv7 -FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.91.0 AS build_armv6 +FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.91.0 AS build_amd64 +FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.91.0 AS build_arm64 +FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.91.0 AS build_armv7 +FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.91.0 AS build_armv6 ########################## BUILD IMAGE ########################## # hadolint ignore=DL3006 -FROM --platform=linux/amd64 build_${TARGETARCH}${TARGETVARIANT} AS build +FROM --platform=$BUILDPLATFORM build_${TARGETARCH}${TARGETVARIANT} AS build ARG TARGETARCH ARG TARGETVARIANT ARG TARGETPLATFORM diff --git a/docker/Dockerfile.j2 b/docker/Dockerfile.j2 index 4816dacb..0501b3ff 100644 --- a/docker/Dockerfile.j2 +++ b/docker/Dockerfile.j2 @@ -36,16 +36,16 @@ FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@{{ vault_image_diges FROM --platform=linux/amd64 docker.io/tonistiigi/xx@{{ xx_image_digest }} AS xx {% elif base == "alpine" %} ########################## ALPINE BUILD IMAGES ########################## -## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 +## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 and linux/arm64 ## And for Alpine we define all build images here, they will only be loaded when actually used {% for arch in build_stage_image[base].arch_image %} -FROM --platform={{ build_stage_image[base].platform }} {{ build_stage_image[base].arch_image[arch] }} AS build_{{ arch }} +FROM --platform=$BUILDPLATFORM {{ build_stage_image[base].arch_image[arch] }} AS build_{{ arch }} {% endfor %} {% endif %} ########################## BUILD IMAGE ########################## # hadolint ignore=DL3006 -FROM --platform={{ build_stage_image[base].platform }} {{ build_stage_image[base].image }} AS build +FROM --platform=$BUILDPLATFORM {{ build_stage_image[base].image }} AS build {% if base == "debian" %} COPY --from=xx / / {% endif %} From 7f7b412220822d1e6a396e0174d1f577cfce839f Mon Sep 17 00:00:00 2001 From: Mathijs van Veluw Date: Sun, 23 Nov 2025 21:50:31 +0100 Subject: [PATCH 2/5] Fix icon redirect caching (#6487) As reported in #6477, redirection of favicon's didn't allowed caching. This commit fixes this by adding the `Cached` wrapper around the response. It will use the same TTL's used for downloading icon's locally. Also removed `_` as valid domain character, these should not be used in FQDN's at all. Those only serve as special chars used in domain labels, mostly used in SRV or TXT records. Fixes #6477 Signed-off-by: BlackDex --- src/api/icons.rs | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/src/api/icons.rs b/src/api/icons.rs index 4e2aef1c..5003a421 100644 --- a/src/api/icons.rs +++ b/src/api/icons.rs @@ -82,19 +82,19 @@ static ICON_SIZE_REGEX: LazyLock = LazyLock::new(|| Regex::new(r"(?x)(\d+ // It is used to prevent sending a specific header which breaks icon downloads. // If this function needs to be renamed, also adjust the code in `util.rs` #[get("//icon.png")] -fn icon_external(domain: &str) -> Option { +fn icon_external(domain: &str) -> Cached> { if !is_valid_domain(domain) { warn!("Invalid domain: {domain}"); - return None; + return Cached::ttl(None, CONFIG.icon_cache_negttl(), true); } if should_block_address(domain) { warn!("Blocked address: {domain}"); - return None; + return Cached::ttl(None, CONFIG.icon_cache_negttl(), true); } let url = CONFIG._icon_service_url().replace("{}", domain); - match CONFIG.icon_redirect_code() { + let redir = match CONFIG.icon_redirect_code() { 301 => Some(Redirect::moved(url)), // legacy permanent redirect 302 => Some(Redirect::found(url)), // legacy temporary redirect 307 => Some(Redirect::temporary(url)), @@ -103,7 +103,8 @@ fn icon_external(domain: &str) -> Option { error!("Unexpected redirect code {}", CONFIG.icon_redirect_code()); None } - } + }; + Cached::ttl(redir, CONFIG.icon_cache_ttl(), true) } #[get("//icon.png")] @@ -141,7 +142,7 @@ async fn icon_internal(domain: &str) -> Cached<(ContentType, Vec)> { /// This does some manual checks and makes use of Url to do some basic checking. /// domains can't be larger then 63 characters (not counting multiple subdomains) according to the RFC's, but we limit the total size to 255. fn is_valid_domain(domain: &str) -> bool { - const ALLOWED_CHARS: &str = "_-."; + const ALLOWED_CHARS: &str = "-."; // If parsing the domain fails using Url, it will not work with reqwest. if let Err(parse_error) = url::Url::parse(format!("https://{domain}").as_str()) { From 35e1a306f3399e3caf33791a759f16e47d3339e1 Mon Sep 17 00:00:00 2001 From: Timshel Date: Sun, 23 Nov 2025 21:54:37 +0100 Subject: [PATCH 3/5] Fix around singleorg policy (#6247) Co-authored-by: Timshel --- src/api/admin.rs | 21 +----- src/api/core/mod.rs | 23 +----- src/api/core/organizations.rs | 75 ++++--------------- src/db/models/mod.rs | 2 +- src/db/models/org_policy.rs | 60 +++++++-------- src/db/models/organization.rs | 7 +- .../send_single_org_removed_from_org.hbs | 4 +- .../send_single_org_removed_from_org.html.hbs | 4 +- 8 files changed, 63 insertions(+), 133 deletions(-) diff --git a/src/api/admin.rs b/src/api/admin.rs index 8b6101fb..d36da8f9 100644 --- a/src/api/admin.rs +++ b/src/api/admin.rs @@ -23,7 +23,7 @@ use crate::{ backup_sqlite, get_sql_server_version, models::{ Attachment, Cipher, Collection, Device, Event, EventType, Group, Invitation, Membership, MembershipId, - MembershipType, OrgPolicy, OrgPolicyErr, Organization, OrganizationId, SsoUser, TwoFactor, User, UserId, + MembershipType, OrgPolicy, Organization, OrganizationId, SsoUser, TwoFactor, User, UserId, }, DbConn, DbConnType, ACTIVE_DB_TYPE, }, @@ -556,23 +556,9 @@ async fn update_membership_type(data: Json, token: AdminToke } } + member_to_edit.atype = new_type; // This check is also done at api::organizations::{accept_invite, _confirm_invite, _activate_member, edit_member}, update_membership_type - // It returns different error messages per function. - if new_type < MembershipType::Admin { - match OrgPolicy::is_user_allowed(&member_to_edit.user_uuid, &member_to_edit.org_uuid, true, &conn).await { - Ok(_) => {} - Err(OrgPolicyErr::TwoFactorMissing) => { - if CONFIG.email_2fa_auto_fallback() { - two_factor::email::find_and_activate_email_2fa(&member_to_edit.user_uuid, &conn).await?; - } else { - err!("You cannot modify this user to this type because they have not setup 2FA"); - } - } - Err(OrgPolicyErr::SingleOrgEnforced) => { - err!("You cannot modify this user to this type because it is a member of an organization which forbids it"); - } - } - } + OrgPolicy::check_user_allowed(&member_to_edit, "modify", &conn).await?; log_event( EventType::OrganizationUserUpdated as i32, @@ -585,7 +571,6 @@ async fn update_membership_type(data: Json, token: AdminToke ) .await; - member_to_edit.atype = new_type; member_to_edit.save(&conn).await } diff --git a/src/api/core/mod.rs b/src/api/core/mod.rs index d5ca0cc9..173a06b6 100644 --- a/src/api/core/mod.rs +++ b/src/api/core/mod.rs @@ -53,7 +53,7 @@ use crate::{ api::{EmptyResult, JsonResult, Notify, UpdateType}, auth::Headers, db::{ - models::{Membership, MembershipStatus, MembershipType, OrgPolicy, OrgPolicyErr, Organization, User}, + models::{Membership, MembershipStatus, OrgPolicy, Organization, User}, DbConn, }, error::Error, @@ -269,27 +269,12 @@ async fn accept_org_invite( err!("User already accepted the invitation"); } - // This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type - // It returns different error messages per function. - if member.atype < MembershipType::Admin { - match OrgPolicy::is_user_allowed(&member.user_uuid, &member.org_uuid, false, conn).await { - Ok(_) => {} - Err(OrgPolicyErr::TwoFactorMissing) => { - if crate::CONFIG.email_2fa_auto_fallback() { - two_factor::email::activate_email_2fa(user, conn).await?; - } else { - err!("You cannot join this organization until you enable two-step login on your user account"); - } - } - Err(OrgPolicyErr::SingleOrgEnforced) => { - err!("You cannot join this organization because you are a member of an organization which forbids it"); - } - } - } - member.status = MembershipStatus::Accepted as i32; member.reset_password_key = reset_password_key; + // This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type + OrgPolicy::check_user_allowed(&member, "join", conn).await?; + member.save(conn).await?; if crate::CONFIG.mail_enabled() { diff --git a/src/api/core/organizations.rs b/src/api/core/organizations.rs index b8715ab7..e8cca467 100644 --- a/src/api/core/organizations.rs +++ b/src/api/core/organizations.rs @@ -15,7 +15,7 @@ use crate::{ models::{ Cipher, CipherId, Collection, CollectionCipher, CollectionGroup, CollectionId, CollectionUser, EventType, Group, GroupId, GroupUser, Invitation, Membership, MembershipId, MembershipStatus, MembershipType, - OrgPolicy, OrgPolicyErr, OrgPolicyType, Organization, OrganizationApiKey, OrganizationId, User, UserId, + OrgPolicy, OrgPolicyType, Organization, OrganizationApiKey, OrganizationId, User, UserId, }, DbConn, }, @@ -1463,27 +1463,12 @@ async fn _confirm_invite( err!("User in invalid state") } - // This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type - // It returns different error messages per function. - if member_to_confirm.atype < MembershipType::Admin { - match OrgPolicy::is_user_allowed(&member_to_confirm.user_uuid, org_id, true, conn).await { - Ok(_) => {} - Err(OrgPolicyErr::TwoFactorMissing) => { - if CONFIG.email_2fa_auto_fallback() { - two_factor::email::find_and_activate_email_2fa(&member_to_confirm.user_uuid, conn).await?; - } else { - err!("You cannot confirm this user because they have not setup 2FA"); - } - } - Err(OrgPolicyErr::SingleOrgEnforced) => { - err!("You cannot confirm this user because they are a member of an organization which forbids it"); - } - } - } - member_to_confirm.status = MembershipStatus::Confirmed as i32; member_to_confirm.akey = key.to_string(); + // This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type + OrgPolicy::check_user_allowed(&member_to_confirm, "confirm", conn).await?; + log_event( EventType::OrganizationUserConfirmed as i32, &member_to_confirm.uuid, @@ -1631,27 +1616,13 @@ async fn edit_member( } } - // This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type - // It returns different error messages per function. - if new_type < MembershipType::Admin { - match OrgPolicy::is_user_allowed(&member_to_edit.user_uuid, &org_id, true, &conn).await { - Ok(_) => {} - Err(OrgPolicyErr::TwoFactorMissing) => { - if CONFIG.email_2fa_auto_fallback() { - two_factor::email::find_and_activate_email_2fa(&member_to_edit.user_uuid, &conn).await?; - } else { - err!("You cannot modify this user to this type because they have not setup 2FA"); - } - } - Err(OrgPolicyErr::SingleOrgEnforced) => { - err!("You cannot modify this user to this type because they are a member of an organization which forbids it"); - } - } - } - member_to_edit.access_all = access_all; member_to_edit.atype = new_type as i32; + // This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type + // We need to perform the check after changing the type since `admin` is exempt. + OrgPolicy::check_user_allowed(&member_to_edit, "modify", &conn).await?; + // Delete all the odd collections for c in CollectionUser::find_by_organization_and_user_uuid(&org_id, &member_to_edit.user_uuid, &conn).await { c.delete(&conn).await?; @@ -2154,14 +2125,14 @@ async fn put_policy( // When enabling the SingleOrg policy, remove this org's members that are members of other orgs if pol_type_enum == OrgPolicyType::SingleOrg && data.enabled { - for member in Membership::find_by_org(&org_id, &conn).await.into_iter() { + for mut member in Membership::find_by_org(&org_id, &conn).await.into_iter() { // Policy only applies to non-Owner/non-Admin members who have accepted joining the org // Exclude invited and revoked users when checking for this policy. // Those users will not be allowed to accept or be activated because of the policy checks done there. - // We check if the count is larger then 1, because it includes this organization also. if member.atype < MembershipType::Admin && member.status != MembershipStatus::Invited as i32 - && Membership::count_accepted_and_confirmed_by_user(&member.user_uuid, &conn).await > 1 + && Membership::count_accepted_and_confirmed_by_user(&member.user_uuid, &member.org_uuid, &conn).await + > 0 { if CONFIG.mail_enabled() { let org = Organization::find_by_uuid(&member.org_uuid, &conn).await.unwrap(); @@ -2181,7 +2152,8 @@ async fn put_policy( ) .await; - member.delete(&conn).await?; + member.revoke(); + member.save(&conn).await?; } } } @@ -2628,25 +2600,10 @@ async fn _restore_member( err!("Only owners can restore other owners") } - // This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type - // It returns different error messages per function. - if member.atype < MembershipType::Admin { - match OrgPolicy::is_user_allowed(&member.user_uuid, org_id, false, conn).await { - Ok(_) => {} - Err(OrgPolicyErr::TwoFactorMissing) => { - if CONFIG.email_2fa_auto_fallback() { - two_factor::email::find_and_activate_email_2fa(&member.user_uuid, conn).await?; - } else { - err!("You cannot restore this user because they have not setup 2FA"); - } - } - Err(OrgPolicyErr::SingleOrgEnforced) => { - err!("You cannot restore this user because they are a member of an organization which forbids it"); - } - } - } - member.restore(); + // This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type + // This check need to be done after restoring to work with the correct status + OrgPolicy::check_user_allowed(&member, "restore", conn).await?; member.save(conn).await?; log_event( diff --git a/src/db/models/mod.rs b/src/db/models/mod.rs index a9406ed0..75c58626 100644 --- a/src/db/models/mod.rs +++ b/src/db/models/mod.rs @@ -27,7 +27,7 @@ pub use self::event::{Event, EventType}; pub use self::favorite::Favorite; pub use self::folder::{Folder, FolderCipher, FolderId}; pub use self::group::{CollectionGroup, Group, GroupId, GroupUser}; -pub use self::org_policy::{OrgPolicy, OrgPolicyErr, OrgPolicyId, OrgPolicyType}; +pub use self::org_policy::{OrgPolicy, OrgPolicyId, OrgPolicyType}; pub use self::organization::{ Membership, MembershipId, MembershipStatus, MembershipType, OrgApiKeyId, Organization, OrganizationApiKey, OrganizationId, diff --git a/src/db/models/org_policy.rs b/src/db/models/org_policy.rs index 92665574..9b4c8b34 100644 --- a/src/db/models/org_policy.rs +++ b/src/db/models/org_policy.rs @@ -2,10 +2,12 @@ use derive_more::{AsRef, From}; use serde::Deserialize; use serde_json::Value; +use crate::api::core::two_factor; use crate::api::EmptyResult; use crate::db::schema::{org_policies, users_organizations}; use crate::db::DbConn; use crate::error::MapResult; +use crate::CONFIG; use diesel::prelude::*; use super::{Membership, MembershipId, MembershipStatus, MembershipType, OrganizationId, TwoFactor, UserId}; @@ -58,14 +60,6 @@ pub struct ResetPasswordDataModel { pub auto_enroll_enabled: bool, } -pub type OrgPolicyResult = Result<(), OrgPolicyErr>; - -#[derive(Debug)] -pub enum OrgPolicyErr { - TwoFactorMissing, - SingleOrgEnforced, -} - /// Local methods impl OrgPolicy { pub fn new(org_uuid: OrganizationId, atype: OrgPolicyType, enabled: bool, data: String) -> Self { @@ -280,31 +274,35 @@ impl OrgPolicy { false } - pub async fn is_user_allowed( - user_uuid: &UserId, - org_uuid: &OrganizationId, - exclude_current_org: bool, - conn: &DbConn, - ) -> OrgPolicyResult { - // Enforce TwoFactor/TwoStep login - if TwoFactor::find_by_user(user_uuid, conn).await.is_empty() { - match Self::find_by_org_and_type(org_uuid, OrgPolicyType::TwoFactorAuthentication, conn).await { - Some(p) if p.enabled => { - return Err(OrgPolicyErr::TwoFactorMissing); + pub async fn check_user_allowed(m: &Membership, action: &str, conn: &DbConn) -> EmptyResult { + if m.atype < MembershipType::Admin && m.status > (MembershipStatus::Invited as i32) { + // Enforce TwoFactor/TwoStep login + if let Some(p) = Self::find_by_org_and_type(&m.org_uuid, OrgPolicyType::TwoFactorAuthentication, conn).await + { + if p.enabled && TwoFactor::find_by_user(&m.user_uuid, conn).await.is_empty() { + if CONFIG.email_2fa_auto_fallback() { + two_factor::email::find_and_activate_email_2fa(&m.user_uuid, conn).await?; + } else { + err!(format!("Cannot {} because 2FA is required (membership {})", action, m.uuid)); + } } - _ => {} - }; - } + } + + // Check if the user is part of another Organization with SingleOrg activated + if Self::is_applicable_to_user(&m.user_uuid, OrgPolicyType::SingleOrg, Some(&m.org_uuid), conn).await { + err!(format!( + "Cannot {} because another organization policy forbids it (membership {})", + action, m.uuid + )); + } - // Enforce Single Organization Policy of other organizations user is a member of - // This check here needs to exclude this current org-id, else an accepted user can not be confirmed. - let exclude_org = if exclude_current_org { - Some(org_uuid) - } else { - None - }; - if Self::is_applicable_to_user(user_uuid, OrgPolicyType::SingleOrg, exclude_org, conn).await { - return Err(OrgPolicyErr::SingleOrgEnforced); + if let Some(p) = Self::find_by_org_and_type(&m.org_uuid, OrgPolicyType::SingleOrg, conn).await { + if p.enabled + && Membership::count_accepted_and_confirmed_by_user(&m.user_uuid, &m.org_uuid, conn).await > 0 + { + err!(format!("Cannot {} because the organization policy forbids being part of other organization (membership {})", action, m.uuid)); + } + } } Ok(()) diff --git a/src/db/models/organization.rs b/src/db/models/organization.rs index 640e47e7..0b722ef6 100644 --- a/src/db/models/organization.rs +++ b/src/db/models/organization.rs @@ -883,10 +883,15 @@ impl Membership { }} } - pub async fn count_accepted_and_confirmed_by_user(user_uuid: &UserId, conn: &DbConn) -> i64 { + pub async fn count_accepted_and_confirmed_by_user( + user_uuid: &UserId, + excluded_org: &OrganizationId, + conn: &DbConn, + ) -> i64 { db_run! { conn: { users_organizations::table .filter(users_organizations::user_uuid.eq(user_uuid)) + .filter(users_organizations::org_uuid.ne(excluded_org)) .filter(users_organizations::status.eq(MembershipStatus::Accepted as i32).or(users_organizations::status.eq(MembershipStatus::Confirmed as i32))) .count() .first::(conn) diff --git a/src/static/templates/email/send_single_org_removed_from_org.hbs b/src/static/templates/email/send_single_org_removed_from_org.hbs index ec77cf63..5fe93902 100644 --- a/src/static/templates/email/send_single_org_removed_from_org.hbs +++ b/src/static/templates/email/send_single_org_removed_from_org.hbs @@ -1,4 +1,4 @@ -You have been removed from {{{org_name}}} +Your access to {{{org_name}}} has been revoked -Your user account has been removed from the *{{org_name}}* organization because you are a part of another organization. The {{org_name}} organization has enabled a policy that prevents users from being a part of multiple organizations. Before you can re-join this organization you need to leave all other organizations or join with a different account. +Your access to the *{{org_name}}* organization has been revoked because you are a part of another organization. The {{org_name}} organization has enabled a policy that prevents users from being a part of multiple organizations. Before your access can be restored you need to leave all other organizations or join with a different account. {{> email/email_footer_text }} diff --git a/src/static/templates/email/send_single_org_removed_from_org.html.hbs b/src/static/templates/email/send_single_org_removed_from_org.html.hbs index e4026628..39527f4e 100644 --- a/src/static/templates/email/send_single_org_removed_from_org.html.hbs +++ b/src/static/templates/email/send_single_org_removed_from_org.html.hbs @@ -1,10 +1,10 @@ -You have been removed from {{{org_name}}} +Your access to {{{org_name}}} has been revoked {{> email/email_header }}
- Your user account has been removed from the {{org_name}} organization because you are a part of another organization. The {{org_name}} organization has enabled a policy that prevents users from being a part of multiple organizations. Before you can re-join this organization you need to leave all other organizations or join with a different account. + Your access to the {{org_name}} organization has been revoked because you are a part of another organization. The {{org_name}} organization has enabled a policy that prevents users from being a part of multiple organizations. Before your access can be restored you need to leave all other organizations or join with a different account.
From aad1f19b45073d7f670a8310689c3245940d667a Mon Sep 17 00:00:00 2001 From: Stefan Melmuk <509385+stefan0xC@users.noreply.github.com> Date: Sun, 23 Nov 2025 21:55:20 +0100 Subject: [PATCH 4/5] fix email as 2fa provider (#6473) --- src/api/core/two_factor/email.rs | 21 ++++++++++----------- src/db/models/user.rs | 14 +------------- 2 files changed, 11 insertions(+), 24 deletions(-) diff --git a/src/api/core/two_factor/email.rs b/src/api/core/two_factor/email.rs index 63e4508b..cc6909af 100644 --- a/src/api/core/two_factor/email.rs +++ b/src/api/core/two_factor/email.rs @@ -10,7 +10,7 @@ use crate::{ auth::Headers, crypto, db::{ - models::{DeviceId, EventType, TwoFactor, TwoFactorType, User, UserId}, + models::{EventType, TwoFactor, TwoFactorType, User, UserId}, DbConn, }, error::{Error, MapResult}, @@ -24,16 +24,10 @@ pub fn routes() -> Vec { #[derive(Deserialize)] #[serde(rename_all = "camelCase")] struct SendEmailLoginData { - #[serde(alias = "DeviceIdentifier")] - device_identifier: DeviceId, - - #[allow(unused)] #[serde(alias = "Email")] - email: Option, - - #[allow(unused)] + email: String, #[serde(alias = "MasterPasswordHash")] - master_password_hash: Option, + master_password_hash: String, } /// User is trying to login and wants to use email 2FA. @@ -45,14 +39,19 @@ async fn send_email_login(data: Json, conn: DbConn) -> Empty use crate::db::models::User; // Get the user - let Some(user) = User::find_by_device_id(&data.device_identifier, &conn).await else { - err!("Cannot find user. Try again.") + let Some(user) = User::find_by_mail(&data.email, &conn).await else { + err!("Username or password is incorrect. Try again.") }; if !CONFIG._enable_email_2fa() { err!("Email 2FA is disabled") } + // Check password + if !user.check_valid_password(&data.master_password_hash) { + err!("Username or password is incorrect. Try again.") + } + send_token(&user.uuid, &conn).await?; Ok(()) diff --git a/src/db/models/user.rs b/src/db/models/user.rs index e14c4218..c7f4e1bc 100644 --- a/src/db/models/user.rs +++ b/src/db/models/user.rs @@ -1,4 +1,4 @@ -use crate::db::schema::{devices, invitations, sso_users, users}; +use crate::db::schema::{invitations, sso_users, users}; use chrono::{NaiveDateTime, TimeDelta, Utc}; use derive_more::{AsRef, Deref, Display, From}; use diesel::prelude::*; @@ -10,7 +10,6 @@ use super::{ use crate::{ api::EmptyResult, crypto, - db::models::DeviceId, db::DbConn, error::MapResult, sso::OIDCIdentifier, @@ -387,17 +386,6 @@ impl User { }} } - pub async fn find_by_device_id(device_uuid: &DeviceId, conn: &DbConn) -> Option { - db_run! { conn: { - users::table - .inner_join(devices::table.on(devices::user_uuid.eq(users::uuid))) - .filter(devices::uuid.eq(device_uuid)) - .select(users::all_columns) - .first::(conn) - .ok() - }} - } - pub async fn get_all(conn: &DbConn) -> Vec<(Self, Option)> { db_run! { conn: { users::table From 7c7f4f5d4fab0d2b6b3c241bfcd787934f971f98 Mon Sep 17 00:00:00 2001 From: Mathijs van Veluw Date: Sun, 23 Nov 2025 22:03:30 +0100 Subject: [PATCH 5/5] Update crates and Rust version (#6485) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update crates and Rust version - Update all crates (where possible) Adjusted code where needed - Fixed some nightly clippy lints Signed-off-by: BlackDex * Fix some issues/comments Signed-off-by: BlackDex * Update some crates Signed-off-by: BlackDex --------- Signed-off-by: BlackDex Co-authored-by: Daniel GarcĂ­a --- Cargo.lock | 299 ++++++++++++++++++++++-------------- Cargo.toml | 14 +- docker/DockerSettings.yaml | 2 +- docker/Dockerfile.alpine | 8 +- docker/Dockerfile.debian | 2 +- macros/Cargo.toml | 4 +- rust-toolchain.toml | 2 +- src/api/core/sends.rs | 2 +- src/db/models/attachment.rs | 2 +- src/mail.rs | 6 +- src/main.rs | 4 +- src/sso.rs | 28 +++- 12 files changed, 231 insertions(+), 142 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c8ed4a25..fd11484e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -161,9 +161,9 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.33" +version = "0.4.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93c1f86859c1af3d514fa19e8323147ff10ea98684e6c7b307912509f50e67b2" +checksum = "0e86f6d3dc9dc4352edeea6b8e499e13e3f5dc3b964d7ca5fd411415a3498473" dependencies = [ "compression-codecs", "compression-core", @@ -361,9 +361,9 @@ checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" [[package]] name = "aws-config" -version = "1.8.10" +version = "1.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1856b1b48b65f71a4dd940b1c0931f9a7b646d4a924b9828ffefc1454714668a" +checksum = "a0149602eeaf915158e14029ba0c78dedb8c08d554b024d54c8f239aab46511d" dependencies = [ "aws-credential-types", "aws-runtime", @@ -391,9 +391,9 @@ dependencies = [ [[package]] name = "aws-credential-types" -version = "1.2.9" +version = "1.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86590e57ea40121d47d3f2e131bfd873dea15d78dc2f4604f4734537ad9e56c4" +checksum = "b01c9521fa01558f750d183c8c68c81b0155b9d193a4ba7f84c36bd1b6d04a06" dependencies = [ "aws-smithy-async", "aws-smithy-runtime-api", @@ -403,9 +403,9 @@ dependencies = [ [[package]] name = "aws-runtime" -version = "1.5.14" +version = "1.5.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fe0fd441565b0b318c76e7206c8d1d0b0166b3e986cf30e890b61feb6192045" +checksum = "7ce527fb7e53ba9626fc47824f25e256250556c40d8f81d27dd92aa38239d632" dependencies = [ "aws-credential-types", "aws-sigv4", @@ -427,9 +427,9 @@ dependencies = [ [[package]] name = "aws-sdk-sso" -version = "1.89.0" +version = "1.90.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9c1b1af02288f729e95b72bd17988c009aa72e26dcb59b3200f86d7aea726c9" +checksum = "4f18e53542c522459e757f81e274783a78f8c81acdfc8d1522ee8a18b5fb1c66" dependencies = [ "aws-credential-types", "aws-runtime", @@ -449,9 +449,9 @@ dependencies = [ [[package]] name = "aws-sdk-ssooidc" -version = "1.91.0" +version = "1.92.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e8122301558dc7c6c68e878af918880b82ff41897a60c8c4e18e4dc4d93e9f1" +checksum = "532f4d866012ffa724a4385c82e8dd0e59f0ca0e600f3f22d4c03b6824b34e4a" dependencies = [ "aws-credential-types", "aws-runtime", @@ -471,9 +471,9 @@ dependencies = [ [[package]] name = "aws-sdk-sts" -version = "1.91.0" +version = "1.94.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f8090151d4d1e971269957b10dbf287bba551ab812e591ce0516b1c73b75d27" +checksum = "1be6fbbfa1a57724788853a623378223fe828fc4c09b146c992f0c95b6256174" dependencies = [ "aws-credential-types", "aws-runtime", @@ -712,7 +712,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "215ee31f8a88f588c349ce2d20108b2ed96089b96b9c2b03775dc35dd72938e8" dependencies = [ "base64 0.21.7", - "pastey", + "pastey 0.1.1", "serde", ] @@ -828,9 +828,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.10.1" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" +checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" [[package]] name = "bytes-utils" @@ -920,9 +920,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.45" +version = "1.2.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35900b6c8d709fb1d854671ae27aeaa9eec2f8b01b364e1619a40da3e6fe2afe" +checksum = "cd405d82c84ff7f35739f175f67d8b9fb7687a0e84ccdc78bd3568839827cf07" dependencies = [ "find-msvc-tools", "jobserver", @@ -994,9 +994,9 @@ checksum = "b9e769b5c8c8283982a987c6e948e540254f1058d5a74b8794914d4ef5fc2a24" [[package]] name = "compression-codecs" -version = "0.4.32" +version = "0.4.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "680dc087785c5230f8e8843e2e57ac7c1c90488b6a91b88caa265410568f441b" +checksum = "302266479cb963552d11bd042013a58ef1adc56768016c8b82b4199488f2d4ad" dependencies = [ "brotli", "compression-core", @@ -1008,9 +1008,9 @@ dependencies = [ [[package]] name = "compression-core" -version = "0.4.30" +version = "0.4.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a9b614a5787ef0c8802a55766480563cb3a93b435898c422ed2a359cf811582" +checksum = "75984efb6ed102a0d42db99afb6c1948f0380d1d91808d5529916e6c08b49d8d" [[package]] name = "concurrent-queue" @@ -1828,9 +1828,9 @@ dependencies = [ [[package]] name = "find-msvc-tools" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52051878f80a721bb68ebfbc930e07b65ba72f2da88968ea5c06fd6ca3d3a127" +checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844" [[package]] name = "flate2" @@ -1854,6 +1854,12 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" +[[package]] +name = "foldhash" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" + [[package]] name = "foreign-types" version = "0.3.2" @@ -2057,9 +2063,9 @@ dependencies = [ [[package]] name = "governor" -version = "0.10.1" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "444405bbb1a762387aa22dd569429533b54a1d8759d35d3b64cb39b0293eaa19" +checksum = "6e23d5986fd4364c2fb7498523540618b4b8d92eec6c36a02e565f66748e2f79" dependencies = [ "cfg-if", "dashmap 6.1.0", @@ -2067,7 +2073,7 @@ dependencies = [ "futures-timer", "futures-util", "getrandom 0.3.4", - "hashbrown 0.15.5", + "hashbrown 0.16.1", "nonzero_ext", "parking_lot", "portable-atomic", @@ -2085,7 +2091,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d9e3df7f0222ce5184154973d247c591d9aadc28ce7a73c6cd31100c9facff6" dependencies = [ "codemap", - "indexmap 2.12.0", + "indexmap 2.12.1", "lasso", "once_cell", "phf 0.11.3", @@ -2114,7 +2120,7 @@ dependencies = [ "futures-core", "futures-sink", "http 1.3.1", - "indexmap 2.12.0", + "indexmap 2.12.1", "slab", "tokio", "tokio-util", @@ -2173,14 +2179,19 @@ checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" dependencies = [ "allocator-api2", "equivalent", - "foldhash", + "foldhash 0.1.5", ] [[package]] name = "hashbrown" -version = "0.16.0" +version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash 0.2.0", +] [[package]] name = "heck" @@ -2386,9 +2397,9 @@ dependencies = [ [[package]] name = "hyper" -version = "1.7.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb3aa54a13a0dfe7fbe3a59e0c76093041720fdc77b110cc0fc260fafb4dc51e" +checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" dependencies = [ "atomic-waker", "bytes", @@ -2413,7 +2424,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" dependencies = [ "http 1.3.1", - "hyper 1.7.0", + "hyper 1.8.1", "hyper-util", "rustls 0.23.35", "rustls-native-certs", @@ -2432,7 +2443,7 @@ checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", - "hyper 1.7.0", + "hyper 1.8.1", "hyper-util", "native-tls", "tokio", @@ -2442,9 +2453,9 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.17" +version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c6995591a8f1380fcb4ba966a252a4b29188d51d2b89e3a252f5305be65aea8" +checksum = "52e9a2a24dc5c6821e71a7030e1e14b7b632acac55c40e9d2e082c621261bb56" dependencies = [ "base64 0.22.1", "bytes", @@ -2453,7 +2464,7 @@ dependencies = [ "futures-util", "http 1.3.1", "http-body 1.0.1", - "hyper 1.7.0", + "hyper 1.8.1", "ipnet", "libc", "percent-encoding", @@ -2611,12 +2622,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.12.0" +version = "2.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f" +checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2" dependencies = [ "equivalent", - "hashbrown 0.16.0", + "hashbrown 0.16.1", "serde", "serde_core", ] @@ -2697,6 +2708,47 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47f142fe24a9c9944451e8349de0a56af5f3e7226dc46f3ed4d4ecc0b85af75e" +[[package]] +name = "jiff" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49cce2b81f2098e7e3efc35bc2e0a6b7abec9d34128283d7a26fa8f32a6dbb35" +dependencies = [ + "jiff-static", + "jiff-tzdb-platform", + "log", + "portable-atomic", + "portable-atomic-util", + "serde_core", + "windows-sys 0.61.2", +] + +[[package]] +name = "jiff-static" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "980af8b43c3ad5d8d349ace167ec8170839f753a42d233ba19e08afe1850fa69" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "jiff-tzdb" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1283705eb0a21404d2bfd6eef2a7593d240bc42a0bdb39db0ad6fa2ec026524" + +[[package]] +name = "jiff-tzdb-platform" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "875a5a69ac2bab1a891711cf5eccbec1ce0341ea805560dcd90b7a2e925132e8" +dependencies = [ + "jiff-tzdb", +] + [[package]] name = "job_scheduler_ng" version = "2.4.0" @@ -2743,6 +2795,29 @@ dependencies = [ "simple_asn1", ] +[[package]] +name = "jsonwebtoken" +version = "10.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c76e1c7d7df3e34443b3621b459b066a7b79644f059fc8b2db7070c825fd417e" +dependencies = [ + "base64 0.22.1", + "ed25519-dalek", + "getrandom 0.2.16", + "hmac", + "js-sys", + "p256", + "p384", + "pem", + "rand 0.8.5", + "rsa", + "serde", + "serde_json", + "sha2", + "signature", + "simple_asn1", +] + [[package]] name = "kv-log-macro" version = "1.0.7" @@ -3114,9 +3189,9 @@ dependencies = [ [[package]] name = "num-bigint-dig" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82c79c15c05d4bf82b6f5ef163104cc81a760d8e874d38ac50ab67c8877b647b" +checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7" dependencies = [ "lazy_static", "libm", @@ -3259,29 +3334,30 @@ dependencies = [ [[package]] name = "opendal" -version = "0.54.1" +version = "0.55.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42afda58fa2cf50914402d132cc1caacff116a85d10c72ab2082bb7c50021754" +checksum = "d075ab8a203a6ab4bc1bce0a4b9fe486a72bf8b939037f4b78d95386384bc80a" dependencies = [ "anyhow", "backon", "base64 0.22.1", "bytes", - "chrono", "crc32c", "futures", "getrandom 0.2.16", "http 1.3.1", "http-body 1.0.1", + "jiff", "log", "md-5", "percent-encoding", - "quick-xml 0.38.3", + "quick-xml 0.38.4", "reqsign", "reqwest", "serde", "serde_json", "tokio", + "url", "uuid", ] @@ -3471,6 +3547,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35fb2e5f958ec131621fdd531e9fc186ed768cbe395337403ae56c17a74c68ec" +[[package]] +name = "pastey" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57d6c094ee800037dff99e02cab0eaf3142826586742a270ab3d7a62656bd27a" + [[package]] name = "pbkdf2" version = "0.12.2" @@ -3531,9 +3613,9 @@ checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "pest" -version = "2.8.3" +version = "2.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "989e7521a040efde50c3ab6bbadafbe15ab6dc042686926be59ac35d74607df4" +checksum = "cbcfd20a6d4eeba40179f05735784ad32bdaef05ce8e8af05f180d45bb3e7e22" dependencies = [ "memchr", "ucd-trie", @@ -3541,9 +3623,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.8.3" +version = "2.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "187da9a3030dbafabbbfb20cb323b976dc7b7ce91fcd84f2f74d6e31d378e2de" +checksum = "51f72981ade67b1ca6adc26ec221be9f463f2b5839c7508998daa17c23d94d7f" dependencies = [ "pest", "pest_generator", @@ -3551,9 +3633,9 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.8.3" +version = "2.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49b401d98f5757ebe97a26085998d6c0eecec4995cad6ab7fc30ffdf4b052843" +checksum = "dee9efd8cdb50d719a80088b76f81aec7c41ed6d522ee750178f83883d271625" dependencies = [ "pest", "pest_meta", @@ -3564,9 +3646,9 @@ dependencies = [ [[package]] name = "pest_meta" -version = "2.8.3" +version = "2.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72f27a2cfee9f9039c4d86faa5af122a0ac3851441a34865b8a043b46be0065a" +checksum = "bf1d70880e76bdc13ba52eafa6239ce793d85c8e43896507e43dd8984ff05b82" dependencies = [ "pest", "sha2", @@ -3725,6 +3807,15 @@ version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" +[[package]] +name = "portable-atomic-util" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8a2f0d8d040d7848a709caf78912debcc3f33ee4b3cac47d73d1e1069e83507" +dependencies = [ + "portable-atomic", +] + [[package]] name = "potential_utf" version = "0.1.4" @@ -3861,9 +3952,9 @@ dependencies = [ [[package]] name = "quick-xml" -version = "0.38.3" +version = "0.38.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42a232e7487fc2ef313d96dde7948e7a3c05101870d8985e4fd8d26aedd27b89" +checksum = "b66c2058c55a409d601666cffe35f04333cf1013010882cec174a7467cd4e21c" dependencies = [ "memchr", "serde", @@ -4115,7 +4206,7 @@ dependencies = [ "hmac", "home", "http 1.3.1", - "jsonwebtoken", + "jsonwebtoken 9.3.1", "log", "once_cell", "percent-encoding", @@ -4151,7 +4242,7 @@ dependencies = [ "http 1.3.1", "http-body 1.0.1", "http-body-util", - "hyper 1.7.0", + "hyper 1.8.1", "hyper-rustls", "hyper-tls", "hyper-util", @@ -4186,9 +4277,9 @@ dependencies = [ [[package]] name = "resolv-conf" -version = "0.7.5" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b3789b30bd25ba102de4beabd95d21ac45b69b1be7d14522bab988c526d6799" +checksum = "1e061d1b48cb8d38042de4ae0a7a6401009d6143dc80d2e2d6f31f0bdd6470c7" [[package]] name = "rfc6979" @@ -4249,7 +4340,7 @@ dependencies = [ "either", "figment", "futures", - "indexmap 2.12.0", + "indexmap 2.12.1", "log", "memchr", "multer", @@ -4281,7 +4372,7 @@ checksum = "575d32d7ec1a9770108c879fc7c47815a80073f96ca07ff9525a94fcede1dd46" dependencies = [ "devise", "glob", - "indexmap 2.12.0", + "indexmap 2.12.1", "proc-macro2", "quote", "rocket_http", @@ -4301,7 +4392,7 @@ dependencies = [ "futures", "http 0.2.12", "hyper 0.14.32", - "indexmap 2.12.0", + "indexmap 2.12.1", "log", "memchr", "pear", @@ -4343,9 +4434,9 @@ dependencies = [ [[package]] name = "rsa" -version = "0.9.8" +version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78928ac1ed176a5ca1d17e578a1825f3d81ca54cf41053a592584b020cfd691b" +checksum = "40a0376c50d0358279d9d643e4bf7b7be212f1f4ff1da9070a7b54d22ef75c88" dependencies = [ "const-oid", "digest", @@ -4778,15 +4869,15 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.15.1" +version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa66c845eee442168b2c8134fec70ac50dc20e760769c8ba0ad1319ca1959b04" +checksum = "10574371d41b0d9b2cff89418eda27da52bcaff2cc8741db26382a77c29131f1" dependencies = [ "base64 0.22.1", "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.12.0", + "indexmap 2.12.1", "schemars 0.9.0", "schemars 1.1.0", "serde_core", @@ -4797,9 +4888,9 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.15.1" +version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b91a903660542fced4e99881aa481bdbaec1634568ee02e0b8bd57c64cb38955" +checksum = "08a72d8216842fdd57820dc78d840bef99248e35fb2554ff923319e60f2d686b" dependencies = [ "darling 0.21.3", "proc-macro2", @@ -4856,9 +4947,9 @@ dependencies = [ [[package]] name = "signal-hook-registry" -version = "1.4.6" +version = "1.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b" +checksum = "7664a098b8e616bdfcc2dc0e9ac44eb231eedf41db4e9fe95d8d32ec728dedad" dependencies = [ "libc", ] @@ -4971,9 +5062,9 @@ dependencies = [ [[package]] name = "sqlite-wasm-rs" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54e4348c16a3d2e2a45437eff67efc5462b60443de76f61b5d0ed9111c626d9d" +checksum = "35c6d746902bca4ddf16592357eacf0473631ea26b36072f0dd0b31fa5ccd1f4" dependencies = [ "js-sys", "once_cell", @@ -5049,9 +5140,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.110" +version = "2.0.111" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a99801b5bd34ede4cf3fc688c5919368fea4e4814a4664359503e6015b280aea" +checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87" dependencies = [ "proc-macro2", "quote", @@ -5399,7 +5490,7 @@ version = "0.22.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" dependencies = [ - "indexmap 2.12.0", + "indexmap 2.12.1", "serde", "serde_spanned 0.6.9", "toml_datetime 0.6.11", @@ -5677,9 +5768,9 @@ checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" [[package]] name = "value-bag" -version = "1.11.1" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "943ce29a8a743eb10d6082545d861b24f9d1b160b7d741e0f2cdf726bec909c5" +checksum = "7ba6f5989077681266825251a52748b8c1d8a4ad098cc37e440103d0ea717fc0" [[package]] name = "vaultwarden" @@ -5715,7 +5806,7 @@ dependencies = [ "html5gum", "http 1.3.1", "job_scheduler_ng", - "jsonwebtoken", + "jsonwebtoken 10.2.0", "lettre", "libsqlite3-sys", "log", @@ -5727,7 +5818,7 @@ dependencies = [ "opendal", "openidconnect", "openssl", - "pastey", + "pastey 0.2.0", "percent-encoding", "pico-args", "rand 0.9.2", @@ -6045,8 +6136,8 @@ dependencies = [ "windows-implement", "windows-interface", "windows-link 0.2.1", - "windows-result 0.4.1", - "windows-strings 0.5.1", + "windows-result", + "windows-strings", ] [[package]] @@ -6085,22 +6176,13 @@ checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" [[package]] name = "windows-registry" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e" -dependencies = [ - "windows-link 0.1.3", - "windows-result 0.3.4", - "windows-strings 0.4.2", -] - -[[package]] -name = "windows-result" -version = "0.3.4" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" +checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720" dependencies = [ - "windows-link 0.1.3", + "windows-link 0.2.1", + "windows-result", + "windows-strings", ] [[package]] @@ -6112,15 +6194,6 @@ dependencies = [ "windows-link 0.2.1", ] -[[package]] -name = "windows-strings" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" -dependencies = [ - "windows-link 0.1.3", -] - [[package]] name = "windows-strings" version = "0.5.1" @@ -6486,18 +6559,18 @@ dependencies = [ [[package]] name = "zerocopy" -version = "0.8.27" +version = "0.8.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c" +checksum = "43fa6694ed34d6e57407afbccdeecfa268c470a7d2a5b0cf49ce9fcc345afb90" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.27" +version = "0.8.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831" +checksum = "c640b22cd9817fae95be82f0d2f90b11f7605f6c319d16705c459b27ac2cbc26" dependencies = [ "proc-macro2", "quote", diff --git a/Cargo.toml b/Cargo.toml index d3e1b52d..2f88aaea 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -116,7 +116,7 @@ job_scheduler_ng = "2.4.0" data-encoding = "2.9.0" # JWT library -jsonwebtoken = "9.3.1" +jsonwebtoken = { version = "10.2.0", features = ["use_pem", "rust_crypto"], default-features = false } # TOTP library totp-lite = "2.0.1" @@ -150,7 +150,7 @@ hickory-resolver = "0.25.2" html5gum = "0.8.0" regex = { version = "1.12.2", features = ["std", "perf", "unicode-perl"], default-features = false } data-url = "0.3.2" -bytes = "1.10.1" +bytes = "1.11.0" svg-hush = "0.9.5" # Cache function results (Used for version check and favicon fetching) @@ -167,8 +167,8 @@ openssl = "0.10.75" pico-args = "0.5.0" # Macro ident concatenation -pastey = "0.1.1" -governor = "0.10.1" +pastey = "0.2.0" +governor = "0.10.2" # OIDC for SSO openidconnect = { version = "4.0.1", features = ["reqwest", "native-tls"] } @@ -193,12 +193,12 @@ rpassword = "7.4.0" grass_compiler = { version = "0.13.4", default-features = false } # File are accessed through Apache OpenDAL -opendal = { version = "0.54.1", features = ["services-fs"], default-features = false } +opendal = { version = "0.55.0", features = ["services-fs"], default-features = false } # For retrieving AWS credentials, including temporary SSO credentials anyhow = { version = "1.0.100", optional = true } -aws-config = { version = "1.8.10", features = ["behavior-version-latest", "rt-tokio", "credentials-process", "sso"], default-features = false, optional = true } -aws-credential-types = { version = "1.2.9", optional = true } +aws-config = { version = "1.8.11", features = ["behavior-version-latest", "rt-tokio", "credentials-process", "sso"], default-features = false, optional = true } +aws-credential-types = { version = "1.2.10", optional = true } aws-smithy-runtime-api = { version = "1.9.2", optional = true } http = { version = "1.3.1", optional = true } reqsign = { version = "0.16.5", optional = true } diff --git a/docker/DockerSettings.yaml b/docker/DockerSettings.yaml index 119dbf9f..50d4bd37 100644 --- a/docker/DockerSettings.yaml +++ b/docker/DockerSettings.yaml @@ -5,7 +5,7 @@ vault_image_digest: "sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e # We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts # https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags xx_image_digest: "sha256:add602d55daca18914838a78221f6bbe4284114b452c86a48f96d59aeb00f5c6" -rust_version: 1.91.0 # Rust version to be used +rust_version: 1.91.1 # Rust version to be used debian_version: trixie # Debian release name to be used alpine_version: "3.22" # Alpine version to be used # For which platforms/architectures will we try to build images diff --git a/docker/Dockerfile.alpine b/docker/Dockerfile.alpine index f593e279..1c135e68 100644 --- a/docker/Dockerfile.alpine +++ b/docker/Dockerfile.alpine @@ -32,10 +32,10 @@ FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:50662dccf4908 ########################## ALPINE BUILD IMAGES ########################## ## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 and linux/arm64 ## And for Alpine we define all build images here, they will only be loaded when actually used -FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.91.0 AS build_amd64 -FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.91.0 AS build_arm64 -FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.91.0 AS build_armv7 -FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.91.0 AS build_armv6 +FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.91.1 AS build_amd64 +FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.91.1 AS build_arm64 +FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.91.1 AS build_armv7 +FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.91.1 AS build_armv6 ########################## BUILD IMAGE ########################## # hadolint ignore=DL3006 diff --git a/docker/Dockerfile.debian b/docker/Dockerfile.debian index 0c67431c..58a3d349 100644 --- a/docker/Dockerfile.debian +++ b/docker/Dockerfile.debian @@ -36,7 +36,7 @@ FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:add602d55daca18914838 ########################## BUILD IMAGE ########################## # hadolint ignore=DL3006 -FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.91.0-slim-trixie AS build +FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.91.1-slim-trixie AS build COPY --from=xx / / ARG TARGETARCH ARG TARGETVARIANT diff --git a/macros/Cargo.toml b/macros/Cargo.toml index ef5a6480..4468ec2a 100644 --- a/macros/Cargo.toml +++ b/macros/Cargo.toml @@ -13,8 +13,8 @@ path = "src/lib.rs" proc-macro = true [dependencies] -quote = "1.0.41" -syn = "2.0.108" +quote = "1.0.42" +syn = "2.0.110" [lints] workspace = true diff --git a/rust-toolchain.toml b/rust-toolchain.toml index a6c92998..0992ce9d 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "1.91.0" +channel = "1.91.1" components = [ "rustfmt", "clippy" ] profile = "minimal" diff --git a/src/api/core/sends.rs b/src/api/core/sends.rs index debd697b..10bf85be 100644 --- a/src/api/core/sends.rs +++ b/src/api/core/sends.rs @@ -568,7 +568,7 @@ async fn post_access_file( async fn download_url(host: &Host, send_id: &SendId, file_id: &SendFileId) -> Result { let operator = CONFIG.opendal_operator_for_path_type(&PathType::Sends)?; - if operator.info().scheme() == opendal::Scheme::Fs { + if operator.info().scheme() == <&'static str>::from(opendal::Scheme::Fs) { let token_claims = crate::auth::generate_send_claims(send_id, file_id); let token = crate::auth::encode_jwt(&token_claims); diff --git a/src/db/models/attachment.rs b/src/db/models/attachment.rs index 60b10d23..4273c22a 100644 --- a/src/db/models/attachment.rs +++ b/src/db/models/attachment.rs @@ -46,7 +46,7 @@ impl Attachment { pub async fn get_url(&self, host: &str) -> Result { let operator = CONFIG.opendal_operator_for_path_type(&PathType::Attachments)?; - if operator.info().scheme() == opendal::Scheme::Fs { + if operator.info().scheme() == <&'static str>::from(opendal::Scheme::Fs) { let token = encode_jwt(&generate_file_download_claims(self.cipher_uuid.clone(), self.id.clone())); Ok(format!("{host}/attachments/{}/{}?token={token}", self.cipher_uuid, self.id)) } else { diff --git a/src/mail.rs b/src/mail.rs index 4cf52d38..270a839e 100644 --- a/src/mail.rs +++ b/src/mail.rs @@ -705,7 +705,7 @@ async fn send_with_selected_transport(email: Message) -> EmptyResult { } async fn send_email(address: &str, subject: &str, body_html: String, body_text: String) -> EmptyResult { - let smtp_from = &CONFIG.smtp_from(); + let smtp_from = Address::from_str(&CONFIG.smtp_from())?; let body = if CONFIG.smtp_embed_images() { let logo_gray_body = Body::new(crate::api::static_files("logo-gray.png").unwrap().1.to_vec()); @@ -727,9 +727,9 @@ async fn send_email(address: &str, subject: &str, body_html: String, body_text: }; let email = Message::builder() - .message_id(Some(format!("<{}@{}>", crate::util::get_uuid(), smtp_from.split('@').collect::>()[1]))) + .message_id(Some(format!("<{}@{}>", crate::util::get_uuid(), smtp_from.domain()))) .to(Mailbox::new(None, Address::from_str(address)?)) - .from(Mailbox::new(Some(CONFIG.smtp_from_name()), Address::from_str(smtp_from)?)) + .from(Mailbox::new(Some(CONFIG.smtp_from_name()), smtp_from)) .subject(subject) .multipart(body)?; diff --git a/src/main.rs b/src/main.rs index 86f7714a..b431e493 100644 --- a/src/main.rs +++ b/src/main.rs @@ -246,8 +246,8 @@ fn init_logging() -> Result { .split(',') .collect::>() .into_iter() - .flat_map(|s| match s.split('=').collect::>()[..] { - [log, lvl_str] => log::LevelFilter::from_str(lvl_str).ok().map(|lvl| (log, lvl)), + .flat_map(|s| match s.split_once('=') { + Some((log, lvl_str)) => log::LevelFilter::from_str(lvl_str).ok().map(|lvl| (log, lvl)), _ => None, }) .collect() diff --git a/src/sso.rs b/src/sso.rs index 90e4d677..789f0a3b 100644 --- a/src/sso.rs +++ b/src/sso.rs @@ -132,6 +132,12 @@ struct BasicTokenClaims { exp: i64, } +#[derive(Deserialize)] +struct BasicTokenClaimsValidation { + exp: u64, + iss: String, +} + impl BasicTokenClaims { fn nbf(&self) -> i64 { self.nbf.or(self.iat).unwrap_or_else(|| Utc::now().timestamp()) @@ -139,13 +145,23 @@ impl BasicTokenClaims { } fn decode_token_claims(token_name: &str, token: &str) -> ApiResult { - let mut validation = jsonwebtoken::Validation::default(); - validation.set_issuer(&[CONFIG.sso_authority()]); - validation.insecure_disable_signature_validation(); - validation.validate_aud = false; + // We need to manually validate this token, since `insecure_decode` does not do this + match jsonwebtoken::dangerous::insecure_decode::(token) { + Ok(btcv) => { + let now = jsonwebtoken::get_current_timestamp(); + let validate_claim = btcv.claims; + // Validate the exp in the claim with a leeway of 60 seconds, same as jsonwebtoken does + if validate_claim.exp < now - 60 { + err_silent!(format!("Expired Signature for base token claim from {token_name}")) + } + if validate_claim.iss.ne(&CONFIG.sso_authority()) { + err_silent!(format!("Invalid Issuer for base token claim from {token_name}")) + } - match jsonwebtoken::decode(token, &jsonwebtoken::DecodingKey::from_secret(&[]), &validation) { - Ok(btc) => Ok(btc.claims), + // All is validated and ok, lets decode again using the wanted struct + let btc = jsonwebtoken::dangerous::insecure_decode::(token).unwrap(); + Ok(btc.claims) + } Err(err) => err_silent!(format!("Failed to decode basic token claims from {token_name}: {err}")), } }