Browse Source

Merge branch 'main' into remediations/audit-2025-11-09

pull/6727/head
kalvinparker 1 week ago
committed by GitHub
parent
commit
f86060e883
No known key found for this signature in database GPG Key ID: B5690EEEBB952194
  1. 12
      .env.template
  2. 8
      .github/workflows/build.yml
  3. 2
      .github/workflows/check-templates.yml
  4. 4
      .github/workflows/hadolint.yml
  5. 324
      .github/workflows/release.yml
  6. 4
      .github/workflows/trivy.yml
  7. 4
      .github/workflows/typos.yml
  8. 4
      .github/workflows/zizmor.yml
  9. 2
      .pre-commit-config.yaml
  10. 738
      Cargo.lock
  11. 52
      Cargo.toml
  12. 13
      docker/DockerSettings.yaml
  13. 26
      docker/Dockerfile.alpine
  14. 19
      docker/Dockerfile.debian
  15. 15
      docker/Dockerfile.j2
  16. 4
      macros/Cargo.toml
  17. 15
      migrations/mysql/2024-03-13-170000_sso_users_cascade/up.sql
  18. 9
      migrations/mysql/2025-08-20-120000_sso_nonce_to_auth/down.sql
  19. 12
      migrations/mysql/2025-08-20-120000_sso_nonce_to_auth/up.sql
  20. 9
      migrations/postgresql/2025-08-20-120000_sso_nonce_to_auth/down.sql
  21. 12
      migrations/postgresql/2025-08-20-120000_sso_nonce_to_auth/up.sql
  22. 9
      migrations/sqlite/2025-08-20-120000_sso_nonce_to_auth/down.sql
  23. 12
      migrations/sqlite/2025-08-20-120000_sso_nonce_to_auth/up.sql
  24. 6
      playwright/README.md
  25. 15
      playwright/compose/warden/build.sh
  26. 5
      playwright/docker-compose.yml
  27. 22
      playwright/global-utils.ts
  28. 328
      playwright/package-lock.json
  29. 8
      playwright/package.json
  30. 5
      playwright/test.env
  31. 3
      playwright/tests/login.smtp.spec.ts
  32. 14
      playwright/tests/organization.smtp.spec.ts
  33. 2
      playwright/tests/setups/2fa.ts
  34. 14
      playwright/tests/setups/sso.ts
  35. 9
      playwright/tests/setups/user.ts
  36. 15
      playwright/tests/sso_organization.smtp.spec.ts
  37. 2
      rust-toolchain.toml
  38. 99
      src/api/admin.rs
  39. 85
      src/api/core/accounts.rs
  40. 38
      src/api/core/ciphers.rs
  41. 23
      src/api/core/emergency_access.rs
  42. 32
      src/api/core/mod.rs
  43. 86
      src/api/core/organizations.rs
  44. 2
      src/api/core/sends.rs
  45. 40
      src/api/core/two_factor/email.rs
  46. 20
      src/api/icons.rs
  47. 189
      src/api/identity.rs
  48. 1
      src/api/mod.rs
  49. 2
      src/api/push.rs
  50. 18
      src/auth.rs
  51. 21
      src/config.rs
  52. 40
      src/db/mod.rs
  53. 2
      src/db/models/attachment.rs
  54. 58
      src/db/models/device.rs
  55. 6
      src/db/models/mod.rs
  56. 64
      src/db/models/org_policy.rs
  57. 7
      src/db/models/organization.rs
  58. 134
      src/db/models/sso_auth.rs
  59. 87
      src/db/models/sso_nonce.rs
  60. 24
      src/db/models/user.rs
  61. 7
      src/db/schema.rs
  62. 5
      src/http_client.rs
  63. 6
      src/mail.rs
  64. 14
      src/main.rs
  65. 274
      src/sso.rs
  66. 39
      src/sso_client.rs
  67. 17
      src/static/scripts/admin.css
  68. 15
      src/static/scripts/admin.js
  69. 12
      src/static/scripts/admin_diagnostics.js
  70. 2
      src/static/scripts/admin_users.js
  71. 7
      src/static/scripts/bootstrap.bundle.js
  72. 7
      src/static/scripts/bootstrap.css
  73. 19
      src/static/scripts/datatables.css
  74. 614
      src/static/scripts/datatables.js
  75. 33
      src/static/templates/admin/base.hbs
  76. 4
      src/static/templates/admin/diagnostics.hbs
  77. 4
      src/static/templates/email/send_single_org_removed_from_org.hbs
  78. 4
      src/static/templates/email/send_single_org_removed_from_org.html.hbs
  79. 2
      src/util.rs

12
.env.template

@ -183,9 +183,9 @@
## Defaults to every minute. Set blank to disable this job.
# DUO_CONTEXT_PURGE_SCHEDULE="30 * * * * *"
#
## Cron schedule of the job that cleans sso nonce from incomplete flow
## Cron schedule of the job that cleans sso auth from incomplete flow
## Defaults to daily (20 minutes after midnight). Set blank to disable this job.
# PURGE_INCOMPLETE_SSO_NONCE="0 20 0 * * *"
# PURGE_INCOMPLETE_SSO_AUTH="0 20 0 * * *"
########################
### General settings ###
@ -348,7 +348,7 @@
## Default: 2592000 (30 days)
# ICON_CACHE_TTL=2592000
## Cache time-to-live for icons which weren't available, in seconds (0 is "forever")
## Default: 2592000 (3 days)
## Default: 259200 (3 days)
# ICON_CACHE_NEGTTL=259200
## Icon download timeout
@ -376,6 +376,7 @@
## - "inline-menu-totp": Enable the use of inline menu TOTP codes in the browser extension.
## - "ssh-agent": Enable SSH agent support on Desktop. (Needs desktop >=2024.12.0)
## - "ssh-key-vault-item": Enable the creation and use of SSH key vault items. (Needs clients >=2024.12.0)
## - "pm-25373-windows-biometrics-v2": Enable the new implementation of biometrics on Windows. (Needs desktop >= 2025.11.0)
## - "export-attachments": Enable support for exporting attachments (Clients >=2025.4.0)
## - "anon-addy-self-host-alias": Enable configuring self-hosted Anon Addy alias generator. (Needs Android >=2025.3.0, iOS >=2025.4.0)
## - "simple-login-self-host-alias": Enable configuring self-hosted Simple Login alias generator. (Needs Android >=2025.3.0, iOS >=2025.4.0)
@ -471,6 +472,11 @@
## Setting this to true will enforce the Single Org Policy to be enabled before you can enable the Reset Password policy.
# ENFORCE_SINGLE_ORG_WITH_RESET_PW_POLICY=false
## Prefer IPv6 (AAAA) resolving
## This settings configures the DNS resolver to resolve IPv6 first, and if not available try IPv4
## This could be useful in IPv6 only environments.
# DNS_PREFER_IPV6=false
#####################################
### SSO settings (OpenID Connect) ###
#####################################

8
.github/workflows/build.yml

@ -54,7 +54,7 @@ jobs:
# Checkout the repo
- name: "Checkout"
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
with:
persist-credentials: false
fetch-depth: 0
@ -80,7 +80,7 @@ jobs:
# Only install the clippy and rustfmt components on the default rust-toolchain
- name: "Install rust-toolchain version"
uses: dtolnay/rust-toolchain@6d653acede28d24f02e3cd41383119e8b1b35921 # master @ Sep 16, 2025, 8:37 PM GMT+2
uses: dtolnay/rust-toolchain@f7ccc83f9ed1e5b9c81d8a67d7ad1a747e22a561 # master @ Dec 16, 2025, 6:11 PM GMT+1
if: ${{ matrix.channel == 'rust-toolchain' }}
with:
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
@ -90,7 +90,7 @@ jobs:
# Install the any other channel to be used for which we do not execute clippy and rustfmt
- name: "Install MSRV version"
uses: dtolnay/rust-toolchain@6d653acede28d24f02e3cd41383119e8b1b35921 # master @ Sep 16, 2025, 8:37 PM GMT+2
uses: dtolnay/rust-toolchain@f7ccc83f9ed1e5b9c81d8a67d7ad1a747e22a561 # master @ Dec 16, 2025, 6:11 PM GMT+1
if: ${{ matrix.channel != 'rust-toolchain' }}
with:
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
@ -115,7 +115,7 @@ jobs:
# Enable Rust Caching
- name: Rust Caching
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
with:
# Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes.
# Like changing the build host from Ubuntu 20.04 to 22.04 for example.

2
.github/workflows/check-templates.yml

@ -12,7 +12,7 @@ jobs:
steps:
# Checkout the repo
- name: "Checkout"
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
with:
persist-credentials: false
# End Checkout the repo

4
.github/workflows/hadolint.yml

@ -13,7 +13,7 @@ jobs:
steps:
# Start Docker Buildx
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
# https://github.com/moby/buildkit/issues/3969
# Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills
with:
@ -34,7 +34,7 @@ jobs:
# End Download hadolint
# Checkout the repo
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
with:
persist-credentials: false
# End Checkout the repo

324
.github/workflows/release.yml

@ -16,6 +16,23 @@ concurrency:
# Don't cancel other runs when creating a tag
cancel-in-progress: ${{ github.ref_type == 'branch' }}
defaults:
run:
shell: bash
env:
# The *_REPO variables need to be configured as repository variables
# Append `/settings/variables/actions` to your repo url
# DOCKERHUB_REPO needs to be 'index.docker.io/<user>/<repo>'
# Check for Docker hub credentials in secrets
HAVE_DOCKERHUB_LOGIN: ${{ vars.DOCKERHUB_REPO != '' && secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}
# GHCR_REPO needs to be 'ghcr.io/<user>/<repo>'
# Check for Github credentials in secrets
HAVE_GHCR_LOGIN: ${{ vars.GHCR_REPO != '' && github.repository_owner != '' && secrets.GITHUB_TOKEN != '' }}
# QUAY_REPO needs to be 'quay.io/<user>/<repo>'
# Check for Quay.io credentials in secrets
HAVE_QUAY_LOGIN: ${{ vars.QUAY_REPO != '' && secrets.QUAY_USERNAME != '' && secrets.QUAY_TOKEN != '' }}
jobs:
docker-build:
name: Build Vaultwarden containers
@ -25,41 +42,25 @@ jobs:
contents: read
attestations: write # Needed to generate an artifact attestation for a build
id-token: write # Needed to mint the OIDC token necessary to request a Sigstore signing certificate
runs-on: ubuntu-24.04
runs-on: ${{ contains(matrix.arch, 'arm') && 'ubuntu-24.04-arm' || 'ubuntu-24.04' }}
timeout-minutes: 120
# Start a local docker registry to extract the compiled binaries to upload as artifacts and attest them
services:
registry:
image: registry@sha256:1fc7de654f2ac1247f0b67e8a459e273b0993be7d2beda1f3f56fbf1001ed3e7 # v3.0.0
ports:
- 5000:5000
env:
SOURCE_COMMIT: ${{ github.sha }}
SOURCE_REPOSITORY_URL: "https://github.com/${{ github.repository }}"
# The *_REPO variables need to be configured as repository variables
# Append `/settings/variables/actions` to your repo url
# DOCKERHUB_REPO needs to be 'index.docker.io/<user>/<repo>'
# Check for Docker hub credentials in secrets
HAVE_DOCKERHUB_LOGIN: ${{ vars.DOCKERHUB_REPO != '' && secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }}
# GHCR_REPO needs to be 'ghcr.io/<user>/<repo>'
# Check for Github credentials in secrets
HAVE_GHCR_LOGIN: ${{ vars.GHCR_REPO != '' && github.repository_owner != '' && secrets.GITHUB_TOKEN != '' }}
# QUAY_REPO needs to be 'quay.io/<user>/<repo>'
# Check for Quay.io credentials in secrets
HAVE_QUAY_LOGIN: ${{ vars.QUAY_REPO != '' && secrets.QUAY_USERNAME != '' && secrets.QUAY_TOKEN != '' }}
strategy:
matrix:
arch: ["amd64", "arm64", "arm/v7", "arm/v6"]
base_image: ["debian","alpine"]
steps:
- name: Initialize QEMU binfmt support
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
with:
platforms: "arm64,arm"
# Start Docker Buildx
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
# https://github.com/moby/buildkit/issues/3969
# Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills
with:
@ -72,25 +73,24 @@ jobs:
# Checkout the repo
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
# We need fetch-depth of 0 so we also get all the tag metadata
with:
persist-credentials: false
fetch-depth: 0
# Determine Base Tags and Source Version
- name: Determine Base Tags and Source Version
shell: bash
# Normalize the architecture string for use in paths and cache keys
- name: Normalize architecture string
env:
REF_TYPE: ${{ github.ref_type }}
MATRIX_ARCH: ${{ matrix.arch }}
run: |
# Check which main tag we are going to build determined by ref_type
if [[ "${REF_TYPE}" == "tag" ]]; then
echo "BASE_TAGS=latest,${GITHUB_REF#refs/*/}" | tee -a "${GITHUB_ENV}"
elif [[ "${REF_TYPE}" == "branch" ]]; then
echo "BASE_TAGS=testing" | tee -a "${GITHUB_ENV}"
fi
# Replace slashes with nothing to create a safe string for paths/cache keys
NORMALIZED_ARCH="${MATRIX_ARCH//\/}"
echo "NORMALIZED_ARCH=${NORMALIZED_ARCH}" | tee -a "${GITHUB_ENV}"
# Determine Source Version
- name: Determine Source Version
run: |
# Get the Source Version for this release
GIT_EXACT_TAG="$(git describe --tags --abbrev=0 --exact-match 2>/dev/null || true)"
if [[ -n "${GIT_EXACT_TAG}" ]]; then
@ -99,7 +99,6 @@ jobs:
GIT_LAST_TAG="$(git describe --tags --abbrev=0)"
echo "SOURCE_VERSION=${GIT_LAST_TAG}-${SOURCE_COMMIT:0:8}" | tee -a "${GITHUB_ENV}"
fi
# End Determine Base Tags
# Login to Docker Hub
- name: Login to Docker Hub
@ -111,7 +110,6 @@ jobs:
- name: Add registry for DockerHub
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }}
shell: bash
env:
DOCKERHUB_REPO: ${{ vars.DOCKERHUB_REPO }}
run: |
@ -128,7 +126,6 @@ jobs:
- name: Add registry for ghcr.io
if: ${{ env.HAVE_GHCR_LOGIN == 'true' }}
shell: bash
env:
GHCR_REPO: ${{ vars.GHCR_REPO }}
run: |
@ -145,55 +142,65 @@ jobs:
- name: Add registry for Quay.io
if: ${{ env.HAVE_QUAY_LOGIN == 'true' }}
shell: bash
env:
QUAY_REPO: ${{ vars.QUAY_REPO }}
run: |
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${QUAY_REPO}" | tee -a "${GITHUB_ENV}"
- name: Configure build cache from/to
shell: bash
env:
GHCR_REPO: ${{ vars.GHCR_REPO }}
BASE_IMAGE: ${{ matrix.base_image }}
NORMALIZED_ARCH: ${{ env.NORMALIZED_ARCH }}
run: |
#
# Check if there is a GitHub Container Registry Login and use it for caching
if [[ -n "${HAVE_GHCR_LOGIN}" ]]; then
echo "BAKE_CACHE_FROM=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}" | tee -a "${GITHUB_ENV}"
echo "BAKE_CACHE_TO=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}"
echo "BAKE_CACHE_FROM=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}-${NORMALIZED_ARCH}" | tee -a "${GITHUB_ENV}"
echo "BAKE_CACHE_TO=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}-${NORMALIZED_ARCH},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}"
else
echo "BAKE_CACHE_FROM="
echo "BAKE_CACHE_TO="
fi
#
- name: Add localhost registry
shell: bash
- name: Generate tags
id: tags
env:
CONTAINER_REGISTRIES: "${{ env.CONTAINER_REGISTRIES }}"
run: |
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}localhost:5000/vaultwarden/server" | tee -a "${GITHUB_ENV}"
# Convert comma-separated list to newline-separated set commands
TAGS=$(echo "${CONTAINER_REGISTRIES}" | tr ',' '\n' | sed "s|.*|*.tags=&|")
# Output for use in next step
{
echo "TAGS<<EOF"
echo "$TAGS"
echo "EOF"
} >> "$GITHUB_ENV"
- name: Bake ${{ matrix.base_image }} containers
id: bake_vw
uses: docker/bake-action@3acf805d94d93a86cce4ca44798a76464a75b88c # v6.9.0
uses: docker/bake-action@5be5f02ff8819ecd3092ea6b2e6261c31774f2b4 # v6.10.0
env:
BASE_TAGS: "${{ env.BASE_TAGS }}"
BASE_TAGS: "${{ steps.determine-version.outputs.BASE_TAGS }}"
SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}"
SOURCE_VERSION: "${{ env.SOURCE_VERSION }}"
SOURCE_REPOSITORY_URL: "${{ env.SOURCE_REPOSITORY_URL }}"
CONTAINER_REGISTRIES: "${{ env.CONTAINER_REGISTRIES }}"
with:
pull: true
push: true
source: .
files: docker/docker-bake.hcl
targets: "${{ matrix.base_image }}-multi"
set: |
*.cache-from=${{ env.BAKE_CACHE_FROM }}
*.cache-to=${{ env.BAKE_CACHE_TO }}
*.platform=linux/${{ matrix.arch }}
${{ env.TAGS }}
*.output=type=local,dest=./output
*.output=type=image,push-by-digest=true,name-canonical=true,push=true
- name: Extract digest SHA
shell: bash
env:
BAKE_METADATA: ${{ steps.bake_vw.outputs.metadata }}
BASE_IMAGE: ${{ matrix.base_image }}
@ -201,105 +208,174 @@ jobs:
GET_DIGEST_SHA="$(jq -r --arg base "$BASE_IMAGE" '.[$base + "-multi"]."containerimage.digest"' <<< "${BAKE_METADATA}")"
echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}"
# Attest container images
- name: Attest - docker.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0
- name: Export digest
env:
DIGEST_SHA: ${{ env.DIGEST_SHA }}
RUNNER_TEMP: ${{ runner.temp }}
run: |
mkdir -p "${RUNNER_TEMP}"/digests
digest="${DIGEST_SHA}"
touch "${RUNNER_TEMP}/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
subject-name: ${{ vars.DOCKERHUB_REPO }}
subject-digest: ${{ env.DIGEST_SHA }}
push-to-registry: true
name: digests-${{ env.NORMALIZED_ARCH }}-${{ matrix.base_image }}
path: ${{ runner.temp }}/digests/*
if-no-files-found: error
retention-days: 1
- name: Attest - ghcr.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_GHCR_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0
- name: Rename binaries to match target platform
env:
NORMALIZED_ARCH: ${{ env.NORMALIZED_ARCH }}
run: |
mv ./output/vaultwarden vaultwarden-"${NORMALIZED_ARCH}"
# Upload artifacts to Github Actions and Attest the binaries
- name: Attest binaries
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8 # v3.1.0
with:
subject-name: ${{ vars.GHCR_REPO }}
subject-digest: ${{ env.DIGEST_SHA }}
push-to-registry: true
subject-path: vaultwarden-${{ env.NORMALIZED_ARCH }}
- name: Attest - quay.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_QUAY_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0
- name: Upload binaries as artifacts
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
subject-name: ${{ vars.QUAY_REPO }}
subject-digest: ${{ env.DIGEST_SHA }}
push-to-registry: true
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-${{ env.NORMALIZED_ARCH }}-${{ matrix.base_image }}
path: vaultwarden-${{ env.NORMALIZED_ARCH }}
merge-manifests:
name: Merge manifests
runs-on: ubuntu-latest
needs: docker-build
permissions:
packages: write # Needed to upload packages and artifacts
attestations: write # Needed to generate an artifact attestation for a build
id-token: write # Needed to mint the OIDC token necessary to request a Sigstore signing certificate
strategy:
matrix:
base_image: ["debian","alpine"]
steps:
- name: Download digests
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
path: ${{ runner.temp }}/digests
pattern: digests-*-${{ matrix.base_image }}
merge-multiple: true
# Login to Docker Hub
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }}
- name: Add registry for DockerHub
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }}
env:
DOCKERHUB_REPO: ${{ vars.DOCKERHUB_REPO }}
run: |
echo "CONTAINER_REGISTRIES=${DOCKERHUB_REPO}" | tee -a "${GITHUB_ENV}"
# Login to GitHub Container Registry
- name: Login to GitHub Container Registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
if: ${{ env.HAVE_GHCR_LOGIN == 'true' }}
- name: Add registry for ghcr.io
if: ${{ env.HAVE_GHCR_LOGIN == 'true' }}
env:
GHCR_REPO: ${{ vars.GHCR_REPO }}
run: |
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${GHCR_REPO}" | tee -a "${GITHUB_ENV}"
# Login to Quay.io
- name: Login to Quay.io
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: quay.io
username: ${{ secrets.QUAY_USERNAME }}
password: ${{ secrets.QUAY_TOKEN }}
if: ${{ env.HAVE_QUAY_LOGIN == 'true' }}
- name: Add registry for Quay.io
if: ${{ env.HAVE_QUAY_LOGIN == 'true' }}
env:
QUAY_REPO: ${{ vars.QUAY_REPO }}
run: |
echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${QUAY_REPO}" | tee -a "${GITHUB_ENV}"
# Extract the Alpine binaries from the containers
- name: Extract binaries
shell: bash
# Determine Base Tags
- name: Determine Base Tags
env:
BASE_IMAGE_TAG: "${{ matrix.base_image != 'debian' && format('-{0}', matrix.base_image) || '' }}"
REF_TYPE: ${{ github.ref_type }}
BASE_IMAGE: ${{ matrix.base_image }}
run: |
# Check which main tag we are going to build determined by ref_type
if [[ "${REF_TYPE}" == "tag" ]]; then
EXTRACT_TAG="latest"
echo "BASE_TAGS=latest${BASE_IMAGE_TAG},${GITHUB_REF#refs/*/}${BASE_IMAGE_TAG}${BASE_IMAGE_TAG//-/,}" | tee -a "${GITHUB_ENV}"
elif [[ "${REF_TYPE}" == "branch" ]]; then
EXTRACT_TAG="testing"
echo "BASE_TAGS=testing${BASE_IMAGE_TAG}" | tee -a "${GITHUB_ENV}"
fi
# Check which base_image was used and append -alpine if needed
if [[ "${BASE_IMAGE}" == "alpine" ]]; then
EXTRACT_TAG="${EXTRACT_TAG}-alpine"
- name: Create manifest list, push it and extract digest SHA
working-directory: ${{ runner.temp }}/digests
env:
BASE_TAGS: "${{ env.BASE_TAGS }}"
CONTAINER_REGISTRIES: "${{ env.CONTAINER_REGISTRIES }}"
run: |
IFS=',' read -ra IMAGES <<< "${CONTAINER_REGISTRIES}"
IFS=',' read -ra TAGS <<< "${BASE_TAGS}"
TAG_ARGS=()
for img in "${IMAGES[@]}"; do
for tag in "${TAGS[@]}"; do
TAG_ARGS+=("-t" "${img}:${tag}")
done
done
echo "Creating manifest"
if ! OUTPUT=$(docker buildx imagetools create \
"${TAG_ARGS[@]}" \
$(printf "${IMAGES[0]}@sha256:%s " *) 2>&1); then
echo "Manifest creation failed"
echo "${OUTPUT}"
exit 1
fi
# After each extraction the image is removed.
# This is needed because using different platforms doesn't trigger a new pull/download
# Extract amd64 binary
docker create --name amd64 --platform=linux/amd64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
docker cp amd64:/vaultwarden vaultwarden-amd64-${BASE_IMAGE}
docker rm --force amd64
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
# Extract arm64 binary
docker create --name arm64 --platform=linux/arm64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
docker cp arm64:/vaultwarden vaultwarden-arm64-${BASE_IMAGE}
docker rm --force arm64
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
# Extract armv7 binary
docker create --name armv7 --platform=linux/arm/v7 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
docker cp armv7:/vaultwarden vaultwarden-armv7-${BASE_IMAGE}
docker rm --force armv7
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
# Extract armv6 binary
docker create --name armv6 --platform=linux/arm/v6 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
docker cp armv6:/vaultwarden vaultwarden-armv6-${BASE_IMAGE}
docker rm --force armv6
docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}"
echo "Manifest created successfully"
echo "${OUTPUT}"
# Upload artifacts to Github Actions and Attest the binaries
- name: "Upload amd64 artifact ${{ matrix.base_image }}"
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64-${{ matrix.base_image }}
path: vaultwarden-amd64-${{ matrix.base_image }}
- name: "Upload arm64 artifact ${{ matrix.base_image }}"
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64-${{ matrix.base_image }}
path: vaultwarden-arm64-${{ matrix.base_image }}
# Extract digest SHA for subsequent steps
GET_DIGEST_SHA="$(echo "${OUTPUT}" | grep -oE 'sha256:[a-f0-9]{64}' | tail -1)"
echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}"
- name: "Upload armv7 artifact ${{ matrix.base_image }}"
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
# Attest container images
- name: Attest - docker.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && env.DIGEST_SHA != ''}}
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8 # v3.1.0
with:
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7-${{ matrix.base_image }}
path: vaultwarden-armv7-${{ matrix.base_image }}
subject-name: ${{ vars.DOCKERHUB_REPO }}
subject-digest: ${{ env.DIGEST_SHA }}
push-to-registry: true
- name: "Upload armv6 artifact ${{ matrix.base_image }}"
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
- name: Attest - ghcr.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_GHCR_LOGIN == 'true' && env.DIGEST_SHA != ''}}
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8 # v3.1.0
with:
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6-${{ matrix.base_image }}
path: vaultwarden-armv6-${{ matrix.base_image }}
subject-name: ${{ vars.GHCR_REPO }}
subject-digest: ${{ env.DIGEST_SHA }}
push-to-registry: true
- name: "Attest artifacts ${{ matrix.base_image }}"
uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0
- name: Attest - quay.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_QUAY_LOGIN == 'true' && env.DIGEST_SHA != ''}}
uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8 # v3.1.0
with:
subject-path: vaultwarden-*
# End Upload artifacts to Github Actions
subject-name: ${{ vars.QUAY_REPO }}
subject-digest: ${{ env.DIGEST_SHA }}
push-to-registry: true

4
.github/workflows/trivy.yml

@ -29,7 +29,7 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
with:
persist-credentials: false
@ -46,6 +46,6 @@ jobs:
severity: CRITICAL,HIGH
- name: Upload Trivy scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
with:
sarif_file: 'trivy-results.sarif'

4
.github/workflows/typos.yml

@ -12,11 +12,11 @@ jobs:
steps:
# Checkout the repo
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
with:
persist-credentials: false
# End Checkout the repo
# When this version is updated, do not forget to update this in `.pre-commit-config.yaml` too
- name: Spell Check Repo
uses: crate-ci/typos@07d900b8fa1097806b8adb6391b0d3e0ac2fdea7 # v1.39.0
uses: crate-ci/typos@1a319b54cc9e3b333fed6a5c88ba1a90324da514 # v1.40.1

4
.github/workflows/zizmor.yml

@ -16,12 +16,12 @@ jobs:
security-events: write # To write the security report
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
with:
persist-credentials: false
- name: Run zizmor
uses: zizmorcore/zizmor-action@e673c3917a1aef3c65c972347ed84ccd013ecda4 # v0.2.0
uses: zizmorcore/zizmor-action@e639db99335bc9038abc0e066dfcd72e23d26fb4 # v0.3.0
with:
# intentionally not scanning the entire repository,
# since it contains integration tests.

2
.pre-commit-config.yaml

@ -53,6 +53,6 @@ repos:
- "cd docker && make"
# When this version is updated, do not forget to update this in `.github/workflows/typos.yaml` too
- repo: https://github.com/crate-ci/typos
rev: 07d900b8fa1097806b8adb6391b0d3e0ac2fdea7 # v1.39.0
rev: 1a319b54cc9e3b333fed6a5c88ba1a90324da514 # v1.40.1
hooks:
- id: typos

738
Cargo.lock

File diff suppressed because it is too large

52
Cargo.toml

@ -1,6 +1,6 @@
[workspace.package]
edition = "2021"
rust-version = "1.89.0"
rust-version = "1.90.0"
license = "AGPL-3.0-only"
repository = "https://github.com/dani-garcia/vaultwarden"
publish = false
@ -55,9 +55,9 @@ syslog = "7.0.0"
macros = { path = "./macros" }
# Logging
log = "0.4.28"
log = "0.4.29"
fern = { version = "0.7.1", features = ["syslog-7", "reopen-1"] }
tracing = { version = "0.1.41", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work
tracing = { version = "0.1.44", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work
# A `dotenv` implementation for Rust
dotenvy = { version = "0.15.7", default-features = false }
@ -65,7 +65,7 @@ dotenvy = { version = "0.15.7", default-features = false }
# Numerical libraries
num-traits = "0.2.19"
num-derive = "0.4.2"
bigdecimal = "0.4.9"
bigdecimal = "0.4.10"
# Web framework
rocket = { version = "0.5.1", features = ["tls", "json"], default-features = false }
@ -80,17 +80,18 @@ dashmap = "6.1.0"
# Async futures
futures = "0.3.31"
tokio = { version = "1.48.0", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] }
tokio-util = { version = "0.7.16", features = ["compat"]}
tokio-util = { version = "0.7.17", features = ["compat"]}
# A generic serialization/deserialization framework
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.145"
serde_json = "1.0.148"
# A safe, extensible ORM and Query builder
diesel = { version = "2.3.3", features = ["chrono", "r2d2", "numeric"] }
diesel_migrations = "2.3.0"
# Currently pinned diesel to v2.3.3 as newer version break MySQL/MariaDB compatibility
diesel = { version = "2.3.5", features = ["chrono", "r2d2", "numeric"] }
diesel_migrations = "2.3.1"
derive_more = { version = "2.0.1", features = ["from", "into", "as_ref", "deref", "display"] }
derive_more = { version = "2.1.1", features = ["from", "into", "as_ref", "deref", "display"] }
diesel-derive-newtype = "2.1.2"
# Bundled/Static SQLite
@ -102,7 +103,7 @@ ring = "0.17.14"
subtle = "2.6.1"
# UUID generation
uuid = { version = "1.18.1", features = ["v4"] }
uuid = { version = "1.19.0", features = ["v4"] }
# Date and time libraries
chrono = { version = "0.4.42", features = ["clock", "serde"], default-features = false }
@ -116,7 +117,7 @@ job_scheduler_ng = "2.4.0"
data-encoding = "2.9.0"
# JWT library
jsonwebtoken = "9.3.1"
jsonwebtoken = { version = "10.2.0", features = ["use_pem", "rust_crypto"], default-features = false }
# TOTP library
totp-lite = "2.0.1"
@ -127,9 +128,9 @@ yubico = { package = "yubico_ng", version = "0.14.1", features = ["online-tokio"
# WebAuthn libraries
# danger-allow-state-serialisation is needed to save the state in the db
# danger-credential-internals is needed to support U2F to Webauthn migration
webauthn-rs = { version = "0.5.3", features = ["danger-allow-state-serialisation", "danger-credential-internals"] }
webauthn-rs-proto = "0.5.3"
webauthn-rs-core = "0.5.3"
webauthn-rs = { version = "0.5.4", features = ["danger-allow-state-serialisation", "danger-credential-internals"] }
webauthn-rs-proto = "0.5.4"
webauthn-rs-core = "0.5.4"
# Handling of URL's for WebAuthn and favicons
url = "2.5.7"
@ -143,15 +144,14 @@ email_address = "0.2.9"
handlebars = { version = "6.3.2", features = ["dir_source"] }
# HTTP client (Used for favicons, version check, DUO and HIBP API)
# Swap rustls -> native-tls for experiment to avoid pulling webpki-roots (CDLA-Permissive-2.0)
reqwest = { version = "0.12.24", features = ["native-tls", "stream", "json", "deflate", "gzip", "brotli", "zstd", "socks", "cookies", "charset", "http2", "system-proxy"], default-features = false }
reqwest = { version = "0.12.28", features = ["rustls-tls", "rustls-tls-native-roots", "stream", "json", "deflate", "gzip", "brotli", "zstd", "socks", "cookies", "charset", "http2", "system-proxy"], default-features = false}
hickory-resolver = "0.25.2"
# Favicon extraction libraries
html5gum = "0.8.0"
html5gum = "0.8.3"
regex = { version = "1.12.2", features = ["std", "perf", "unicode-perl"], default-features = false }
data-url = "0.3.2"
bytes = "1.10.1"
bytes = "1.11.0"
svg-hush = "0.9.5"
# Cache function results (Used for version check and favicon fetching)
@ -162,14 +162,14 @@ cookie = "0.18.1"
cookie_store = "0.22.0"
# Used by U2F, JWT and PostgreSQL
openssl = "0.10.74"
openssl = "0.10.75"
# CLI argument parsing
pico-args = "0.5.0"
# Macro ident concatenation
pastey = "0.1.1"
governor = "0.10.1"
pastey = "0.2.1"
governor = "0.10.4"
# OIDC for SSO
openidconnect = { version = "^4.0", features = ["native-tls"] }
@ -194,14 +194,14 @@ rpassword = "7.4.0"
grass_compiler = { version = "0.13.4", default-features = false }
# File are accessed through Apache OpenDAL
opendal = { version = "0.54.1", features = ["services-fs"], default-features = false }
opendal = { version = "0.55.0", features = ["services-fs"], default-features = false }
# For retrieving AWS credentials, including temporary SSO credentials
anyhow = { version = "1.0.100", optional = true }
aws-config = { version = "1.8.8", features = ["behavior-version-latest", "rt-tokio", "credentials-process", "sso"], default-features = false, optional = true }
aws-credential-types = { version = "1.2.8", optional = true }
aws-smithy-runtime-api = { version = "1.9.2", optional = true }
http = { version = "1.3.1", optional = true }
aws-config = { version = "1.8.12", features = ["behavior-version-latest", "rt-tokio", "credentials-process", "sso"], default-features = false, optional = true }
aws-credential-types = { version = "1.2.11", optional = true }
aws-smithy-runtime-api = { version = "1.9.3", optional = true }
http = { version = "1.4.0", optional = true }
reqsign = { version = "0.16.5", optional = true }
# Strip debuginfo from the release builds

13
docker/DockerSettings.yaml

@ -1,13 +1,13 @@
---
vault_version: "v2025.10.1"
vault_image_digest: "sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa"
# Cross Compile Docker Helper Scripts v1.8.0
vault_version: "v2025.12.1+build.3"
vault_image_digest: "sha256:bf5aa55dc7bcb99f85d2a88ff44d32cdc832e934a0603fe28e5c3f92904bad42"
# Cross Compile Docker Helper Scripts v1.9.0
# We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts
# https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags
xx_image_digest: "sha256:add602d55daca18914838a78221f6bbe4284114b452c86a48f96d59aeb00f5c6"
rust_version: 1.91.0 # Rust version to be used
xx_image_digest: "sha256:c64defb9ed5a91eacb37f96ccc3d4cd72521c4bd18d5442905b95e2226b0e707"
rust_version: 1.92.0 # Rust version to be used
debian_version: trixie # Debian release name to be used
alpine_version: "3.22" # Alpine version to be used
alpine_version: "3.23" # Alpine version to be used
# For which platforms/architectures will we try to build images
platforms: ["linux/amd64", "linux/arm64", "linux/arm/v7", "linux/arm/v6"]
# Determine the build images per OS/Arch
@ -17,7 +17,6 @@ build_stage_image:
platform: "$BUILDPLATFORM"
alpine:
image: "build_${TARGETARCH}${TARGETVARIANT}"
platform: "linux/amd64" # The Alpine build images only have linux/amd64 images
arch_image:
amd64: "ghcr.io/blackdex/rust-musl:x86_64-musl-stable-{{rust_version}}"
arm64: "ghcr.io/blackdex/rust-musl:aarch64-musl-stable-{{rust_version}}"

26
docker/Dockerfile.alpine

@ -19,27 +19,27 @@
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
# click the tag name to view the digest of the image it currently points to.
# - From the command line:
# $ docker pull docker.io/vaultwarden/web-vault:v2025.10.1
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.10.1
# [docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa]
# $ docker pull docker.io/vaultwarden/web-vault:v2025.12.1_build.3
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.12.1_build.3
# [docker.io/vaultwarden/web-vault@sha256:bf5aa55dc7bcb99f85d2a88ff44d32cdc832e934a0603fe28e5c3f92904bad42]
#
# - Conversely, to get the tag name from the digest:
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa
# [docker.io/vaultwarden/web-vault:v2025.10.1]
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:bf5aa55dc7bcb99f85d2a88ff44d32cdc832e934a0603fe28e5c3f92904bad42
# [docker.io/vaultwarden/web-vault:v2025.12.1_build.3]
#
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa AS vault
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:bf5aa55dc7bcb99f85d2a88ff44d32cdc832e934a0603fe28e5c3f92904bad42 AS vault
########################## ALPINE BUILD IMAGES ##########################
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 and linux/arm64
## And for Alpine we define all build images here, they will only be loaded when actually used
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.91.0 AS build_amd64
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.91.0 AS build_arm64
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.91.0 AS build_armv7
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.91.0 AS build_armv6
FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.92.0 AS build_amd64
FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.92.0 AS build_arm64
FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.92.0 AS build_armv7
FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.92.0 AS build_armv6
########################## BUILD IMAGE ##########################
# hadolint ignore=DL3006
FROM --platform=linux/amd64 build_${TARGETARCH}${TARGETVARIANT} AS build
FROM --platform=$BUILDPLATFORM build_${TARGETARCH}${TARGETVARIANT} AS build
ARG TARGETARCH
ARG TARGETVARIANT
ARG TARGETPLATFORM
@ -127,7 +127,7 @@ RUN source /env-cargo && \
# To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*'
#
# We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742
FROM --platform=$TARGETPLATFORM docker.io/library/alpine:3.22
FROM --platform=$TARGETPLATFORM docker.io/library/alpine:3.23
ENV ROCKET_PROFILE="release" \
ROCKET_ADDRESS=0.0.0.0 \

19
docker/Dockerfile.debian

@ -19,24 +19,24 @@
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
# click the tag name to view the digest of the image it currently points to.
# - From the command line:
# $ docker pull docker.io/vaultwarden/web-vault:v2025.10.1
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.10.1
# [docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa]
# $ docker pull docker.io/vaultwarden/web-vault:v2025.12.1_build.3
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.12.1_build.3
# [docker.io/vaultwarden/web-vault@sha256:bf5aa55dc7bcb99f85d2a88ff44d32cdc832e934a0603fe28e5c3f92904bad42]
#
# - Conversely, to get the tag name from the digest:
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa
# [docker.io/vaultwarden/web-vault:v2025.10.1]
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:bf5aa55dc7bcb99f85d2a88ff44d32cdc832e934a0603fe28e5c3f92904bad42
# [docker.io/vaultwarden/web-vault:v2025.12.1_build.3]
#
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa AS vault
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:bf5aa55dc7bcb99f85d2a88ff44d32cdc832e934a0603fe28e5c3f92904bad42 AS vault
########################## Cross Compile Docker Helper Scripts ##########################
## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts
## And these bash scripts do not have any significant difference if at all
FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:add602d55daca18914838a78221f6bbe4284114b452c86a48f96d59aeb00f5c6 AS xx
FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:c64defb9ed5a91eacb37f96ccc3d4cd72521c4bd18d5442905b95e2226b0e707 AS xx
########################## BUILD IMAGE ##########################
# hadolint ignore=DL3006
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.91.0-slim-trixie AS build
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.92.0-slim-trixie AS build
COPY --from=xx / /
ARG TARGETARCH
ARG TARGETVARIANT
@ -51,7 +51,6 @@ ENV DEBIAN_FRONTEND=noninteractive \
TERM=xterm-256color \
CARGO_HOME="/root/.cargo" \
USER="root"
# Install clang to get `xx-cargo` working
# Install pkg-config to allow amd64 builds to find all libraries
# Install git so build.rs can determine the correct version
@ -175,7 +174,7 @@ RUN mkdir /data && \
--no-install-recommends \
ca-certificates \
curl \
libmariadb-dev \
libmariadb3 \
libpq5 \
openssl && \
apt-get clean && \

15
docker/Dockerfile.j2

@ -19,13 +19,13 @@
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
# click the tag name to view the digest of the image it currently points to.
# - From the command line:
# $ docker pull docker.io/vaultwarden/web-vault:{{ vault_version }}
# $ docker image inspect --format "{{ '{{' }}.RepoDigests}}" docker.io/vaultwarden/web-vault:{{ vault_version }}
# $ docker pull docker.io/vaultwarden/web-vault:{{ vault_version | replace('+', '_') }}
# $ docker image inspect --format "{{ '{{' }}.RepoDigests}}" docker.io/vaultwarden/web-vault:{{ vault_version | replace('+', '_') }}
# [docker.io/vaultwarden/web-vault@{{ vault_image_digest }}]
#
# - Conversely, to get the tag name from the digest:
# $ docker image inspect --format "{{ '{{' }}.RepoTags}}" docker.io/vaultwarden/web-vault@{{ vault_image_digest }}
# [docker.io/vaultwarden/web-vault:{{ vault_version }}]
# [docker.io/vaultwarden/web-vault:{{ vault_version | replace('+', '_') }}]
#
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@{{ vault_image_digest }} AS vault
@ -36,16 +36,16 @@ FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@{{ vault_image_diges
FROM --platform=linux/amd64 docker.io/tonistiigi/xx@{{ xx_image_digest }} AS xx
{% elif base == "alpine" %}
########################## ALPINE BUILD IMAGES ##########################
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 and linux/arm64
## And for Alpine we define all build images here, they will only be loaded when actually used
{% for arch in build_stage_image[base].arch_image %}
FROM --platform={{ build_stage_image[base].platform }} {{ build_stage_image[base].arch_image[arch] }} AS build_{{ arch }}
FROM --platform=$BUILDPLATFORM {{ build_stage_image[base].arch_image[arch] }} AS build_{{ arch }}
{% endfor %}
{% endif %}
########################## BUILD IMAGE ##########################
# hadolint ignore=DL3006
FROM --platform={{ build_stage_image[base].platform }} {{ build_stage_image[base].image }} AS build
FROM --platform=$BUILDPLATFORM {{ build_stage_image[base].image }} AS build
{% if base == "debian" %}
COPY --from=xx / /
{% endif %}
@ -69,7 +69,6 @@ ENV DEBIAN_FRONTEND=noninteractive \
{% endif %}
{% if base == "debian" %}
# Install clang to get `xx-cargo` working
# Install pkg-config to allow amd64 builds to find all libraries
# Install git so build.rs can determine the correct version
@ -212,7 +211,7 @@ RUN mkdir /data && \
--no-install-recommends \
ca-certificates \
curl \
libmariadb-dev \
libmariadb3 \
libpq5 \
openssl && \
apt-get clean && \

4
macros/Cargo.toml

@ -13,8 +13,8 @@ path = "src/lib.rs"
proc-macro = true
[dependencies]
quote = "1.0.41"
syn = "2.0.108"
quote = "1.0.42"
syn = "2.0.111"
[lints]
workspace = true

15
migrations/mysql/2024-03-13-170000_sso_users_cascade/up.sql

@ -1,2 +1,15 @@
ALTER TABLE sso_users DROP FOREIGN KEY `sso_users_ibfk_1`;
-- Dynamically create DROP FOREIGN KEY
-- Some versions of MySQL or MariaDB might fail if the key doesn't exists
-- This checks if the key exists, and if so, will drop it.
SET @drop_sso_fk = IF((SELECT true FROM information_schema.TABLE_CONSTRAINTS WHERE
CONSTRAINT_SCHEMA = DATABASE() AND
TABLE_NAME = 'sso_users' AND
CONSTRAINT_NAME = 'sso_users_ibfk_1' AND
CONSTRAINT_TYPE = 'FOREIGN KEY') = true,
'ALTER TABLE sso_users DROP FOREIGN KEY sso_users_ibfk_1',
'SELECT 1');
PREPARE stmt FROM @drop_sso_fk;
EXECUTE stmt;
DEALLOCATE PREPARE stmt;
ALTER TABLE sso_users ADD FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE;

9
migrations/mysql/2025-08-20-120000_sso_nonce_to_auth/down.sql

@ -0,0 +1,9 @@
DROP TABLE IF EXISTS sso_auth;
CREATE TABLE sso_nonce (
state VARCHAR(512) NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
verifier TEXT,
redirect_uri TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT now()
);

12
migrations/mysql/2025-08-20-120000_sso_nonce_to_auth/up.sql

@ -0,0 +1,12 @@
DROP TABLE IF EXISTS sso_nonce;
CREATE TABLE sso_auth (
state VARCHAR(512) NOT NULL PRIMARY KEY,
client_challenge TEXT NOT NULL,
nonce TEXT NOT NULL,
redirect_uri TEXT NOT NULL,
code_response TEXT,
auth_response TEXT,
created_at TIMESTAMP NOT NULL DEFAULT now(),
updated_at TIMESTAMP NOT NULL DEFAULT now()
);

9
migrations/postgresql/2025-08-20-120000_sso_nonce_to_auth/down.sql

@ -0,0 +1,9 @@
DROP TABLE IF EXISTS sso_auth;
CREATE TABLE sso_nonce (
state TEXT NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
verifier TEXT,
redirect_uri TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT now()
);

12
migrations/postgresql/2025-08-20-120000_sso_nonce_to_auth/up.sql

@ -0,0 +1,12 @@
DROP TABLE IF EXISTS sso_nonce;
CREATE TABLE sso_auth (
state TEXT NOT NULL PRIMARY KEY,
client_challenge TEXT NOT NULL,
nonce TEXT NOT NULL,
redirect_uri TEXT NOT NULL,
code_response TEXT,
auth_response TEXT,
created_at TIMESTAMP NOT NULL DEFAULT now(),
updated_at TIMESTAMP NOT NULL DEFAULT now()
);

9
migrations/sqlite/2025-08-20-120000_sso_nonce_to_auth/down.sql

@ -0,0 +1,9 @@
DROP TABLE IF EXISTS sso_auth;
CREATE TABLE sso_nonce (
state TEXT NOT NULL PRIMARY KEY,
nonce TEXT NOT NULL,
verifier TEXT,
redirect_uri TEXT NOT NULL,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);

12
migrations/sqlite/2025-08-20-120000_sso_nonce_to_auth/up.sql

@ -0,0 +1,12 @@
DROP TABLE IF EXISTS sso_nonce;
CREATE TABLE sso_auth (
state TEXT NOT NULL PRIMARY KEY,
client_challenge TEXT NOT NULL,
nonce TEXT NOT NULL,
redirect_uri TEXT NOT NULL,
code_response TEXT,
auth_response TEXT,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);

6
playwright/README.md

@ -97,9 +97,9 @@ npx playwright codegen "http://127.0.0.1:8003"
## Override web-vault
It is possible to change the `web-vault` used by referencing a different `bw_web_builds` commit.
It is possible to change the `web-vault` used by referencing a different `vw_web_builds` commit.
Simplest is to set and uncomment `PW_WV_REPO_URL` and `PW_WV_COMMIT_HASH` in the `test.env`.
Simplest is to set and uncomment `PW_VW_REPO_URL` and `PW_VW_COMMIT_HASH` in the `test.env`.
Ensure that the image is built with:
```bash
@ -112,6 +112,8 @@ You can check the result running:
DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env up Vaultwarden
```
Then check `http://127.0.0.1:8003/admin/diagnostics` with `admin`.
# OpenID Connect test setup
Additionally this `docker-compose` template allows to run locally Vaultwarden,

15
playwright/compose/warden/build.sh

@ -6,18 +6,19 @@ echo $COMMIT_HASH
if [[ ! -z "$REPO_URL" ]] && [[ ! -z "$COMMIT_HASH" ]] ; then
rm -rf /web-vault
mkdir bw_web_builds;
cd bw_web_builds;
mkdir -p vw_web_builds;
cd vw_web_builds;
git -c init.defaultBranch=main init
git remote add origin "$REPO_URL"
git fetch --depth 1 origin "$COMMIT_HASH"
git -c advice.detachedHead=false checkout FETCH_HEAD
export VAULT_VERSION=$(cat Dockerfile | grep "ARG VAULT_VERSION" | cut -d "=" -f2)
./scripts/checkout_web_vault.sh
./scripts/build_web_vault.sh
printf '{"version":"%s"}' "$COMMIT_HASH" > ./web-vault/apps/web/build/vw-version.json
npm ci --ignore-scripts
mv ./web-vault/apps/web/build /web-vault
cd apps/web
npm run dist:oss:selfhost
printf '{"version":"%s"}' "$COMMIT_HASH" > build/vw-version.json
mv build /web-vault
fi

5
playwright/docker-compose.yml

@ -18,10 +18,11 @@ services:
context: compose/warden
dockerfile: Dockerfile
args:
REPO_URL: ${PW_WV_REPO_URL:-}
COMMIT_HASH: ${PW_WV_COMMIT_HASH:-}
REPO_URL: ${PW_VW_REPO_URL:-}
COMMIT_HASH: ${PW_VW_COMMIT_HASH:-}
env_file: ${DC_ENV_FILE:-.env}
environment:
- ADMIN_TOKEN
- DATABASE_URL
- I_REALLY_WANT_VOLATILE_STORAGE
- LOG_LEVEL

22
playwright/global-utils.ts

@ -221,9 +221,13 @@ export async function restartVault(page: Page, testInfo: TestInfo, env, resetDB:
}
export async function checkNotification(page: Page, hasText: string) {
await expect(page.locator('bit-toast').filter({ hasText })).toBeVisible();
await page.locator('bit-toast').filter({ hasText }).getByRole('button').click();
await expect(page.locator('bit-toast').filter({ hasText })).toHaveCount(0);
await expect(page.locator('bit-toast', { hasText })).toBeVisible();
try {
await page.locator('bit-toast', { hasText }).getByRole('button', { name: 'Close' }).click({force: true, timeout: 10_000});
} catch (error) {
console.log(`Closing notification failed but it should now be invisible (${error})`);
}
await expect(page.locator('bit-toast', { hasText })).toHaveCount(0);
}
export async function cleanLanding(page: Page) {
@ -244,3 +248,15 @@ export async function logout(test: Test, page: Page, user: { name: string }) {
await expect(page.getByRole('heading', { name: 'Log in' })).toBeVisible();
});
}
export async function ignoreExtension(page: Page) {
await page.waitForLoadState('domcontentloaded');
try {
await page.getByRole('button', { name: 'Add it later' }).click({timeout: 5_000});
await page.getByRole('link', { name: 'Skip to web app' }).click();
} catch (error) {
console.log('Extension setup not visible. Continuing');
}
}

328
playwright/package-lock.json

@ -9,15 +9,15 @@
"version": "1.0.0",
"license": "ISC",
"dependencies": {
"mysql2": "3.15.0",
"mysql2": "3.15.3",
"otpauth": "9.4.1",
"pg": "8.16.3"
},
"devDependencies": {
"@playwright/test": "1.55.1",
"dotenv": "17.2.2",
"@playwright/test": "1.56.1",
"dotenv": "17.2.3",
"dotenv-expand": "12.0.3",
"maildev": "npm:@timshel_npm/maildev@^3.2.3"
"maildev": "npm:@timshel_npm/maildev@3.2.5"
}
},
"node_modules/@asamuzakjp/css-color": {
@ -34,16 +34,16 @@
}
},
"node_modules/@asamuzakjp/dom-selector": {
"version": "6.5.6",
"resolved": "https://registry.npmjs.org/@asamuzakjp/dom-selector/-/dom-selector-6.5.6.tgz",
"integrity": "sha512-Mj3Hu9ymlsERd7WOsUKNUZnJYL4IZ/I9wVVYgtvOsWYiEFbkQ4G7VRIh2USxTVW4BBDIsLG+gBUgqOqf2Kvqow==",
"version": "6.7.3",
"resolved": "https://registry.npmjs.org/@asamuzakjp/dom-selector/-/dom-selector-6.7.3.tgz",
"integrity": "sha512-kiGFeY+Hxf5KbPpjRLf+ffWbkos1aGo8MBfd91oxS3O57RgU3XhZrt/6UzoVF9VMpWbC3v87SRc9jxGrc9qHtQ==",
"dev": true,
"dependencies": {
"@asamuzakjp/nwsapi": "^2.3.9",
"bidi-js": "^1.0.3",
"css-tree": "^3.1.0",
"is-potential-custom-element-name": "^1.0.1",
"lru-cache": "^11.2.1"
"lru-cache": "^11.2.2"
}
},
"node_modules/@asamuzakjp/nwsapi": {
@ -144,9 +144,9 @@
}
},
"node_modules/@csstools/css-syntax-patches-for-csstree": {
"version": "1.0.14",
"resolved": "https://registry.npmjs.org/@csstools/css-syntax-patches-for-csstree/-/css-syntax-patches-for-csstree-1.0.14.tgz",
"integrity": "sha512-zSlIxa20WvMojjpCSy8WrNpcZ61RqfTfX3XTaOeVlGJrt/8HF3YbzgFZa01yTbT4GWQLwfTcC3EB8i3XnB647Q==",
"version": "1.0.15",
"resolved": "https://registry.npmjs.org/@csstools/css-syntax-patches-for-csstree/-/css-syntax-patches-for-csstree-1.0.15.tgz",
"integrity": "sha512-q0p6zkVq2lJnmzZVPR33doA51G7YOja+FBvRdp5ISIthL0MtFCgYHHhR563z9WFGxcOn0WfjSkPDJ5Qig3H3Sw==",
"dev": true,
"funding": [
{
@ -160,9 +160,6 @@
],
"engines": {
"node": ">=18"
},
"peerDependencies": {
"postcss": "^8.4"
}
},
"node_modules/@csstools/css-tokenizer": {
@ -196,12 +193,12 @@
}
},
"node_modules/@playwright/test": {
"version": "1.55.1",
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.55.1.tgz",
"integrity": "sha512-IVAh/nOJaw6W9g+RJVlIQJ6gSiER+ae6mKQ5CX1bERzQgbC1VSeBlwdvczT7pxb0GWiyrxH4TGKbMfDb4Sq/ig==",
"version": "1.56.1",
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.56.1.tgz",
"integrity": "sha512-vSMYtL/zOcFpvJCW71Q/OEGQb7KYBPAdKh35WNSkaZA75JlAO8ED8UN6GUNTm3drWomcbcqRPFqQbLae8yBTdg==",
"dev": true,
"dependencies": {
"playwright": "1.55.1"
"playwright": "1.56.1"
},
"bin": {
"playwright": "cli.js"
@ -249,12 +246,12 @@
}
},
"node_modules/@types/node": {
"version": "24.5.2",
"resolved": "https://registry.npmjs.org/@types/node/-/node-24.5.2.tgz",
"integrity": "sha512-FYxk1I7wPv3K2XBaoyH2cTnocQEu8AOZ60hPbsyukMPLv5/5qr7V1i8PLHdl6Zf87I+xZXFvPCXYjiTFq+YSDQ==",
"version": "24.2.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-24.2.1.tgz",
"integrity": "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ==",
"dev": true,
"dependencies": {
"undici-types": "~7.12.0"
"undici-types": "~7.10.0"
}
},
"node_modules/@types/trusted-types": {
@ -363,9 +360,9 @@
}
},
"node_modules/body-parser/node_modules/debug": {
"version": "4.4.3",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
"integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
"integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
"dev": true,
"dependencies": {
"ms": "^2.1.3"
@ -637,9 +634,9 @@
}
},
"node_modules/dompurify": {
"version": "3.2.7",
"resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.2.7.tgz",
"integrity": "sha512-WhL/YuveyGXJaerVlMYGWhvQswa7myDG17P7Vu65EWC05o8vfeNbvNf4d/BOvH99+ZW+LlQsc1GDKMa1vNK6dw==",
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.3.0.tgz",
"integrity": "sha512-r+f6MYR1gGN1eJv0TVQbhA7if/U7P87cdPl3HN5rikqaBSBxLiCb/b9O+2eG0cxz0ghyU+mU1QkbsOwERMYlWQ==",
"dev": true,
"optionalDependencies": {
"@types/trusted-types": "^2.0.7"
@ -660,9 +657,9 @@
}
},
"node_modules/dotenv": {
"version": "17.2.2",
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-17.2.2.tgz",
"integrity": "sha512-Sf2LSQP+bOlhKWWyhFsn0UsfdK/kCWRv1iuA2gXAwt3dyNabr6QSj00I2V10pidqz69soatm9ZwZvpQMTIOd5Q==",
"version": "17.2.3",
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-17.2.3.tgz",
"integrity": "sha512-JVUnt+DUIzu87TABbhPmNfVdBDt18BLOWjMUFJMSi/Qqg7NTYtabbvSNJGOJ7afbRuv9D/lngizHtP7QyLQ+9w==",
"dev": true,
"engines": {
"node": ">=12"
@ -952,9 +949,9 @@
}
},
"node_modules/express/node_modules/debug": {
"version": "4.4.3",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
"integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
"integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
"dev": true,
"dependencies": {
"ms": "^2.1.3"
@ -992,9 +989,9 @@
}
},
"node_modules/finalhandler/node_modules/debug": {
"version": "4.4.3",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
"integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
"integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
"dev": true,
"dependencies": {
"ms": "^2.1.3"
@ -1340,20 +1337,20 @@
"integrity": "sha512-Ks/IoX00TtClbGQr4TWXemAnktAQvYB7HzcCxDGqEZU6oCmb2INHuOoKxbtR+HFkmYWBKv/dOZtGRiAjDhj92g=="
},
"node_modules/jsdom": {
"version": "27.0.0",
"resolved": "https://registry.npmjs.org/jsdom/-/jsdom-27.0.0.tgz",
"integrity": "sha512-lIHeR1qlIRrIN5VMccd8tI2Sgw6ieYXSVktcSHaNe3Z5nE/tcPQYQWOq00wxMvYOsz+73eAkNenVvmPC6bba9A==",
"version": "27.0.1",
"resolved": "https://registry.npmjs.org/jsdom/-/jsdom-27.0.1.tgz",
"integrity": "sha512-SNSQteBL1IlV2zqhwwolaG9CwhIhTvVHWg3kTss/cLE7H/X4644mtPQqYvCfsSrGQWt9hSZcgOXX8bOZaMN+kA==",
"dev": true,
"dependencies": {
"@asamuzakjp/dom-selector": "^6.5.4",
"cssstyle": "^5.3.0",
"@asamuzakjp/dom-selector": "^6.7.2",
"cssstyle": "^5.3.1",
"data-urls": "^6.0.0",
"decimal.js": "^10.5.0",
"decimal.js": "^10.6.0",
"html-encoding-sniffer": "^4.0.0",
"http-proxy-agent": "^7.0.2",
"https-proxy-agent": "^7.0.6",
"is-potential-custom-element-name": "^1.0.1",
"parse5": "^7.3.0",
"parse5": "^8.0.0",
"rrweb-cssom": "^0.8.0",
"saxes": "^6.0.0",
"symbol-tree": "^3.2.4",
@ -1362,8 +1359,8 @@
"webidl-conversions": "^8.0.0",
"whatwg-encoding": "^3.1.1",
"whatwg-mimetype": "^4.0.0",
"whatwg-url": "^15.0.0",
"ws": "^8.18.2",
"whatwg-url": "^15.1.0",
"ws": "^8.18.3",
"xml-name-validator": "^5.0.0"
},
"engines": {
@ -1426,9 +1423,9 @@
"integrity": "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA=="
},
"node_modules/lru-cache": {
"version": "11.2.1",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz",
"integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==",
"version": "11.2.2",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.2.tgz",
"integrity": "sha512-F9ODfyqML2coTIsQpSkRHnLSZMtkU8Q+mSfcaIyKwy58u+8k5nvAYeiNhsyMARvzNcXJ9QfWVrcPsC9e9rAxtg==",
"dev": true,
"engines": {
"node": "20 || >=22"
@ -1450,9 +1447,9 @@
},
"node_modules/maildev": {
"name": "@timshel_npm/maildev",
"version": "3.2.3",
"resolved": "https://registry.npmjs.org/@timshel_npm/maildev/-/maildev-3.2.3.tgz",
"integrity": "sha512-CNxMz4Obw7nL8MZbx4y1YUFeqqAQk+Qwm51tcBV5lRBotMlXKeYhuEcayb1v66nUwq832bUfKF4hyQpJixFZrw==",
"version": "3.2.5",
"resolved": "https://registry.npmjs.org/@timshel_npm/maildev/-/maildev-3.2.5.tgz",
"integrity": "sha512-suWQu2s2kmO+MXtNJYW9peklznhd+aorIUb4tSNrfaKoEJjDa3vLXTvWf+3cb67o4Yv4Z6nPeKdMTCDZVn/Nyw==",
"dev": true,
"dependencies": {
"@types/mailparser": "3.4.6",
@ -1461,13 +1458,13 @@
"commander": "14.0.1",
"compression": "1.8.1",
"cors": "2.8.5",
"dompurify": "3.2.7",
"dompurify": "3.3.0",
"express": "5.1.0",
"jsdom": "27.0.0",
"mailparser": "3.7.4",
"jsdom": "27.0.1",
"mailparser": "3.7.5",
"mime": "4.1.0",
"nodemailer": "7.0.6",
"smtp-server": "3.14.0",
"nodemailer": "7.0.9",
"smtp-server": "3.15.0",
"socket.io": "4.8.1",
"wildstring": "1.0.9"
},
@ -1479,36 +1476,44 @@
}
},
"node_modules/mailparser": {
"version": "3.7.4",
"resolved": "https://registry.npmjs.org/mailparser/-/mailparser-3.7.4.tgz",
"integrity": "sha512-Beh4yyR4jLq3CZZ32asajByrXnW8dLyKCAQD3WvtTiBnMtFWhxO+wa93F6sJNjDmfjxXs4NRNjw3XAGLqZR3Vg==",
"version": "3.7.5",
"resolved": "https://registry.npmjs.org/mailparser/-/mailparser-3.7.5.tgz",
"integrity": "sha512-o59RgZC+4SyCOn4xRH1mtRiZ1PbEmi6si6Ufnd3tbX/V9zmZN1qcqu8xbXY62H6CwIclOT3ppm5u/wV2nujn4g==",
"dev": true,
"dependencies": {
"encoding-japanese": "2.2.0",
"he": "1.2.0",
"html-to-text": "9.0.5",
"iconv-lite": "0.6.3",
"iconv-lite": "0.7.0",
"libmime": "5.3.7",
"linkify-it": "5.0.0",
"mailsplit": "5.4.5",
"nodemailer": "7.0.4",
"mailsplit": "5.4.6",
"nodemailer": "7.0.9",
"punycode.js": "2.3.1",
"tlds": "1.259.0"
"tlds": "1.260.0"
}
},
"node_modules/mailparser/node_modules/nodemailer": {
"version": "7.0.4",
"resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-7.0.4.tgz",
"integrity": "sha512-9O00Vh89/Ld2EcVCqJ/etd7u20UhME0f/NToPfArwPEe1Don1zy4mAIz6ariRr7mJ2RDxtaDzN0WJVdVXPtZaw==",
"node_modules/mailparser/node_modules/iconv-lite": {
"version": "0.7.0",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.0.tgz",
"integrity": "sha512-cf6L2Ds3h57VVmkZe+Pn+5APsT7FpqJtEhhieDCvrE2MK5Qk9MyffgQyuxQTm6BChfeZNtcOLHp9IcWRVcIcBQ==",
"dev": true,
"dependencies": {
"safer-buffer": ">= 2.1.2 < 3.0.0"
},
"engines": {
"node": ">=6.0.0"
"node": ">=0.10.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
}
},
"node_modules/mailsplit": {
"version": "5.4.5",
"resolved": "https://registry.npmjs.org/mailsplit/-/mailsplit-5.4.5.tgz",
"integrity": "sha512-oMfhmvclR689IIaQmIcR5nODnZRRVwAKtqFT407TIvmhX2OLUBnshUTcxzQBt3+96sZVDud9NfSe1NxAkUNXEQ==",
"version": "5.4.6",
"resolved": "https://registry.npmjs.org/mailsplit/-/mailsplit-5.4.6.tgz",
"integrity": "sha512-M+cqmzaPG/mEiCDmqQUz8L177JZLZmXAUpq38owtpq2xlXlTSw+kntnxRt2xsxVFFV6+T8Mj/U0l5s7s6e0rNw==",
"deprecated": "This package has been renamed to @zone-eu/mailsplit. Please update your dependencies.",
"dev": true,
"dependencies": {
"libbase64": "1.3.0",
@ -1595,9 +1600,9 @@
"dev": true
},
"node_modules/mysql2": {
"version": "3.15.0",
"resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.15.0.tgz",
"integrity": "sha512-tT6pomf5Z/I7Jzxu8sScgrYBMK9bUFWd7Kbo6Fs1L0M13OOIJ/ZobGKS3Z7tQ8Re4lj+LnLXIQVZZxa3fhYKzA==",
"version": "3.15.3",
"resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.15.3.tgz",
"integrity": "sha512-FBrGau0IXmuqg4haEZRBfHNWB5mUARw6hNwPDXXGg0XzVJ50mr/9hb267lvpVMnhZ1FON3qNd4Xfcez1rbFwSg==",
"dependencies": {
"aws-ssl-profiles": "^1.1.1",
"denque": "^2.1.0",
@ -1647,25 +1652,6 @@
"node": ">=12"
}
},
"node_modules/nanoid": {
"version": "3.3.11",
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz",
"integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==",
"dev": true,
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"peer": true,
"bin": {
"nanoid": "bin/nanoid.cjs"
},
"engines": {
"node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
}
},
"node_modules/negotiator": {
"version": "0.6.4",
"resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.4.tgz",
@ -1676,9 +1662,9 @@
}
},
"node_modules/nodemailer": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-7.0.6.tgz",
"integrity": "sha512-F44uVzgwo49xboqbFgBGkRaiMgtoBrBEWCVincJPK9+S9Adkzt/wXCLKbf7dxucmxfTI5gHGB+bEmdyzN6QKjw==",
"version": "7.0.9",
"resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-7.0.9.tgz",
"integrity": "sha512-9/Qm0qXIByEP8lEV2qOqcAW7bRpL8CR9jcTwk3NBnHJNmP9fIJ86g2fgmIXqHY+nj55ZEMwWqYAT2QTDpRUYiQ==",
"dev": true,
"engines": {
"node": ">=6.0.0"
@ -1747,9 +1733,9 @@
}
},
"node_modules/parse5": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/parse5/-/parse5-7.3.0.tgz",
"integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==",
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/parse5/-/parse5-8.0.0.tgz",
"integrity": "sha512-9m4m5GSgXjL4AjumKzq1Fgfp3Z8rsvjRNbnkVwfu2ImRqE5D0LnY2QfDen18FSY9C573YU5XxSapdHZTZ2WolA==",
"dev": true,
"dependencies": {
"entities": "^6.0.0"
@ -1793,13 +1779,12 @@
}
},
"node_modules/path-to-regexp": {
"version": "8.3.0",
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.3.0.tgz",
"integrity": "sha512-7jdwVIRtsP8MYpdXSwOS0YdD0Du+qOoF/AEPIt88PcCFrZCzx41oxku1jD88hZBwbNUIEfpqvuhjFaMAqMTWnA==",
"version": "8.2.0",
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.2.0.tgz",
"integrity": "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==",
"dev": true,
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
"engines": {
"node": ">=16"
}
},
"node_modules/peberminta": {
@ -1892,20 +1877,13 @@
"split2": "^4.1.0"
}
},
"node_modules/picocolors": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
"integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
"dev": true,
"peer": true
},
"node_modules/playwright": {
"version": "1.55.1",
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.55.1.tgz",
"integrity": "sha512-cJW4Xd/G3v5ovXtJJ52MAOclqeac9S/aGGgRzLabuF8TnIb6xHvMzKIa6JmrRzUkeXJgfL1MhukP0NK6l39h3A==",
"version": "1.56.1",
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.56.1.tgz",
"integrity": "sha512-aFi5B0WovBHTEvpM3DzXTUaeN6eN0qWnTkKx4NQaH4Wvcmc153PdaY2UBdSYKaGYw+UyWXSVyxDUg5DoPEttjw==",
"dev": true,
"dependencies": {
"playwright-core": "1.55.1"
"playwright-core": "1.56.1"
},
"bin": {
"playwright": "cli.js"
@ -1918,9 +1896,9 @@
}
},
"node_modules/playwright-core": {
"version": "1.55.1",
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.55.1.tgz",
"integrity": "sha512-Z6Mh9mkwX+zxSlHqdr5AOcJnfp+xUWLCt9uKV18fhzA8eyxUd8NUWzAjxUh55RZKSYwDGX0cfaySdhZJGMoJ+w==",
"version": "1.56.1",
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.56.1.tgz",
"integrity": "sha512-hutraynyn31F+Bifme+Ps9Vq59hKuUCz7H1kDOcBs+2oGguKkWTU50bBWrtz34OUWmIwpBTWDxaRPXrIXkgvmQ==",
"dev": true,
"bin": {
"playwright-core": "cli.js"
@ -1929,35 +1907,6 @@
"node": ">=18"
}
},
"node_modules/postcss": {
"version": "8.5.6",
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz",
"integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==",
"dev": true,
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/postcss/"
},
{
"type": "tidelift",
"url": "https://tidelift.com/funding/github/npm/postcss"
},
{
"type": "github",
"url": "https://github.com/sponsors/ai"
}
],
"peer": true,
"dependencies": {
"nanoid": "^3.3.11",
"picocolors": "^1.1.1",
"source-map-js": "^1.2.1"
},
"engines": {
"node": "^10 || ^12 || >=14"
}
},
"node_modules/postgres-array": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz",
@ -2049,34 +1998,18 @@
}
},
"node_modules/raw-body": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.1.tgz",
"integrity": "sha512-9G8cA+tuMS75+6G/TzW8OtLzmBDMo8p1JRxN5AZ+LAp8uxGA8V8GZm4GQ4/N5QNQEnLmg6SS7wyuSmbKepiKqA==",
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.0.tgz",
"integrity": "sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g==",
"dev": true,
"dependencies": {
"bytes": "3.1.2",
"http-errors": "2.0.0",
"iconv-lite": "0.7.0",
"iconv-lite": "0.6.3",
"unpipe": "1.0.0"
},
"engines": {
"node": ">= 0.10"
}
},
"node_modules/raw-body/node_modules/iconv-lite": {
"version": "0.7.0",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.0.tgz",
"integrity": "sha512-cf6L2Ds3h57VVmkZe+Pn+5APsT7FpqJtEhhieDCvrE2MK5Qk9MyffgQyuxQTm6BChfeZNtcOLHp9IcWRVcIcBQ==",
"dev": true,
"dependencies": {
"safer-buffer": ">= 2.1.2 < 3.0.0"
},
"engines": {
"node": ">=0.10.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
"node": ">= 0.8"
}
},
"node_modules/require-from-string": {
@ -2105,9 +2038,9 @@
}
},
"node_modules/router/node_modules/debug": {
"version": "4.4.3",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
"integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
"integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
"dev": true,
"dependencies": {
"ms": "^2.1.3"
@ -2205,9 +2138,9 @@
}
},
"node_modules/send/node_modules/debug": {
"version": "4.4.3",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
"integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
"integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
"dev": true,
"dependencies": {
"ms": "^2.1.3"
@ -2326,29 +2259,20 @@
}
},
"node_modules/smtp-server": {
"version": "3.14.0",
"resolved": "https://registry.npmjs.org/smtp-server/-/smtp-server-3.14.0.tgz",
"integrity": "sha512-cEw/hdIY+xw1pkbQbQ23hvnm9kNABAsgYB+jJYGkzAynZxJ2VB9aqC6JhB1vpdDnqan7C7AL3qHYRGwz5eD6BQ==",
"version": "3.15.0",
"resolved": "https://registry.npmjs.org/smtp-server/-/smtp-server-3.15.0.tgz",
"integrity": "sha512-yv945vk0/xcukSKAoIhGz6GOlcXoCyGQH2w9IlLrTKk3SJiOBH9bcO6tD0ILTZYJsMqRa6OTRZAyqeuLXkv59Q==",
"dev": true,
"dependencies": {
"base32.js": "0.1.0",
"ipv6-normalize": "1.0.1",
"nodemailer": "7.0.3",
"nodemailer": "7.0.9",
"punycode.js": "2.3.1"
},
"engines": {
"node": ">=12.0.0"
}
},
"node_modules/smtp-server/node_modules/nodemailer": {
"version": "7.0.3",
"resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-7.0.3.tgz",
"integrity": "sha512-Ajq6Sz1x7cIK3pN6KesGTah+1gnwMnx5gKl3piQlQQE/PwyJ4Mbc8is2psWYxK3RJTVeqsDaCv8ZzXLCDHMTZw==",
"dev": true,
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/socket.io": {
"version": "4.8.1",
"resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.8.1.tgz",
@ -2564,30 +2488,30 @@
"dev": true
},
"node_modules/tlds": {
"version": "1.259.0",
"resolved": "https://registry.npmjs.org/tlds/-/tlds-1.259.0.tgz",
"integrity": "sha512-AldGGlDP0PNgwppe2quAvuBl18UcjuNtOnDuUkqhd6ipPqrYYBt3aTxK1QTsBVknk97lS2JcafWMghjGWFtunw==",
"version": "1.260.0",
"resolved": "https://registry.npmjs.org/tlds/-/tlds-1.260.0.tgz",
"integrity": "sha512-78+28EWBhCEE7qlyaHA9OR3IPvbCLiDh3Ckla593TksfFc9vfTsgvH7eS+dr3o9qr31gwGbogcI16yN91PoRjQ==",
"dev": true,
"bin": {
"tlds": "bin.js"
}
},
"node_modules/tldts": {
"version": "7.0.16",
"resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.16.tgz",
"integrity": "sha512-5bdPHSwbKTeHmXrgecID4Ljff8rQjv7g8zKQPkCozRo2HWWni+p310FSn5ImI+9kWw9kK4lzOB5q/a6iv0IJsw==",
"version": "7.0.17",
"resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.17.tgz",
"integrity": "sha512-Y1KQBgDd/NUc+LfOtKS6mNsC9CCaH+m2P1RoIZy7RAPo3C3/t8X45+zgut31cRZtZ3xKPjfn3TkGTrctC2TQIQ==",
"dev": true,
"dependencies": {
"tldts-core": "^7.0.16"
"tldts-core": "^7.0.17"
},
"bin": {
"tldts": "bin/cli.js"
}
},
"node_modules/tldts-core": {
"version": "7.0.16",
"resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.16.tgz",
"integrity": "sha512-XHhPmHxphLi+LGbH0G/O7dmUH9V65OY20R7vH8gETHsp5AZCjBk9l8sqmRKLaGOxnETU7XNSDUPtewAy/K6jbA==",
"version": "7.0.17",
"resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.17.tgz",
"integrity": "sha512-DieYoGrP78PWKsrXr8MZwtQ7GLCUeLxihtjC1jZsW1DnvSMdKPitJSe8OSYDM2u5H6g3kWJZpePqkp43TfLh0g==",
"dev": true
},
"node_modules/toidentifier": {
@ -2644,9 +2568,9 @@
"dev": true
},
"node_modules/undici-types": {
"version": "7.12.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.12.0.tgz",
"integrity": "sha512-goOacqME2GYyOZZfb5Lgtu+1IDmAlAEu5xnD3+xTzS10hT0vzpf0SPjkXwAw9Jm+4n/mQGDP3LO8CPbYROeBfQ==",
"version": "7.10.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.10.0.tgz",
"integrity": "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag==",
"dev": true
},
"node_modules/unpipe": {

8
playwright/package.json

@ -8,13 +8,13 @@
"author": "",
"license": "ISC",
"devDependencies": {
"@playwright/test": "1.55.1",
"dotenv": "17.2.2",
"@playwright/test": "1.56.1",
"dotenv": "17.2.3",
"dotenv-expand": "12.0.3",
"maildev": "npm:@timshel_npm/maildev@^3.2.3"
"maildev": "npm:@timshel_npm/maildev@3.2.5"
},
"dependencies": {
"mysql2": "3.15.0",
"mysql2": "3.15.3",
"otpauth": "9.4.1",
"pg": "8.16.3"
}

5
playwright/test.env

@ -55,6 +55,7 @@ ROCKET_PORT=8003
DOMAIN=http://localhost:${ROCKET_PORT}
LOG_LEVEL=info,oidcwarden::sso=debug
LOGIN_RATELIMIT_MAX_BURST=100
ADMIN_TOKEN=admin
SMTP_SECURITY=off
SMTP_PORT=${MAILDEV_SMTP_PORT}
@ -67,8 +68,8 @@ SSO_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${TEST_REALM}
SSO_DEBUG_TOKENS=true
# Custom web-vault build
# PW_WV_REPO_URL=https://github.com/dani-garcia/bw_web_builds.git
# PW_WV_COMMIT_HASH=a5f5390895516bce2f48b7baadb6dc399e5fe75a
# PW_VW_REPO_URL=https://github.com/vaultwarden/vw_web_builds.git
# PW_VW_COMMIT_HASH=b5f5b2157b9b64b5813bc334a75a277d0377b5d3
###########################
# Docker MariaDb container#

3
playwright/tests/login.smtp.spec.ts

@ -91,6 +91,9 @@ test('2fa', async ({ page }) => {
await page.getByLabel(/Verification code/).fill(code);
await page.getByRole('button', { name: 'Continue' }).click();
await page.getByRole('button', { name: 'Add it later' }).click();
await page.getByRole('link', { name: 'Skip to web app' }).click();
await expect(page).toHaveTitle(/Vaults/);
})

14
playwright/tests/organization.smtp.spec.ts

@ -57,15 +57,17 @@ test('invited with new account', async ({ page }) => {
await expect(page).toHaveTitle(/Create account | Vaultwarden Web/);
//await page.getByLabel('Name').fill(users.user2.name);
await page.getByLabel('New master password (required)', { exact: true }).fill(users.user2.password);
await page.getByLabel('Confirm new master password (').fill(users.user2.password);
await page.getByLabel('Master password (required)', { exact: true }).fill(users.user2.password);
await page.getByLabel('Confirm master password (').fill(users.user2.password);
await page.getByRole('button', { name: 'Create account' }).click();
await utils.checkNotification(page, 'Your new account has been created');
await utils.checkNotification(page, 'Invitation accepted');
await utils.ignoreExtension(page);
// Redirected to the vault
await expect(page).toHaveTitle('Vaults | Vaultwarden Web');
await utils.checkNotification(page, 'You have been logged in!');
await utils.checkNotification(page, 'Invitation accepted');
// await utils.checkNotification(page, 'You have been logged in!');
});
await test.step('Check mails', async () => {
@ -91,9 +93,11 @@ test('invited with existing account', async ({ page }) => {
await page.getByLabel('Master password').fill(users.user3.password);
await page.getByRole('button', { name: 'Log in with master password' }).click();
await utils.checkNotification(page, 'Invitation accepted');
await utils.ignoreExtension(page);
// We are now in the default vault page
await expect(page).toHaveTitle(/Vaultwarden Web/);
await utils.checkNotification(page, 'Invitation accepted');
await mail3Buffer.expect((m) => m.subject === 'New Device Logged In From Firefox');
await mail1Buffer.expect((m) => m.subject.includes('Invitation to Test accepted'));

2
playwright/tests/setups/2fa.ts

@ -48,7 +48,7 @@ export async function activateEmail(test: Test, page: Page, user: { name: string
await page.getByRole('menuitem', { name: 'Account settings' }).click();
await page.getByRole('link', { name: 'Security' }).click();
await page.getByRole('link', { name: 'Two-step login' }).click();
await page.locator('bit-item').filter({ hasText: 'Email Email Enter a code sent' }).getByRole('button').click();
await page.locator('bit-item').filter({ hasText: 'Enter a code sent to your email' }).getByRole('button').click();
await page.getByLabel('Master password (required)').fill(user.password);
await page.getByRole('button', { name: 'Continue' }).click();
await page.getByRole('button', { name: 'Send email' }).click();

14
playwright/tests/setups/sso.ts

@ -33,19 +33,21 @@ export async function logNewUser(
await test.step('Create Vault account', async () => {
await expect(page.getByRole('heading', { name: 'Join organisation' })).toBeVisible();
await page.getByLabel('New master password (required)', { exact: true }).fill(user.password);
await page.getByLabel('Confirm new master password (').fill(user.password);
await page.getByLabel('Master password (required)', { exact: true }).fill(user.password);
await page.getByLabel('Confirm master password (').fill(user.password);
await page.getByRole('button', { name: 'Create account' }).click();
});
await utils.checkNotification(page, 'Account successfully created!');
await utils.checkNotification(page, 'Invitation accepted');
await utils.ignoreExtension(page);
await test.step('Default vault page', async () => {
await expect(page).toHaveTitle(/Vaultwarden Web/);
await expect(page.getByTitle('All vaults', { exact: true })).toBeVisible();
});
await utils.checkNotification(page, 'Account successfully created!');
await utils.checkNotification(page, 'Invitation accepted');
if( options.mailBuffer ){
let mailBuffer = options.mailBuffer;
await test.step('Check emails', async () => {
@ -115,6 +117,8 @@ export async function logUser(
await page.getByRole('button', { name: 'Unlock' }).click();
});
await utils.ignoreExtension(page);
await test.step('Default vault page', async () => {
await expect(page).toHaveTitle(/Vaultwarden Web/);
await expect(page.getByTitle('All vaults', { exact: true })).toBeVisible();

9
playwright/tests/setups/user.ts

@ -17,15 +17,16 @@ export async function createAccount(test, page: Page, user: { email: string, nam
await page.getByRole('button', { name: 'Continue' }).click();
// Vault finish Creation
await page.getByLabel('New master password (required)', { exact: true }).fill(user.password);
await page.getByLabel('Confirm new master password (').fill(user.password);
await page.getByLabel('Master password (required)', { exact: true }).fill(user.password);
await page.getByLabel('Confirm master password (').fill(user.password);
await page.getByRole('button', { name: 'Create account' }).click();
await utils.checkNotification(page, 'Your new account has been created')
await utils.ignoreExtension(page);
// We are now in the default vault page
await expect(page).toHaveTitle('Vaults | Vaultwarden Web');
await utils.checkNotification(page, 'You have been logged in!');
// await utils.checkNotification(page, 'You have been logged in!');
if( mailBuffer ){
await mailBuffer.expect((m) => m.subject === "Welcome");
@ -45,6 +46,8 @@ export async function logUser(test, page: Page, user: { email: string, password:
await page.getByLabel('Master password').fill(user.password);
await page.getByRole('button', { name: 'Log in with master password' }).click();
await utils.ignoreExtension(page);
// We are now in the default vault page
await expect(page).toHaveTitle(/Vaultwarden Web/);

15
playwright/tests/sso_organization.smtp.spec.ts

@ -67,16 +67,17 @@ test('invited with new account', async ({ page }) => {
await test.step('Create Vault account', async () => {
await expect(page.getByRole('heading', { name: 'Join organisation' })).toBeVisible();
await page.getByLabel('New master password (required)', { exact: true }).fill(users.user2.password);
await page.getByLabel('Confirm new master password (').fill(users.user2.password);
await page.getByLabel('Master password (required)', { exact: true }).fill(users.user2.password);
await page.getByLabel('Confirm master password (').fill(users.user2.password);
await page.getByRole('button', { name: 'Create account' }).click();
await utils.checkNotification(page, 'Account successfully created!');
await utils.checkNotification(page, 'Invitation accepted');
await utils.ignoreExtension(page);
});
await test.step('Default vault page', async () => {
await expect(page).toHaveTitle(/Vaultwarden Web/);
await utils.checkNotification(page, 'Account successfully created!');
await utils.checkNotification(page, 'Invitation accepted');
});
await test.step('Check mails', async () => {
@ -107,11 +108,13 @@ test('invited with existing account', async ({ page }) => {
await expect(page).toHaveTitle('Vaultwarden Web');
await page.getByLabel('Master password').fill(users.user3.password);
await page.getByRole('button', { name: 'Unlock' }).click();
await utils.checkNotification(page, 'Invitation accepted');
await utils.ignoreExtension(page);
});
await test.step('Default vault page', async () => {
await expect(page).toHaveTitle(/Vaultwarden Web/);
await utils.checkNotification(page, 'Invitation accepted');
});
await test.step('Check mails', async () => {

2
rust-toolchain.toml

@ -1,4 +1,4 @@
[toolchain]
channel = "1.91.0"
channel = "1.92.0"
components = [ "rustfmt", "clippy" ]
profile = "minimal"

99
src/api/admin.rs

@ -23,7 +23,7 @@ use crate::{
backup_sqlite, get_sql_server_version,
models::{
Attachment, Cipher, Collection, Device, Event, EventType, Group, Invitation, Membership, MembershipId,
MembershipType, OrgPolicy, OrgPolicyErr, Organization, OrganizationId, SsoUser, TwoFactor, User, UserId,
MembershipType, OrgPolicy, Organization, OrganizationId, SsoUser, TwoFactor, User, UserId,
},
DbConn, DbConnType, ACTIVE_DB_TYPE,
},
@ -31,7 +31,7 @@ use crate::{
http_client::make_http_request,
mail,
util::{
container_base_image, format_naive_datetime_local, get_display_size, get_web_vault_version,
container_base_image, format_naive_datetime_local, get_active_web_release, get_display_size,
is_running_in_container, NumberOrString,
},
CONFIG, VERSION,
@ -556,23 +556,9 @@ async fn update_membership_type(data: Json<MembershipTypeData>, token: AdminToke
}
}
member_to_edit.atype = new_type;
// This check is also done at api::organizations::{accept_invite, _confirm_invite, _activate_member, edit_member}, update_membership_type
// It returns different error messages per function.
if new_type < MembershipType::Admin {
match OrgPolicy::is_user_allowed(&member_to_edit.user_uuid, &member_to_edit.org_uuid, true, &conn).await {
Ok(_) => {}
Err(OrgPolicyErr::TwoFactorMissing) => {
if CONFIG.email_2fa_auto_fallback() {
two_factor::email::find_and_activate_email_2fa(&member_to_edit.user_uuid, &conn).await?;
} else {
err!("You cannot modify this user to this type because they have not setup 2FA");
}
}
Err(OrgPolicyErr::SingleOrgEnforced) => {
err!("You cannot modify this user to this type because it is a member of an organization which forbids it");
}
}
}
OrgPolicy::check_user_allowed(&member_to_edit, "modify", &conn).await?;
log_event(
EventType::OrganizationUserUpdated as i32,
@ -585,7 +571,6 @@ async fn update_membership_type(data: Json<MembershipTypeData>, token: AdminToke
)
.await;
member_to_edit.atype = new_type;
member_to_edit.save(&conn).await
}
@ -704,6 +689,26 @@ async fn get_ntp_time(has_http_access: bool) -> String {
String::from("Unable to fetch NTP time.")
}
fn web_vault_compare(active: &str, latest: &str) -> i8 {
use semver::Version;
use std::cmp::Ordering;
let active_semver = Version::parse(active).unwrap_or_else(|e| {
warn!("Unable to parse active web-vault version '{active}': {e}");
Version::parse("2025.1.1").unwrap()
});
let latest_semver = Version::parse(latest).unwrap_or_else(|e| {
warn!("Unable to parse latest web-vault version '{latest}': {e}");
Version::parse("2025.1.1").unwrap()
});
match active_semver.cmp(&latest_semver) {
Ordering::Less => -1,
Ordering::Equal => 0,
Ordering::Greater => 1,
}
}
#[get("/diagnostics")]
async fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResult<Html<String>> {
use chrono::prelude::*;
@ -723,32 +728,21 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> A
_ => "Unable to resolve domain name.".to_string(),
};
let (latest_release, latest_commit, latest_web_build) = get_release_info(has_http_access).await;
let (latest_vw_release, latest_vw_commit, latest_web_release) = get_release_info(has_http_access).await;
let active_web_release = get_active_web_release();
let web_vault_compare = web_vault_compare(&active_web_release, &latest_web_release);
let ip_header_name = &ip_header.0.unwrap_or_default();
// Get current running versions
let web_vault_version = get_web_vault_version();
// Check if the running version is newer than the latest stable released version
let web_vault_pre_release = if let Ok(web_ver_match) = semver::VersionReq::parse(&format!(">{latest_web_build}")) {
web_ver_match.matches(
&semver::Version::parse(&web_vault_version).unwrap_or_else(|_| semver::Version::parse("2025.1.1").unwrap()),
)
} else {
error!("Unable to parse latest_web_build: '{latest_web_build}'");
false
};
let diagnostics_json = json!({
"dns_resolved": dns_resolved,
"current_release": VERSION,
"latest_release": latest_release,
"latest_commit": latest_commit,
"latest_release": latest_vw_release,
"latest_commit": latest_vw_commit,
"web_vault_enabled": &CONFIG.web_vault_enabled(),
"web_vault_version": web_vault_version,
"latest_web_build": latest_web_build,
"web_vault_pre_release": web_vault_pre_release,
"active_web_release": active_web_release,
"latest_web_release": latest_web_release,
"web_vault_compare": web_vault_compare,
"running_within_container": running_within_container,
"container_base_image": if running_within_container { container_base_image() } else { "Not applicable" },
"has_http_access": has_http_access,
@ -859,3 +853,32 @@ impl<'r> FromRequest<'r> for AdminToken {
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn validate_web_vault_compare() {
// web_vault_compare(active, latest)
// Test normal versions
assert!(web_vault_compare("2025.12.0", "2025.12.1") == -1);
assert!(web_vault_compare("2025.12.1", "2025.12.1") == 0);
assert!(web_vault_compare("2025.12.2", "2025.12.1") == 1);
// Test patched/+build.n versions
// Newer latest version
assert!(web_vault_compare("2025.12.0+build.1", "2025.12.1") == -1);
assert!(web_vault_compare("2025.12.1", "2025.12.1+build.1") == -1);
assert!(web_vault_compare("2025.12.0+build.1", "2025.12.1+build.1") == -1);
assert!(web_vault_compare("2025.12.1+build.1", "2025.12.1+build.2") == -1);
// Equal versions
assert!(web_vault_compare("2025.12.1+build.1", "2025.12.1+build.1") == 0);
assert!(web_vault_compare("2025.12.2+build.2", "2025.12.2+build.2") == 0);
// Newer active version
assert!(web_vault_compare("2025.12.1+build.1", "2025.12.1") == 1);
assert!(web_vault_compare("2025.12.2", "2025.12.1+build.1") == 1);
assert!(web_vault_compare("2025.12.2+build.1", "2025.12.1+build.1") == 1);
assert!(web_vault_compare("2025.12.1+build.3", "2025.12.1+build.2") == 1);
}
}

85
src/api/core/accounts.rs

@ -66,6 +66,7 @@ pub fn routes() -> Vec<rocket::Route> {
put_device_token,
put_clear_device_token,
post_clear_device_token,
get_tasks,
post_auth_request,
get_auth_request,
put_auth_request,
@ -75,12 +76,16 @@ pub fn routes() -> Vec<rocket::Route> {
]
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct KDFData {
#[serde(alias = "kdfType")]
kdf: i32,
#[serde(alias = "iterations")]
kdf_iterations: i32,
#[serde(alias = "memory")]
kdf_memory: Option<i32>,
#[serde(alias = "parallelism")]
kdf_parallelism: Option<i32>,
}
@ -374,7 +379,7 @@ async fn post_set_password(data: Json<SetPasswordData>, headers: Headers, conn:
}
if let Some(identifier) = data.org_identifier {
if identifier != crate::sso::FAKE_IDENTIFIER {
if identifier != crate::sso::FAKE_IDENTIFIER && identifier != crate::api::admin::FAKE_ADMIN_UUID {
let org = match Organization::find_by_uuid(&identifier.into(), &conn).await {
None => err!("Failed to retrieve the associated organization"),
Some(org) => org,
@ -401,8 +406,8 @@ async fn post_set_password(data: Json<SetPasswordData>, headers: Headers, conn:
user.save(&conn).await?;
Ok(Json(json!({
"Object": "set-password",
"CaptchaBypassToken": "",
"object": "set-password",
"captchaBypassToken": "",
})))
}
@ -545,17 +550,6 @@ async fn post_password(data: Json<ChangePassData>, headers: Headers, conn: DbCon
save_result
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct ChangeKdfData {
#[serde(flatten)]
kdf: KDFData,
master_password_hash: String,
new_master_password_hash: String,
key: String,
}
fn set_kdf_data(user: &mut User, data: &KDFData) -> EmptyResult {
if data.kdf == UserKdfType::Pbkdf2 as i32 && data.kdf_iterations < 100_000 {
err!("PBKDF2 KDF iterations must be at least 100000.")
@ -591,18 +585,61 @@ fn set_kdf_data(user: &mut User, data: &KDFData) -> EmptyResult {
Ok(())
}
#[allow(dead_code)]
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct AuthenticationData {
salt: String,
kdf: KDFData,
master_password_authentication_hash: String,
}
#[allow(dead_code)]
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct UnlockData {
salt: String,
kdf: KDFData,
master_key_wrapped_user_key: String,
}
#[allow(dead_code)]
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct ChangeKdfData {
new_master_password_hash: String,
key: String,
authentication_data: AuthenticationData,
unlock_data: UnlockData,
master_password_hash: String,
}
#[post("/accounts/kdf", data = "<data>")]
async fn post_kdf(data: Json<ChangeKdfData>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult {
let data: ChangeKdfData = data.into_inner();
let mut user = headers.user;
if !user.check_valid_password(&data.master_password_hash) {
if !headers.user.check_valid_password(&data.master_password_hash) {
err!("Invalid password")
}
set_kdf_data(&mut user, &data.kdf)?;
if data.authentication_data.kdf != data.unlock_data.kdf {
err!("KDF settings must be equal for authentication and unlock")
}
user.set_password(&data.new_master_password_hash, Some(data.key), true, None);
if headers.user.email != data.authentication_data.salt || headers.user.email != data.unlock_data.salt {
err!("Invalid master password salt")
}
let mut user = headers.user;
set_kdf_data(&mut user, &data.unlock_data.kdf)?;
user.set_password(
&data.authentication_data.master_password_authentication_hash,
Some(data.unlock_data.master_key_wrapped_user_key),
true,
None,
);
let save_result = user.save(&conn).await;
nt.send_logout(&user, Some(headers.device.uuid.clone()), &conn).await;
@ -1373,7 +1410,7 @@ async fn put_device_token(device_id: DeviceId, data: Json<PushToken>, headers: H
}
device.push_token = Some(token);
if let Err(e) = device.save(&conn).await {
if let Err(e) = device.save(true, &conn).await {
err!(format!("An error occurred while trying to save the device push token: {e}"));
}
@ -1409,6 +1446,14 @@ async fn post_clear_device_token(device_id: DeviceId, conn: DbConn) -> EmptyResu
put_clear_device_token(device_id, conn).await
}
#[get("/tasks")]
fn get_tasks(_client_headers: ClientHeaders) -> JsonResult {
Ok(Json(json!({
"data": [],
"object": "list"
})))
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct AuthRequestRequest {

38
src/api/core/ciphers.rs

@ -159,7 +159,28 @@ async fn sync(data: SyncData, headers: Headers, client_version: Option<ClientVer
let domains_json = if data.exclude_domains {
Value::Null
} else {
api::core::_get_eq_domains(headers, true).into_inner()
api::core::_get_eq_domains(&headers, true).into_inner()
};
// This is very similar to the the userDecryptionOptions sent in connect/token,
// but as of 2025-12-19 they're both using different casing conventions.
let has_master_password = !headers.user.password_hash.is_empty();
let master_password_unlock = if has_master_password {
json!({
"kdf": {
"kdfType": headers.user.client_kdf_type,
"iterations": headers.user.client_kdf_iter,
"memory": headers.user.client_kdf_memory,
"parallelism": headers.user.client_kdf_parallelism
},
// This field is named inconsistently and will be removed and replaced by the "wrapped" variant in the apps.
// https://github.com/bitwarden/android/blob/release/2025.12-rc41/network/src/main/kotlin/com/bitwarden/network/model/MasterPasswordUnlockDataJson.kt#L22-L26
"masterKeyEncryptedUserKey": headers.user.akey,
"masterKeyWrappedUserKey": headers.user.akey,
"salt": headers.user.email
})
} else {
Value::Null
};
Ok(Json(json!({
@ -170,6 +191,9 @@ async fn sync(data: SyncData, headers: Headers, client_version: Option<ClientVer
"ciphers": ciphers_json,
"domains": domains_json,
"sends": sends_json,
"userDecryption": {
"masterPasswordUnlock": master_password_unlock,
},
"object": "sync"
})))
}
@ -301,12 +325,6 @@ async fn post_ciphers_create(
) -> JsonResult {
let mut data: ShareCipherData = data.into_inner();
// Check if there are one more more collections selected when this cipher is part of an organization.
// err if this is not the case before creating an empty cipher.
if data.cipher.organization_id.is_some() && data.collection_ids.is_empty() {
err!("You must select at least one collection.");
}
// This check is usually only needed in update_cipher_from_data(), but we
// need it here as well to avoid creating an empty cipher in the call to
// cipher.save() below.
@ -324,7 +342,11 @@ async fn post_ciphers_create(
// or otherwise), we can just ignore this field entirely.
data.cipher.last_known_revision_date = None;
share_cipher_by_uuid(&cipher.uuid, data, &headers, &conn, &nt, None).await
let res = share_cipher_by_uuid(&cipher.uuid, data, &headers, &conn, &nt, None).await;
if res.is_err() {
cipher.delete(&conn).await?;
}
res
}
/// Called when creating a new user-owned cipher.

23
src/api/core/emergency_access.rs

@ -47,24 +47,11 @@ pub fn routes() -> Vec<Route> {
#[get("/emergency-access/trusted")]
async fn get_contacts(headers: Headers, conn: DbConn) -> Json<Value> {
if !CONFIG.emergency_access_allowed() {
return Json(json!({
"data": [{
"id": "",
"status": 2,
"type": 0,
"waitTimeDays": 0,
"granteeId": "",
"email": "",
"name": "NOTE: Emergency Access is disabled!",
"object": "emergencyAccessGranteeDetails",
}],
"object": "list",
"continuationToken": null
}));
}
let emergency_access_list = EmergencyAccess::find_all_by_grantor_uuid(&headers.user.uuid, &conn).await;
let emergency_access_list = if CONFIG.emergency_access_allowed() {
EmergencyAccess::find_all_by_grantor_uuid(&headers.user.uuid, &conn).await
} else {
Vec::new()
};
let mut emergency_access_list_json = Vec::with_capacity(emergency_access_list.len());
for ea in emergency_access_list {
if let Some(grantee) = ea.to_json_grantee_details(&conn).await {

32
src/api/core/mod.rs

@ -53,7 +53,7 @@ use crate::{
api::{EmptyResult, JsonResult, Notify, UpdateType},
auth::Headers,
db::{
models::{Membership, MembershipStatus, MembershipType, OrgPolicy, OrgPolicyErr, Organization, User},
models::{Membership, MembershipStatus, OrgPolicy, Organization, User},
DbConn,
},
error::Error,
@ -74,11 +74,11 @@ const GLOBAL_DOMAINS: &str = include_str!("../../static/global_domains.json");
#[get("/settings/domains")]
fn get_eq_domains(headers: Headers) -> Json<Value> {
_get_eq_domains(headers, false)
_get_eq_domains(&headers, false)
}
fn _get_eq_domains(headers: Headers, no_excluded: bool) -> Json<Value> {
let user = headers.user;
fn _get_eq_domains(headers: &Headers, no_excluded: bool) -> Json<Value> {
let user = &headers.user;
use serde_json::from_str;
let equivalent_domains: Vec<Vec<String>> = from_str(&user.equivalent_domains).unwrap();
@ -217,7 +217,8 @@ fn config() -> Json<Value> {
// We should make sure that we keep this updated when we support the new server features
// Version history:
// - Individual cipher key encryption: 2024.2.0
"version": "2025.6.0",
// - Mobile app support for MasterPasswordUnlockData: 2025.8.0
"version": "2025.12.0",
"gitHash": option_env!("GIT_REV"),
"server": {
"name": "Vaultwarden",
@ -269,27 +270,12 @@ async fn accept_org_invite(
err!("User already accepted the invitation");
}
// This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type
// It returns different error messages per function.
if member.atype < MembershipType::Admin {
match OrgPolicy::is_user_allowed(&member.user_uuid, &member.org_uuid, false, conn).await {
Ok(_) => {}
Err(OrgPolicyErr::TwoFactorMissing) => {
if crate::CONFIG.email_2fa_auto_fallback() {
two_factor::email::activate_email_2fa(user, conn).await?;
} else {
err!("You cannot join this organization until you enable two-step login on your user account");
}
}
Err(OrgPolicyErr::SingleOrgEnforced) => {
err!("You cannot join this organization because you are a member of an organization which forbids it");
}
}
}
member.status = MembershipStatus::Accepted as i32;
member.reset_password_key = reset_password_key;
// This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type
OrgPolicy::check_user_allowed(&member, "join", conn).await?;
member.save(conn).await?;
if crate::CONFIG.mail_enabled() {

86
src/api/core/organizations.rs

@ -15,7 +15,7 @@ use crate::{
models::{
Cipher, CipherId, Collection, CollectionCipher, CollectionGroup, CollectionId, CollectionUser, EventType,
Group, GroupId, GroupUser, Invitation, Membership, MembershipId, MembershipStatus, MembershipType,
OrgPolicy, OrgPolicyErr, OrgPolicyType, Organization, OrganizationApiKey, OrganizationId, User, UserId,
OrgPolicy, OrgPolicyType, Organization, OrganizationApiKey, OrganizationId, User, UserId,
},
DbConn,
},
@ -195,8 +195,7 @@ async fn create_organization(headers: Headers, data: Json<OrgData>, conn: DbConn
}
let data: OrgData = data.into_inner();
let (private_key, public_key) = if data.keys.is_some() {
let keys: OrgKeyData = data.keys.unwrap();
let (private_key, public_key) = if let Some(keys) = data.keys {
(Some(keys.encrypted_private_key), Some(keys.public_key))
} else {
(None, None)
@ -370,9 +369,9 @@ async fn get_auto_enroll_status(identifier: &str, headers: Headers, conn: DbConn
};
Ok(Json(json!({
"Id": id,
"Identifier": identifier,
"ResetPasswordEnabled": rp_auto_enroll,
"id": id,
"identifier": identifier,
"resetPasswordEnabled": rp_auto_enroll,
})))
}
@ -1463,27 +1462,12 @@ async fn _confirm_invite(
err!("User in invalid state")
}
// This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type
// It returns different error messages per function.
if member_to_confirm.atype < MembershipType::Admin {
match OrgPolicy::is_user_allowed(&member_to_confirm.user_uuid, org_id, true, conn).await {
Ok(_) => {}
Err(OrgPolicyErr::TwoFactorMissing) => {
if CONFIG.email_2fa_auto_fallback() {
two_factor::email::find_and_activate_email_2fa(&member_to_confirm.user_uuid, conn).await?;
} else {
err!("You cannot confirm this user because they have not setup 2FA");
}
}
Err(OrgPolicyErr::SingleOrgEnforced) => {
err!("You cannot confirm this user because they are a member of an organization which forbids it");
}
}
}
member_to_confirm.status = MembershipStatus::Confirmed as i32;
member_to_confirm.akey = key.to_string();
// This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type
OrgPolicy::check_user_allowed(&member_to_confirm, "confirm", conn).await?;
log_event(
EventType::OrganizationUserConfirmed as i32,
&member_to_confirm.uuid,
@ -1631,27 +1615,13 @@ async fn edit_member(
}
}
// This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type
// It returns different error messages per function.
if new_type < MembershipType::Admin {
match OrgPolicy::is_user_allowed(&member_to_edit.user_uuid, &org_id, true, &conn).await {
Ok(_) => {}
Err(OrgPolicyErr::TwoFactorMissing) => {
if CONFIG.email_2fa_auto_fallback() {
two_factor::email::find_and_activate_email_2fa(&member_to_edit.user_uuid, &conn).await?;
} else {
err!("You cannot modify this user to this type because they have not setup 2FA");
}
}
Err(OrgPolicyErr::SingleOrgEnforced) => {
err!("You cannot modify this user to this type because they are a member of an organization which forbids it");
}
}
}
member_to_edit.access_all = access_all;
member_to_edit.atype = new_type as i32;
// This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type
// We need to perform the check after changing the type since `admin` is exempt.
OrgPolicy::check_user_allowed(&member_to_edit, "modify", &conn).await?;
// Delete all the odd collections
for c in CollectionUser::find_by_organization_and_user_uuid(&org_id, &member_to_edit.user_uuid, &conn).await {
c.delete(&conn).await?;
@ -2086,8 +2056,6 @@ async fn get_policy(org_id: OrganizationId, pol_type: i32, headers: AdminHeaders
#[derive(Deserialize)]
struct PolicyData {
enabled: bool,
#[serde(rename = "type")]
_type: i32,
data: Option<Value>,
}
@ -2154,14 +2122,14 @@ async fn put_policy(
// When enabling the SingleOrg policy, remove this org's members that are members of other orgs
if pol_type_enum == OrgPolicyType::SingleOrg && data.enabled {
for member in Membership::find_by_org(&org_id, &conn).await.into_iter() {
for mut member in Membership::find_by_org(&org_id, &conn).await.into_iter() {
// Policy only applies to non-Owner/non-Admin members who have accepted joining the org
// Exclude invited and revoked users when checking for this policy.
// Those users will not be allowed to accept or be activated because of the policy checks done there.
// We check if the count is larger then 1, because it includes this organization also.
if member.atype < MembershipType::Admin
&& member.status != MembershipStatus::Invited as i32
&& Membership::count_accepted_and_confirmed_by_user(&member.user_uuid, &conn).await > 1
&& Membership::count_accepted_and_confirmed_by_user(&member.user_uuid, &member.org_uuid, &conn).await
> 0
{
if CONFIG.mail_enabled() {
let org = Organization::find_by_uuid(&member.org_uuid, &conn).await.unwrap();
@ -2181,7 +2149,8 @@ async fn put_policy(
)
.await;
member.delete(&conn).await?;
member.revoke();
member.save(&conn).await?;
}
}
}
@ -2628,25 +2597,10 @@ async fn _restore_member(
err!("Only owners can restore other owners")
}
// This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type
// It returns different error messages per function.
if member.atype < MembershipType::Admin {
match OrgPolicy::is_user_allowed(&member.user_uuid, org_id, false, conn).await {
Ok(_) => {}
Err(OrgPolicyErr::TwoFactorMissing) => {
if CONFIG.email_2fa_auto_fallback() {
two_factor::email::find_and_activate_email_2fa(&member.user_uuid, conn).await?;
} else {
err!("You cannot restore this user because they have not setup 2FA");
}
}
Err(OrgPolicyErr::SingleOrgEnforced) => {
err!("You cannot restore this user because they are a member of an organization which forbids it");
}
}
}
member.restore();
// This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type
// This check need to be done after restoring to work with the correct status
OrgPolicy::check_user_allowed(&member, "restore", conn).await?;
member.save(conn).await?;
log_event(

2
src/api/core/sends.rs

@ -568,7 +568,7 @@ async fn post_access_file(
async fn download_url(host: &Host, send_id: &SendId, file_id: &SendFileId) -> Result<String, crate::Error> {
let operator = CONFIG.opendal_operator_for_path_type(&PathType::Sends)?;
if operator.info().scheme() == opendal::Scheme::Fs {
if operator.info().scheme() == <&'static str>::from(opendal::Scheme::Fs) {
let token_claims = crate::auth::generate_send_claims(send_id, file_id);
let token = crate::auth::encode_jwt(&token_claims);

40
src/api/core/two_factor/email.rs

@ -26,12 +26,8 @@ pub fn routes() -> Vec<Route> {
struct SendEmailLoginData {
#[serde(alias = "DeviceIdentifier")]
device_identifier: DeviceId,
#[allow(unused)]
#[serde(alias = "Email")]
email: Option<String>,
#[allow(unused)]
#[serde(alias = "MasterPasswordHash")]
master_password_hash: Option<String>,
}
@ -42,20 +38,40 @@ struct SendEmailLoginData {
async fn send_email_login(data: Json<SendEmailLoginData>, conn: DbConn) -> EmptyResult {
let data: SendEmailLoginData = data.into_inner();
use crate::db::models::User;
if !CONFIG._enable_email_2fa() {
err!("Email 2FA is disabled")
}
// Get the user
let Some(user) = User::find_by_device_id(&data.device_identifier, &conn).await else {
err!("Cannot find user. Try again.")
let email = match &data.email {
Some(email) if !email.is_empty() => Some(email),
_ => None,
};
let user = if let Some(email) = email {
let Some(master_password_hash) = &data.master_password_hash else {
err!("No password hash has been submitted.")
};
if !CONFIG._enable_email_2fa() {
err!("Email 2FA is disabled")
}
let Some(user) = User::find_by_mail(email, &conn).await else {
err!("Username or password is incorrect. Try again.")
};
send_token(&user.uuid, &conn).await?;
// Check password
if !user.check_valid_password(master_password_hash) {
err!("Username or password is incorrect. Try again.")
}
Ok(())
user
} else {
// SSO login only sends device id, so we get the user by the most recently used device
let Some(user) = User::find_by_device_for_email2fa(&data.device_identifier, &conn).await else {
err!("Username or password is incorrect. Try again.")
};
user
};
send_token(&user.uuid, &conn).await
}
/// Generate the token, save the data for later verification and send email to user

20
src/api/icons.rs

@ -82,19 +82,19 @@ static ICON_SIZE_REGEX: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(?x)(\d+
// It is used to prevent sending a specific header which breaks icon downloads.
// If this function needs to be renamed, also adjust the code in `util.rs`
#[get("/<domain>/icon.png")]
fn icon_external(domain: &str) -> Option<Redirect> {
fn icon_external(domain: &str) -> Cached<Option<Redirect>> {
if !is_valid_domain(domain) {
warn!("Invalid domain: {domain}");
return None;
return Cached::ttl(None, CONFIG.icon_cache_negttl(), true);
}
if should_block_address(domain) {
warn!("Blocked address: {domain}");
return None;
return Cached::ttl(None, CONFIG.icon_cache_negttl(), true);
}
let url = CONFIG._icon_service_url().replace("{}", domain);
match CONFIG.icon_redirect_code() {
let redir = match CONFIG.icon_redirect_code() {
301 => Some(Redirect::moved(url)), // legacy permanent redirect
302 => Some(Redirect::found(url)), // legacy temporary redirect
307 => Some(Redirect::temporary(url)),
@ -103,7 +103,8 @@ fn icon_external(domain: &str) -> Option<Redirect> {
error!("Unexpected redirect code {}", CONFIG.icon_redirect_code());
None
}
}
};
Cached::ttl(redir, CONFIG.icon_cache_ttl(), true)
}
#[get("/<domain>/icon.png")]
@ -141,7 +142,7 @@ async fn icon_internal(domain: &str) -> Cached<(ContentType, Vec<u8>)> {
/// This does some manual checks and makes use of Url to do some basic checking.
/// domains can't be larger then 63 characters (not counting multiple subdomains) according to the RFC's, but we limit the total size to 255.
fn is_valid_domain(domain: &str) -> bool {
const ALLOWED_CHARS: &str = "_-.";
const ALLOWED_CHARS: &str = "-.";
// If parsing the domain fails using Url, it will not work with reqwest.
if let Err(parse_error) = url::Url::parse(format!("https://{domain}").as_str()) {
@ -796,8 +797,11 @@ impl Emitter for FaviconEmitter {
fn emit_current_tag(&mut self) -> Option<html5gum::State> {
self.flush_current_attribute(true);
self.last_start_tag.clear();
if self.current_token.is_some() && !self.current_token.as_ref().unwrap().closing {
self.last_start_tag.extend(&*self.current_token.as_ref().unwrap().tag.name);
match &self.current_token {
Some(token) if !token.closing => {
self.last_start_tag.extend(&*token.tag.name);
}
_ => {}
}
html5gum::naive_next_state(&self.last_start_tag)
}

189
src/api/identity.rs

@ -1,4 +1,4 @@
use chrono::{NaiveDateTime, Utc};
use chrono::Utc;
use num_traits::FromPrimitive;
use rocket::{
form::{Form, FromForm},
@ -24,14 +24,14 @@ use crate::{
auth::{generate_organization_api_key_login_claims, AuthMethod, ClientHeaders, ClientIp, ClientVersion},
db::{
models::{
AuthRequest, AuthRequestId, Device, DeviceId, EventType, Invitation, OrganizationApiKey, OrganizationId,
SsoNonce, SsoUser, TwoFactor, TwoFactorIncomplete, TwoFactorType, User, UserId,
AuthRequest, AuthRequestId, Device, DeviceId, EventType, Invitation, OIDCCodeWrapper, OrganizationApiKey,
OrganizationId, SsoAuth, SsoUser, TwoFactor, TwoFactorIncomplete, TwoFactorType, User, UserId,
},
DbConn,
},
error::MapResult,
mail, sso,
sso::{OIDCCode, OIDCState},
sso::{OIDCCode, OIDCCodeChallenge, OIDCCodeVerifier, OIDCState},
util, CONFIG,
};
@ -92,6 +92,7 @@ async fn login(
"authorization_code" if CONFIG.sso_enabled() => {
_check_is_some(&data.client_id, "client_id cannot be blank")?;
_check_is_some(&data.code, "code cannot be blank")?;
_check_is_some(&data.code_verifier, "code verifier cannot be blank")?;
_check_is_some(&data.device_identifier, "device_identifier cannot be blank")?;
_check_is_some(&data.device_name, "device_name cannot be blank")?;
@ -147,7 +148,7 @@ async fn _refresh_login(data: ConnectData, conn: &DbConn, ip: &ClientIp) -> Json
}
Ok((mut device, auth_tokens)) => {
// Save to update `device.updated_at` to track usage and toggle new status
device.save(conn).await?;
device.save(true, conn).await?;
let result = json!({
"refresh_token": auth_tokens.refresh_token(),
@ -175,17 +176,23 @@ async fn _sso_login(
// Ratelimit the login
crate::ratelimit::check_limit_login(&ip.ip)?;
let code = match data.code.as_ref() {
None => err!(
let (state, code_verifier) = match (data.code.as_ref(), data.code_verifier.as_ref()) {
(None, _) => err!(
"Got no code in OIDC data",
ErrorEvent {
event: EventType::UserFailedLogIn
}
),
Some(code) => code,
(_, None) => err!(
"Got no code verifier in OIDC data",
ErrorEvent {
event: EventType::UserFailedLogIn
}
),
(Some(code), Some(code_verifier)) => (code, code_verifier.clone()),
};
let user_infos = sso::exchange_code(code, conn).await?;
let (sso_auth, user_infos) = sso::exchange_code(state, code_verifier, conn).await?;
let user_with_sso = match SsoUser::find_by_identifier(&user_infos.identifier, conn).await {
None => match SsoUser::find_by_mail(&user_infos.email, conn).await {
None => None,
@ -248,7 +255,7 @@ async fn _sso_login(
_ => (),
}
let mut user = User::new(&user_infos.email, user_infos.user_name);
let mut user = User::new(&user_infos.email, user_infos.user_name.clone());
user.verified_at = Some(now);
user.save(conn).await?;
@ -267,13 +274,14 @@ async fn _sso_login(
}
Some((mut user, sso_user)) => {
let mut device = get_device(&data, conn, &user).await?;
let twofactor_token = twofactor_auth(&mut user, &data, &mut device, ip, client_version, conn).await?;
if user.private_key.is_none() {
// User was invited a stub was created
user.verified_at = Some(now);
if let Some(user_name) = user_infos.user_name {
user.name = user_name;
if let Some(ref user_name) = user_infos.user_name {
user.name = user_name.clone();
}
user.save(conn).await?;
@ -290,30 +298,13 @@ async fn _sso_login(
}
};
// We passed 2FA get full user information
let auth_user = sso::redeem(&user_infos.state, conn).await?;
if sso_user.is_none() {
let user_sso = SsoUser {
user_uuid: user.uuid.clone(),
identifier: user_infos.identifier,
};
user_sso.save(conn).await?;
}
// Set the user_uuid here to be passed back used for event logging.
*user_id = Some(user.uuid.clone());
let auth_tokens = sso::create_auth_tokens(
&device,
&user,
data.client_id,
auth_user.refresh_token,
auth_user.access_token,
auth_user.expires_in,
)?;
// We passed 2FA get auth tokens
let auth_tokens = sso::redeem(&device, &user, data.client_id, sso_user, sso_auth, user_infos, conn).await?;
authenticated_response(&user, &mut device, auth_tokens, twofactor_token, &now, conn, ip).await
authenticated_response(&user, &mut device, auth_tokens, twofactor_token, conn, ip).await
}
async fn _password_login(
@ -435,7 +426,7 @@ async fn _password_login(
let auth_tokens = auth::AuthTokens::new(&device, &user, AuthMethod::Password, data.client_id);
authenticated_response(&user, &mut device, auth_tokens, twofactor_token, &now, conn, ip).await
authenticated_response(&user, &mut device, auth_tokens, twofactor_token, conn, ip).await
}
async fn authenticated_response(
@ -443,12 +434,12 @@ async fn authenticated_response(
device: &mut Device,
auth_tokens: auth::AuthTokens,
twofactor_token: Option<String>,
now: &NaiveDateTime,
conn: &DbConn,
ip: &ClientIp,
) -> JsonResult {
if CONFIG.mail_enabled() && device.is_new() {
if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), now, device).await {
let now = Utc::now().naive_utc();
if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, device).await {
error!("Error sending new device email: {e:#?}");
if CONFIG.require_device_email() {
@ -468,10 +459,38 @@ async fn authenticated_response(
}
// Save to update `device.updated_at` to track usage and toggle new status
device.save(conn).await?;
device.save(true, conn).await?;
let master_password_policy = master_password_policy(user, conn).await;
let has_master_password = !user.password_hash.is_empty();
let master_password_unlock = if has_master_password {
json!({
"Kdf": {
"KdfType": user.client_kdf_type,
"Iterations": user.client_kdf_iter,
"Memory": user.client_kdf_memory,
"Parallelism": user.client_kdf_parallelism
},
// This field is named inconsistently and will be removed and replaced by the "wrapped" variant in the apps.
// https://github.com/bitwarden/android/blob/release/2025.12-rc41/network/src/main/kotlin/com/bitwarden/network/model/MasterPasswordUnlockDataJson.kt#L22-L26
"MasterKeyEncryptedUserKey": user.akey,
"MasterKeyWrappedUserKey": user.akey,
"Salt": user.email
})
} else {
Value::Null
};
let account_keys = json!({
"publicKeyEncryptionKeyPair": {
"wrappedPrivateKey": user.private_key,
"publicKey": user.public_key,
"Object": "publicKeyEncryptionKeyPair"
},
"Object": "privateKeys"
});
let mut result = json!({
"access_token": auth_tokens.access_token(),
"expires_in": auth_tokens.expires_in(),
@ -486,8 +505,10 @@ async fn authenticated_response(
"ForcePasswordReset": false,
"MasterPasswordPolicy": master_password_policy,
"scope": auth_tokens.scope(),
"AccountKeys": account_keys,
"UserDecryptionOptions": {
"HasMasterPassword": !user.password_hash.is_empty(),
"HasMasterPassword": has_master_password,
"MasterPasswordUnlock": master_password_unlock,
"Object": "userDecryptionOptions"
},
});
@ -585,10 +606,29 @@ async fn _user_api_key_login(
let access_claims = auth::LoginJwtClaims::default(&device, &user, &AuthMethod::UserApiKey, data.client_id);
// Save to update `device.updated_at` to track usage and toggle new status
device.save(conn).await?;
device.save(true, conn).await?;
info!("User {} logged in successfully via API key. IP: {}", user.email, ip.ip);
let has_master_password = !user.password_hash.is_empty();
let master_password_unlock = if has_master_password {
json!({
"Kdf": {
"KdfType": user.client_kdf_type,
"Iterations": user.client_kdf_iter,
"Memory": user.client_kdf_memory,
"Parallelism": user.client_kdf_parallelism
},
// This field is named inconsistently and will be removed and replaced by the "wrapped" variant in the apps.
// https://github.com/bitwarden/android/blob/release/2025.12-rc41/network/src/main/kotlin/com/bitwarden/network/model/MasterPasswordUnlockDataJson.kt#L22-L26
"MasterKeyEncryptedUserKey": user.akey,
"MasterKeyWrappedUserKey": user.akey,
"Salt": user.email
})
} else {
Value::Null
};
// Note: No refresh_token is returned. The CLI just repeats the
// client_credentials login flow when the existing token expires.
let result = json!({
@ -604,6 +644,11 @@ async fn _user_api_key_login(
"KdfParallelism": user.client_kdf_parallelism,
"ResetMasterPassword": false, // TODO: according to official server seems something like: user.password_hash.is_empty(), but would need testing
"scope": AuthMethod::UserApiKey.scope(),
"UserDecryptionOptions": {
"HasMasterPassword": has_master_password,
"MasterPasswordUnlock": master_password_unlock,
"Object": "userDecryptionOptions"
},
});
Ok(Json(result))
@ -648,7 +693,12 @@ async fn get_device(data: &ConnectData, conn: &DbConn, user: &User) -> ApiResult
// Find device or create new
match Device::find_by_uuid_and_user(&device_id, &user.uuid, conn).await {
Some(device) => Ok(device),
None => Device::new(device_id, user.uuid.clone(), device_name, device_type, conn).await,
None => {
let mut device = Device::new(device_id, user.uuid.clone(), device_name, device_type);
// save device without updating `device.updated_at`
device.save(false, conn).await?;
Ok(device)
}
}
}
@ -893,6 +943,7 @@ struct RegisterVerificationData {
#[derive(rocket::Responder)]
enum RegisterVerificationResponse {
#[response(status = 204)]
NoContent(()),
Token(Json<String>),
}
@ -997,9 +1048,12 @@ struct ConnectData {
two_factor_remember: Option<i32>,
#[field(name = uncased("authrequest"))]
auth_request: Option<AuthRequestId>,
// Needed for authorization code
#[field(name = uncased("code"))]
code: Option<String>,
code: Option<OIDCState>,
#[field(name = uncased("code_verifier"))]
code_verifier: Option<OIDCCodeVerifier>,
}
fn _check_is_some<T>(value: &Option<T>, msg: &str) -> EmptyResult {
if value.is_none() {
@ -1021,14 +1075,13 @@ fn prevalidate() -> JsonResult {
}
#[get("/connect/oidc-signin?<code>&<state>", rank = 1)]
async fn oidcsignin(code: OIDCCode, state: String, conn: DbConn) -> ApiResult<Redirect> {
oidcsignin_redirect(
async fn oidcsignin(code: OIDCCode, state: String, mut conn: DbConn) -> ApiResult<Redirect> {
_oidcsignin_redirect(
state,
|decoded_state| sso::OIDCCodeWrapper::Ok {
state: decoded_state,
OIDCCodeWrapper::Ok {
code,
},
&conn,
&mut conn,
)
.await
}
@ -1040,42 +1093,44 @@ async fn oidcsignin_error(
state: String,
error: String,
error_description: Option<String>,
conn: DbConn,
mut conn: DbConn,
) -> ApiResult<Redirect> {
oidcsignin_redirect(
_oidcsignin_redirect(
state,
|decoded_state| sso::OIDCCodeWrapper::Error {
state: decoded_state,
OIDCCodeWrapper::Error {
error,
error_description,
},
&conn,
&mut conn,
)
.await
}
// The state was encoded using Base64 to ensure no issue with providers.
// iss and scope parameters are needed for redirection to work on IOS.
async fn oidcsignin_redirect(
// We pass the state as the code to get it back later on.
async fn _oidcsignin_redirect(
base64_state: String,
wrapper: impl FnOnce(OIDCState) -> sso::OIDCCodeWrapper,
conn: &DbConn,
code_response: OIDCCodeWrapper,
conn: &mut DbConn,
) -> ApiResult<Redirect> {
let state = sso::decode_state(&base64_state)?;
let code = sso::encode_code_claims(wrapper(state.clone()));
let nonce = match SsoNonce::find(&state, conn).await {
Some(n) => n,
None => err!(format!("Failed to retrieve redirect_uri with {state}")),
let mut sso_auth = match SsoAuth::find(&state, conn).await {
None => err!(format!("Cannot retrieve sso_auth for {state}")),
Some(sso_auth) => sso_auth,
};
sso_auth.code_response = Some(code_response);
sso_auth.updated_at = Utc::now().naive_utc();
sso_auth.save(conn).await?;
let mut url = match url::Url::parse(&nonce.redirect_uri) {
let mut url = match url::Url::parse(&sso_auth.redirect_uri) {
Ok(url) => url,
Err(err) => err!(format!("Failed to parse redirect uri ({}): {err}", nonce.redirect_uri)),
Err(err) => err!(format!("Failed to parse redirect uri ({}): {err}", sso_auth.redirect_uri)),
};
url.query_pairs_mut()
.append_pair("code", &code)
.append_pair("code", &state)
.append_pair("state", &state)
.append_pair("scope", &AuthMethod::Sso.scope())
.append_pair("iss", &CONFIG.domain());
@ -1098,10 +1153,8 @@ struct AuthorizeData {
#[allow(unused)]
scope: Option<String>,
state: OIDCState,
#[allow(unused)]
code_challenge: Option<String>,
#[allow(unused)]
code_challenge_method: Option<String>,
code_challenge: OIDCCodeChallenge,
code_challenge_method: String,
#[allow(unused)]
response_mode: Option<String>,
#[allow(unused)]
@ -1118,10 +1171,16 @@ async fn authorize(data: AuthorizeData, conn: DbConn) -> ApiResult<Redirect> {
client_id,
redirect_uri,
state,
code_challenge,
code_challenge_method,
..
} = data;
let auth_url = sso::authorize_url(state, &client_id, &redirect_uri, conn).await?;
if code_challenge_method != "S256" {
err!("Unsupported code challenge method");
}
let auth_url = sso::authorize_url(state, code_challenge, &client_id, &redirect_uri, conn).await?;
Ok(Redirect::temporary(String::from(auth_url)))
}

1
src/api/mod.rs

@ -47,6 +47,7 @@ pub type EmptyResult = ApiResult<()>;
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct PasswordOrOtpData {
#[serde(alias = "MasterPasswordHash")]
master_password_hash: Option<String>,
otp: Option<String>,
}

2
src/api/push.rs

@ -128,7 +128,7 @@ pub async fn register_push_device(device: &mut Device, conn: &DbConn) -> EmptyRe
err!(format!("An error occurred while proceeding registration of a device: {e}"));
}
if let Err(e) = device.save(conn).await {
if let Err(e) = device.save(true, conn).await {
err!(format!("An error occurred while trying to save the (registered) device push uuid: {e}"));
}

18
src/auth.rs

@ -1210,8 +1210,20 @@ pub async fn refresh_tokens(
) -> ApiResult<(Device, AuthTokens)> {
let refresh_claims = match decode_refresh(refresh_token) {
Err(err) => {
debug!("Failed to decode {} refresh_token: {refresh_token}", ip.ip);
err_silent!(format!("Impossible to read refresh_token: {}", err.message()))
error!("Failed to decode {} refresh_token: {refresh_token}: {err:?}", ip.ip);
//err_silent!(format!("Impossible to read refresh_token: {}", err.message()))
// If the token failed to decode, it was probably one of the old style tokens that was just a Base64 string.
// We can generate a claim for them for backwards compatibility. Note that the password refresh claims don't
// check expiration or issuer, so they're not included here.
RefreshJwtClaims {
nbf: 0,
exp: 0,
iss: String::new(),
sub: AuthMethod::Password,
device_token: refresh_token.into(),
token: None,
}
}
Ok(claims) => claims,
};
@ -1223,7 +1235,7 @@ pub async fn refresh_tokens(
};
// Save to update `updated_at`.
device.save(conn).await?;
device.save(true, conn).await?;
let user = match User::find_by_uuid(&device.user_uuid, conn).await {
None => err!("Impossible to find user"),

21
src/config.rs

@ -14,7 +14,7 @@ use serde::de::{self, Deserialize, Deserializer, MapAccess, Visitor};
use crate::{
error::Error,
util::{get_env, get_env_bool, get_web_vault_version, is_valid_email, parse_experimental_client_feature_flags},
util::{get_active_web_release, get_env, get_env_bool, is_valid_email, parse_experimental_client_feature_flags},
};
static CONFIG_FILE: LazyLock<String> = LazyLock::new(|| {
@ -564,9 +564,9 @@ make_config! {
/// Duo Auth context cleanup schedule |> Cron schedule of the job that cleans expired Duo contexts from the database. Does nothing if Duo MFA is disabled or set to use the legacy iframe prompt.
/// Defaults to once every minute. Set blank to disable this job.
duo_context_purge_schedule: String, false, def, "30 * * * * *".to_string();
/// Purge incomplete SSO nonce. |> Cron schedule of the job that cleans leftover nonce in db due to incomplete SSO login.
/// Purge incomplete SSO auth. |> Cron schedule of the job that cleans leftover auth in db due to incomplete SSO login.
/// Defaults to daily. Set blank to disable this job.
purge_incomplete_sso_nonce: String, false, def, "0 20 0 * * *".to_string();
purge_incomplete_sso_auth: String, false, def, "0 20 0 * * *".to_string();
},
/// General settings
@ -789,6 +789,10 @@ make_config! {
/// Bitwarden enforces this by default. In Vaultwarden we encouraged to use multiple organizations because groups were not available.
/// Setting this to true will enforce the Single Org Policy to be enabled before you can enable the Reset Password policy.
enforce_single_org_with_reset_pw_policy: bool, false, def, false;
/// Prefer IPv6 (AAAA) resolving |> This settings configures the DNS resolver to resolve IPv6 first, and if not available try IPv4
/// This could be useful in IPv6 only environments.
dns_prefer_ipv6: bool, true, def, false;
},
/// OpenID Connect SSO settings
@ -1035,6 +1039,7 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> {
"ssh-agent",
// Key Management Team
"ssh-key-vault-item",
"pm-25373-windows-biometrics-v2",
// Tools
"export-attachments",
// Mobile Team
@ -1320,12 +1325,16 @@ fn generate_smtp_img_src(embed_images: bool, domain: &str) -> String {
if embed_images {
"cid:".to_string()
} else {
format!("{domain}/vw_static/")
// normalize base_url
let base_url = domain.trim_end_matches('/');
format!("{base_url}/vw_static/")
}
}
fn generate_sso_callback_path(domain: &str) -> String {
format!("{domain}/identity/connect/oidc-signin")
// normalize base_url
let base_url = domain.trim_end_matches('/');
format!("{base_url}/identity/connect/oidc-signin")
}
/// Generate the correct URL for the icon service.
@ -1840,7 +1849,7 @@ fn to_json<'reg, 'rc>(
// Configure the web-vault version as an integer so it can be used as a comparison smaller or greater then.
// The default is based upon the version since this feature is added.
static WEB_VAULT_VERSION: LazyLock<semver::Version> = LazyLock::new(|| {
let vault_version = get_web_vault_version();
let vault_version = get_active_web_release();
// Use a single regex capture to extract version components
let re = regex::Regex::new(r"(\d{4})\.(\d{1,2})\.(\d{1,2})").unwrap();
re.captures(&vault_version)

40
src/db/mod.rs

@ -337,6 +337,46 @@ macro_rules! db_run {
};
}
// Write all ToSql<Text, DB> and FromSql<Text, DB> given a serializable/deserializable type.
#[macro_export]
macro_rules! impl_FromToSqlText {
($name:ty) => {
#[cfg(mysql)]
impl ToSql<Text, diesel::mysql::Mysql> for $name {
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, diesel::mysql::Mysql>) -> diesel::serialize::Result {
serde_json::to_writer(out, self).map(|_| diesel::serialize::IsNull::No).map_err(Into::into)
}
}
#[cfg(postgresql)]
impl ToSql<Text, diesel::pg::Pg> for $name {
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, diesel::pg::Pg>) -> diesel::serialize::Result {
serde_json::to_writer(out, self).map(|_| diesel::serialize::IsNull::No).map_err(Into::into)
}
}
#[cfg(sqlite)]
impl ToSql<Text, diesel::sqlite::Sqlite> for $name {
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, diesel::sqlite::Sqlite>) -> diesel::serialize::Result {
serde_json::to_string(self).map_err(Into::into).map(|str| {
out.set_value(str);
diesel::serialize::IsNull::No
})
}
}
impl<DB: diesel::backend::Backend> FromSql<Text, DB> for $name
where
String: FromSql<Text, DB>,
{
fn from_sql(bytes: DB::RawValue<'_>) -> diesel::deserialize::Result<Self> {
<String as FromSql<Text, DB>>::from_sql(bytes)
.and_then(|str| serde_json::from_str(&str).map_err(Into::into))
}
}
};
}
pub mod schema;
// Reexport the models, needs to be after the macros are defined so it can access them

2
src/db/models/attachment.rs

@ -46,7 +46,7 @@ impl Attachment {
pub async fn get_url(&self, host: &str) -> Result<String, crate::Error> {
let operator = CONFIG.opendal_operator_for_path_type(&PathType::Attachments)?;
if operator.info().scheme() == opendal::Scheme::Fs {
if operator.info().scheme() == <&'static str>::from(opendal::Scheme::Fs) {
let token = encode_jwt(&generate_file_download_claims(self.cipher_uuid.clone(), self.id.clone()));
Ok(format!("{host}/attachments/{}/{}?token={token}", self.cipher_uuid, self.id))
} else {

58
src/db/models/device.rs

@ -35,6 +35,25 @@ pub struct Device {
/// Local methods
impl Device {
pub fn new(uuid: DeviceId, user_uuid: UserId, name: String, atype: i32) -> Self {
let now = Utc::now().naive_utc();
Self {
uuid,
created_at: now,
updated_at: now,
user_uuid,
name,
atype,
push_uuid: Some(PushId(get_uuid())),
push_token: None,
refresh_token: crypto::encode_random_bytes::<64>(&BASE64URL),
twofactor_remember: None,
}
}
pub fn to_json(&self) -> Value {
json!({
"id": self.uuid,
@ -110,38 +129,21 @@ impl DeviceWithAuthRequest {
}
use crate::db::DbConn;
use crate::api::{ApiResult, EmptyResult};
use crate::api::EmptyResult;
use crate::error::MapResult;
/// Database methods
impl Device {
pub async fn new(uuid: DeviceId, user_uuid: UserId, name: String, atype: i32, conn: &DbConn) -> ApiResult<Device> {
let now = Utc::now().naive_utc();
let device = Self {
uuid,
created_at: now,
updated_at: now,
user_uuid,
name,
atype,
push_uuid: Some(PushId(get_uuid())),
push_token: None,
refresh_token: crypto::encode_random_bytes::<64>(&BASE64URL),
twofactor_remember: None,
};
device.inner_save(conn).await.map(|()| device)
}
pub async fn save(&mut self, update_time: bool, conn: &DbConn) -> EmptyResult {
if update_time {
self.updated_at = Utc::now().naive_utc();
}
async fn inner_save(&self, conn: &DbConn) -> EmptyResult {
db_run! { conn:
sqlite, mysql {
crate::util::retry(||
diesel::replace_into(devices::table)
.values(self)
.values(&*self)
.execute(conn),
10,
).map_res("Error saving device")
@ -149,10 +151,10 @@ impl Device {
postgresql {
crate::util::retry(||
diesel::insert_into(devices::table)
.values(self)
.values(&*self)
.on_conflict((devices::uuid, devices::user_uuid))
.do_update()
.set(self)
.set(&*self)
.execute(conn),
10,
).map_res("Error saving device")
@ -160,12 +162,6 @@ impl Device {
}
}
// Should only be called after user has passed authentication
pub async fn save(&mut self, conn: &DbConn) -> EmptyResult {
self.updated_at = Utc::now().naive_utc();
self.inner_save(conn).await
}
pub async fn delete_all_by_user(user_uuid: &UserId, conn: &DbConn) -> EmptyResult {
db_run! { conn: {
diesel::delete(devices::table.filter(devices::user_uuid.eq(user_uuid)))

6
src/db/models/mod.rs

@ -11,7 +11,7 @@ mod group;
mod org_policy;
mod organization;
mod send;
mod sso_nonce;
mod sso_auth;
mod two_factor;
mod two_factor_duo_context;
mod two_factor_incomplete;
@ -27,7 +27,7 @@ pub use self::event::{Event, EventType};
pub use self::favorite::Favorite;
pub use self::folder::{Folder, FolderCipher, FolderId};
pub use self::group::{CollectionGroup, Group, GroupId, GroupUser};
pub use self::org_policy::{OrgPolicy, OrgPolicyErr, OrgPolicyId, OrgPolicyType};
pub use self::org_policy::{OrgPolicy, OrgPolicyId, OrgPolicyType};
pub use self::organization::{
Membership, MembershipId, MembershipStatus, MembershipType, OrgApiKeyId, Organization, OrganizationApiKey,
OrganizationId,
@ -36,7 +36,7 @@ pub use self::send::{
id::{SendFileId, SendId},
Send, SendType,
};
pub use self::sso_nonce::SsoNonce;
pub use self::sso_auth::{OIDCAuthenticatedUser, OIDCCodeWrapper, SsoAuth};
pub use self::two_factor::{TwoFactor, TwoFactorType};
pub use self::two_factor_duo_context::TwoFactorDuoContext;
pub use self::two_factor_incomplete::TwoFactorIncomplete;

64
src/db/models/org_policy.rs

@ -2,10 +2,12 @@ use derive_more::{AsRef, From};
use serde::Deserialize;
use serde_json::Value;
use crate::api::core::two_factor;
use crate::api::EmptyResult;
use crate::db::schema::{org_policies, users_organizations};
use crate::db::DbConn;
use crate::error::MapResult;
use crate::CONFIG;
use diesel::prelude::*;
use super::{Membership, MembershipId, MembershipStatus, MembershipType, OrganizationId, TwoFactor, UserId};
@ -40,6 +42,10 @@ pub enum OrgPolicyType {
// FreeFamiliesSponsorshipPolicy = 13,
RemoveUnlockWithPin = 14,
RestrictedItemTypes = 15,
UriMatchDefaults = 16,
// AutotypeDefaultSetting = 17, // Not supported yet
// AutoConfirm = 18, // Not supported (not implemented yet)
// BlockClaimedDomainAccountCreation = 19, // Not supported (Not AGPLv3 Licensed)
}
// https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/AdminConsole/Models/Data/Organizations/Policies/SendOptionsPolicyData.cs#L5
@ -58,14 +64,6 @@ pub struct ResetPasswordDataModel {
pub auto_enroll_enabled: bool,
}
pub type OrgPolicyResult = Result<(), OrgPolicyErr>;
#[derive(Debug)]
pub enum OrgPolicyErr {
TwoFactorMissing,
SingleOrgEnforced,
}
/// Local methods
impl OrgPolicy {
pub fn new(org_uuid: OrganizationId, atype: OrgPolicyType, enabled: bool, data: String) -> Self {
@ -280,31 +278,35 @@ impl OrgPolicy {
false
}
pub async fn is_user_allowed(
user_uuid: &UserId,
org_uuid: &OrganizationId,
exclude_current_org: bool,
conn: &DbConn,
) -> OrgPolicyResult {
// Enforce TwoFactor/TwoStep login
if TwoFactor::find_by_user(user_uuid, conn).await.is_empty() {
match Self::find_by_org_and_type(org_uuid, OrgPolicyType::TwoFactorAuthentication, conn).await {
Some(p) if p.enabled => {
return Err(OrgPolicyErr::TwoFactorMissing);
pub async fn check_user_allowed(m: &Membership, action: &str, conn: &DbConn) -> EmptyResult {
if m.atype < MembershipType::Admin && m.status > (MembershipStatus::Invited as i32) {
// Enforce TwoFactor/TwoStep login
if let Some(p) = Self::find_by_org_and_type(&m.org_uuid, OrgPolicyType::TwoFactorAuthentication, conn).await
{
if p.enabled && TwoFactor::find_by_user(&m.user_uuid, conn).await.is_empty() {
if CONFIG.email_2fa_auto_fallback() {
two_factor::email::find_and_activate_email_2fa(&m.user_uuid, conn).await?;
} else {
err!(format!("Cannot {} because 2FA is required (membership {})", action, m.uuid));
}
}
_ => {}
};
}
}
// Check if the user is part of another Organization with SingleOrg activated
if Self::is_applicable_to_user(&m.user_uuid, OrgPolicyType::SingleOrg, Some(&m.org_uuid), conn).await {
err!(format!(
"Cannot {} because another organization policy forbids it (membership {})",
action, m.uuid
));
}
// Enforce Single Organization Policy of other organizations user is a member of
// This check here needs to exclude this current org-id, else an accepted user can not be confirmed.
let exclude_org = if exclude_current_org {
Some(org_uuid)
} else {
None
};
if Self::is_applicable_to_user(user_uuid, OrgPolicyType::SingleOrg, exclude_org, conn).await {
return Err(OrgPolicyErr::SingleOrgEnforced);
if let Some(p) = Self::find_by_org_and_type(&m.org_uuid, OrgPolicyType::SingleOrg, conn).await {
if p.enabled
&& Membership::count_accepted_and_confirmed_by_user(&m.user_uuid, &m.org_uuid, conn).await > 0
{
err!(format!("Cannot {} because the organization policy forbids being part of other organization (membership {})", action, m.uuid));
}
}
}
Ok(())

7
src/db/models/organization.rs

@ -883,10 +883,15 @@ impl Membership {
}}
}
pub async fn count_accepted_and_confirmed_by_user(user_uuid: &UserId, conn: &DbConn) -> i64 {
pub async fn count_accepted_and_confirmed_by_user(
user_uuid: &UserId,
excluded_org: &OrganizationId,
conn: &DbConn,
) -> i64 {
db_run! { conn: {
users_organizations::table
.filter(users_organizations::user_uuid.eq(user_uuid))
.filter(users_organizations::org_uuid.ne(excluded_org))
.filter(users_organizations::status.eq(MembershipStatus::Accepted as i32).or(users_organizations::status.eq(MembershipStatus::Confirmed as i32)))
.count()
.first::<i64>(conn)

134
src/db/models/sso_auth.rs

@ -0,0 +1,134 @@
use chrono::{NaiveDateTime, Utc};
use std::time::Duration;
use crate::api::EmptyResult;
use crate::db::schema::sso_auth;
use crate::db::{DbConn, DbPool};
use crate::error::MapResult;
use crate::sso::{OIDCCode, OIDCCodeChallenge, OIDCIdentifier, OIDCState, SSO_AUTH_EXPIRATION};
use diesel::deserialize::FromSql;
use diesel::expression::AsExpression;
use diesel::prelude::*;
use diesel::serialize::{Output, ToSql};
use diesel::sql_types::Text;
#[derive(AsExpression, Clone, Debug, Serialize, Deserialize, FromSqlRow)]
#[diesel(sql_type = Text)]
pub enum OIDCCodeWrapper {
Ok {
code: OIDCCode,
},
Error {
error: String,
error_description: Option<String>,
},
}
impl_FromToSqlText!(OIDCCodeWrapper);
#[derive(AsExpression, Clone, Debug, Serialize, Deserialize, FromSqlRow)]
#[diesel(sql_type = Text)]
pub struct OIDCAuthenticatedUser {
pub refresh_token: Option<String>,
pub access_token: String,
pub expires_in: Option<Duration>,
pub identifier: OIDCIdentifier,
pub email: String,
pub email_verified: Option<bool>,
pub user_name: Option<String>,
}
impl_FromToSqlText!(OIDCAuthenticatedUser);
#[derive(Identifiable, Queryable, Insertable, AsChangeset, Selectable)]
#[diesel(table_name = sso_auth)]
#[diesel(treat_none_as_null = true)]
#[diesel(primary_key(state))]
pub struct SsoAuth {
pub state: OIDCState,
pub client_challenge: OIDCCodeChallenge,
pub nonce: String,
pub redirect_uri: String,
pub code_response: Option<OIDCCodeWrapper>,
pub auth_response: Option<OIDCAuthenticatedUser>,
pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime,
}
/// Local methods
impl SsoAuth {
pub fn new(state: OIDCState, client_challenge: OIDCCodeChallenge, nonce: String, redirect_uri: String) -> Self {
let now = Utc::now().naive_utc();
SsoAuth {
state,
client_challenge,
nonce,
redirect_uri,
created_at: now,
updated_at: now,
code_response: None,
auth_response: None,
}
}
}
/// Database methods
impl SsoAuth {
pub async fn save(&self, conn: &DbConn) -> EmptyResult {
db_run! { conn:
mysql {
diesel::insert_into(sso_auth::table)
.values(self)
.on_conflict(diesel::dsl::DuplicatedKeys)
.do_update()
.set(self)
.execute(conn)
.map_res("Error saving SSO auth")
}
postgresql, sqlite {
diesel::insert_into(sso_auth::table)
.values(self)
.on_conflict(sso_auth::state)
.do_update()
.set(self)
.execute(conn)
.map_res("Error saving SSO auth")
}
}
}
pub async fn find(state: &OIDCState, conn: &DbConn) -> Option<Self> {
let oldest = Utc::now().naive_utc() - *SSO_AUTH_EXPIRATION;
db_run! { conn: {
sso_auth::table
.filter(sso_auth::state.eq(state))
.filter(sso_auth::created_at.ge(oldest))
.first::<Self>(conn)
.ok()
}}
}
pub async fn delete(self, conn: &DbConn) -> EmptyResult {
db_run! {conn: {
diesel::delete(sso_auth::table.filter(sso_auth::state.eq(self.state)))
.execute(conn)
.map_res("Error deleting sso_auth")
}}
}
pub async fn delete_expired(pool: DbPool) -> EmptyResult {
debug!("Purging expired sso_auth");
if let Ok(conn) = pool.get().await {
let oldest = Utc::now().naive_utc() - *SSO_AUTH_EXPIRATION;
db_run! { conn: {
diesel::delete(sso_auth::table.filter(sso_auth::created_at.lt(oldest)))
.execute(conn)
.map_res("Error deleting expired SSO nonce")
}}
} else {
err!("Failed to get DB connection while purging expired sso_auth")
}
}
}

87
src/db/models/sso_nonce.rs

@ -1,87 +0,0 @@
use chrono::{NaiveDateTime, Utc};
use crate::api::EmptyResult;
use crate::db::schema::sso_nonce;
use crate::db::{DbConn, DbPool};
use crate::error::MapResult;
use crate::sso::{OIDCState, NONCE_EXPIRATION};
use diesel::prelude::*;
#[derive(Identifiable, Queryable, Insertable)]
#[diesel(table_name = sso_nonce)]
#[diesel(primary_key(state))]
pub struct SsoNonce {
pub state: OIDCState,
pub nonce: String,
pub verifier: Option<String>,
pub redirect_uri: String,
pub created_at: NaiveDateTime,
}
/// Local methods
impl SsoNonce {
pub fn new(state: OIDCState, nonce: String, verifier: Option<String>, redirect_uri: String) -> Self {
let now = Utc::now().naive_utc();
SsoNonce {
state,
nonce,
verifier,
redirect_uri,
created_at: now,
}
}
}
/// Database methods
impl SsoNonce {
pub async fn save(&self, conn: &DbConn) -> EmptyResult {
db_run! { conn:
sqlite, mysql {
diesel::replace_into(sso_nonce::table)
.values(self)
.execute(conn)
.map_res("Error saving SSO nonce")
}
postgresql {
diesel::insert_into(sso_nonce::table)
.values(self)
.execute(conn)
.map_res("Error saving SSO nonce")
}
}
}
pub async fn delete(state: &OIDCState, conn: &DbConn) -> EmptyResult {
db_run! { conn: {
diesel::delete(sso_nonce::table.filter(sso_nonce::state.eq(state)))
.execute(conn)
.map_res("Error deleting SSO nonce")
}}
}
pub async fn find(state: &OIDCState, conn: &DbConn) -> Option<Self> {
let oldest = Utc::now().naive_utc() - *NONCE_EXPIRATION;
db_run! { conn: {
sso_nonce::table
.filter(sso_nonce::state.eq(state))
.filter(sso_nonce::created_at.ge(oldest))
.first::<Self>(conn)
.ok()
}}
}
pub async fn delete_expired(pool: DbPool) -> EmptyResult {
debug!("Purging expired sso_nonce");
if let Ok(conn) = pool.get().await {
let oldest = Utc::now().naive_utc() - *NONCE_EXPIRATION;
db_run! { conn: {
diesel::delete(sso_nonce::table.filter(sso_nonce::created_at.lt(oldest)))
.execute(conn)
.map_res("Error deleting expired SSO nonce")
}}
} else {
err!("Failed to get DB connection while purging expired sso_nonce")
}
}
}

24
src/db/models/user.rs

@ -1,4 +1,4 @@
use crate::db::schema::{devices, invitations, sso_users, users};
use crate::db::schema::{invitations, sso_users, twofactor_incomplete, users};
use chrono::{NaiveDateTime, TimeDelta, Utc};
use derive_more::{AsRef, Deref, Display, From};
use diesel::prelude::*;
@ -10,8 +10,7 @@ use super::{
use crate::{
api::EmptyResult,
crypto,
db::models::DeviceId,
db::DbConn,
db::{models::DeviceId, DbConn},
error::MapResult,
sso::OIDCIdentifier,
util::{format_date, get_uuid, retry},
@ -387,15 +386,18 @@ impl User {
}}
}
pub async fn find_by_device_id(device_uuid: &DeviceId, conn: &DbConn) -> Option<Self> {
db_run! { conn: {
users::table
.inner_join(devices::table.on(devices::user_uuid.eq(users::uuid)))
.filter(devices::uuid.eq(device_uuid))
.select(users::all_columns)
.first::<Self>(conn)
pub async fn find_by_device_for_email2fa(device_uuid: &DeviceId, conn: &DbConn) -> Option<Self> {
if let Some(user_uuid) = db_run! ( conn: {
twofactor_incomplete::table
.filter(twofactor_incomplete::device_uuid.eq(device_uuid))
.order_by(twofactor_incomplete::login_time.desc())
.select(twofactor_incomplete::user_uuid)
.first::<UserId>(conn)
.ok()
}}
}) {
return Self::find_by_uuid(&user_uuid, conn).await;
}
None
}
pub async fn get_all(conn: &DbConn) -> Vec<(Self, Option<SsoUser>)> {

7
src/db/schema.rs

@ -256,12 +256,15 @@ table! {
}
table! {
sso_nonce (state) {
sso_auth (state) {
state -> Text,
client_challenge -> Text,
nonce -> Text,
verifier -> Nullable<Text>,
redirect_uri -> Text,
code_response -> Nullable<Text>,
auth_response -> Nullable<Text>,
created_at -> Timestamp,
updated_at -> Timestamp,
}
}

5
src/http_client.rs

@ -185,7 +185,10 @@ impl CustomDnsResolver {
fn new() -> Arc<Self> {
match TokioResolver::builder(TokioConnectionProvider::default()) {
Ok(builder) => {
Ok(mut builder) => {
if CONFIG.dns_prefer_ipv6() {
builder.options_mut().ip_strategy = hickory_resolver::config::LookupIpStrategy::Ipv6thenIpv4;
}
let resolver = builder.build();
Arc::new(Self::Hickory(Arc::new(resolver)))
}

6
src/mail.rs

@ -705,7 +705,7 @@ async fn send_with_selected_transport(email: Message) -> EmptyResult {
}
async fn send_email(address: &str, subject: &str, body_html: String, body_text: String) -> EmptyResult {
let smtp_from = &CONFIG.smtp_from();
let smtp_from = Address::from_str(&CONFIG.smtp_from())?;
let body = if CONFIG.smtp_embed_images() {
let logo_gray_body = Body::new(crate::api::static_files("logo-gray.png").unwrap().1.to_vec());
@ -727,9 +727,9 @@ async fn send_email(address: &str, subject: &str, body_html: String, body_text:
};
let email = Message::builder()
.message_id(Some(format!("<{}@{}>", crate::util::get_uuid(), smtp_from.split('@').collect::<Vec<&str>>()[1])))
.message_id(Some(format!("<{}@{}>", crate::util::get_uuid(), smtp_from.domain())))
.to(Mailbox::new(None, Address::from_str(address)?))
.from(Mailbox::new(Some(CONFIG.smtp_from_name()), Address::from_str(smtp_from)?))
.from(Mailbox::new(Some(CONFIG.smtp_from_name()), smtp_from))
.subject(subject)
.multipart(body)?;

14
src/main.rs

@ -126,7 +126,7 @@ fn parse_args() {
exit(0);
} else if pargs.contains(["-v", "--version"]) {
config::SKIP_CONFIG_VALIDATION.store(true, Ordering::Relaxed);
let web_vault_version = util::get_web_vault_version();
let web_vault_version = util::get_active_web_release();
println!("Vaultwarden {version}");
println!("Web-Vault {web_vault_version}");
exit(0);
@ -246,8 +246,8 @@ fn init_logging() -> Result<log::LevelFilter, Error> {
.split(',')
.collect::<Vec<&str>>()
.into_iter()
.flat_map(|s| match s.split('=').collect::<Vec<&str>>()[..] {
[log, lvl_str] => log::LevelFilter::from_str(lvl_str).ok().map(|lvl| (log, lvl)),
.flat_map(|s| match s.split_once('=') {
Some((log, lvl_str)) => log::LevelFilter::from_str(lvl_str).ok().map(|lvl| (log, lvl)),
_ => None,
})
.collect()
@ -699,10 +699,10 @@ fn schedule_jobs(pool: db::DbPool) {
}));
}
// Purge sso nonce from incomplete flow (default to daily at 00h20).
if !CONFIG.purge_incomplete_sso_nonce().is_empty() {
sched.add(Job::new(CONFIG.purge_incomplete_sso_nonce().parse().unwrap(), || {
runtime.spawn(db::models::SsoNonce::delete_expired(pool.clone()));
// Purge sso auth from incomplete flow (default to daily at 00h20).
if !CONFIG.purge_incomplete_sso_auth().is_empty() {
sched.add(Job::new(CONFIG.purge_incomplete_sso_auth().parse().unwrap(), || {
runtime.spawn(db::models::SsoAuth::delete_expired(pool.clone()));
}));
}

274
src/sso.rs

@ -1,8 +1,7 @@
use std::{sync::LazyLock, time::Duration};
use chrono::Utc;
use derive_more::{AsRef, Deref, Display, From};
use mini_moka::sync::Cache;
use derive_more::{AsRef, Deref, Display, From, Into};
use regex::Regex;
use url::Url;
@ -11,7 +10,7 @@ use crate::{
auth,
auth::{AuthMethod, AuthTokens, TokenWrapper, BW_EXPIRATION, DEFAULT_REFRESH_VALIDITY},
db::{
models::{Device, SsoNonce, User},
models::{Device, OIDCAuthenticatedUser, OIDCCodeWrapper, SsoAuth, SsoUser, User},
DbConn,
},
sso_client::Client,
@ -20,12 +19,10 @@ use crate::{
pub static FAKE_IDENTIFIER: &str = "VW_DUMMY_IDENTIFIER_FOR_OIDC";
static AC_CACHE: LazyLock<Cache<OIDCState, AuthenticatedUser>> =
LazyLock::new(|| Cache::builder().max_capacity(1000).time_to_live(Duration::from_secs(10 * 60)).build());
static SSO_JWT_ISSUER: LazyLock<String> = LazyLock::new(|| format!("{}|sso", CONFIG.domain_origin()));
pub static NONCE_EXPIRATION: LazyLock<chrono::Duration> = LazyLock::new(|| chrono::TimeDelta::try_minutes(10).unwrap());
pub static SSO_AUTH_EXPIRATION: LazyLock<chrono::Duration> =
LazyLock::new(|| chrono::TimeDelta::try_minutes(10).unwrap());
#[derive(
Clone,
@ -47,6 +44,47 @@ pub static NONCE_EXPIRATION: LazyLock<chrono::Duration> = LazyLock::new(|| chron
#[from(forward)]
pub struct OIDCCode(String);
#[derive(
Clone,
Debug,
Default,
DieselNewType,
FromForm,
PartialEq,
Eq,
Hash,
Serialize,
Deserialize,
AsRef,
Deref,
Display,
From,
Into,
)]
#[deref(forward)]
#[into(owned)]
pub struct OIDCCodeChallenge(String);
#[derive(
Clone,
Debug,
Default,
DieselNewType,
FromForm,
PartialEq,
Eq,
Hash,
Serialize,
Deserialize,
AsRef,
Deref,
Display,
Into,
)]
#[deref(forward)]
#[into(owned)]
pub struct OIDCCodeVerifier(String);
#[derive(
Clone,
Debug,
@ -91,40 +129,6 @@ pub fn encode_ssotoken_claims() -> String {
auth::encode_jwt(&claims)
}
#[derive(Debug, Serialize, Deserialize)]
pub enum OIDCCodeWrapper {
Ok {
state: OIDCState,
code: OIDCCode,
},
Error {
state: OIDCState,
error: String,
error_description: Option<String>,
},
}
#[derive(Debug, Serialize, Deserialize)]
struct OIDCCodeClaims {
// Expiration time
pub exp: i64,
// Issuer
pub iss: String,
pub code: OIDCCodeWrapper,
}
pub fn encode_code_claims(code: OIDCCodeWrapper) -> String {
let time_now = Utc::now();
let claims = OIDCCodeClaims {
exp: (time_now + chrono::TimeDelta::try_minutes(5).unwrap()).timestamp(),
iss: SSO_JWT_ISSUER.to_string(),
code,
};
auth::encode_jwt(&claims)
}
#[derive(Clone, Debug, Serialize, Deserialize)]
struct BasicTokenClaims {
iat: Option<i64>,
@ -132,6 +136,12 @@ struct BasicTokenClaims {
exp: i64,
}
#[derive(Deserialize)]
struct BasicTokenClaimsValidation {
exp: u64,
iss: String,
}
impl BasicTokenClaims {
fn nbf(&self) -> i64 {
self.nbf.or(self.iat).unwrap_or_else(|| Utc::now().timestamp())
@ -139,13 +149,23 @@ impl BasicTokenClaims {
}
fn decode_token_claims(token_name: &str, token: &str) -> ApiResult<BasicTokenClaims> {
let mut validation = jsonwebtoken::Validation::default();
validation.set_issuer(&[CONFIG.sso_authority()]);
validation.insecure_disable_signature_validation();
validation.validate_aud = false;
// We need to manually validate this token, since `insecure_decode` does not do this
match jsonwebtoken::dangerous::insecure_decode::<BasicTokenClaimsValidation>(token) {
Ok(btcv) => {
let now = jsonwebtoken::get_current_timestamp();
let validate_claim = btcv.claims;
// Validate the exp in the claim with a leeway of 60 seconds, same as jsonwebtoken does
if validate_claim.exp < now - 60 {
err_silent!(format!("Expired Signature for base token claim from {token_name}"))
}
if validate_claim.iss.ne(&CONFIG.sso_authority()) {
err_silent!(format!("Invalid Issuer for base token claim from {token_name}"))
}
match jsonwebtoken::decode(token, &jsonwebtoken::DecodingKey::from_secret(&[]), &validation) {
Ok(btc) => Ok(btc.claims),
// All is validated and ok, lets decode again using the wanted struct
let btc = jsonwebtoken::dangerous::insecure_decode::<BasicTokenClaims>(token).unwrap();
Ok(btc.claims)
}
Err(err) => err_silent!(format!("Failed to decode basic token claims from {token_name}: {err}")),
}
}
@ -162,9 +182,14 @@ pub fn decode_state(base64_state: &str) -> ApiResult<OIDCState> {
Ok(state)
}
// The `nonce` allow to protect against replay attacks
// redirect_uri from: https://github.com/bitwarden/server/blob/main/src/Identity/IdentityServer/ApiClient.cs
pub async fn authorize_url(state: OIDCState, client_id: &str, raw_redirect_uri: &str, conn: DbConn) -> ApiResult<Url> {
pub async fn authorize_url(
state: OIDCState,
client_challenge: OIDCCodeChallenge,
client_id: &str,
raw_redirect_uri: &str,
conn: DbConn,
) -> ApiResult<Url> {
let redirect_uri = match client_id {
"web" | "browser" => format!("{}/sso-connector.html", CONFIG.domain()),
"desktop" | "mobile" => "bitwarden://sso-callback".to_string(),
@ -178,8 +203,8 @@ pub async fn authorize_url(state: OIDCState, client_id: &str, raw_redirect_uri:
_ => err!(format!("Unsupported client {client_id}")),
};
let (auth_url, nonce) = Client::authorize_url(state, redirect_uri).await?;
nonce.save(&conn).await?;
let (auth_url, sso_auth) = Client::authorize_url(state, client_challenge, redirect_uri).await?;
sso_auth.save(&conn).await?;
Ok(auth_url)
}
@ -209,78 +234,45 @@ impl OIDCIdentifier {
}
}
#[derive(Clone, Debug)]
pub struct AuthenticatedUser {
pub refresh_token: Option<String>,
pub access_token: String,
pub expires_in: Option<Duration>,
pub identifier: OIDCIdentifier,
pub email: String,
pub email_verified: Option<bool>,
pub user_name: Option<String>,
}
#[derive(Clone, Debug)]
pub struct UserInformation {
pub state: OIDCState,
pub identifier: OIDCIdentifier,
pub email: String,
pub email_verified: Option<bool>,
pub user_name: Option<String>,
}
async fn decode_code_claims(code: &str, conn: &DbConn) -> ApiResult<(OIDCCode, OIDCState)> {
match auth::decode_jwt::<OIDCCodeClaims>(code, SSO_JWT_ISSUER.to_string()) {
Ok(code_claims) => match code_claims.code {
OIDCCodeWrapper::Ok {
state,
code,
} => Ok((code, state)),
OIDCCodeWrapper::Error {
state,
error,
error_description,
} => {
if let Err(err) = SsoNonce::delete(&state, conn).await {
error!("Failed to delete database sso_nonce using {state}: {err}")
}
err!(format!(
"SSO authorization failed: {error}, {}",
error_description.as_ref().unwrap_or(&String::new())
))
}
},
Err(err) => err!(format!("Failed to decode code wrapper: {err}")),
}
}
// During the 2FA flow we will
// - retrieve the user information and then only discover he needs 2FA.
// - second time we will rely on the `AC_CACHE` since the `code` has already been exchanged.
// The `nonce` will ensure that the user is authorized only once.
// We return only the `UserInformation` to force calling `redeem` to obtain the `refresh_token`.
pub async fn exchange_code(wrapped_code: &str, conn: &DbConn) -> ApiResult<UserInformation> {
// - second time we will rely on `SsoAuth.auth_response` since the `code` has already been exchanged.
// The `SsoAuth` will ensure that the user is authorized only once.
pub async fn exchange_code(
state: &OIDCState,
client_verifier: OIDCCodeVerifier,
conn: &DbConn,
) -> ApiResult<(SsoAuth, OIDCAuthenticatedUser)> {
use openidconnect::OAuth2TokenResponse;
let (code, state) = decode_code_claims(wrapped_code, conn).await?;
let mut sso_auth = match SsoAuth::find(state, conn).await {
None => err!(format!("Invalid state cannot retrieve sso auth")),
Some(sso_auth) => sso_auth,
};
if let Some(authenticated_user) = AC_CACHE.get(&state) {
return Ok(UserInformation {
state,
identifier: authenticated_user.identifier,
email: authenticated_user.email,
email_verified: authenticated_user.email_verified,
user_name: authenticated_user.user_name,
});
if let Some(authenticated_user) = sso_auth.auth_response.clone() {
return Ok((sso_auth, authenticated_user));
}
let nonce = match SsoNonce::find(&state, conn).await {
None => err!(format!("Invalid state cannot retrieve nonce")),
Some(nonce) => nonce,
let code = match sso_auth.code_response.clone() {
Some(OIDCCodeWrapper::Ok {
code,
}) => code.clone(),
Some(OIDCCodeWrapper::Error {
error,
error_description,
}) => {
sso_auth.delete(conn).await?;
err!(format!("SSO authorization failed: {error}, {}", error_description.as_ref().unwrap_or(&String::new())))
}
None => {
sso_auth.delete(conn).await?;
err!("Missing authorization provider return");
}
};
let client = Client::cached().await?;
let (token_response, id_claims) = client.exchange_code(code, nonce).await?;
let (token_response, id_claims) = client.exchange_code(code, client_verifier, &sso_auth).await?;
let user_info = client.user_info(token_response.access_token().to_owned()).await?;
@ -300,7 +292,7 @@ pub async fn exchange_code(wrapped_code: &str, conn: &DbConn) -> ApiResult<UserI
let identifier = OIDCIdentifier::new(id_claims.issuer(), id_claims.subject());
let authenticated_user = AuthenticatedUser {
let authenticated_user = OIDCAuthenticatedUser {
refresh_token: refresh_token.cloned(),
access_token: token_response.access_token().secret().clone(),
expires_in: token_response.expires_in(),
@ -311,29 +303,49 @@ pub async fn exchange_code(wrapped_code: &str, conn: &DbConn) -> ApiResult<UserI
};
debug!("Authenticated user {authenticated_user:?}");
sso_auth.auth_response = Some(authenticated_user.clone());
sso_auth.updated_at = Utc::now().naive_utc();
sso_auth.save(conn).await?;
AC_CACHE.insert(state.clone(), authenticated_user);
Ok(UserInformation {
state,
identifier,
email,
email_verified,
user_name,
})
Ok((sso_auth, authenticated_user))
}
// User has passed 2FA flow we can delete `nonce` and clear the cache.
pub async fn redeem(state: &OIDCState, conn: &DbConn) -> ApiResult<AuthenticatedUser> {
if let Err(err) = SsoNonce::delete(state, conn).await {
error!("Failed to delete database sso_nonce using {state}: {err}")
// User has passed 2FA flow we can delete auth info from database
pub async fn redeem(
device: &Device,
user: &User,
client_id: Option<String>,
sso_user: Option<SsoUser>,
sso_auth: SsoAuth,
auth_user: OIDCAuthenticatedUser,
conn: &DbConn,
) -> ApiResult<AuthTokens> {
sso_auth.delete(conn).await?;
if sso_user.is_none() {
let user_sso = SsoUser {
user_uuid: user.uuid.clone(),
identifier: auth_user.identifier.clone(),
};
user_sso.save(conn).await?;
}
if let Some(au) = AC_CACHE.get(state) {
AC_CACHE.invalidate(state);
Ok(au)
if !CONFIG.sso_auth_only_not_session() {
let now = Utc::now();
let (ap_nbf, ap_exp) =
match (decode_token_claims("access_token", &auth_user.access_token), auth_user.expires_in) {
(Ok(ap), _) => (ap.nbf(), ap.exp),
(Err(_), Some(exp)) => (now.timestamp(), (now + exp).timestamp()),
_ => err!("Non jwt access_token and empty expires_in"),
};
let access_claims =
auth::LoginJwtClaims::new(device, user, ap_nbf, ap_exp, AuthMethod::Sso.scope_vec(), client_id, now);
_create_auth_tokens(device, auth_user.refresh_token, access_claims, auth_user.access_token)
} else {
err!("Failed to retrieve user info from sso cache")
Ok(AuthTokens::new(device, user, AuthMethod::Sso, client_id))
}
}

39
src/sso_client.rs

@ -7,8 +7,8 @@ use url::Url;
use crate::{
api::{ApiResult, EmptyResult},
db::models::SsoNonce,
sso::{OIDCCode, OIDCState},
db::models::SsoAuth,
sso::{OIDCCode, OIDCCodeChallenge, OIDCCodeVerifier, OIDCState},
CONFIG,
};
@ -107,7 +107,11 @@ impl Client {
}
// The `state` is encoded using base64 to ensure no issue with providers (It contains the Organization identifier).
pub async fn authorize_url(state: OIDCState, redirect_uri: String) -> ApiResult<(Url, SsoNonce)> {
pub async fn authorize_url(
state: OIDCState,
client_challenge: OIDCCodeChallenge,
redirect_uri: String,
) -> ApiResult<(Url, SsoAuth)> {
let scopes = CONFIG.sso_scopes_vec().into_iter().map(Scope::new);
let base64_state = data_encoding::BASE64.encode(state.to_string().as_bytes());
@ -122,22 +126,21 @@ impl Client {
.add_scopes(scopes)
.add_extra_params(CONFIG.sso_authorize_extra_params_vec());
let verifier = if CONFIG.sso_pkce() {
let (pkce_challenge, pkce_verifier) = PkceCodeChallenge::new_random_sha256();
auth_req = auth_req.set_pkce_challenge(pkce_challenge);
Some(pkce_verifier.into_secret())
} else {
None
};
if CONFIG.sso_pkce() {
auth_req = auth_req
.add_extra_param::<&str, String>("code_challenge", client_challenge.clone().into())
.add_extra_param("code_challenge_method", "S256");
}
let (auth_url, _, nonce) = auth_req.url();
Ok((auth_url, SsoNonce::new(state, nonce.secret().clone(), verifier, redirect_uri)))
Ok((auth_url, SsoAuth::new(state, client_challenge, nonce.secret().clone(), redirect_uri)))
}
pub async fn exchange_code(
&self,
code: OIDCCode,
nonce: SsoNonce,
client_verifier: OIDCCodeVerifier,
sso_auth: &SsoAuth,
) -> ApiResult<(
StandardTokenResponse<
IdTokenFields<
@ -155,17 +158,21 @@ impl Client {
let mut exchange = self.core_client.exchange_code(oidc_code);
let verifier = PkceCodeVerifier::new(client_verifier.into());
if CONFIG.sso_pkce() {
match nonce.verifier {
None => err!(format!("Missing verifier in the DB nonce table")),
Some(secret) => exchange = exchange.set_pkce_verifier(PkceCodeVerifier::new(secret)),
exchange = exchange.set_pkce_verifier(verifier);
} else {
let challenge = PkceCodeChallenge::from_code_verifier_sha256(&verifier);
if challenge.as_str() != String::from(sso_auth.client_challenge.clone()) {
err!(format!("PKCE client challenge failed"))
// Might need to notify admin ? how ?
}
}
match exchange.request_async(&self.http_client).await {
Err(err) => err!(format!("Failed to contact token endpoint: {:?}", err)),
Ok(token_response) => {
let oidc_nonce = Nonce::new(nonce.nonce);
let oidc_nonce = Nonce::new(sso_auth.nonce.clone());
let id_token = match token_response.extra_fields().id_token() {
None => err!("Token response did not contain an id_token"),

17
src/static/scripts/admin.css

@ -58,3 +58,20 @@ img {
.abbr-badge {
cursor: help;
}
.theme-icon,
.theme-icon-active {
display: inline-flex;
flex: 0 0 1.75em;
justify-content: center;
}
.theme-icon svg,
.theme-icon-active svg {
width: 1.25em;
height: 1.25em;
min-width: 1.25em;
min-height: 1.25em;
display: block;
overflow: visible;
}

15
src/static/scripts/admin.js

@ -1,6 +1,6 @@
"use strict";
/* eslint-env es2017, browser */
/* exported BASE_URL, _post */
/* exported BASE_URL, _post _delete */
function getBaseUrl() {
// If the base URL is `https://vaultwarden.example.com/base/path/admin/`,
@ -106,7 +106,11 @@ const showActiveTheme = (theme, focus = false) => {
const themeSwitcherText = document.querySelector("#bd-theme-text");
const activeThemeIcon = document.querySelector(".theme-icon-active use");
const btnToActive = document.querySelector(`[data-bs-theme-value="${theme}"]`);
const svgOfActiveBtn = btnToActive.querySelector("span use").textContent;
if (!btnToActive) {
return;
}
const btnIconUse = btnToActive ? btnToActive.querySelector("[data-theme-icon-use]") : null;
const iconHref = btnIconUse ? btnIconUse.getAttribute("href") || btnIconUse.getAttribute("xlink:href") : null;
document.querySelectorAll("[data-bs-theme-value]").forEach(element => {
element.classList.remove("active");
@ -115,7 +119,12 @@ const showActiveTheme = (theme, focus = false) => {
btnToActive.classList.add("active");
btnToActive.setAttribute("aria-pressed", "true");
activeThemeIcon.textContent = svgOfActiveBtn;
if (iconHref && activeThemeIcon) {
activeThemeIcon.setAttribute("href", iconHref);
activeThemeIcon.setAttribute("xlink:href", iconHref);
}
const themeSwitcherLabel = `${themeSwitcherText.textContent} (${btnToActive.dataset.bsThemeValue})`;
themeSwitcher.setAttribute("aria-label", themeSwitcherLabel);

12
src/static/scripts/admin_diagnostics.js

@ -29,7 +29,7 @@ function isValidIp(ip) {
return ipv4Regex.test(ip) || ipv6Regex.test(ip);
}
function checkVersions(platform, installed, latest, commit=null, pre_release=false) {
function checkVersions(platform, installed, latest, commit=null, compare_order=0) {
if (installed === "-" || latest === "-") {
document.getElementById(`${platform}-failed`).classList.remove("d-none");
return;
@ -37,7 +37,7 @@ function checkVersions(platform, installed, latest, commit=null, pre_release=fal
// Only check basic versions, no commit revisions
if (commit === null || installed.indexOf("-") === -1) {
if (platform === "web" && pre_release === true) {
if (platform === "web" && compare_order === 1) {
document.getElementById(`${platform}-prerelease`).classList.remove("d-none");
} else if (installed == latest) {
document.getElementById(`${platform}-success`).classList.remove("d-none");
@ -83,7 +83,7 @@ async function generateSupportString(event, dj) {
let supportString = "### Your environment (Generated via diagnostics page)\n\n";
supportString += `* Vaultwarden version: v${dj.current_release}\n`;
supportString += `* Web-vault version: v${dj.web_vault_version}\n`;
supportString += `* Web-vault version: v${dj.active_web_release}\n`;
supportString += `* OS/Arch: ${dj.host_os}/${dj.host_arch}\n`;
supportString += `* Running within a container: ${dj.running_within_container} (Base: ${dj.container_base_image})\n`;
supportString += `* Database type: ${dj.db_type}\n`;
@ -208,9 +208,9 @@ function initVersionCheck(dj) {
}
checkVersions("server", serverInstalled, serverLatest, serverLatestCommit);
const webInstalled = dj.web_vault_version;
const webLatest = dj.latest_web_build;
checkVersions("web", webInstalled, webLatest, null, dj.web_vault_pre_release);
const webInstalled = dj.active_web_release;
const webLatest = dj.latest_web_release;
checkVersions("web", webInstalled, webLatest, null, dj.web_vault_compare);
}
function checkDns(dns_resolved) {

2
src/static/scripts/admin_users.js

@ -1,6 +1,6 @@
"use strict";
/* eslint-env es2017, browser, jquery */
/* global _post:readable, BASE_URL:readable, reload:readable, jdenticon:readable */
/* global _post:readable, _delete:readable BASE_URL:readable, reload:readable, jdenticon:readable */
function deleteUser(event) {
event.preventDefault();

7
src/static/scripts/bootstrap.bundle.js

@ -1,5 +1,5 @@
/*!
* Bootstrap v5.3.7 (https://getbootstrap.com/)
* Bootstrap v5.3.8 (https://getbootstrap.com/)
* Copyright 2011-2025 The Bootstrap Authors (https://github.com/twbs/bootstrap/graphs/contributors)
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
*/
@ -647,7 +647,7 @@
* Constants
*/
const VERSION = '5.3.7';
const VERSION = '5.3.8';
/**
* Class definition
@ -3690,9 +3690,6 @@
this._element.setAttribute('aria-expanded', 'false');
Manipulator.removeDataAttribute(this._menu, 'popper');
EventHandler.trigger(this._element, EVENT_HIDDEN$5, relatedTarget);
// Explicitly return focus to the trigger element
this._element.focus();
}
_getConfig(config) {
config = super._getConfig(config);

7
src/static/scripts/bootstrap.css

@ -1,6 +1,6 @@
@charset "UTF-8";
/*!
* Bootstrap v5.3.7 (https://getbootstrap.com/)
* Bootstrap v5.3.8 (https://getbootstrap.com/)
* Copyright 2011-2025 The Bootstrap Authors
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE)
*/
@ -547,6 +547,10 @@ legend + * {
-webkit-appearance: textfield;
outline-offset: -2px;
}
[type=search]::-webkit-search-cancel-button {
cursor: pointer;
filter: grayscale(1);
}
/* rtl:raw:
[type="tel"],
@ -6208,6 +6212,7 @@ textarea.form-control-lg {
.spinner-grow,
.spinner-border {
display: inline-block;
flex-shrink: 0;
width: var(--bs-spinner-width);
height: var(--bs-spinner-height);
vertical-align: var(--bs-spinner-vertical-align);

19
src/static/scripts/datatables.css

@ -4,20 +4,21 @@
*
* To rebuild or modify this file with the latest versions of the included
* software please visit:
* https://datatables.net/download/#bs5/dt-2.3.2
* https://datatables.net/download/#bs5/dt-2.3.5
*
* Included libraries:
* DataTables 2.3.2
* DataTables 2.3.5
*/
:root {
--dt-row-selected: 13, 110, 253;
--dt-row-selected-text: 255, 255, 255;
--dt-row-selected-link: 9, 10, 11;
--dt-row-selected-link: 228, 228, 228;
--dt-row-stripe: 0, 0, 0;
--dt-row-hover: 0, 0, 0;
--dt-column-ordering: 0, 0, 0;
--dt-header-align-items: center;
--dt-header-vertical-align: middle;
--dt-html-background: white;
}
:root.dark {
@ -112,7 +113,7 @@ table.dataTable thead > tr > td.dt-ordering-asc span.dt-column-order,
table.dataTable thead > tr > td.dt-ordering-desc span.dt-column-order {
position: relative;
width: 12px;
height: 20px;
height: 24px;
}
table.dataTable thead > tr > th.dt-orderable-asc span.dt-column-order:before, table.dataTable thead > tr > th.dt-orderable-asc span.dt-column-order:after, table.dataTable thead > tr > th.dt-orderable-desc span.dt-column-order:before, table.dataTable thead > tr > th.dt-orderable-desc span.dt-column-order:after, table.dataTable thead > tr > th.dt-ordering-asc span.dt-column-order:before, table.dataTable thead > tr > th.dt-ordering-asc span.dt-column-order:after, table.dataTable thead > tr > th.dt-ordering-desc span.dt-column-order:before, table.dataTable thead > tr > th.dt-ordering-desc span.dt-column-order:after,
table.dataTable thead > tr > td.dt-orderable-asc span.dt-column-order:before,
@ -144,7 +145,8 @@ table.dataTable thead > tr > td.dt-ordering-asc span.dt-column-order:before,
table.dataTable thead > tr > td.dt-ordering-desc span.dt-column-order:after {
opacity: 0.6;
}
table.dataTable thead > tr > th.sorting_desc_disabled span.dt-column-order:after, table.dataTable thead > tr > th.sorting_asc_disabled span.dt-column-order:before,
table.dataTable thead > tr > th.dt-orderable-none:not(.dt-ordering-asc, .dt-ordering-desc) span.dt-column-order:empty, table.dataTable thead > tr > th.sorting_desc_disabled span.dt-column-order:after, table.dataTable thead > tr > th.sorting_asc_disabled span.dt-column-order:before,
table.dataTable thead > tr > td.dt-orderable-none:not(.dt-ordering-asc, .dt-ordering-desc) span.dt-column-order:empty,
table.dataTable thead > tr > td.sorting_desc_disabled span.dt-column-order:after,
table.dataTable thead > tr > td.sorting_asc_disabled span.dt-column-order:before {
display: none;
@ -340,6 +342,7 @@ table.dataTable thead td,
table.dataTable tfoot th,
table.dataTable tfoot td {
text-align: left;
vertical-align: var(--dt-header-vertical-align);
}
table.dataTable thead th.dt-head-left,
table.dataTable thead td.dt-head-left,
@ -422,10 +425,6 @@ table.dataTable tbody td.dt-body-nowrap {
white-space: nowrap;
}
:root {
--dt-header-align-items: flex-end;
}
/*! Bootstrap 5 integration for DataTables
*
* ©2020 SpryMedia Ltd, all rights reserved.
@ -453,7 +452,7 @@ table.table.dataTable > tbody > tr.selected > * {
color: rgb(var(--dt-row-selected-text));
}
table.table.dataTable > tbody > tr.selected a {
color: rgb(9, 10, 11);
color: rgb(228, 228, 228);
color: rgb(var(--dt-row-selected-link));
}
table.table.dataTable.table-striped > tbody > tr:nth-of-type(2n+1) > * {

614
src/static/scripts/datatables.js

File diff suppressed because it is too large

33
src/static/templates/admin/base.hbs

@ -11,6 +11,21 @@
<script src="{{urlpath}}/vw_static/admin.js"></script>
</head>
<body>
<svg xmlns="http://www.w3.org/2000/svg" class="d-none">
<symbol id="vw-icon-sun" viewBox="0 0 24 24">
<circle cx="12" cy="12" r="5" fill="currentColor"/>
<g stroke="currentColor" stroke-linecap="round" stroke-width="1.5">
<path d="M12 2v3M12 19v3M4.22 4.22l2.12 2.12M17.66 17.66l2.12 2.12M2 12h3M19 12h3M4.22 19.78l2.12-2.12M17.66 6.34l2.12-2.12"/>
</g>
</symbol>
<symbol id="vw-icon-moon" viewBox="0 0 24 24">
<path fill="currentColor" stroke-width=".8" d="M18.4 17.8A9 8.6 0 0 1 13 2a10.5 10 0 1 0 9 14.4 9.4 9 0 0 1-3.6 1.4"/>
</symbol>
<symbol id="vw-icon-auto" viewBox="0 0 24 24">
<circle cx="12" cy="12" r="9" fill="none" stroke="currentColor" stroke-width="1.5"/>
<path fill="currentColor" d="M12 3a9 9 0 1 1 0 18Z"/>
</symbol>
</svg>
<nav class="navbar navbar-expand-md navbar-dark bg-dark mb-4 shadow fixed-top">
<div class="container-xl">
<a class="navbar-brand" href="{{urlpath}}/admin"><img class="vaultwarden-icon" src="{{urlpath}}/vw_static/vaultwarden-icon.png" alt="V">aultwarden Admin</a>
@ -39,14 +54,16 @@
</li>
</ul>
<ul class="navbar-nav">
<ul class="navbar-nav mx-3">
<li class="nav-item dropdown">
<button
class="btn btn-link nav-link py-0 px-0 px-md-2 dropdown-toggle d-flex align-items-center"
id="bd-theme" type="button" aria-expanded="false" data-bs-toggle="dropdown"
data-bs-display="static" aria-label="Toggle theme (auto)">
<span class="my-1 fs-4 theme-icon-active">
<use>&#9775;</use>
<svg class="vw-theme-icon" focusable="false" aria-hidden="true">
<use data-theme-icon-use href="#vw-icon-auto"></use>
</svg>
</span>
<span class="d-md-none ms-2" id="bd-theme-text">Toggle theme</span>
</button>
@ -55,7 +72,9 @@
<button type="button" class="dropdown-item d-flex align-items-center"
data-bs-theme-value="light" aria-pressed="false">
<span class="me-2 fs-4 theme-icon">
<use>&#9728;</use>
<svg class="vw-theme-icon" focusable="false" aria-hidden="true">
<use data-theme-icon-use href="#vw-icon-sun"></use>
</svg>
</span>
Light
</button>
@ -64,7 +83,9 @@
<button type="button" class="dropdown-item d-flex align-items-center"
data-bs-theme-value="dark" aria-pressed="false">
<span class="me-2 fs-4 theme-icon">
<use>&starf;</use>
<svg class="vw-theme-icon" focusable="false" aria-hidden="true">
<use data-theme-icon-use href="#vw-icon-moon"></use>
</svg>
</span>
Dark
</button>
@ -73,7 +94,9 @@
<button type="button" class="dropdown-item d-flex align-items-center active"
data-bs-theme-value="auto" aria-pressed="true">
<span class="me-2 fs-4 theme-icon">
<use>&#9775;</use>
<svg class="vw-theme-icon" focusable="false" aria-hidden="true">
<use data-theme-icon-use href="#vw-icon-auto"></use>
</svg>
</span>
Auto
</button>

4
src/static/templates/admin/diagnostics.hbs

@ -27,13 +27,13 @@
<span class="badge bg-info text-dark d-none abbr-badge" id="web-prerelease" title="You seem to be using a pre-release version.">Pre-Release</span>
</dt>
<dd class="col-sm-7">
<span id="web-installed">{{page_data.web_vault_version}}</span>
<span id="web-installed">{{page_data.active_web_release}}</span>
</dd>
<dt class="col-sm-5">Web Latest
<span class="badge bg-secondary d-none abbr-badge" id="web-failed" title="Unable to determine latest version.">Unknown</span>
</dt>
<dd class="col-sm-7">
<span id="web-latest">{{page_data.latest_web_build}}</span>
<span id="web-latest">{{page_data.latest_web_release}}</span>
</dd>
{{/if}}
{{#unless page_data.web_vault_enabled}}

4
src/static/templates/email/send_single_org_removed_from_org.hbs

@ -1,4 +1,4 @@
You have been removed from {{{org_name}}}
Your access to {{{org_name}}} has been revoked
<!---------------->
Your user account has been removed from the *{{org_name}}* organization because you are a part of another organization. The {{org_name}} organization has enabled a policy that prevents users from being a part of multiple organizations. Before you can re-join this organization you need to leave all other organizations or join with a different account.
Your access to the *{{org_name}}* organization has been revoked because you are a part of another organization. The {{org_name}} organization has enabled a policy that prevents users from being a part of multiple organizations. Before your access can be restored you need to leave all other organizations or join with a different account.
{{> email/email_footer_text }}

4
src/static/templates/email/send_single_org_removed_from_org.html.hbs

@ -1,10 +1,10 @@
You have been removed from {{{org_name}}}
Your access to {{{org_name}}} has been revoked
<!---------------->
{{> email/email_header }}
<table width="100%" cellpadding="0" cellspacing="0" style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
<tr style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">
<td class="content-block" style="font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; margin: 0; -webkit-font-smoothing: antialiased; padding: 0 0 10px; -webkit-text-size-adjust: none; text-align: center;" valign="top" align="center">
Your user account has been removed from the <b style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">{{org_name}}</b> organization because you are a part of another organization. The {{org_name}} organization has enabled a policy that prevents users from being a part of multiple organizations. Before you can re-join this organization you need to leave all other organizations or join with a different account.
Your access to the <b style="margin: 0; font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif; box-sizing: border-box; font-size: 16px; color: #333; line-height: 25px; -webkit-font-smoothing: antialiased; -webkit-text-size-adjust: none;">{{org_name}}</b> organization has been revoked because you are a part of another organization. The {{org_name}} organization has enabled a policy that prevents users from being a part of multiple organizations. Before your access can be restored you need to leave all other organizations or join with a different account.
</td>
</tr>
</table>

2
src/util.rs

@ -531,7 +531,7 @@ struct WebVaultVersion {
version: String,
}
pub fn get_web_vault_version() -> String {
pub fn get_active_web_release() -> String {
let version_files = [
format!("{}/vw-version.json", CONFIG.web_vault_folder()),
format!("{}/version.json", CONFIG.web_vault_folder()),

Loading…
Cancel
Save