diff --git a/.env.template b/.env.template index 140a4ccc..67f531fc 100644 --- a/.env.template +++ b/.env.template @@ -183,9 +183,9 @@ ## Defaults to every minute. Set blank to disable this job. # DUO_CONTEXT_PURGE_SCHEDULE="30 * * * * *" # -## Cron schedule of the job that cleans sso nonce from incomplete flow +## Cron schedule of the job that cleans sso auth from incomplete flow ## Defaults to daily (20 minutes after midnight). Set blank to disable this job. -# PURGE_INCOMPLETE_SSO_NONCE="0 20 0 * * *" +# PURGE_INCOMPLETE_SSO_AUTH="0 20 0 * * *" ######################## ### General settings ### @@ -348,7 +348,7 @@ ## Default: 2592000 (30 days) # ICON_CACHE_TTL=2592000 ## Cache time-to-live for icons which weren't available, in seconds (0 is "forever") -## Default: 2592000 (3 days) +## Default: 259200 (3 days) # ICON_CACHE_NEGTTL=259200 ## Icon download timeout @@ -376,6 +376,7 @@ ## - "inline-menu-totp": Enable the use of inline menu TOTP codes in the browser extension. ## - "ssh-agent": Enable SSH agent support on Desktop. (Needs desktop >=2024.12.0) ## - "ssh-key-vault-item": Enable the creation and use of SSH key vault items. (Needs clients >=2024.12.0) +## - "pm-25373-windows-biometrics-v2": Enable the new implementation of biometrics on Windows. (Needs desktop >= 2025.11.0) ## - "export-attachments": Enable support for exporting attachments (Clients >=2025.4.0) ## - "anon-addy-self-host-alias": Enable configuring self-hosted Anon Addy alias generator. (Needs Android >=2025.3.0, iOS >=2025.4.0) ## - "simple-login-self-host-alias": Enable configuring self-hosted Simple Login alias generator. (Needs Android >=2025.3.0, iOS >=2025.4.0) @@ -471,6 +472,11 @@ ## Setting this to true will enforce the Single Org Policy to be enabled before you can enable the Reset Password policy. # ENFORCE_SINGLE_ORG_WITH_RESET_PW_POLICY=false +## Prefer IPv6 (AAAA) resolving +## This settings configures the DNS resolver to resolve IPv6 first, and if not available try IPv4 +## This could be useful in IPv6 only environments. +# DNS_PREFER_IPV6=false + ##################################### ### SSO settings (OpenID Connect) ### ##################################### diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index b04f6e8a..8901ea41 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -54,7 +54,7 @@ jobs: # Checkout the repo - name: "Checkout" - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0 with: persist-credentials: false fetch-depth: 0 @@ -80,7 +80,7 @@ jobs: # Only install the clippy and rustfmt components on the default rust-toolchain - name: "Install rust-toolchain version" - uses: dtolnay/rust-toolchain@6d653acede28d24f02e3cd41383119e8b1b35921 # master @ Sep 16, 2025, 8:37 PM GMT+2 + uses: dtolnay/rust-toolchain@f7ccc83f9ed1e5b9c81d8a67d7ad1a747e22a561 # master @ Dec 16, 2025, 6:11 PM GMT+1 if: ${{ matrix.channel == 'rust-toolchain' }} with: toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}" @@ -90,7 +90,7 @@ jobs: # Install the any other channel to be used for which we do not execute clippy and rustfmt - name: "Install MSRV version" - uses: dtolnay/rust-toolchain@6d653acede28d24f02e3cd41383119e8b1b35921 # master @ Sep 16, 2025, 8:37 PM GMT+2 + uses: dtolnay/rust-toolchain@f7ccc83f9ed1e5b9c81d8a67d7ad1a747e22a561 # master @ Dec 16, 2025, 6:11 PM GMT+1 if: ${{ matrix.channel != 'rust-toolchain' }} with: toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}" @@ -115,7 +115,7 @@ jobs: # Enable Rust Caching - name: Rust Caching - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2 with: # Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes. # Like changing the build host from Ubuntu 20.04 to 22.04 for example. diff --git a/.github/workflows/check-templates.yml b/.github/workflows/check-templates.yml index 7a6a764a..2e02f574 100644 --- a/.github/workflows/check-templates.yml +++ b/.github/workflows/check-templates.yml @@ -12,7 +12,7 @@ jobs: steps: # Checkout the repo - name: "Checkout" - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0 with: persist-credentials: false # End Checkout the repo diff --git a/.github/workflows/hadolint.yml b/.github/workflows/hadolint.yml index 9dfd7a59..8a6d1218 100644 --- a/.github/workflows/hadolint.yml +++ b/.github/workflows/hadolint.yml @@ -13,7 +13,7 @@ jobs: steps: # Start Docker Buildx - name: Setup Docker Buildx - uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1 + uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0 # https://github.com/moby/buildkit/issues/3969 # Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills with: @@ -34,7 +34,7 @@ jobs: # End Download hadolint # Checkout the repo - name: Checkout - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0 with: persist-credentials: false # End Checkout the repo diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 0bd0a560..378682d3 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -16,6 +16,23 @@ concurrency: # Don't cancel other runs when creating a tag cancel-in-progress: ${{ github.ref_type == 'branch' }} +defaults: + run: + shell: bash + +env: + # The *_REPO variables need to be configured as repository variables + # Append `/settings/variables/actions` to your repo url + # DOCKERHUB_REPO needs to be 'index.docker.io//' + # Check for Docker hub credentials in secrets + HAVE_DOCKERHUB_LOGIN: ${{ vars.DOCKERHUB_REPO != '' && secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }} + # GHCR_REPO needs to be 'ghcr.io//' + # Check for Github credentials in secrets + HAVE_GHCR_LOGIN: ${{ vars.GHCR_REPO != '' && github.repository_owner != '' && secrets.GITHUB_TOKEN != '' }} + # QUAY_REPO needs to be 'quay.io//' + # Check for Quay.io credentials in secrets + HAVE_QUAY_LOGIN: ${{ vars.QUAY_REPO != '' && secrets.QUAY_USERNAME != '' && secrets.QUAY_TOKEN != '' }} + jobs: docker-build: name: Build Vaultwarden containers @@ -25,41 +42,25 @@ jobs: contents: read attestations: write # Needed to generate an artifact attestation for a build id-token: write # Needed to mint the OIDC token necessary to request a Sigstore signing certificate - runs-on: ubuntu-24.04 + runs-on: ${{ contains(matrix.arch, 'arm') && 'ubuntu-24.04-arm' || 'ubuntu-24.04' }} timeout-minutes: 120 - # Start a local docker registry to extract the compiled binaries to upload as artifacts and attest them - services: - registry: - image: registry@sha256:1fc7de654f2ac1247f0b67e8a459e273b0993be7d2beda1f3f56fbf1001ed3e7 # v3.0.0 - ports: - - 5000:5000 env: SOURCE_COMMIT: ${{ github.sha }} SOURCE_REPOSITORY_URL: "https://github.com/${{ github.repository }}" - # The *_REPO variables need to be configured as repository variables - # Append `/settings/variables/actions` to your repo url - # DOCKERHUB_REPO needs to be 'index.docker.io//' - # Check for Docker hub credentials in secrets - HAVE_DOCKERHUB_LOGIN: ${{ vars.DOCKERHUB_REPO != '' && secrets.DOCKERHUB_USERNAME != '' && secrets.DOCKERHUB_TOKEN != '' }} - # GHCR_REPO needs to be 'ghcr.io//' - # Check for Github credentials in secrets - HAVE_GHCR_LOGIN: ${{ vars.GHCR_REPO != '' && github.repository_owner != '' && secrets.GITHUB_TOKEN != '' }} - # QUAY_REPO needs to be 'quay.io//' - # Check for Quay.io credentials in secrets - HAVE_QUAY_LOGIN: ${{ vars.QUAY_REPO != '' && secrets.QUAY_USERNAME != '' && secrets.QUAY_TOKEN != '' }} strategy: matrix: + arch: ["amd64", "arm64", "arm/v7", "arm/v6"] base_image: ["debian","alpine"] steps: - name: Initialize QEMU binfmt support - uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0 + uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0 with: platforms: "arm64,arm" # Start Docker Buildx - name: Setup Docker Buildx - uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1 + uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0 # https://github.com/moby/buildkit/issues/3969 # Also set max parallelism to 2, the default of 4 breaks GitHub Actions and causes OOMKills with: @@ -72,25 +73,24 @@ jobs: # Checkout the repo - name: Checkout - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0 # We need fetch-depth of 0 so we also get all the tag metadata with: persist-credentials: false fetch-depth: 0 - # Determine Base Tags and Source Version - - name: Determine Base Tags and Source Version - shell: bash + # Normalize the architecture string for use in paths and cache keys + - name: Normalize architecture string env: - REF_TYPE: ${{ github.ref_type }} + MATRIX_ARCH: ${{ matrix.arch }} run: | - # Check which main tag we are going to build determined by ref_type - if [[ "${REF_TYPE}" == "tag" ]]; then - echo "BASE_TAGS=latest,${GITHUB_REF#refs/*/}" | tee -a "${GITHUB_ENV}" - elif [[ "${REF_TYPE}" == "branch" ]]; then - echo "BASE_TAGS=testing" | tee -a "${GITHUB_ENV}" - fi + # Replace slashes with nothing to create a safe string for paths/cache keys + NORMALIZED_ARCH="${MATRIX_ARCH//\/}" + echo "NORMALIZED_ARCH=${NORMALIZED_ARCH}" | tee -a "${GITHUB_ENV}" + # Determine Source Version + - name: Determine Source Version + run: | # Get the Source Version for this release GIT_EXACT_TAG="$(git describe --tags --abbrev=0 --exact-match 2>/dev/null || true)" if [[ -n "${GIT_EXACT_TAG}" ]]; then @@ -99,7 +99,6 @@ jobs: GIT_LAST_TAG="$(git describe --tags --abbrev=0)" echo "SOURCE_VERSION=${GIT_LAST_TAG}-${SOURCE_COMMIT:0:8}" | tee -a "${GITHUB_ENV}" fi - # End Determine Base Tags # Login to Docker Hub - name: Login to Docker Hub @@ -111,7 +110,6 @@ jobs: - name: Add registry for DockerHub if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }} - shell: bash env: DOCKERHUB_REPO: ${{ vars.DOCKERHUB_REPO }} run: | @@ -128,7 +126,6 @@ jobs: - name: Add registry for ghcr.io if: ${{ env.HAVE_GHCR_LOGIN == 'true' }} - shell: bash env: GHCR_REPO: ${{ vars.GHCR_REPO }} run: | @@ -145,55 +142,65 @@ jobs: - name: Add registry for Quay.io if: ${{ env.HAVE_QUAY_LOGIN == 'true' }} - shell: bash env: QUAY_REPO: ${{ vars.QUAY_REPO }} run: | echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${QUAY_REPO}" | tee -a "${GITHUB_ENV}" - name: Configure build cache from/to - shell: bash env: GHCR_REPO: ${{ vars.GHCR_REPO }} BASE_IMAGE: ${{ matrix.base_image }} + NORMALIZED_ARCH: ${{ env.NORMALIZED_ARCH }} run: | # # Check if there is a GitHub Container Registry Login and use it for caching if [[ -n "${HAVE_GHCR_LOGIN}" ]]; then - echo "BAKE_CACHE_FROM=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}" | tee -a "${GITHUB_ENV}" - echo "BAKE_CACHE_TO=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}" + echo "BAKE_CACHE_FROM=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}-${NORMALIZED_ARCH}" | tee -a "${GITHUB_ENV}" + echo "BAKE_CACHE_TO=type=registry,ref=${GHCR_REPO}-buildcache:${BASE_IMAGE}-${NORMALIZED_ARCH},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}" else echo "BAKE_CACHE_FROM=" echo "BAKE_CACHE_TO=" fi # - - name: Add localhost registry - shell: bash + - name: Generate tags + id: tags + env: + CONTAINER_REGISTRIES: "${{ env.CONTAINER_REGISTRIES }}" run: | - echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}localhost:5000/vaultwarden/server" | tee -a "${GITHUB_ENV}" + # Convert comma-separated list to newline-separated set commands + TAGS=$(echo "${CONTAINER_REGISTRIES}" | tr ',' '\n' | sed "s|.*|*.tags=&|") + + # Output for use in next step + { + echo "TAGS<> "$GITHUB_ENV" - name: Bake ${{ matrix.base_image }} containers id: bake_vw - uses: docker/bake-action@3acf805d94d93a86cce4ca44798a76464a75b88c # v6.9.0 + uses: docker/bake-action@5be5f02ff8819ecd3092ea6b2e6261c31774f2b4 # v6.10.0 env: - BASE_TAGS: "${{ env.BASE_TAGS }}" + BASE_TAGS: "${{ steps.determine-version.outputs.BASE_TAGS }}" SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}" SOURCE_VERSION: "${{ env.SOURCE_VERSION }}" SOURCE_REPOSITORY_URL: "${{ env.SOURCE_REPOSITORY_URL }}" - CONTAINER_REGISTRIES: "${{ env.CONTAINER_REGISTRIES }}" with: pull: true - push: true source: . files: docker/docker-bake.hcl targets: "${{ matrix.base_image }}-multi" set: | *.cache-from=${{ env.BAKE_CACHE_FROM }} *.cache-to=${{ env.BAKE_CACHE_TO }} + *.platform=linux/${{ matrix.arch }} + ${{ env.TAGS }} + *.output=type=local,dest=./output + *.output=type=image,push-by-digest=true,name-canonical=true,push=true - name: Extract digest SHA - shell: bash env: BAKE_METADATA: ${{ steps.bake_vw.outputs.metadata }} BASE_IMAGE: ${{ matrix.base_image }} @@ -201,105 +208,174 @@ jobs: GET_DIGEST_SHA="$(jq -r --arg base "$BASE_IMAGE" '.[$base + "-multi"]."containerimage.digest"' <<< "${BAKE_METADATA}")" echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}" - # Attest container images - - name: Attest - docker.io - ${{ matrix.base_image }} - if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} - uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0 + - name: Export digest + env: + DIGEST_SHA: ${{ env.DIGEST_SHA }} + RUNNER_TEMP: ${{ runner.temp }} + run: | + mkdir -p "${RUNNER_TEMP}"/digests + digest="${DIGEST_SHA}" + touch "${RUNNER_TEMP}/digests/${digest#sha256:}" + + - name: Upload digest + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 with: - subject-name: ${{ vars.DOCKERHUB_REPO }} - subject-digest: ${{ env.DIGEST_SHA }} - push-to-registry: true + name: digests-${{ env.NORMALIZED_ARCH }}-${{ matrix.base_image }} + path: ${{ runner.temp }}/digests/* + if-no-files-found: error + retention-days: 1 - - name: Attest - ghcr.io - ${{ matrix.base_image }} - if: ${{ env.HAVE_GHCR_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} - uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0 + - name: Rename binaries to match target platform + env: + NORMALIZED_ARCH: ${{ env.NORMALIZED_ARCH }} + run: | + mv ./output/vaultwarden vaultwarden-"${NORMALIZED_ARCH}" + + # Upload artifacts to Github Actions and Attest the binaries + - name: Attest binaries + uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8 # v3.1.0 with: - subject-name: ${{ vars.GHCR_REPO }} - subject-digest: ${{ env.DIGEST_SHA }} - push-to-registry: true + subject-path: vaultwarden-${{ env.NORMALIZED_ARCH }} - - name: Attest - quay.io - ${{ matrix.base_image }} - if: ${{ env.HAVE_QUAY_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} - uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0 + - name: Upload binaries as artifacts + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 with: - subject-name: ${{ vars.QUAY_REPO }} - subject-digest: ${{ env.DIGEST_SHA }} - push-to-registry: true + name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-${{ env.NORMALIZED_ARCH }}-${{ matrix.base_image }} + path: vaultwarden-${{ env.NORMALIZED_ARCH }} + + merge-manifests: + name: Merge manifests + runs-on: ubuntu-latest + needs: docker-build + permissions: + packages: write # Needed to upload packages and artifacts + attestations: write # Needed to generate an artifact attestation for a build + id-token: write # Needed to mint the OIDC token necessary to request a Sigstore signing certificate + strategy: + matrix: + base_image: ["debian","alpine"] + + steps: + - name: Download digests + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + with: + path: ${{ runner.temp }}/digests + pattern: digests-*-${{ matrix.base_image }} + merge-multiple: true + + # Login to Docker Hub + - name: Login to Docker Hub + uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }} + + - name: Add registry for DockerHub + if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' }} + env: + DOCKERHUB_REPO: ${{ vars.DOCKERHUB_REPO }} + run: | + echo "CONTAINER_REGISTRIES=${DOCKERHUB_REPO}" | tee -a "${GITHUB_ENV}" + + # Login to GitHub Container Registry + - name: Login to GitHub Container Registry + uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + if: ${{ env.HAVE_GHCR_LOGIN == 'true' }} + + - name: Add registry for ghcr.io + if: ${{ env.HAVE_GHCR_LOGIN == 'true' }} + env: + GHCR_REPO: ${{ vars.GHCR_REPO }} + run: | + echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${GHCR_REPO}" | tee -a "${GITHUB_ENV}" + + # Login to Quay.io + - name: Login to Quay.io + uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 + with: + registry: quay.io + username: ${{ secrets.QUAY_USERNAME }} + password: ${{ secrets.QUAY_TOKEN }} + if: ${{ env.HAVE_QUAY_LOGIN == 'true' }} + - name: Add registry for Quay.io + if: ${{ env.HAVE_QUAY_LOGIN == 'true' }} + env: + QUAY_REPO: ${{ vars.QUAY_REPO }} + run: | + echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${QUAY_REPO}" | tee -a "${GITHUB_ENV}" - # Extract the Alpine binaries from the containers - - name: Extract binaries - shell: bash + # Determine Base Tags + - name: Determine Base Tags env: + BASE_IMAGE_TAG: "${{ matrix.base_image != 'debian' && format('-{0}', matrix.base_image) || '' }}" REF_TYPE: ${{ github.ref_type }} - BASE_IMAGE: ${{ matrix.base_image }} run: | # Check which main tag we are going to build determined by ref_type if [[ "${REF_TYPE}" == "tag" ]]; then - EXTRACT_TAG="latest" + echo "BASE_TAGS=latest${BASE_IMAGE_TAG},${GITHUB_REF#refs/*/}${BASE_IMAGE_TAG}${BASE_IMAGE_TAG//-/,}" | tee -a "${GITHUB_ENV}" elif [[ "${REF_TYPE}" == "branch" ]]; then - EXTRACT_TAG="testing" + echo "BASE_TAGS=testing${BASE_IMAGE_TAG}" | tee -a "${GITHUB_ENV}" fi - # Check which base_image was used and append -alpine if needed - if [[ "${BASE_IMAGE}" == "alpine" ]]; then - EXTRACT_TAG="${EXTRACT_TAG}-alpine" + - name: Create manifest list, push it and extract digest SHA + working-directory: ${{ runner.temp }}/digests + env: + BASE_TAGS: "${{ env.BASE_TAGS }}" + CONTAINER_REGISTRIES: "${{ env.CONTAINER_REGISTRIES }}" + run: | + IFS=',' read -ra IMAGES <<< "${CONTAINER_REGISTRIES}" + IFS=',' read -ra TAGS <<< "${BASE_TAGS}" + + TAG_ARGS=() + for img in "${IMAGES[@]}"; do + for tag in "${TAGS[@]}"; do + TAG_ARGS+=("-t" "${img}:${tag}") + done + done + + echo "Creating manifest" + if ! OUTPUT=$(docker buildx imagetools create \ + "${TAG_ARGS[@]}" \ + $(printf "${IMAGES[0]}@sha256:%s " *) 2>&1); then + echo "Manifest creation failed" + echo "${OUTPUT}" + exit 1 fi - # After each extraction the image is removed. - # This is needed because using different platforms doesn't trigger a new pull/download - - # Extract amd64 binary - docker create --name amd64 --platform=linux/amd64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" - docker cp amd64:/vaultwarden vaultwarden-amd64-${BASE_IMAGE} - docker rm --force amd64 - docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" - - # Extract arm64 binary - docker create --name arm64 --platform=linux/arm64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" - docker cp arm64:/vaultwarden vaultwarden-arm64-${BASE_IMAGE} - docker rm --force arm64 - docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" - - # Extract armv7 binary - docker create --name armv7 --platform=linux/arm/v7 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" - docker cp armv7:/vaultwarden vaultwarden-armv7-${BASE_IMAGE} - docker rm --force armv7 - docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" - - # Extract armv6 binary - docker create --name armv6 --platform=linux/arm/v6 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" - docker cp armv6:/vaultwarden vaultwarden-armv6-${BASE_IMAGE} - docker rm --force armv6 - docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}" + echo "Manifest created successfully" + echo "${OUTPUT}" - # Upload artifacts to Github Actions and Attest the binaries - - name: "Upload amd64 artifact ${{ matrix.base_image }}" - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 - with: - name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64-${{ matrix.base_image }} - path: vaultwarden-amd64-${{ matrix.base_image }} - - - name: "Upload arm64 artifact ${{ matrix.base_image }}" - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 - with: - name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64-${{ matrix.base_image }} - path: vaultwarden-arm64-${{ matrix.base_image }} + # Extract digest SHA for subsequent steps + GET_DIGEST_SHA="$(echo "${OUTPUT}" | grep -oE 'sha256:[a-f0-9]{64}' | tail -1)" + echo "DIGEST_SHA=${GET_DIGEST_SHA}" | tee -a "${GITHUB_ENV}" - - name: "Upload armv7 artifact ${{ matrix.base_image }}" - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 + # Attest container images + - name: Attest - docker.io - ${{ matrix.base_image }} + if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && env.DIGEST_SHA != ''}} + uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8 # v3.1.0 with: - name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7-${{ matrix.base_image }} - path: vaultwarden-armv7-${{ matrix.base_image }} + subject-name: ${{ vars.DOCKERHUB_REPO }} + subject-digest: ${{ env.DIGEST_SHA }} + push-to-registry: true - - name: "Upload armv6 artifact ${{ matrix.base_image }}" - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 + - name: Attest - ghcr.io - ${{ matrix.base_image }} + if: ${{ env.HAVE_GHCR_LOGIN == 'true' && env.DIGEST_SHA != ''}} + uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8 # v3.1.0 with: - name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6-${{ matrix.base_image }} - path: vaultwarden-armv6-${{ matrix.base_image }} + subject-name: ${{ vars.GHCR_REPO }} + subject-digest: ${{ env.DIGEST_SHA }} + push-to-registry: true - - name: "Attest artifacts ${{ matrix.base_image }}" - uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0 + - name: Attest - quay.io - ${{ matrix.base_image }} + if: ${{ env.HAVE_QUAY_LOGIN == 'true' && env.DIGEST_SHA != ''}} + uses: actions/attest-build-provenance@00014ed6ed5efc5b1ab7f7f34a39eb55d41aa4f8 # v3.1.0 with: - subject-path: vaultwarden-* - # End Upload artifacts to Github Actions + subject-name: ${{ vars.QUAY_REPO }} + subject-digest: ${{ env.DIGEST_SHA }} + push-to-registry: true diff --git a/.github/workflows/trivy.yml b/.github/workflows/trivy.yml index 43e4d6e9..bd1043a0 100644 --- a/.github/workflows/trivy.yml +++ b/.github/workflows/trivy.yml @@ -29,7 +29,7 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0 with: persist-credentials: false @@ -46,6 +46,6 @@ jobs: severity: CRITICAL,HIGH - name: Upload Trivy scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2 + uses: github/codeql-action/upload-sarif@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9 with: sarif_file: 'trivy-results.sarif' diff --git a/.github/workflows/typos.yml b/.github/workflows/typos.yml index d1410d24..b3dae9b7 100644 --- a/.github/workflows/typos.yml +++ b/.github/workflows/typos.yml @@ -12,11 +12,11 @@ jobs: steps: # Checkout the repo - name: Checkout - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0 with: persist-credentials: false # End Checkout the repo # When this version is updated, do not forget to update this in `.pre-commit-config.yaml` too - name: Spell Check Repo - uses: crate-ci/typos@07d900b8fa1097806b8adb6391b0d3e0ac2fdea7 # v1.39.0 + uses: crate-ci/typos@1a319b54cc9e3b333fed6a5c88ba1a90324da514 # v1.40.1 diff --git a/.github/workflows/zizmor.yml b/.github/workflows/zizmor.yml index a3cd0df2..8ea25a4a 100644 --- a/.github/workflows/zizmor.yml +++ b/.github/workflows/zizmor.yml @@ -16,12 +16,12 @@ jobs: security-events: write # To write the security report steps: - name: Checkout repository - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0 with: persist-credentials: false - name: Run zizmor - uses: zizmorcore/zizmor-action@e673c3917a1aef3c65c972347ed84ccd013ecda4 # v0.2.0 + uses: zizmorcore/zizmor-action@e639db99335bc9038abc0e066dfcd72e23d26fb4 # v0.3.0 with: # intentionally not scanning the entire repository, # since it contains integration tests. diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6d0cb9fc..448ccbeb 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -53,6 +53,6 @@ repos: - "cd docker && make" # When this version is updated, do not forget to update this in `.github/workflows/typos.yaml` too - repo: https://github.com/crate-ci/typos - rev: 07d900b8fa1097806b8adb6391b0d3e0ac2fdea7 # v1.39.0 + rev: 1a319b54cc9e3b333fed6a5c88ba1a90324da514 # v1.40.1 hooks: - id: typos diff --git a/Cargo.lock b/Cargo.lock index aff5c82c..1e045ab6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -161,9 +161,9 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.33" +version = "0.4.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93c1f86859c1af3d514fa19e8323147ff10ea98684e6c7b307912509f50e67b2" +checksum = "98ec5f6c2f8bc326c994cb9e241cc257ddaba9afa8555a43cffbb5dd86efaa37" dependencies = [ "compression-codecs", "compression-core", @@ -221,9 +221,9 @@ dependencies = [ [[package]] name = "async-lock" -version = "3.4.1" +version = "3.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fd03604047cee9b6ce9de9f70c6cd540a0520c813cbd49bae61f33ab80ed1dc" +checksum = "290f7f2596bd5b78a9fec8088ccd89180d7f9f55b94b0576823bbbdc72ee8311" dependencies = [ "event-listener 5.4.1", "event-listener-strategy", @@ -361,9 +361,9 @@ checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" [[package]] name = "aws-config" -version = "1.8.10" +version = "1.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1856b1b48b65f71a4dd940b1c0931f9a7b646d4a924b9828ffefc1454714668a" +checksum = "96571e6996817bf3d58f6b569e4b9fd2e9d2fcf9f7424eed07b2ce9bb87535e5" dependencies = [ "aws-credential-types", "aws-runtime", @@ -380,7 +380,7 @@ dependencies = [ "bytes", "fastrand", "hex", - "http 1.3.1", + "http 1.4.0", "ring", "time", "tokio", @@ -391,9 +391,9 @@ dependencies = [ [[package]] name = "aws-credential-types" -version = "1.2.9" +version = "1.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86590e57ea40121d47d3f2e131bfd873dea15d78dc2f4604f4734537ad9e56c4" +checksum = "3cd362783681b15d136480ad555a099e82ecd8e2d10a841e14dfd0078d67fee3" dependencies = [ "aws-smithy-async", "aws-smithy-runtime-api", @@ -403,9 +403,9 @@ dependencies = [ [[package]] name = "aws-runtime" -version = "1.5.14" +version = "1.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fe0fd441565b0b318c76e7206c8d1d0b0166b3e986cf30e890b61feb6192045" +checksum = "d81b5b2898f6798ad58f484856768bca817e3cd9de0974c24ae0f1113fe88f1b" dependencies = [ "aws-credential-types", "aws-sigv4", @@ -427,9 +427,9 @@ dependencies = [ [[package]] name = "aws-sdk-sso" -version = "1.89.0" +version = "1.91.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9c1b1af02288f729e95b72bd17988c009aa72e26dcb59b3200f86d7aea726c9" +checksum = "8ee6402a36f27b52fe67661c6732d684b2635152b676aa2babbfb5204f99115d" dependencies = [ "aws-credential-types", "aws-runtime", @@ -449,9 +449,9 @@ dependencies = [ [[package]] name = "aws-sdk-ssooidc" -version = "1.91.0" +version = "1.93.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e8122301558dc7c6c68e878af918880b82ff41897a60c8c4e18e4dc4d93e9f1" +checksum = "a45a7f750bbd170ee3677671ad782d90b894548f4e4ae168302c57ec9de5cb3e" dependencies = [ "aws-credential-types", "aws-runtime", @@ -471,9 +471,9 @@ dependencies = [ [[package]] name = "aws-sdk-sts" -version = "1.92.0" +version = "1.95.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0c7808adcff8333eaa76a849e6de926c6ac1a1268b9fd6afe32de9c29ef29d2" +checksum = "55542378e419558e6b1f398ca70adb0b2088077e79ad9f14eb09441f2f7b2164" dependencies = [ "aws-credential-types", "aws-runtime", @@ -494,9 +494,9 @@ dependencies = [ [[package]] name = "aws-sigv4" -version = "1.3.6" +version = "1.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c35452ec3f001e1f2f6db107b6373f1f48f05ec63ba2c5c9fa91f07dad32af11" +checksum = "69e523e1c4e8e7e8ff219d732988e22bfeae8a1cafdbe6d9eca1546fa080be7c" dependencies = [ "aws-credential-types", "aws-smithy-http", @@ -507,7 +507,7 @@ dependencies = [ "hex", "hmac", "http 0.2.12", - "http 1.3.1", + "http 1.4.0", "percent-encoding", "sha2", "time", @@ -516,9 +516,9 @@ dependencies = [ [[package]] name = "aws-smithy-async" -version = "1.2.6" +version = "1.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "127fcfad33b7dfc531141fda7e1c402ac65f88aca5511a4d31e2e3d2cd01ce9c" +checksum = "9ee19095c7c4dda59f1697d028ce704c24b2d33c6718790c7f1d5a3015b4107c" dependencies = [ "futures-util", "pin-project-lite", @@ -527,9 +527,9 @@ dependencies = [ [[package]] name = "aws-smithy-http" -version = "0.62.5" +version = "0.62.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "445d5d720c99eed0b4aa674ed00d835d9b1427dd73e04adaf2f94c6b2d6f9fca" +checksum = "826141069295752372f8203c17f28e30c464d22899a43a0c9fd9c458d469c88b" dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", @@ -538,7 +538,7 @@ dependencies = [ "futures-core", "futures-util", "http 0.2.12", - "http 1.3.1", + "http 1.4.0", "http-body 0.4.6", "percent-encoding", "pin-project-lite", @@ -548,27 +548,27 @@ dependencies = [ [[package]] name = "aws-smithy-json" -version = "0.61.7" +version = "0.61.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2db31f727935fc63c6eeae8b37b438847639ec330a9161ece694efba257e0c54" +checksum = "49fa1213db31ac95288d981476f78d05d9cbb0353d22cdf3472cc05bb02f6551" dependencies = [ "aws-smithy-types", ] [[package]] name = "aws-smithy-observability" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d1881b1ea6d313f9890710d65c158bdab6fb08c91ea825f74c1c8c357baf4cc" +checksum = "17f616c3f2260612fe44cede278bafa18e73e6479c4e393e2c4518cf2a9a228a" dependencies = [ "aws-smithy-runtime-api", ] [[package]] name = "aws-smithy-query" -version = "0.60.8" +version = "0.60.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d28a63441360c477465f80c7abac3b9c4d075ca638f982e605b7dc2a2c7156c9" +checksum = "ae5d689cf437eae90460e944a58b5668530d433b4ff85789e69d2f2a556e057d" dependencies = [ "aws-smithy-types", "urlencoding", @@ -576,9 +576,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime" -version = "1.9.4" +version = "1.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bbe9d018d646b96c7be063dd07987849862b0e6d07c778aad7d93d1be6c1ef0" +checksum = "a392db6c583ea4a912538afb86b7be7c5d8887d91604f50eb55c262ee1b4a5f5" dependencies = [ "aws-smithy-async", "aws-smithy-http", @@ -588,7 +588,7 @@ dependencies = [ "bytes", "fastrand", "http 0.2.12", - "http 1.3.1", + "http 1.4.0", "http-body 0.4.6", "http-body 1.0.1", "pin-project-lite", @@ -599,15 +599,15 @@ dependencies = [ [[package]] name = "aws-smithy-runtime-api" -version = "1.9.2" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec7204f9fd94749a7c53b26da1b961b4ac36bf070ef1e0b94bb09f79d4f6c193" +checksum = "ab0d43d899f9e508300e587bf582ba54c27a452dd0a9ea294690669138ae14a2" dependencies = [ "aws-smithy-async", "aws-smithy-types", "bytes", "http 0.2.12", - "http 1.3.1", + "http 1.4.0", "pin-project-lite", "tokio", "tracing", @@ -616,15 +616,15 @@ dependencies = [ [[package]] name = "aws-smithy-types" -version = "1.3.4" +version = "1.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25f535879a207fce0db74b679cfc3e91a3159c8144d717d55f5832aea9eef46e" +checksum = "905cb13a9895626d49cf2ced759b062d913834c7482c38e49557eac4e6193f01" dependencies = [ "base64-simd", "bytes", "bytes-utils", "http 0.2.12", - "http 1.3.1", + "http 1.4.0", "http-body 0.4.6", "http-body 1.0.1", "http-body-util", @@ -639,18 +639,18 @@ dependencies = [ [[package]] name = "aws-smithy-xml" -version = "0.60.12" +version = "0.60.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eab77cdd036b11056d2a30a7af7b775789fb024bf216acc13884c6c97752ae56" +checksum = "11b2f670422ff42bf7065031e72b45bc52a3508bd089f743ea90731ca2b6ea57" dependencies = [ "xmlparser", ] [[package]] name = "aws-types" -version = "1.3.10" +version = "1.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d79fb68e3d7fe5d4833ea34dc87d2e97d26d3086cb3da660bb6b1f76d98680b6" +checksum = "1d980627d2dd7bfc32a3c025685a033eeab8d365cc840c631ef59d1b8f428164" dependencies = [ "aws-credential-types", "aws-smithy-async", @@ -701,26 +701,26 @@ dependencies = [ [[package]] name = "base64ct" -version = "1.8.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba" +checksum = "0e050f626429857a27ddccb31e0aca21356bfa709c04041aefddac081a8f068a" [[package]] name = "base64urlsafedata" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "215ee31f8a88f588c349ce2d20108b2ed96089b96b9c2b03775dc35dd72938e8" +checksum = "42f7f6be94fa637132933fd0a68b9140bcb60e3d46164cb68e82a2bb8d102b3a" dependencies = [ "base64 0.21.7", - "pastey", + "pastey 0.1.1", "serde", ] [[package]] name = "bigdecimal" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "560f42649de9fa436b73517378a147ec21f6c997a546581df4b4b31677828934" +checksum = "4d6867f1565b3aad85681f1015055b087fcfd840d6aeee6eee7f2da317603695" dependencies = [ "autocfg", "libm", @@ -804,9 +804,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.19.0" +version = "3.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" +checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" [[package]] name = "bytecount" @@ -828,9 +828,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.10.1" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" +checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" [[package]] name = "bytes-utils" @@ -880,9 +880,9 @@ checksum = "ade8366b8bd5ba243f0a58f036cc0ca8a2f069cff1a2351ef1cac6b083e16fc0" [[package]] name = "camino" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "276a59bf2b2c967788139340c9f0c5b12d7fd6630315c15c217e559de85d2609" +checksum = "e629a66d692cb9ff1a1c664e41771b3dcaf961985a9774c0eb0bd1b51cf60a48" dependencies = [ "serde_core", ] @@ -920,9 +920,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.45" +version = "1.2.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35900b6c8d709fb1d854671ae27aeaa9eec2f8b01b364e1619a40da3e6fe2afe" +checksum = "7a0aeaff4ff1a90589618835a598e545176939b97874f7abc7851caa0618f203" dependencies = [ "find-msvc-tools", "jobserver", @@ -953,7 +953,7 @@ dependencies = [ "num-traits", "serde", "wasm-bindgen", - "windows-link 0.2.1", + "windows-link", ] [[package]] @@ -994,9 +994,9 @@ checksum = "b9e769b5c8c8283982a987c6e948e540254f1058d5a74b8794914d4ef5fc2a24" [[package]] name = "compression-codecs" -version = "0.4.32" +version = "0.4.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "680dc087785c5230f8e8843e2e57ac7c1c90488b6a91b88caa265410568f441b" +checksum = "b0f7ac3e5b97fdce45e8922fb05cae2c37f7bbd63d30dd94821dacfd8f3f2bf2" dependencies = [ "brotli", "compression-core", @@ -1008,9 +1008,9 @@ dependencies = [ [[package]] name = "compression-core" -version = "0.4.30" +version = "0.4.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a9b614a5787ef0c8802a55766480563cb3a93b435898c422ed2a359cf811582" +checksum = "75984efb6ed102a0d42db99afb6c1948f0380d1d91808d5529916e6c08b49d8d" [[package]] name = "concurrent-queue" @@ -1048,32 +1048,23 @@ dependencies = [ ] [[package]] -name = "cookie" -version = "0.18.1" +name = "convert_case" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ddef33a339a91ea89fb53151bd0a4689cfce27055c291dfa69945475d22c747" +checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9" dependencies = [ - "percent-encoding", - "time", - "version_check", + "unicode-segmentation", ] [[package]] -name = "cookie_store" -version = "0.21.1" +name = "cookie" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2eac901828f88a5241ee0600950ab981148a18f2f756900ffba1b125ca6a3ef9" +checksum = "4ddef33a339a91ea89fb53151bd0a4689cfce27055c291dfa69945475d22c747" dependencies = [ - "cookie", - "document-features", - "idna", - "log", - "publicsuffix", - "serde", - "serde_derive", - "serde_json", + "percent-encoding", "time", - "url", + "version_check", ] [[package]] @@ -1420,21 +1411,23 @@ dependencies = [ [[package]] name = "derive_more" -version = "2.0.1" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678" +checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134" dependencies = [ "derive_more-impl", ] [[package]] name = "derive_more-impl" -version = "2.0.1" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" +checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb" dependencies = [ + "convert_case", "proc-macro2", "quote", + "rustc_version", "syn", "unicode-xid", ] @@ -1474,9 +1467,9 @@ dependencies = [ [[package]] name = "diesel" -version = "2.3.3" +version = "2.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e7624a3bb9fffd82fff016be9a7f163d20e5a89eb8d28f9daaa6b30fff37500" +checksum = "e130c806dccc85428c564f2dc5a96e05b6615a27c9a28776bd7761a9af4bb552" dependencies = [ "bigdecimal", "bitflags", @@ -1511,9 +1504,9 @@ dependencies = [ [[package]] name = "diesel_derives" -version = "2.3.4" +version = "2.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9daac6489a36e42570da165a10c424f3edcefdff70c5fd55e1847c23f3dd7562" +checksum = "c30b2969f923fa1f73744b92bb7df60b858df8832742d9a3aceb79236c0be1d2" dependencies = [ "diesel_table_macro_syntax", "dsl_auto_type", @@ -1524,9 +1517,9 @@ dependencies = [ [[package]] name = "diesel_migrations" -version = "2.3.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee060f709c3e3b1cadd83fcd0f61711f7a8cf493348f758d3a1c1147d70b3c97" +checksum = "745fd255645f0f1135f9ec55c7b00e0882192af9683ab4731e4bba3da82b8f9c" dependencies = [ "diesel", "migrations_internals", @@ -1828,9 +1821,9 @@ dependencies = [ [[package]] name = "find-msvc-tools" -version = "0.1.4" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52051878f80a721bb68ebfbc930e07b65ba72f2da88968ea5c06fd6ca3d3a127" +checksum = "645cbb3a84e60b7531617d5ae4e57f7e27308f6445f5abf653209ea76dec8dff" [[package]] name = "flate2" @@ -1854,6 +1847,12 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" +[[package]] +name = "foldhash" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" + [[package]] name = "foreign-types" version = "0.3.2" @@ -2057,9 +2056,9 @@ dependencies = [ [[package]] name = "governor" -version = "0.10.1" +version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "444405bbb1a762387aa22dd569429533b54a1d8759d35d3b64cb39b0293eaa19" +checksum = "9efcab3c1958580ff1f25a2a41be1668f7603d849bb63af523b208a3cc1223b8" dependencies = [ "cfg-if", "dashmap 6.1.0", @@ -2067,7 +2066,7 @@ dependencies = [ "futures-timer", "futures-util", "getrandom 0.3.4", - "hashbrown 0.15.5", + "hashbrown 0.16.1", "nonzero_ext", "parking_lot", "portable-atomic", @@ -2085,7 +2084,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d9e3df7f0222ce5184154973d247c591d9aadc28ce7a73c6cd31100c9facff6" dependencies = [ "codemap", - "indexmap 2.12.0", + "indexmap 2.12.1", "lasso", "once_cell", "phf 0.11.3", @@ -2113,8 +2112,8 @@ dependencies = [ "fnv", "futures-core", "futures-sink", - "http 1.3.1", - "indexmap 2.12.0", + "http 1.4.0", + "indexmap 2.12.1", "slab", "tokio", "tokio-util", @@ -2173,14 +2172,19 @@ checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" dependencies = [ "allocator-api2", "equivalent", - "foldhash", + "foldhash 0.1.5", ] [[package]] name = "hashbrown" -version = "0.16.0" +version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash 0.2.0", +] [[package]] name = "heck" @@ -2275,20 +2279,20 @@ dependencies = [ [[package]] name = "hostname" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a56f203cd1c76362b69e3863fd987520ac36cf70a8c92627449b2f64a8cf7d65" +checksum = "617aaa3557aef3810a6369d0a99fac8a080891b68bd9f9812a1eeda0c0730cbd" dependencies = [ "cfg-if", "libc", - "windows-link 0.1.3", + "windows-link", ] [[package]] name = "html5gum" -version = "0.8.0" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba6fbe46e93059ce8ee19fbefdb0c7699cc7197fcaac048f2c3593f3e5da845f" +checksum = "12d29324a6ba370667998f63c6dd2b2511e2297f07e827f69026684907adc3b5" dependencies = [ "jetscii", ] @@ -2306,12 +2310,11 @@ dependencies = [ [[package]] name = "http" -version = "1.3.1" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" +checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" dependencies = [ "bytes", - "fnv", "itoa", ] @@ -2333,7 +2336,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.3.1", + "http 1.4.0", ] [[package]] @@ -2344,7 +2347,7 @@ checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" dependencies = [ "bytes", "futures-core", - "http 1.3.1", + "http 1.4.0", "http-body 1.0.1", "pin-project-lite", ] @@ -2386,16 +2389,16 @@ dependencies = [ [[package]] name = "hyper" -version = "1.7.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb3aa54a13a0dfe7fbe3a59e0c76093041720fdc77b110cc0fc260fafb4dc51e" +checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" dependencies = [ "atomic-waker", "bytes", "futures-channel", "futures-core", "h2", - "http 1.3.1", + "http 1.4.0", "http-body 1.0.1", "httparse", "itoa", @@ -2412,10 +2415,11 @@ version = "0.27.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" dependencies = [ - "http 1.3.1", - "hyper 1.7.0", + "http 1.4.0", + "hyper 1.8.1", "hyper-util", "rustls 0.23.35", + "rustls-native-certs", "rustls-pki-types", "tokio", "tokio-rustls 0.26.4", @@ -2431,7 +2435,7 @@ checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", - "hyper 1.7.0", + "hyper 1.8.1", "hyper-util", "native-tls", "tokio", @@ -2441,18 +2445,18 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.17" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c6995591a8f1380fcb4ba966a252a4b29188d51d2b89e3a252f5305be65aea8" +checksum = "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f" dependencies = [ "base64 0.22.1", "bytes", "futures-channel", "futures-core", "futures-util", - "http 1.3.1", + "http 1.4.0", "http-body 1.0.1", - "hyper 1.7.0", + "hyper 1.8.1", "ipnet", "libc", "percent-encoding", @@ -2537,9 +2541,9 @@ checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" [[package]] name = "icu_properties" -version = "2.1.1" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e93fcd3157766c0c8da2f8cff6ce651a31f0810eaa1c51ec363ef790bbb5fb99" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" dependencies = [ "icu_collections", "icu_locale_core", @@ -2551,9 +2555,9 @@ dependencies = [ [[package]] name = "icu_properties_data" -version = "2.1.1" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02845b3647bb045f1100ecd6480ff52f34c35f82d9880e029d329c21d1054899" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" [[package]] name = "icu_provider" @@ -2610,12 +2614,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.12.0" +version = "2.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f" +checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2" dependencies = [ "equivalent", - "hashbrown 0.16.0", + "hashbrown 0.16.1", "serde", "serde_core", ] @@ -2656,9 +2660,9 @@ checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" [[package]] name = "iri-string" -version = "0.7.9" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f867b9d1d896b67beb18518eda36fdb77a32ea590de864f1325b294a6d14397" +checksum = "c91338f0783edbd6195decb37bae672fd3b165faffb89bf7b9e6942f8b1a731a" dependencies = [ "memchr", "serde", @@ -2686,9 +2690,9 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.15" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" [[package]] name = "jetscii" @@ -2696,6 +2700,47 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47f142fe24a9c9944451e8349de0a56af5f3e7226dc46f3ed4d4ecc0b85af75e" +[[package]] +name = "jiff" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a87d9b8105c23642f50cbbae03d1f75d8422c5cb98ce7ee9271f7ff7505be6b8" +dependencies = [ + "jiff-static", + "jiff-tzdb-platform", + "log", + "portable-atomic", + "portable-atomic-util", + "serde_core", + "windows-sys 0.61.2", +] + +[[package]] +name = "jiff-static" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b787bebb543f8969132630c51fd0afab173a86c6abae56ff3b9e5e3e3f9f6e58" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "jiff-tzdb" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68971ebff725b9e2ca27a601c5eb38a4c5d64422c4cbab0c535f248087eda5c2" + +[[package]] +name = "jiff-tzdb-platform" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "875a5a69ac2bab1a891711cf5eccbec1ce0341ea805560dcd90b7a2e925132e8" +dependencies = [ + "jiff-tzdb", +] + [[package]] name = "job_scheduler_ng" version = "2.4.0" @@ -2719,9 +2764,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.82" +version = "0.3.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b011eec8cc36da2aab2d5cff675ec18454fad408585853910a202391cf9f8e65" +checksum = "464a3709c7f55f1f721e5389aa6ea4e3bc6aba669353300af094b29ffbdde1d8" dependencies = [ "once_cell", "wasm-bindgen", @@ -2742,6 +2787,29 @@ dependencies = [ "simple_asn1", ] +[[package]] +name = "jsonwebtoken" +version = "10.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c76e1c7d7df3e34443b3621b459b066a7b79644f059fc8b2db7070c825fd417e" +dependencies = [ + "base64 0.22.1", + "ed25519-dalek", + "getrandom 0.2.16", + "hmac", + "js-sys", + "p256", + "p384", + "pem", + "rand 0.8.5", + "rsa", + "serde", + "serde_json", + "sha2", + "signature", + "simple_asn1", +] + [[package]] name = "kv-log-macro" version = "1.0.7" @@ -2803,9 +2871,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.177" +version = "0.2.178" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976" +checksum = "37c93d8daa9d8a012fd8ab92f088405fb202ea0b6ab73ee2482ae66af4f42091" [[package]] name = "libm" @@ -2863,9 +2931,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.28" +version = "0.4.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" dependencies = [ "value-bag", ] @@ -2931,7 +2999,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "36c791ecdf977c99f45f23280405d7723727470f6689a5e6dbf513ac547ae10d" dependencies = [ "serde", - "toml 0.9.8", + "toml 0.9.10+spec-1.1.0", ] [[package]] @@ -2993,9 +3061,9 @@ dependencies = [ [[package]] name = "mio" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69d83b0086dc8ecf3ce9ae2874b2d1290252e2a30720bea58a5c6639b0092873" +checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" dependencies = [ "libc", "wasi", @@ -3004,9 +3072,9 @@ dependencies = [ [[package]] name = "moka" -version = "0.12.11" +version = "0.12.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8261cd88c312e0004c1d51baad2980c66528dfdb2bee62003e643a4d8f86b077" +checksum = "a3dec6bd31b08944e08b58fd99373893a6c17054d6f3ea5006cc894f4f4eee2a" dependencies = [ "crossbeam-channel", "crossbeam-epoch", @@ -3014,7 +3082,6 @@ dependencies = [ "equivalent", "parking_lot", "portable-atomic", - "rustc_version", "smallvec", "tagptr", "uuid", @@ -3029,7 +3096,7 @@ dependencies = [ "bytes", "encoding_rs", "futures-util", - "http 1.3.1", + "http 1.4.0", "httparse", "memchr", "mime", @@ -3059,7 +3126,7 @@ dependencies = [ "libc", "log", "openssl", - "openssl-probe", + "openssl-probe 0.1.6", "openssl-sys", "schannel", "security-framework 2.11.1", @@ -3113,9 +3180,9 @@ dependencies = [ [[package]] name = "num-bigint-dig" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82c79c15c05d4bf82b6f5ef163104cc81a760d8e874d38ac50ab67c8877b647b" +checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7" dependencies = [ "lazy_static", "libm", @@ -3217,7 +3284,7 @@ dependencies = [ "base64 0.21.7", "chrono", "getrandom 0.2.16", - "http 1.3.1", + "http 1.4.0", "rand 0.8.5", "reqwest", "serde", @@ -3258,29 +3325,30 @@ dependencies = [ [[package]] name = "opendal" -version = "0.54.1" +version = "0.55.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42afda58fa2cf50914402d132cc1caacff116a85d10c72ab2082bb7c50021754" +checksum = "d075ab8a203a6ab4bc1bce0a4b9fe486a72bf8b939037f4b78d95386384bc80a" dependencies = [ "anyhow", "backon", "base64 0.22.1", "bytes", - "chrono", "crc32c", "futures", "getrandom 0.2.16", - "http 1.3.1", + "http 1.4.0", "http-body 1.0.1", + "jiff", "log", "md-5", "percent-encoding", - "quick-xml 0.38.3", + "quick-xml 0.38.4", "reqsign", "reqwest", "serde", "serde_json", "tokio", + "url", "uuid", ] @@ -3295,7 +3363,7 @@ dependencies = [ "dyn-clone", "ed25519-dalek", "hmac", - "http 1.3.1", + "http 1.4.0", "itertools", "log", "oauth2", @@ -3347,6 +3415,12 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" +[[package]] +name = "openssl-probe" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f50d9b3dabb09ecd771ad0aa242ca6894994c130308ca3d7684634df8037391" + [[package]] name = "openssl-src" version = "300.5.4+3.5.4" @@ -3444,7 +3518,7 @@ dependencies = [ "libc", "redox_syscall", "smallvec", - "windows-link 0.2.1", + "windows-link", ] [[package]] @@ -3459,16 +3533,16 @@ dependencies = [ ] [[package]] -name = "paste" -version = "1.0.15" +name = "pastey" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" +checksum = "35fb2e5f958ec131621fdd531e9fc186ed768cbe395337403ae56c17a74c68ec" [[package]] name = "pastey" -version = "0.1.1" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35fb2e5f958ec131621fdd531e9fc186ed768cbe395337403ae56c17a74c68ec" +checksum = "b867cad97c0791bbd3aaa6472142568c6c9e8f71937e98379f584cfb0cf35bec" [[package]] name = "pbkdf2" @@ -3530,9 +3604,9 @@ checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "pest" -version = "2.8.3" +version = "2.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "989e7521a040efde50c3ab6bbadafbe15ab6dc042686926be59ac35d74607df4" +checksum = "cbcfd20a6d4eeba40179f05735784ad32bdaef05ce8e8af05f180d45bb3e7e22" dependencies = [ "memchr", "ucd-trie", @@ -3540,9 +3614,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.8.3" +version = "2.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "187da9a3030dbafabbbfb20cb323b976dc7b7ce91fcd84f2f74d6e31d378e2de" +checksum = "51f72981ade67b1ca6adc26ec221be9f463f2b5839c7508998daa17c23d94d7f" dependencies = [ "pest", "pest_generator", @@ -3550,9 +3624,9 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.8.3" +version = "2.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49b401d98f5757ebe97a26085998d6c0eecec4995cad6ab7fc30ffdf4b052843" +checksum = "dee9efd8cdb50d719a80088b76f81aec7c41ed6d522ee750178f83883d271625" dependencies = [ "pest", "pest_meta", @@ -3563,9 +3637,9 @@ dependencies = [ [[package]] name = "pest_meta" -version = "2.8.3" +version = "2.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72f27a2cfee9f9039c4d86faa5af122a0ac3851441a34865b8a043b46be0065a" +checksum = "bf1d70880e76bdc13ba52eafa6239ce793d85c8e43896507e43dd8984ff05b82" dependencies = [ "pest", "sha2", @@ -3720,9 +3794,18 @@ dependencies = [ [[package]] name = "portable-atomic" -version = "1.11.1" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" +checksum = "f89776e4d69bb58bc6993e99ffa1d11f228b839984854c7daeb5d37f87cbe950" + +[[package]] +name = "portable-atomic-util" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8a2f0d8d040d7848a709caf78912debcc3f33ee4b3cac47d73d1e1069e83507" +dependencies = [ + "portable-atomic", +] [[package]] name = "potential_utf" @@ -3770,9 +3853,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.103" +version = "1.0.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8" +checksum = "9695f8df41bb4f3d222c95a67532365f569318332d03d5f3f67f37b20e6ebdf0" dependencies = [ "unicode-ident", ] @@ -3860,9 +3943,9 @@ dependencies = [ [[package]] name = "quick-xml" -version = "0.38.3" +version = "0.38.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42a232e7487fc2ef313d96dde7948e7a3c05101870d8985e4fd8d26aedd27b89" +checksum = "b66c2058c55a409d601666cffe35f04333cf1013010882cec174a7467cd4e21c" dependencies = [ "memchr", "serde", @@ -3881,7 +3964,7 @@ dependencies = [ "quinn-udp", "rustc-hash", "rustls 0.23.35", - "socket2 0.5.10", + "socket2 0.6.1", "thiserror 2.0.17", "tokio", "tracing", @@ -4113,8 +4196,8 @@ dependencies = [ "hex", "hmac", "home", - "http 1.3.1", - "jsonwebtoken", + "http 1.4.0", + "jsonwebtoken 9.3.1", "log", "once_cell", "percent-encoding", @@ -4133,24 +4216,23 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.24" +version = "0.12.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f" +checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" dependencies = [ - "async-compression", "base64 0.22.1", "bytes", "cookie", - "cookie_store 0.21.1", + "cookie_store", "encoding_rs", "futures-channel", "futures-core", "futures-util", "h2", - "http 1.3.1", + "http 1.4.0", "http-body 1.0.1", "http-body-util", - "hyper 1.7.0", + "hyper 1.8.1", "hyper-rustls", "hyper-tls", "hyper-util", @@ -4162,6 +4244,7 @@ dependencies = [ "pin-project-lite", "quinn", "rustls 0.23.35", + "rustls-native-certs", "rustls-pki-types", "serde", "serde_json", @@ -4184,9 +4267,9 @@ dependencies = [ [[package]] name = "resolv-conf" -version = "0.7.5" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b3789b30bd25ba102de4beabd95d21ac45b69b1be7d14522bab988c526d6799" +checksum = "1e061d1b48cb8d38042de4ae0a7a6401009d6143dc80d2e2d6f31f0bdd6470c7" [[package]] name = "rfc6979" @@ -4214,22 +4297,19 @@ dependencies = [ [[package]] name = "rmp" -version = "0.8.14" +version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "228ed7c16fa39782c3b3468e974aec2795e9089153cd08ee2e9aefb3613334c4" +checksum = "4ba8be72d372b2c9b35542551678538b562e7cf86c3315773cae48dfbfe7790c" dependencies = [ - "byteorder", "num-traits", - "paste", ] [[package]] name = "rmpv" -version = "1.3.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58450723cd9ee93273ce44a20b6ec4efe17f8ed2e3631474387bfdecf18bb2a9" +checksum = "7a4e1d4b9b938a26d2996af33229f0ca0956c652c1375067f0b45291c1df8417" dependencies = [ - "num-traits", "rmp", ] @@ -4247,7 +4327,7 @@ dependencies = [ "either", "figment", "futures", - "indexmap 2.12.0", + "indexmap 2.12.1", "log", "memchr", "multer", @@ -4279,7 +4359,7 @@ checksum = "575d32d7ec1a9770108c879fc7c47815a80073f96ca07ff9525a94fcede1dd46" dependencies = [ "devise", "glob", - "indexmap 2.12.0", + "indexmap 2.12.1", "proc-macro2", "quote", "rocket_http", @@ -4299,7 +4379,7 @@ dependencies = [ "futures", "http 0.2.12", "hyper 0.14.32", - "indexmap 2.12.0", + "indexmap 2.12.1", "log", "memchr", "pear", @@ -4341,9 +4421,9 @@ dependencies = [ [[package]] name = "rsa" -version = "0.9.8" +version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78928ac1ed176a5ca1d17e578a1825f3d81ca54cf41053a592584b020cfd691b" +checksum = "40a0376c50d0358279d9d643e4bf7b7be212f1f4ff1da9070a7b54d22ef75c88" dependencies = [ "const-oid", "digest", @@ -4406,9 +4486,9 @@ dependencies = [ [[package]] name = "rustix" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e" +checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" dependencies = [ "bitflags", "errno", @@ -4446,11 +4526,11 @@ dependencies = [ [[package]] name = "rustls-native-certs" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9980d917ebb0c0536119ba501e90834767bffc3d60641457fd84a1f3fd337923" +checksum = "612460d5f7bea540c490b2b6395d8e34a953e52b491accd6c86c8164c5932a63" dependencies = [ - "openssl-probe", + "openssl-probe 0.2.0", "rustls-pki-types", "schannel", "security-framework 3.5.1", @@ -4467,9 +4547,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.13.0" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94182ad936a0c91c324cd46c6511b9510ed16af436d7b5bab34beab0afd55f7a" +checksum = "21e6f2ab2928ca4291b86736a8bd920a277a399bba1589409d72154ff87c1282" dependencies = [ "web-time", "zeroize", @@ -4504,9 +4584,9 @@ checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" [[package]] name = "ryu" -version = "1.0.20" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" +checksum = "a50f4cf475b65d88e057964e0e9bb1f0aa9bbb2036dc65c64596b42932536984" [[package]] name = "salsa20" @@ -4558,9 +4638,9 @@ dependencies = [ [[package]] name = "schemars" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9558e172d4e8533736ba97870c4b2cd63f84b382a3d6eb063da41b91cce17289" +checksum = "54e910108742c57a770f492731f99be216a52fadd361b06c8fb59d74ccc267d2" dependencies = [ "dyn-clone", "ref-cast", @@ -4713,15 +4793,15 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.145" +version = "1.0.148" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" +checksum = "3084b546a1dd6289475996f182a22aba973866ea8e8b02c51d9f46b1336a22da" dependencies = [ "itoa", "memchr", - "ryu", "serde", "serde_core", + "zmij", ] [[package]] @@ -4755,9 +4835,9 @@ dependencies = [ [[package]] name = "serde_spanned" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e24345aa0fe688594e73770a5f6d1b216508b4f93484c0026d521acd30134392" +checksum = "f8bbf91e5a4d6315eee45e704372590b30e260ee83af6639d64557f51b067776" dependencies = [ "serde_core", ] @@ -4776,17 +4856,17 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.15.1" +version = "3.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa66c845eee442168b2c8134fec70ac50dc20e760769c8ba0ad1319ca1959b04" +checksum = "4fa237f2807440d238e0364a218270b98f767a00d3dada77b1c53ae88940e2e7" dependencies = [ "base64 0.22.1", "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.12.0", + "indexmap 2.12.1", "schemars 0.9.0", - "schemars 1.1.0", + "schemars 1.2.0", "serde_core", "serde_json", "serde_with_macros", @@ -4795,9 +4875,9 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.15.1" +version = "3.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b91a903660542fced4e99881aa481bdbaec1634568ee02e0b8bd57c64cb38955" +checksum = "52a8e3ca0ca629121f70ab50f95249e5a6f925cc0f6ffe8256c45b728875706c" dependencies = [ "darling 0.21.3", "proc-macro2", @@ -4854,10 +4934,11 @@ dependencies = [ [[package]] name = "signal-hook-registry" -version = "1.4.6" +version = "1.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" dependencies = [ + "errno", "libc", ] @@ -4873,9 +4954,9 @@ dependencies = [ [[package]] name = "simd-adler32" -version = "0.3.7" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" +checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" [[package]] name = "simple_asn1" @@ -4969,17 +5050,15 @@ dependencies = [ [[package]] name = "sqlite-wasm-rs" -version = "0.4.7" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35c6d746902bca4ddf16592357eacf0473631ea26b36072f0dd0b31fa5ccd1f4" +checksum = "05e98301bf8b0540c7de45ecd760539b9c62f5772aed172f08efba597c11cd5d" dependencies = [ + "cc", + "hashbrown 0.16.1", "js-sys", - "once_cell", "thiserror 2.0.17", - "tokio", "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", ] [[package]] @@ -5047,9 +5126,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.110" +version = "2.0.111" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a99801b5bd34ede4cf3fc688c5919368fea4e4814a4664359503e6015b280aea" +checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87" dependencies = [ "proc-macro2", "quote", @@ -5117,9 +5196,9 @@ checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417" [[package]] name = "tempfile" -version = "3.23.0" +version = "3.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16" +checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c" dependencies = [ "fastrand", "getrandom 0.3.4", @@ -5362,15 +5441,15 @@ dependencies = [ [[package]] name = "toml" -version = "0.9.8" +version = "0.9.10+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0dc8b1fb61449e27716ec0e1bdf0f6b8f3e8f6b05391e8497b8b6d7804ea6d8" +checksum = "0825052159284a1a8b4d6c0c86cbc801f2da5afd2b225fa548c72f2e74002f48" dependencies = [ "serde_core", - "serde_spanned 1.0.3", - "toml_datetime 0.7.3", + "serde_spanned 1.0.4", + "toml_datetime 0.7.5+spec-1.1.0", "toml_parser", - "winnow 0.7.13", + "winnow 0.7.14", ] [[package]] @@ -5384,9 +5463,9 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.7.3" +version = "0.7.5+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2cdb639ebbc97961c51720f858597f7f24c4fc295327923af55b74c3c724533" +checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347" dependencies = [ "serde_core", ] @@ -5397,21 +5476,21 @@ version = "0.22.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" dependencies = [ - "indexmap 2.12.0", + "indexmap 2.12.1", "serde", "serde_spanned 0.6.9", "toml_datetime 0.6.11", "toml_write", - "winnow 0.7.13", + "winnow 0.7.14", ] [[package]] name = "toml_parser" -version = "1.0.4" +version = "1.0.6+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0cbe268d35bdb4bb5a56a2de88d0ad0eb70af5384a99d648cd4b3d04039800e" +checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44" dependencies = [ - "winnow 0.7.13", + "winnow 0.7.14", ] [[package]] @@ -5449,17 +5528,22 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.6.6" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" +checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" dependencies = [ + "async-compression", "bitflags", "bytes", + "futures-core", "futures-util", - "http 1.3.1", + "http 1.4.0", "http-body 1.0.1", + "http-body-util", "iri-string", "pin-project-lite", + "tokio", + "tokio-util", "tower", "tower-layer", "tower-service", @@ -5479,9 +5563,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" -version = "0.1.41" +version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" dependencies = [ "log", "pin-project-lite", @@ -5491,9 +5575,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.30" +version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" +checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" dependencies = [ "proc-macro2", "quote", @@ -5502,9 +5586,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.34" +version = "0.1.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" dependencies = [ "once_cell", "valuable", @@ -5523,9 +5607,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.20" +version = "0.3.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5" +checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e" dependencies = [ "matchers", "nu-ansi-term", @@ -5560,7 +5644,7 @@ dependencies = [ "byteorder", "bytes", "data-encoding", - "http 1.3.1", + "http 1.4.0", "httparse", "log", "rand 0.8.5", @@ -5613,6 +5697,12 @@ version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" +[[package]] +name = "unicode-segmentation" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" + [[package]] name = "unicode-xid" version = "0.2.6" @@ -5657,13 +5747,13 @@ checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] name = "uuid" -version = "1.18.1" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2" +checksum = "e2e054861b4bd027cd373e18e8d8d8e6548085000e41290d95ce0c373a654b4a" dependencies = [ "getrandom 0.3.4", "js-sys", - "serde", + "serde_core", "wasm-bindgen", ] @@ -5675,9 +5765,9 @@ checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" [[package]] name = "value-bag" -version = "1.11.1" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "943ce29a8a743eb10d6082545d861b24f9d1b160b7d741e0f2cdf726bec909c5" +checksum = "7ba6f5989077681266825251a52748b8c1d8a4ad098cc37e440103d0ea717fc0" [[package]] name = "vaultwarden" @@ -5694,7 +5784,7 @@ dependencies = [ "chrono", "chrono-tz", "cookie", - "cookie_store 0.22.0", + "cookie_store", "dashmap 6.1.0", "data-encoding", "data-url", @@ -5711,9 +5801,9 @@ dependencies = [ "handlebars", "hickory-resolver", "html5gum", - "http 1.3.1", + "http 1.4.0", "job_scheduler_ng", - "jsonwebtoken", + "jsonwebtoken 10.2.0", "lettre", "libsqlite3-sys", "log", @@ -5725,7 +5815,7 @@ dependencies = [ "opendal", "openidconnect", "openssl", - "pastey", + "pastey 0.2.1", "percent-encoding", "pico-args", "rand 0.9.2", @@ -5811,9 +5901,9 @@ dependencies = [ [[package]] name = "wasm-bindgen" -version = "0.2.105" +version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da95793dfc411fbbd93f5be7715b0578ec61fe87cb1a42b12eb625caa5c5ea60" +checksum = "0d759f433fa64a2d763d1340820e46e111a7a5ab75f993d1852d70b03dbb80fd" dependencies = [ "cfg-if", "once_cell", @@ -5824,9 +5914,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.55" +version = "0.4.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "551f88106c6d5e7ccc7cd9a16f312dd3b5d36ea8b4954304657d5dfba115d4a0" +checksum = "836d9622d604feee9e5de25ac10e3ea5f2d65b41eac0d9ce72eb5deae707ce7c" dependencies = [ "cfg-if", "js-sys", @@ -5837,9 +5927,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.105" +version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04264334509e04a7bf8690f2384ef5265f05143a4bff3889ab7a3269adab59c2" +checksum = "48cb0d2638f8baedbc542ed444afc0644a29166f1595371af4fecf8ce1e7eeb3" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -5847,9 +5937,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.105" +version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "420bc339d9f322e562942d52e115d57e950d12d88983a14c79b86859ee6c7ebc" +checksum = "cefb59d5cd5f92d9dcf80e4683949f15ca4b511f4ac0a6e14d4e1ac60c6ecd40" dependencies = [ "bumpalo", "proc-macro2", @@ -5860,9 +5950,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.105" +version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76f218a38c84bcb33c25ec7059b07847d465ce0e0a76b995e134a45adcb6af76" +checksum = "cbc538057e648b67f72a982e708d485b2efa771e1ac05fec311f9f63e5800db4" dependencies = [ "unicode-ident", ] @@ -5882,9 +5972,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.82" +version = "0.3.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a1f95c0d03a47f4ae1f7a64643a6bb97465d9b740f0fa8f90ea33915c99a9a1" +checksum = "9b32828d774c412041098d182a8b38b16ea816958e07cf40eec2bc080ae137ac" dependencies = [ "js-sys", "wasm-bindgen", @@ -5902,9 +5992,9 @@ dependencies = [ [[package]] name = "webauthn-attestation-ca" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f77a2892ec44032e6c48dad9aad1b05fada09c346ada11d8d32db119b4b4f205" +checksum = "fafcf13f7dc1fb292ed4aea22cdd3757c285d7559e9748950ee390249da4da6b" dependencies = [ "base64urlsafedata", "openssl", @@ -5916,9 +6006,9 @@ dependencies = [ [[package]] name = "webauthn-rs" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb7c3a2f9c8bddd524e47bbd427bcf3a28aa074de55d74470b42a91a41937b8e" +checksum = "1b24d082d3360258fefb6ffe56123beef7d6868c765c779f97b7a2fcf06727f8" dependencies = [ "base64urlsafedata", "serde", @@ -5930,9 +6020,9 @@ dependencies = [ [[package]] name = "webauthn-rs-core" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19f1d80f3146382529fe70a3ab5d0feb2413a015204ed7843f9377cd39357fc4" +checksum = "15784340a24c170ce60567282fb956a0938742dbfbf9eff5df793a686a009b8b" dependencies = [ "base64 0.21.7", "base64urlsafedata", @@ -5941,8 +6031,8 @@ dependencies = [ "nom 7.1.3", "openssl", "openssl-sys", - "rand 0.8.5", - "rand_chacha 0.3.1", + "rand 0.9.2", + "rand_chacha 0.9.0", "serde", "serde_cbor_2", "serde_json", @@ -5957,9 +6047,9 @@ dependencies = [ [[package]] name = "webauthn-rs-proto" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e786894f89facb9aaf1c5f6559670236723c98382e045521c76f3d5ca5047bd" +checksum = "16a1fb2580ce73baa42d3011a24de2ceab0d428de1879ece06e02e8c416e497c" dependencies = [ "base64 0.21.7", "base64urlsafedata", @@ -6042,9 +6132,9 @@ checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" dependencies = [ "windows-implement", "windows-interface", - "windows-link 0.2.1", - "windows-result 0.4.1", - "windows-strings 0.5.1", + "windows-link", + "windows-result", + "windows-strings", ] [[package]] @@ -6069,12 +6159,6 @@ dependencies = [ "syn", ] -[[package]] -name = "windows-link" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" - [[package]] name = "windows-link" version = "0.2.1" @@ -6083,22 +6167,13 @@ checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" [[package]] name = "windows-registry" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e" -dependencies = [ - "windows-link 0.1.3", - "windows-result 0.3.4", - "windows-strings 0.4.2", -] - -[[package]] -name = "windows-result" -version = "0.3.4" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" +checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720" dependencies = [ - "windows-link 0.1.3", + "windows-link", + "windows-result", + "windows-strings", ] [[package]] @@ -6107,16 +6182,7 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" dependencies = [ - "windows-link 0.2.1", -] - -[[package]] -name = "windows-strings" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" -dependencies = [ - "windows-link 0.1.3", + "windows-link", ] [[package]] @@ -6125,7 +6191,7 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" dependencies = [ - "windows-link 0.2.1", + "windows-link", ] [[package]] @@ -6170,7 +6236,7 @@ version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" dependencies = [ - "windows-link 0.2.1", + "windows-link", ] [[package]] @@ -6210,7 +6276,7 @@ version = "0.53.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" dependencies = [ - "windows-link 0.2.1", + "windows-link", "windows_aarch64_gnullvm 0.53.1", "windows_aarch64_msvc 0.53.1", "windows_i686_gnu 0.53.1", @@ -6370,9 +6436,9 @@ dependencies = [ [[package]] name = "winnow" -version = "0.7.13" +version = "0.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf" +checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" dependencies = [ "memchr", ] @@ -6484,18 +6550,18 @@ dependencies = [ [[package]] name = "zerocopy" -version = "0.8.27" +version = "0.8.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c" +checksum = "fd74ec98b9250adb3ca554bdde269adf631549f51d8a8f8f0a10b50f1cb298c3" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.27" +version = "0.8.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831" +checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a" dependencies = [ "proc-macro2", "quote", @@ -6562,6 +6628,12 @@ dependencies = [ "syn", ] +[[package]] +name = "zmij" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f4a4e8e9dc5c62d159f04fcdbe07f4c3fb710415aab4754bf11505501e3251d" + [[package]] name = "zstd" version = "0.13.3" diff --git a/Cargo.toml b/Cargo.toml index 26bbd613..161a582f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [workspace.package] edition = "2021" -rust-version = "1.89.0" +rust-version = "1.90.0" license = "AGPL-3.0-only" repository = "https://github.com/dani-garcia/vaultwarden" publish = false @@ -55,9 +55,9 @@ syslog = "7.0.0" macros = { path = "./macros" } # Logging -log = "0.4.28" +log = "0.4.29" fern = { version = "0.7.1", features = ["syslog-7", "reopen-1"] } -tracing = { version = "0.1.41", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work +tracing = { version = "0.1.44", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work # A `dotenv` implementation for Rust dotenvy = { version = "0.15.7", default-features = false } @@ -65,7 +65,7 @@ dotenvy = { version = "0.15.7", default-features = false } # Numerical libraries num-traits = "0.2.19" num-derive = "0.4.2" -bigdecimal = "0.4.9" +bigdecimal = "0.4.10" # Web framework rocket = { version = "0.5.1", features = ["tls", "json"], default-features = false } @@ -80,17 +80,18 @@ dashmap = "6.1.0" # Async futures futures = "0.3.31" tokio = { version = "1.48.0", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] } -tokio-util = { version = "0.7.16", features = ["compat"]} +tokio-util = { version = "0.7.17", features = ["compat"]} # A generic serialization/deserialization framework serde = { version = "1.0.228", features = ["derive"] } -serde_json = "1.0.145" +serde_json = "1.0.148" # A safe, extensible ORM and Query builder -diesel = { version = "2.3.3", features = ["chrono", "r2d2", "numeric"] } -diesel_migrations = "2.3.0" +# Currently pinned diesel to v2.3.3 as newer version break MySQL/MariaDB compatibility +diesel = { version = "2.3.5", features = ["chrono", "r2d2", "numeric"] } +diesel_migrations = "2.3.1" -derive_more = { version = "2.0.1", features = ["from", "into", "as_ref", "deref", "display"] } +derive_more = { version = "2.1.1", features = ["from", "into", "as_ref", "deref", "display"] } diesel-derive-newtype = "2.1.2" # Bundled/Static SQLite @@ -102,7 +103,7 @@ ring = "0.17.14" subtle = "2.6.1" # UUID generation -uuid = { version = "1.18.1", features = ["v4"] } +uuid = { version = "1.19.0", features = ["v4"] } # Date and time libraries chrono = { version = "0.4.42", features = ["clock", "serde"], default-features = false } @@ -116,7 +117,7 @@ job_scheduler_ng = "2.4.0" data-encoding = "2.9.0" # JWT library -jsonwebtoken = "9.3.1" +jsonwebtoken = { version = "10.2.0", features = ["use_pem", "rust_crypto"], default-features = false } # TOTP library totp-lite = "2.0.1" @@ -127,9 +128,9 @@ yubico = { package = "yubico_ng", version = "0.14.1", features = ["online-tokio" # WebAuthn libraries # danger-allow-state-serialisation is needed to save the state in the db # danger-credential-internals is needed to support U2F to Webauthn migration -webauthn-rs = { version = "0.5.3", features = ["danger-allow-state-serialisation", "danger-credential-internals"] } -webauthn-rs-proto = "0.5.3" -webauthn-rs-core = "0.5.3" +webauthn-rs = { version = "0.5.4", features = ["danger-allow-state-serialisation", "danger-credential-internals"] } +webauthn-rs-proto = "0.5.4" +webauthn-rs-core = "0.5.4" # Handling of URL's for WebAuthn and favicons url = "2.5.7" @@ -143,15 +144,14 @@ email_address = "0.2.9" handlebars = { version = "6.3.2", features = ["dir_source"] } # HTTP client (Used for favicons, version check, DUO and HIBP API) -# Swap rustls -> native-tls for experiment to avoid pulling webpki-roots (CDLA-Permissive-2.0) -reqwest = { version = "0.12.24", features = ["native-tls", "stream", "json", "deflate", "gzip", "brotli", "zstd", "socks", "cookies", "charset", "http2", "system-proxy"], default-features = false } +reqwest = { version = "0.12.28", features = ["rustls-tls", "rustls-tls-native-roots", "stream", "json", "deflate", "gzip", "brotli", "zstd", "socks", "cookies", "charset", "http2", "system-proxy"], default-features = false} hickory-resolver = "0.25.2" # Favicon extraction libraries -html5gum = "0.8.0" +html5gum = "0.8.3" regex = { version = "1.12.2", features = ["std", "perf", "unicode-perl"], default-features = false } data-url = "0.3.2" -bytes = "1.10.1" +bytes = "1.11.0" svg-hush = "0.9.5" # Cache function results (Used for version check and favicon fetching) @@ -162,14 +162,14 @@ cookie = "0.18.1" cookie_store = "0.22.0" # Used by U2F, JWT and PostgreSQL -openssl = "0.10.74" +openssl = "0.10.75" # CLI argument parsing pico-args = "0.5.0" # Macro ident concatenation -pastey = "0.1.1" -governor = "0.10.1" +pastey = "0.2.1" +governor = "0.10.4" # OIDC for SSO openidconnect = { version = "^4.0", features = ["native-tls"] } @@ -194,14 +194,14 @@ rpassword = "7.4.0" grass_compiler = { version = "0.13.4", default-features = false } # File are accessed through Apache OpenDAL -opendal = { version = "0.54.1", features = ["services-fs"], default-features = false } +opendal = { version = "0.55.0", features = ["services-fs"], default-features = false } # For retrieving AWS credentials, including temporary SSO credentials anyhow = { version = "1.0.100", optional = true } -aws-config = { version = "1.8.8", features = ["behavior-version-latest", "rt-tokio", "credentials-process", "sso"], default-features = false, optional = true } -aws-credential-types = { version = "1.2.8", optional = true } -aws-smithy-runtime-api = { version = "1.9.2", optional = true } -http = { version = "1.3.1", optional = true } +aws-config = { version = "1.8.12", features = ["behavior-version-latest", "rt-tokio", "credentials-process", "sso"], default-features = false, optional = true } +aws-credential-types = { version = "1.2.11", optional = true } +aws-smithy-runtime-api = { version = "1.9.3", optional = true } +http = { version = "1.4.0", optional = true } reqsign = { version = "0.16.5", optional = true } # Strip debuginfo from the release builds diff --git a/docker/DockerSettings.yaml b/docker/DockerSettings.yaml index 1707affe..dd87a9e3 100644 --- a/docker/DockerSettings.yaml +++ b/docker/DockerSettings.yaml @@ -1,13 +1,13 @@ --- -vault_version: "v2025.10.1" -vault_image_digest: "sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa" -# Cross Compile Docker Helper Scripts v1.8.0 +vault_version: "v2025.12.1+build.3" +vault_image_digest: "sha256:bf5aa55dc7bcb99f85d2a88ff44d32cdc832e934a0603fe28e5c3f92904bad42" +# Cross Compile Docker Helper Scripts v1.9.0 # We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts # https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags -xx_image_digest: "sha256:add602d55daca18914838a78221f6bbe4284114b452c86a48f96d59aeb00f5c6" -rust_version: 1.91.0 # Rust version to be used +xx_image_digest: "sha256:c64defb9ed5a91eacb37f96ccc3d4cd72521c4bd18d5442905b95e2226b0e707" +rust_version: 1.92.0 # Rust version to be used debian_version: trixie # Debian release name to be used -alpine_version: "3.22" # Alpine version to be used +alpine_version: "3.23" # Alpine version to be used # For which platforms/architectures will we try to build images platforms: ["linux/amd64", "linux/arm64", "linux/arm/v7", "linux/arm/v6"] # Determine the build images per OS/Arch @@ -17,7 +17,6 @@ build_stage_image: platform: "$BUILDPLATFORM" alpine: image: "build_${TARGETARCH}${TARGETVARIANT}" - platform: "linux/amd64" # The Alpine build images only have linux/amd64 images arch_image: amd64: "ghcr.io/blackdex/rust-musl:x86_64-musl-stable-{{rust_version}}" arm64: "ghcr.io/blackdex/rust-musl:aarch64-musl-stable-{{rust_version}}" diff --git a/docker/Dockerfile.alpine b/docker/Dockerfile.alpine index 0b0a7c10..2a6cf9f2 100644 --- a/docker/Dockerfile.alpine +++ b/docker/Dockerfile.alpine @@ -19,27 +19,27 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull docker.io/vaultwarden/web-vault:v2025.10.1 -# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.10.1 -# [docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa] +# $ docker pull docker.io/vaultwarden/web-vault:v2025.12.1_build.3 +# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.12.1_build.3 +# [docker.io/vaultwarden/web-vault@sha256:bf5aa55dc7bcb99f85d2a88ff44d32cdc832e934a0603fe28e5c3f92904bad42] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa -# [docker.io/vaultwarden/web-vault:v2025.10.1] +# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:bf5aa55dc7bcb99f85d2a88ff44d32cdc832e934a0603fe28e5c3f92904bad42 +# [docker.io/vaultwarden/web-vault:v2025.12.1_build.3] # -FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa AS vault +FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:bf5aa55dc7bcb99f85d2a88ff44d32cdc832e934a0603fe28e5c3f92904bad42 AS vault ########################## ALPINE BUILD IMAGES ########################## -## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 +## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 and linux/arm64 ## And for Alpine we define all build images here, they will only be loaded when actually used -FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.91.0 AS build_amd64 -FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.91.0 AS build_arm64 -FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.91.0 AS build_armv7 -FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.91.0 AS build_armv6 +FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.92.0 AS build_amd64 +FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.92.0 AS build_arm64 +FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.92.0 AS build_armv7 +FROM --platform=$BUILDPLATFORM ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.92.0 AS build_armv6 ########################## BUILD IMAGE ########################## # hadolint ignore=DL3006 -FROM --platform=linux/amd64 build_${TARGETARCH}${TARGETVARIANT} AS build +FROM --platform=$BUILDPLATFORM build_${TARGETARCH}${TARGETVARIANT} AS build ARG TARGETARCH ARG TARGETVARIANT ARG TARGETPLATFORM @@ -127,7 +127,7 @@ RUN source /env-cargo && \ # To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*' # # We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742 -FROM --platform=$TARGETPLATFORM docker.io/library/alpine:3.22 +FROM --platform=$TARGETPLATFORM docker.io/library/alpine:3.23 ENV ROCKET_PROFILE="release" \ ROCKET_ADDRESS=0.0.0.0 \ diff --git a/docker/Dockerfile.debian b/docker/Dockerfile.debian index 8e9cadb3..03c0faba 100644 --- a/docker/Dockerfile.debian +++ b/docker/Dockerfile.debian @@ -19,24 +19,24 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull docker.io/vaultwarden/web-vault:v2025.10.1 -# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.10.1 -# [docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa] +# $ docker pull docker.io/vaultwarden/web-vault:v2025.12.1_build.3 +# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.12.1_build.3 +# [docker.io/vaultwarden/web-vault@sha256:bf5aa55dc7bcb99f85d2a88ff44d32cdc832e934a0603fe28e5c3f92904bad42] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa -# [docker.io/vaultwarden/web-vault:v2025.10.1] +# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:bf5aa55dc7bcb99f85d2a88ff44d32cdc832e934a0603fe28e5c3f92904bad42 +# [docker.io/vaultwarden/web-vault:v2025.12.1_build.3] # -FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa AS vault +FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:bf5aa55dc7bcb99f85d2a88ff44d32cdc832e934a0603fe28e5c3f92904bad42 AS vault ########################## Cross Compile Docker Helper Scripts ########################## ## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts ## And these bash scripts do not have any significant difference if at all -FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:add602d55daca18914838a78221f6bbe4284114b452c86a48f96d59aeb00f5c6 AS xx +FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:c64defb9ed5a91eacb37f96ccc3d4cd72521c4bd18d5442905b95e2226b0e707 AS xx ########################## BUILD IMAGE ########################## # hadolint ignore=DL3006 -FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.91.0-slim-trixie AS build +FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.92.0-slim-trixie AS build COPY --from=xx / / ARG TARGETARCH ARG TARGETVARIANT @@ -51,7 +51,6 @@ ENV DEBIAN_FRONTEND=noninteractive \ TERM=xterm-256color \ CARGO_HOME="/root/.cargo" \ USER="root" - # Install clang to get `xx-cargo` working # Install pkg-config to allow amd64 builds to find all libraries # Install git so build.rs can determine the correct version @@ -175,7 +174,7 @@ RUN mkdir /data && \ --no-install-recommends \ ca-certificates \ curl \ - libmariadb-dev \ + libmariadb3 \ libpq5 \ openssl && \ apt-get clean && \ diff --git a/docker/Dockerfile.j2 b/docker/Dockerfile.j2 index c1f2a032..f745780e 100644 --- a/docker/Dockerfile.j2 +++ b/docker/Dockerfile.j2 @@ -19,13 +19,13 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull docker.io/vaultwarden/web-vault:{{ vault_version }} -# $ docker image inspect --format "{{ '{{' }}.RepoDigests}}" docker.io/vaultwarden/web-vault:{{ vault_version }} +# $ docker pull docker.io/vaultwarden/web-vault:{{ vault_version | replace('+', '_') }} +# $ docker image inspect --format "{{ '{{' }}.RepoDigests}}" docker.io/vaultwarden/web-vault:{{ vault_version | replace('+', '_') }} # [docker.io/vaultwarden/web-vault@{{ vault_image_digest }}] # # - Conversely, to get the tag name from the digest: # $ docker image inspect --format "{{ '{{' }}.RepoTags}}" docker.io/vaultwarden/web-vault@{{ vault_image_digest }} -# [docker.io/vaultwarden/web-vault:{{ vault_version }}] +# [docker.io/vaultwarden/web-vault:{{ vault_version | replace('+', '_') }}] # FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@{{ vault_image_digest }} AS vault @@ -36,16 +36,16 @@ FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@{{ vault_image_diges FROM --platform=linux/amd64 docker.io/tonistiigi/xx@{{ xx_image_digest }} AS xx {% elif base == "alpine" %} ########################## ALPINE BUILD IMAGES ########################## -## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 +## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 and linux/arm64 ## And for Alpine we define all build images here, they will only be loaded when actually used {% for arch in build_stage_image[base].arch_image %} -FROM --platform={{ build_stage_image[base].platform }} {{ build_stage_image[base].arch_image[arch] }} AS build_{{ arch }} +FROM --platform=$BUILDPLATFORM {{ build_stage_image[base].arch_image[arch] }} AS build_{{ arch }} {% endfor %} {% endif %} ########################## BUILD IMAGE ########################## # hadolint ignore=DL3006 -FROM --platform={{ build_stage_image[base].platform }} {{ build_stage_image[base].image }} AS build +FROM --platform=$BUILDPLATFORM {{ build_stage_image[base].image }} AS build {% if base == "debian" %} COPY --from=xx / / {% endif %} @@ -69,7 +69,6 @@ ENV DEBIAN_FRONTEND=noninteractive \ {% endif %} {% if base == "debian" %} - # Install clang to get `xx-cargo` working # Install pkg-config to allow amd64 builds to find all libraries # Install git so build.rs can determine the correct version @@ -212,7 +211,7 @@ RUN mkdir /data && \ --no-install-recommends \ ca-certificates \ curl \ - libmariadb-dev \ + libmariadb3 \ libpq5 \ openssl && \ apt-get clean && \ diff --git a/macros/Cargo.toml b/macros/Cargo.toml index ef5a6480..9855c56e 100644 --- a/macros/Cargo.toml +++ b/macros/Cargo.toml @@ -13,8 +13,8 @@ path = "src/lib.rs" proc-macro = true [dependencies] -quote = "1.0.41" -syn = "2.0.108" +quote = "1.0.42" +syn = "2.0.111" [lints] workspace = true diff --git a/migrations/mysql/2024-03-13-170000_sso_users_cascade/up.sql b/migrations/mysql/2024-03-13-170000_sso_users_cascade/up.sql index 4e06fe58..9e5e46df 100644 --- a/migrations/mysql/2024-03-13-170000_sso_users_cascade/up.sql +++ b/migrations/mysql/2024-03-13-170000_sso_users_cascade/up.sql @@ -1,2 +1,15 @@ -ALTER TABLE sso_users DROP FOREIGN KEY `sso_users_ibfk_1`; +-- Dynamically create DROP FOREIGN KEY +-- Some versions of MySQL or MariaDB might fail if the key doesn't exists +-- This checks if the key exists, and if so, will drop it. +SET @drop_sso_fk = IF((SELECT true FROM information_schema.TABLE_CONSTRAINTS WHERE + CONSTRAINT_SCHEMA = DATABASE() AND + TABLE_NAME = 'sso_users' AND + CONSTRAINT_NAME = 'sso_users_ibfk_1' AND + CONSTRAINT_TYPE = 'FOREIGN KEY') = true, + 'ALTER TABLE sso_users DROP FOREIGN KEY sso_users_ibfk_1', + 'SELECT 1'); +PREPARE stmt FROM @drop_sso_fk; +EXECUTE stmt; +DEALLOCATE PREPARE stmt; + ALTER TABLE sso_users ADD FOREIGN KEY(user_uuid) REFERENCES users(uuid) ON UPDATE CASCADE ON DELETE CASCADE; diff --git a/migrations/mysql/2025-08-20-120000_sso_nonce_to_auth/down.sql b/migrations/mysql/2025-08-20-120000_sso_nonce_to_auth/down.sql new file mode 100644 index 00000000..3a965886 --- /dev/null +++ b/migrations/mysql/2025-08-20-120000_sso_nonce_to_auth/down.sql @@ -0,0 +1,9 @@ +DROP TABLE IF EXISTS sso_auth; + +CREATE TABLE sso_nonce ( + state VARCHAR(512) NOT NULL PRIMARY KEY, + nonce TEXT NOT NULL, + verifier TEXT, + redirect_uri TEXT NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT now() +); diff --git a/migrations/mysql/2025-08-20-120000_sso_nonce_to_auth/up.sql b/migrations/mysql/2025-08-20-120000_sso_nonce_to_auth/up.sql new file mode 100644 index 00000000..1a68b715 --- /dev/null +++ b/migrations/mysql/2025-08-20-120000_sso_nonce_to_auth/up.sql @@ -0,0 +1,12 @@ +DROP TABLE IF EXISTS sso_nonce; + +CREATE TABLE sso_auth ( + state VARCHAR(512) NOT NULL PRIMARY KEY, + client_challenge TEXT NOT NULL, + nonce TEXT NOT NULL, + redirect_uri TEXT NOT NULL, + code_response TEXT, + auth_response TEXT, + created_at TIMESTAMP NOT NULL DEFAULT now(), + updated_at TIMESTAMP NOT NULL DEFAULT now() +); diff --git a/migrations/postgresql/2025-08-20-120000_sso_nonce_to_auth/down.sql b/migrations/postgresql/2025-08-20-120000_sso_nonce_to_auth/down.sql new file mode 100644 index 00000000..8cc36353 --- /dev/null +++ b/migrations/postgresql/2025-08-20-120000_sso_nonce_to_auth/down.sql @@ -0,0 +1,9 @@ +DROP TABLE IF EXISTS sso_auth; + +CREATE TABLE sso_nonce ( + state TEXT NOT NULL PRIMARY KEY, + nonce TEXT NOT NULL, + verifier TEXT, + redirect_uri TEXT NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT now() +); diff --git a/migrations/postgresql/2025-08-20-120000_sso_nonce_to_auth/up.sql b/migrations/postgresql/2025-08-20-120000_sso_nonce_to_auth/up.sql new file mode 100644 index 00000000..0fee1b5a --- /dev/null +++ b/migrations/postgresql/2025-08-20-120000_sso_nonce_to_auth/up.sql @@ -0,0 +1,12 @@ +DROP TABLE IF EXISTS sso_nonce; + +CREATE TABLE sso_auth ( + state TEXT NOT NULL PRIMARY KEY, + client_challenge TEXT NOT NULL, + nonce TEXT NOT NULL, + redirect_uri TEXT NOT NULL, + code_response TEXT, + auth_response TEXT, + created_at TIMESTAMP NOT NULL DEFAULT now(), + updated_at TIMESTAMP NOT NULL DEFAULT now() +); diff --git a/migrations/sqlite/2025-08-20-120000_sso_nonce_to_auth/down.sql b/migrations/sqlite/2025-08-20-120000_sso_nonce_to_auth/down.sql new file mode 100644 index 00000000..453e267b --- /dev/null +++ b/migrations/sqlite/2025-08-20-120000_sso_nonce_to_auth/down.sql @@ -0,0 +1,9 @@ +DROP TABLE IF EXISTS sso_auth; + +CREATE TABLE sso_nonce ( + state TEXT NOT NULL PRIMARY KEY, + nonce TEXT NOT NULL, + verifier TEXT, + redirect_uri TEXT NOT NULL, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP +); diff --git a/migrations/sqlite/2025-08-20-120000_sso_nonce_to_auth/up.sql b/migrations/sqlite/2025-08-20-120000_sso_nonce_to_auth/up.sql new file mode 100644 index 00000000..1cd868b4 --- /dev/null +++ b/migrations/sqlite/2025-08-20-120000_sso_nonce_to_auth/up.sql @@ -0,0 +1,12 @@ +DROP TABLE IF EXISTS sso_nonce; + +CREATE TABLE sso_auth ( + state TEXT NOT NULL PRIMARY KEY, + client_challenge TEXT NOT NULL, + nonce TEXT NOT NULL, + redirect_uri TEXT NOT NULL, + code_response TEXT, + auth_response TEXT, + created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP +); diff --git a/playwright/README.md b/playwright/README.md index 528bdf07..a27e6105 100644 --- a/playwright/README.md +++ b/playwright/README.md @@ -97,9 +97,9 @@ npx playwright codegen "http://127.0.0.1:8003" ## Override web-vault -It is possible to change the `web-vault` used by referencing a different `bw_web_builds` commit. +It is possible to change the `web-vault` used by referencing a different `vw_web_builds` commit. -Simplest is to set and uncomment `PW_WV_REPO_URL` and `PW_WV_COMMIT_HASH` in the `test.env`. +Simplest is to set and uncomment `PW_VW_REPO_URL` and `PW_VW_COMMIT_HASH` in the `test.env`. Ensure that the image is built with: ```bash @@ -112,6 +112,8 @@ You can check the result running: DOCKER_BUILDKIT=1 docker compose --profile playwright --env-file test.env up Vaultwarden ``` +Then check `http://127.0.0.1:8003/admin/diagnostics` with `admin`. + # OpenID Connect test setup Additionally this `docker-compose` template allows to run locally Vaultwarden, diff --git a/playwright/compose/warden/build.sh b/playwright/compose/warden/build.sh index a29067c8..37e9a25e 100755 --- a/playwright/compose/warden/build.sh +++ b/playwright/compose/warden/build.sh @@ -6,18 +6,19 @@ echo $COMMIT_HASH if [[ ! -z "$REPO_URL" ]] && [[ ! -z "$COMMIT_HASH" ]] ; then rm -rf /web-vault - mkdir bw_web_builds; - cd bw_web_builds; + mkdir -p vw_web_builds; + cd vw_web_builds; git -c init.defaultBranch=main init git remote add origin "$REPO_URL" git fetch --depth 1 origin "$COMMIT_HASH" git -c advice.detachedHead=false checkout FETCH_HEAD - export VAULT_VERSION=$(cat Dockerfile | grep "ARG VAULT_VERSION" | cut -d "=" -f2) - ./scripts/checkout_web_vault.sh - ./scripts/build_web_vault.sh - printf '{"version":"%s"}' "$COMMIT_HASH" > ./web-vault/apps/web/build/vw-version.json + npm ci --ignore-scripts - mv ./web-vault/apps/web/build /web-vault + cd apps/web + npm run dist:oss:selfhost + printf '{"version":"%s"}' "$COMMIT_HASH" > build/vw-version.json + + mv build /web-vault fi diff --git a/playwright/docker-compose.yml b/playwright/docker-compose.yml index c6e33e79..f4402326 100644 --- a/playwright/docker-compose.yml +++ b/playwright/docker-compose.yml @@ -18,10 +18,11 @@ services: context: compose/warden dockerfile: Dockerfile args: - REPO_URL: ${PW_WV_REPO_URL:-} - COMMIT_HASH: ${PW_WV_COMMIT_HASH:-} + REPO_URL: ${PW_VW_REPO_URL:-} + COMMIT_HASH: ${PW_VW_COMMIT_HASH:-} env_file: ${DC_ENV_FILE:-.env} environment: + - ADMIN_TOKEN - DATABASE_URL - I_REALLY_WANT_VOLATILE_STORAGE - LOG_LEVEL diff --git a/playwright/global-utils.ts b/playwright/global-utils.ts index e622451d..224bb4b8 100644 --- a/playwright/global-utils.ts +++ b/playwright/global-utils.ts @@ -221,9 +221,13 @@ export async function restartVault(page: Page, testInfo: TestInfo, env, resetDB: } export async function checkNotification(page: Page, hasText: string) { - await expect(page.locator('bit-toast').filter({ hasText })).toBeVisible(); - await page.locator('bit-toast').filter({ hasText }).getByRole('button').click(); - await expect(page.locator('bit-toast').filter({ hasText })).toHaveCount(0); + await expect(page.locator('bit-toast', { hasText })).toBeVisible(); + try { + await page.locator('bit-toast', { hasText }).getByRole('button', { name: 'Close' }).click({force: true, timeout: 10_000}); + } catch (error) { + console.log(`Closing notification failed but it should now be invisible (${error})`); + } + await expect(page.locator('bit-toast', { hasText })).toHaveCount(0); } export async function cleanLanding(page: Page) { @@ -244,3 +248,15 @@ export async function logout(test: Test, page: Page, user: { name: string }) { await expect(page.getByRole('heading', { name: 'Log in' })).toBeVisible(); }); } + +export async function ignoreExtension(page: Page) { + await page.waitForLoadState('domcontentloaded'); + + try { + await page.getByRole('button', { name: 'Add it later' }).click({timeout: 5_000}); + await page.getByRole('link', { name: 'Skip to web app' }).click(); + } catch (error) { + console.log('Extension setup not visible. Continuing'); + } + +} diff --git a/playwright/package-lock.json b/playwright/package-lock.json index 37d0b512..2f4cd0c1 100644 --- a/playwright/package-lock.json +++ b/playwright/package-lock.json @@ -9,15 +9,15 @@ "version": "1.0.0", "license": "ISC", "dependencies": { - "mysql2": "3.15.0", + "mysql2": "3.15.3", "otpauth": "9.4.1", "pg": "8.16.3" }, "devDependencies": { - "@playwright/test": "1.55.1", - "dotenv": "17.2.2", + "@playwright/test": "1.56.1", + "dotenv": "17.2.3", "dotenv-expand": "12.0.3", - "maildev": "npm:@timshel_npm/maildev@^3.2.3" + "maildev": "npm:@timshel_npm/maildev@3.2.5" } }, "node_modules/@asamuzakjp/css-color": { @@ -34,16 +34,16 @@ } }, "node_modules/@asamuzakjp/dom-selector": { - "version": "6.5.6", - "resolved": "https://registry.npmjs.org/@asamuzakjp/dom-selector/-/dom-selector-6.5.6.tgz", - "integrity": "sha512-Mj3Hu9ymlsERd7WOsUKNUZnJYL4IZ/I9wVVYgtvOsWYiEFbkQ4G7VRIh2USxTVW4BBDIsLG+gBUgqOqf2Kvqow==", + "version": "6.7.3", + "resolved": "https://registry.npmjs.org/@asamuzakjp/dom-selector/-/dom-selector-6.7.3.tgz", + "integrity": "sha512-kiGFeY+Hxf5KbPpjRLf+ffWbkos1aGo8MBfd91oxS3O57RgU3XhZrt/6UzoVF9VMpWbC3v87SRc9jxGrc9qHtQ==", "dev": true, "dependencies": { "@asamuzakjp/nwsapi": "^2.3.9", "bidi-js": "^1.0.3", "css-tree": "^3.1.0", "is-potential-custom-element-name": "^1.0.1", - "lru-cache": "^11.2.1" + "lru-cache": "^11.2.2" } }, "node_modules/@asamuzakjp/nwsapi": { @@ -144,9 +144,9 @@ } }, "node_modules/@csstools/css-syntax-patches-for-csstree": { - "version": "1.0.14", - "resolved": "https://registry.npmjs.org/@csstools/css-syntax-patches-for-csstree/-/css-syntax-patches-for-csstree-1.0.14.tgz", - "integrity": "sha512-zSlIxa20WvMojjpCSy8WrNpcZ61RqfTfX3XTaOeVlGJrt/8HF3YbzgFZa01yTbT4GWQLwfTcC3EB8i3XnB647Q==", + "version": "1.0.15", + "resolved": "https://registry.npmjs.org/@csstools/css-syntax-patches-for-csstree/-/css-syntax-patches-for-csstree-1.0.15.tgz", + "integrity": "sha512-q0p6zkVq2lJnmzZVPR33doA51G7YOja+FBvRdp5ISIthL0MtFCgYHHhR563z9WFGxcOn0WfjSkPDJ5Qig3H3Sw==", "dev": true, "funding": [ { @@ -160,9 +160,6 @@ ], "engines": { "node": ">=18" - }, - "peerDependencies": { - "postcss": "^8.4" } }, "node_modules/@csstools/css-tokenizer": { @@ -196,12 +193,12 @@ } }, "node_modules/@playwright/test": { - "version": "1.55.1", - "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.55.1.tgz", - "integrity": "sha512-IVAh/nOJaw6W9g+RJVlIQJ6gSiER+ae6mKQ5CX1bERzQgbC1VSeBlwdvczT7pxb0GWiyrxH4TGKbMfDb4Sq/ig==", + "version": "1.56.1", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.56.1.tgz", + "integrity": "sha512-vSMYtL/zOcFpvJCW71Q/OEGQb7KYBPAdKh35WNSkaZA75JlAO8ED8UN6GUNTm3drWomcbcqRPFqQbLae8yBTdg==", "dev": true, "dependencies": { - "playwright": "1.55.1" + "playwright": "1.56.1" }, "bin": { "playwright": "cli.js" @@ -249,12 +246,12 @@ } }, "node_modules/@types/node": { - "version": "24.5.2", - "resolved": "https://registry.npmjs.org/@types/node/-/node-24.5.2.tgz", - "integrity": "sha512-FYxk1I7wPv3K2XBaoyH2cTnocQEu8AOZ60hPbsyukMPLv5/5qr7V1i8PLHdl6Zf87I+xZXFvPCXYjiTFq+YSDQ==", + "version": "24.2.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.2.1.tgz", + "integrity": "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ==", "dev": true, "dependencies": { - "undici-types": "~7.12.0" + "undici-types": "~7.10.0" } }, "node_modules/@types/trusted-types": { @@ -363,9 +360,9 @@ } }, "node_modules/body-parser/node_modules/debug": { - "version": "4.4.3", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", - "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", "dev": true, "dependencies": { "ms": "^2.1.3" @@ -637,9 +634,9 @@ } }, "node_modules/dompurify": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.2.7.tgz", - "integrity": "sha512-WhL/YuveyGXJaerVlMYGWhvQswa7myDG17P7Vu65EWC05o8vfeNbvNf4d/BOvH99+ZW+LlQsc1GDKMa1vNK6dw==", + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.3.0.tgz", + "integrity": "sha512-r+f6MYR1gGN1eJv0TVQbhA7if/U7P87cdPl3HN5rikqaBSBxLiCb/b9O+2eG0cxz0ghyU+mU1QkbsOwERMYlWQ==", "dev": true, "optionalDependencies": { "@types/trusted-types": "^2.0.7" @@ -660,9 +657,9 @@ } }, "node_modules/dotenv": { - "version": "17.2.2", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-17.2.2.tgz", - "integrity": "sha512-Sf2LSQP+bOlhKWWyhFsn0UsfdK/kCWRv1iuA2gXAwt3dyNabr6QSj00I2V10pidqz69soatm9ZwZvpQMTIOd5Q==", + "version": "17.2.3", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-17.2.3.tgz", + "integrity": "sha512-JVUnt+DUIzu87TABbhPmNfVdBDt18BLOWjMUFJMSi/Qqg7NTYtabbvSNJGOJ7afbRuv9D/lngizHtP7QyLQ+9w==", "dev": true, "engines": { "node": ">=12" @@ -952,9 +949,9 @@ } }, "node_modules/express/node_modules/debug": { - "version": "4.4.3", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", - "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", "dev": true, "dependencies": { "ms": "^2.1.3" @@ -992,9 +989,9 @@ } }, "node_modules/finalhandler/node_modules/debug": { - "version": "4.4.3", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", - "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", "dev": true, "dependencies": { "ms": "^2.1.3" @@ -1340,20 +1337,20 @@ "integrity": "sha512-Ks/IoX00TtClbGQr4TWXemAnktAQvYB7HzcCxDGqEZU6oCmb2INHuOoKxbtR+HFkmYWBKv/dOZtGRiAjDhj92g==" }, "node_modules/jsdom": { - "version": "27.0.0", - "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-27.0.0.tgz", - "integrity": "sha512-lIHeR1qlIRrIN5VMccd8tI2Sgw6ieYXSVktcSHaNe3Z5nE/tcPQYQWOq00wxMvYOsz+73eAkNenVvmPC6bba9A==", + "version": "27.0.1", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-27.0.1.tgz", + "integrity": "sha512-SNSQteBL1IlV2zqhwwolaG9CwhIhTvVHWg3kTss/cLE7H/X4644mtPQqYvCfsSrGQWt9hSZcgOXX8bOZaMN+kA==", "dev": true, "dependencies": { - "@asamuzakjp/dom-selector": "^6.5.4", - "cssstyle": "^5.3.0", + "@asamuzakjp/dom-selector": "^6.7.2", + "cssstyle": "^5.3.1", "data-urls": "^6.0.0", - "decimal.js": "^10.5.0", + "decimal.js": "^10.6.0", "html-encoding-sniffer": "^4.0.0", "http-proxy-agent": "^7.0.2", "https-proxy-agent": "^7.0.6", "is-potential-custom-element-name": "^1.0.1", - "parse5": "^7.3.0", + "parse5": "^8.0.0", "rrweb-cssom": "^0.8.0", "saxes": "^6.0.0", "symbol-tree": "^3.2.4", @@ -1362,8 +1359,8 @@ "webidl-conversions": "^8.0.0", "whatwg-encoding": "^3.1.1", "whatwg-mimetype": "^4.0.0", - "whatwg-url": "^15.0.0", - "ws": "^8.18.2", + "whatwg-url": "^15.1.0", + "ws": "^8.18.3", "xml-name-validator": "^5.0.0" }, "engines": { @@ -1426,9 +1423,9 @@ "integrity": "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==" }, "node_modules/lru-cache": { - "version": "11.2.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz", - "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==", + "version": "11.2.2", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.2.tgz", + "integrity": "sha512-F9ODfyqML2coTIsQpSkRHnLSZMtkU8Q+mSfcaIyKwy58u+8k5nvAYeiNhsyMARvzNcXJ9QfWVrcPsC9e9rAxtg==", "dev": true, "engines": { "node": "20 || >=22" @@ -1450,9 +1447,9 @@ }, "node_modules/maildev": { "name": "@timshel_npm/maildev", - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/@timshel_npm/maildev/-/maildev-3.2.3.tgz", - "integrity": "sha512-CNxMz4Obw7nL8MZbx4y1YUFeqqAQk+Qwm51tcBV5lRBotMlXKeYhuEcayb1v66nUwq832bUfKF4hyQpJixFZrw==", + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/@timshel_npm/maildev/-/maildev-3.2.5.tgz", + "integrity": "sha512-suWQu2s2kmO+MXtNJYW9peklznhd+aorIUb4tSNrfaKoEJjDa3vLXTvWf+3cb67o4Yv4Z6nPeKdMTCDZVn/Nyw==", "dev": true, "dependencies": { "@types/mailparser": "3.4.6", @@ -1461,13 +1458,13 @@ "commander": "14.0.1", "compression": "1.8.1", "cors": "2.8.5", - "dompurify": "3.2.7", + "dompurify": "3.3.0", "express": "5.1.0", - "jsdom": "27.0.0", - "mailparser": "3.7.4", + "jsdom": "27.0.1", + "mailparser": "3.7.5", "mime": "4.1.0", - "nodemailer": "7.0.6", - "smtp-server": "3.14.0", + "nodemailer": "7.0.9", + "smtp-server": "3.15.0", "socket.io": "4.8.1", "wildstring": "1.0.9" }, @@ -1479,36 +1476,44 @@ } }, "node_modules/mailparser": { - "version": "3.7.4", - "resolved": "https://registry.npmjs.org/mailparser/-/mailparser-3.7.4.tgz", - "integrity": "sha512-Beh4yyR4jLq3CZZ32asajByrXnW8dLyKCAQD3WvtTiBnMtFWhxO+wa93F6sJNjDmfjxXs4NRNjw3XAGLqZR3Vg==", + "version": "3.7.5", + "resolved": "https://registry.npmjs.org/mailparser/-/mailparser-3.7.5.tgz", + "integrity": "sha512-o59RgZC+4SyCOn4xRH1mtRiZ1PbEmi6si6Ufnd3tbX/V9zmZN1qcqu8xbXY62H6CwIclOT3ppm5u/wV2nujn4g==", "dev": true, "dependencies": { "encoding-japanese": "2.2.0", "he": "1.2.0", "html-to-text": "9.0.5", - "iconv-lite": "0.6.3", + "iconv-lite": "0.7.0", "libmime": "5.3.7", "linkify-it": "5.0.0", - "mailsplit": "5.4.5", - "nodemailer": "7.0.4", + "mailsplit": "5.4.6", + "nodemailer": "7.0.9", "punycode.js": "2.3.1", - "tlds": "1.259.0" + "tlds": "1.260.0" } }, - "node_modules/mailparser/node_modules/nodemailer": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-7.0.4.tgz", - "integrity": "sha512-9O00Vh89/Ld2EcVCqJ/etd7u20UhME0f/NToPfArwPEe1Don1zy4mAIz6ariRr7mJ2RDxtaDzN0WJVdVXPtZaw==", + "node_modules/mailparser/node_modules/iconv-lite": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.0.tgz", + "integrity": "sha512-cf6L2Ds3h57VVmkZe+Pn+5APsT7FpqJtEhhieDCvrE2MK5Qk9MyffgQyuxQTm6BChfeZNtcOLHp9IcWRVcIcBQ==", "dev": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, "engines": { - "node": ">=6.0.0" + "node": ">=0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/mailsplit": { - "version": "5.4.5", - "resolved": "https://registry.npmjs.org/mailsplit/-/mailsplit-5.4.5.tgz", - "integrity": "sha512-oMfhmvclR689IIaQmIcR5nODnZRRVwAKtqFT407TIvmhX2OLUBnshUTcxzQBt3+96sZVDud9NfSe1NxAkUNXEQ==", + "version": "5.4.6", + "resolved": "https://registry.npmjs.org/mailsplit/-/mailsplit-5.4.6.tgz", + "integrity": "sha512-M+cqmzaPG/mEiCDmqQUz8L177JZLZmXAUpq38owtpq2xlXlTSw+kntnxRt2xsxVFFV6+T8Mj/U0l5s7s6e0rNw==", + "deprecated": "This package has been renamed to @zone-eu/mailsplit. Please update your dependencies.", "dev": true, "dependencies": { "libbase64": "1.3.0", @@ -1595,9 +1600,9 @@ "dev": true }, "node_modules/mysql2": { - "version": "3.15.0", - "resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.15.0.tgz", - "integrity": "sha512-tT6pomf5Z/I7Jzxu8sScgrYBMK9bUFWd7Kbo6Fs1L0M13OOIJ/ZobGKS3Z7tQ8Re4lj+LnLXIQVZZxa3fhYKzA==", + "version": "3.15.3", + "resolved": "https://registry.npmjs.org/mysql2/-/mysql2-3.15.3.tgz", + "integrity": "sha512-FBrGau0IXmuqg4haEZRBfHNWB5mUARw6hNwPDXXGg0XzVJ50mr/9hb267lvpVMnhZ1FON3qNd4Xfcez1rbFwSg==", "dependencies": { "aws-ssl-profiles": "^1.1.1", "denque": "^2.1.0", @@ -1647,25 +1652,6 @@ "node": ">=12" } }, - "node_modules/nanoid": { - "version": "3.3.11", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", - "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "peer": true, - "bin": { - "nanoid": "bin/nanoid.cjs" - }, - "engines": { - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" - } - }, "node_modules/negotiator": { "version": "0.6.4", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.4.tgz", @@ -1676,9 +1662,9 @@ } }, "node_modules/nodemailer": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-7.0.6.tgz", - "integrity": "sha512-F44uVzgwo49xboqbFgBGkRaiMgtoBrBEWCVincJPK9+S9Adkzt/wXCLKbf7dxucmxfTI5gHGB+bEmdyzN6QKjw==", + "version": "7.0.9", + "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-7.0.9.tgz", + "integrity": "sha512-9/Qm0qXIByEP8lEV2qOqcAW7bRpL8CR9jcTwk3NBnHJNmP9fIJ86g2fgmIXqHY+nj55ZEMwWqYAT2QTDpRUYiQ==", "dev": true, "engines": { "node": ">=6.0.0" @@ -1747,9 +1733,9 @@ } }, "node_modules/parse5": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.3.0.tgz", - "integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-8.0.0.tgz", + "integrity": "sha512-9m4m5GSgXjL4AjumKzq1Fgfp3Z8rsvjRNbnkVwfu2ImRqE5D0LnY2QfDen18FSY9C573YU5XxSapdHZTZ2WolA==", "dev": true, "dependencies": { "entities": "^6.0.0" @@ -1793,13 +1779,12 @@ } }, "node_modules/path-to-regexp": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.3.0.tgz", - "integrity": "sha512-7jdwVIRtsP8MYpdXSwOS0YdD0Du+qOoF/AEPIt88PcCFrZCzx41oxku1jD88hZBwbNUIEfpqvuhjFaMAqMTWnA==", + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.2.0.tgz", + "integrity": "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==", "dev": true, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/express" + "engines": { + "node": ">=16" } }, "node_modules/peberminta": { @@ -1892,20 +1877,13 @@ "split2": "^4.1.0" } }, - "node_modules/picocolors": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", - "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", - "dev": true, - "peer": true - }, "node_modules/playwright": { - "version": "1.55.1", - "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.55.1.tgz", - "integrity": "sha512-cJW4Xd/G3v5ovXtJJ52MAOclqeac9S/aGGgRzLabuF8TnIb6xHvMzKIa6JmrRzUkeXJgfL1MhukP0NK6l39h3A==", + "version": "1.56.1", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.56.1.tgz", + "integrity": "sha512-aFi5B0WovBHTEvpM3DzXTUaeN6eN0qWnTkKx4NQaH4Wvcmc153PdaY2UBdSYKaGYw+UyWXSVyxDUg5DoPEttjw==", "dev": true, "dependencies": { - "playwright-core": "1.55.1" + "playwright-core": "1.56.1" }, "bin": { "playwright": "cli.js" @@ -1918,9 +1896,9 @@ } }, "node_modules/playwright-core": { - "version": "1.55.1", - "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.55.1.tgz", - "integrity": "sha512-Z6Mh9mkwX+zxSlHqdr5AOcJnfp+xUWLCt9uKV18fhzA8eyxUd8NUWzAjxUh55RZKSYwDGX0cfaySdhZJGMoJ+w==", + "version": "1.56.1", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.56.1.tgz", + "integrity": "sha512-hutraynyn31F+Bifme+Ps9Vq59hKuUCz7H1kDOcBs+2oGguKkWTU50bBWrtz34OUWmIwpBTWDxaRPXrIXkgvmQ==", "dev": true, "bin": { "playwright-core": "cli.js" @@ -1929,35 +1907,6 @@ "node": ">=18" } }, - "node_modules/postcss": { - "version": "8.5.6", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", - "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/postcss" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "peer": true, - "dependencies": { - "nanoid": "^3.3.11", - "picocolors": "^1.1.1", - "source-map-js": "^1.2.1" - }, - "engines": { - "node": "^10 || ^12 || >=14" - } - }, "node_modules/postgres-array": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", @@ -2049,34 +1998,18 @@ } }, "node_modules/raw-body": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.1.tgz", - "integrity": "sha512-9G8cA+tuMS75+6G/TzW8OtLzmBDMo8p1JRxN5AZ+LAp8uxGA8V8GZm4GQ4/N5QNQEnLmg6SS7wyuSmbKepiKqA==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.0.tgz", + "integrity": "sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g==", "dev": true, "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", - "iconv-lite": "0.7.0", + "iconv-lite": "0.6.3", "unpipe": "1.0.0" }, "engines": { - "node": ">= 0.10" - } - }, - "node_modules/raw-body/node_modules/iconv-lite": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.0.tgz", - "integrity": "sha512-cf6L2Ds3h57VVmkZe+Pn+5APsT7FpqJtEhhieDCvrE2MK5Qk9MyffgQyuxQTm6BChfeZNtcOLHp9IcWRVcIcBQ==", - "dev": true, - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3.0.0" - }, - "engines": { - "node": ">=0.10.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/express" + "node": ">= 0.8" } }, "node_modules/require-from-string": { @@ -2105,9 +2038,9 @@ } }, "node_modules/router/node_modules/debug": { - "version": "4.4.3", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", - "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", "dev": true, "dependencies": { "ms": "^2.1.3" @@ -2205,9 +2138,9 @@ } }, "node_modules/send/node_modules/debug": { - "version": "4.4.3", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", - "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", "dev": true, "dependencies": { "ms": "^2.1.3" @@ -2326,29 +2259,20 @@ } }, "node_modules/smtp-server": { - "version": "3.14.0", - "resolved": "https://registry.npmjs.org/smtp-server/-/smtp-server-3.14.0.tgz", - "integrity": "sha512-cEw/hdIY+xw1pkbQbQ23hvnm9kNABAsgYB+jJYGkzAynZxJ2VB9aqC6JhB1vpdDnqan7C7AL3qHYRGwz5eD6BQ==", + "version": "3.15.0", + "resolved": "https://registry.npmjs.org/smtp-server/-/smtp-server-3.15.0.tgz", + "integrity": "sha512-yv945vk0/xcukSKAoIhGz6GOlcXoCyGQH2w9IlLrTKk3SJiOBH9bcO6tD0ILTZYJsMqRa6OTRZAyqeuLXkv59Q==", "dev": true, "dependencies": { "base32.js": "0.1.0", "ipv6-normalize": "1.0.1", - "nodemailer": "7.0.3", + "nodemailer": "7.0.9", "punycode.js": "2.3.1" }, "engines": { "node": ">=12.0.0" } }, - "node_modules/smtp-server/node_modules/nodemailer": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-7.0.3.tgz", - "integrity": "sha512-Ajq6Sz1x7cIK3pN6KesGTah+1gnwMnx5gKl3piQlQQE/PwyJ4Mbc8is2psWYxK3RJTVeqsDaCv8ZzXLCDHMTZw==", - "dev": true, - "engines": { - "node": ">=6.0.0" - } - }, "node_modules/socket.io": { "version": "4.8.1", "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.8.1.tgz", @@ -2564,30 +2488,30 @@ "dev": true }, "node_modules/tlds": { - "version": "1.259.0", - "resolved": "https://registry.npmjs.org/tlds/-/tlds-1.259.0.tgz", - "integrity": "sha512-AldGGlDP0PNgwppe2quAvuBl18UcjuNtOnDuUkqhd6ipPqrYYBt3aTxK1QTsBVknk97lS2JcafWMghjGWFtunw==", + "version": "1.260.0", + "resolved": "https://registry.npmjs.org/tlds/-/tlds-1.260.0.tgz", + "integrity": "sha512-78+28EWBhCEE7qlyaHA9OR3IPvbCLiDh3Ckla593TksfFc9vfTsgvH7eS+dr3o9qr31gwGbogcI16yN91PoRjQ==", "dev": true, "bin": { "tlds": "bin.js" } }, "node_modules/tldts": { - "version": "7.0.16", - "resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.16.tgz", - "integrity": "sha512-5bdPHSwbKTeHmXrgecID4Ljff8rQjv7g8zKQPkCozRo2HWWni+p310FSn5ImI+9kWw9kK4lzOB5q/a6iv0IJsw==", + "version": "7.0.17", + "resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.17.tgz", + "integrity": "sha512-Y1KQBgDd/NUc+LfOtKS6mNsC9CCaH+m2P1RoIZy7RAPo3C3/t8X45+zgut31cRZtZ3xKPjfn3TkGTrctC2TQIQ==", "dev": true, "dependencies": { - "tldts-core": "^7.0.16" + "tldts-core": "^7.0.17" }, "bin": { "tldts": "bin/cli.js" } }, "node_modules/tldts-core": { - "version": "7.0.16", - "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.16.tgz", - "integrity": "sha512-XHhPmHxphLi+LGbH0G/O7dmUH9V65OY20R7vH8gETHsp5AZCjBk9l8sqmRKLaGOxnETU7XNSDUPtewAy/K6jbA==", + "version": "7.0.17", + "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.17.tgz", + "integrity": "sha512-DieYoGrP78PWKsrXr8MZwtQ7GLCUeLxihtjC1jZsW1DnvSMdKPitJSe8OSYDM2u5H6g3kWJZpePqkp43TfLh0g==", "dev": true }, "node_modules/toidentifier": { @@ -2644,9 +2568,9 @@ "dev": true }, "node_modules/undici-types": { - "version": "7.12.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.12.0.tgz", - "integrity": "sha512-goOacqME2GYyOZZfb5Lgtu+1IDmAlAEu5xnD3+xTzS10hT0vzpf0SPjkXwAw9Jm+4n/mQGDP3LO8CPbYROeBfQ==", + "version": "7.10.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.10.0.tgz", + "integrity": "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag==", "dev": true }, "node_modules/unpipe": { diff --git a/playwright/package.json b/playwright/package.json index 5b33b1ed..f47ec5dc 100644 --- a/playwright/package.json +++ b/playwright/package.json @@ -8,13 +8,13 @@ "author": "", "license": "ISC", "devDependencies": { - "@playwright/test": "1.55.1", - "dotenv": "17.2.2", + "@playwright/test": "1.56.1", + "dotenv": "17.2.3", "dotenv-expand": "12.0.3", - "maildev": "npm:@timshel_npm/maildev@^3.2.3" + "maildev": "npm:@timshel_npm/maildev@3.2.5" }, "dependencies": { - "mysql2": "3.15.0", + "mysql2": "3.15.3", "otpauth": "9.4.1", "pg": "8.16.3" } diff --git a/playwright/test.env b/playwright/test.env index d97c08d1..df182ebe 100644 --- a/playwright/test.env +++ b/playwright/test.env @@ -55,6 +55,7 @@ ROCKET_PORT=8003 DOMAIN=http://localhost:${ROCKET_PORT} LOG_LEVEL=info,oidcwarden::sso=debug LOGIN_RATELIMIT_MAX_BURST=100 +ADMIN_TOKEN=admin SMTP_SECURITY=off SMTP_PORT=${MAILDEV_SMTP_PORT} @@ -67,8 +68,8 @@ SSO_AUTHORITY=http://${KC_HTTP_HOST}:${KC_HTTP_PORT}/realms/${TEST_REALM} SSO_DEBUG_TOKENS=true # Custom web-vault build -# PW_WV_REPO_URL=https://github.com/dani-garcia/bw_web_builds.git -# PW_WV_COMMIT_HASH=a5f5390895516bce2f48b7baadb6dc399e5fe75a +# PW_VW_REPO_URL=https://github.com/vaultwarden/vw_web_builds.git +# PW_VW_COMMIT_HASH=b5f5b2157b9b64b5813bc334a75a277d0377b5d3 ########################### # Docker MariaDb container# diff --git a/playwright/tests/login.smtp.spec.ts b/playwright/tests/login.smtp.spec.ts index 2f782c14..87474b79 100644 --- a/playwright/tests/login.smtp.spec.ts +++ b/playwright/tests/login.smtp.spec.ts @@ -91,6 +91,9 @@ test('2fa', async ({ page }) => { await page.getByLabel(/Verification code/).fill(code); await page.getByRole('button', { name: 'Continue' }).click(); + await page.getByRole('button', { name: 'Add it later' }).click(); + await page.getByRole('link', { name: 'Skip to web app' }).click(); + await expect(page).toHaveTitle(/Vaults/); }) diff --git a/playwright/tests/organization.smtp.spec.ts b/playwright/tests/organization.smtp.spec.ts index 764f9017..35dfcdb1 100644 --- a/playwright/tests/organization.smtp.spec.ts +++ b/playwright/tests/organization.smtp.spec.ts @@ -57,15 +57,17 @@ test('invited with new account', async ({ page }) => { await expect(page).toHaveTitle(/Create account | Vaultwarden Web/); //await page.getByLabel('Name').fill(users.user2.name); - await page.getByLabel('New master password (required)', { exact: true }).fill(users.user2.password); - await page.getByLabel('Confirm new master password (').fill(users.user2.password); + await page.getByLabel('Master password (required)', { exact: true }).fill(users.user2.password); + await page.getByLabel('Confirm master password (').fill(users.user2.password); await page.getByRole('button', { name: 'Create account' }).click(); await utils.checkNotification(page, 'Your new account has been created'); + await utils.checkNotification(page, 'Invitation accepted'); + await utils.ignoreExtension(page); + // Redirected to the vault await expect(page).toHaveTitle('Vaults | Vaultwarden Web'); - await utils.checkNotification(page, 'You have been logged in!'); - await utils.checkNotification(page, 'Invitation accepted'); + // await utils.checkNotification(page, 'You have been logged in!'); }); await test.step('Check mails', async () => { @@ -91,9 +93,11 @@ test('invited with existing account', async ({ page }) => { await page.getByLabel('Master password').fill(users.user3.password); await page.getByRole('button', { name: 'Log in with master password' }).click(); + await utils.checkNotification(page, 'Invitation accepted'); + await utils.ignoreExtension(page); + // We are now in the default vault page await expect(page).toHaveTitle(/Vaultwarden Web/); - await utils.checkNotification(page, 'Invitation accepted'); await mail3Buffer.expect((m) => m.subject === 'New Device Logged In From Firefox'); await mail1Buffer.expect((m) => m.subject.includes('Invitation to Test accepted')); diff --git a/playwright/tests/setups/2fa.ts b/playwright/tests/setups/2fa.ts index 1406083e..d7936420 100644 --- a/playwright/tests/setups/2fa.ts +++ b/playwright/tests/setups/2fa.ts @@ -48,7 +48,7 @@ export async function activateEmail(test: Test, page: Page, user: { name: string await page.getByRole('menuitem', { name: 'Account settings' }).click(); await page.getByRole('link', { name: 'Security' }).click(); await page.getByRole('link', { name: 'Two-step login' }).click(); - await page.locator('bit-item').filter({ hasText: 'Email Email Enter a code sent' }).getByRole('button').click(); + await page.locator('bit-item').filter({ hasText: 'Enter a code sent to your email' }).getByRole('button').click(); await page.getByLabel('Master password (required)').fill(user.password); await page.getByRole('button', { name: 'Continue' }).click(); await page.getByRole('button', { name: 'Send email' }).click(); diff --git a/playwright/tests/setups/sso.ts b/playwright/tests/setups/sso.ts index 8998b6c7..6317f8b0 100644 --- a/playwright/tests/setups/sso.ts +++ b/playwright/tests/setups/sso.ts @@ -33,19 +33,21 @@ export async function logNewUser( await test.step('Create Vault account', async () => { await expect(page.getByRole('heading', { name: 'Join organisation' })).toBeVisible(); - await page.getByLabel('New master password (required)', { exact: true }).fill(user.password); - await page.getByLabel('Confirm new master password (').fill(user.password); + await page.getByLabel('Master password (required)', { exact: true }).fill(user.password); + await page.getByLabel('Confirm master password (').fill(user.password); await page.getByRole('button', { name: 'Create account' }).click(); }); + await utils.checkNotification(page, 'Account successfully created!'); + await utils.checkNotification(page, 'Invitation accepted'); + + await utils.ignoreExtension(page); + await test.step('Default vault page', async () => { await expect(page).toHaveTitle(/Vaultwarden Web/); await expect(page.getByTitle('All vaults', { exact: true })).toBeVisible(); }); - await utils.checkNotification(page, 'Account successfully created!'); - await utils.checkNotification(page, 'Invitation accepted'); - if( options.mailBuffer ){ let mailBuffer = options.mailBuffer; await test.step('Check emails', async () => { @@ -115,6 +117,8 @@ export async function logUser( await page.getByRole('button', { name: 'Unlock' }).click(); }); + await utils.ignoreExtension(page); + await test.step('Default vault page', async () => { await expect(page).toHaveTitle(/Vaultwarden Web/); await expect(page.getByTitle('All vaults', { exact: true })).toBeVisible(); diff --git a/playwright/tests/setups/user.ts b/playwright/tests/setups/user.ts index 45fd86a0..395196ae 100644 --- a/playwright/tests/setups/user.ts +++ b/playwright/tests/setups/user.ts @@ -17,15 +17,16 @@ export async function createAccount(test, page: Page, user: { email: string, nam await page.getByRole('button', { name: 'Continue' }).click(); // Vault finish Creation - await page.getByLabel('New master password (required)', { exact: true }).fill(user.password); - await page.getByLabel('Confirm new master password (').fill(user.password); + await page.getByLabel('Master password (required)', { exact: true }).fill(user.password); + await page.getByLabel('Confirm master password (').fill(user.password); await page.getByRole('button', { name: 'Create account' }).click(); await utils.checkNotification(page, 'Your new account has been created') + await utils.ignoreExtension(page); // We are now in the default vault page await expect(page).toHaveTitle('Vaults | Vaultwarden Web'); - await utils.checkNotification(page, 'You have been logged in!'); + // await utils.checkNotification(page, 'You have been logged in!'); if( mailBuffer ){ await mailBuffer.expect((m) => m.subject === "Welcome"); @@ -45,6 +46,8 @@ export async function logUser(test, page: Page, user: { email: string, password: await page.getByLabel('Master password').fill(user.password); await page.getByRole('button', { name: 'Log in with master password' }).click(); + await utils.ignoreExtension(page); + // We are now in the default vault page await expect(page).toHaveTitle(/Vaultwarden Web/); diff --git a/playwright/tests/sso_organization.smtp.spec.ts b/playwright/tests/sso_organization.smtp.spec.ts index 45ef5ada..92813f72 100644 --- a/playwright/tests/sso_organization.smtp.spec.ts +++ b/playwright/tests/sso_organization.smtp.spec.ts @@ -67,16 +67,17 @@ test('invited with new account', async ({ page }) => { await test.step('Create Vault account', async () => { await expect(page.getByRole('heading', { name: 'Join organisation' })).toBeVisible(); - await page.getByLabel('New master password (required)', { exact: true }).fill(users.user2.password); - await page.getByLabel('Confirm new master password (').fill(users.user2.password); + await page.getByLabel('Master password (required)', { exact: true }).fill(users.user2.password); + await page.getByLabel('Confirm master password (').fill(users.user2.password); await page.getByRole('button', { name: 'Create account' }).click(); + + await utils.checkNotification(page, 'Account successfully created!'); + await utils.checkNotification(page, 'Invitation accepted'); + await utils.ignoreExtension(page); }); await test.step('Default vault page', async () => { await expect(page).toHaveTitle(/Vaultwarden Web/); - - await utils.checkNotification(page, 'Account successfully created!'); - await utils.checkNotification(page, 'Invitation accepted'); }); await test.step('Check mails', async () => { @@ -107,11 +108,13 @@ test('invited with existing account', async ({ page }) => { await expect(page).toHaveTitle('Vaultwarden Web'); await page.getByLabel('Master password').fill(users.user3.password); await page.getByRole('button', { name: 'Unlock' }).click(); + + await utils.checkNotification(page, 'Invitation accepted'); + await utils.ignoreExtension(page); }); await test.step('Default vault page', async () => { await expect(page).toHaveTitle(/Vaultwarden Web/); - await utils.checkNotification(page, 'Invitation accepted'); }); await test.step('Check mails', async () => { diff --git a/rust-toolchain.toml b/rust-toolchain.toml index a6c92998..568d0faa 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "1.91.0" +channel = "1.92.0" components = [ "rustfmt", "clippy" ] profile = "minimal" diff --git a/src/api/admin.rs b/src/api/admin.rs index 8b6101fb..badfaa3a 100644 --- a/src/api/admin.rs +++ b/src/api/admin.rs @@ -23,7 +23,7 @@ use crate::{ backup_sqlite, get_sql_server_version, models::{ Attachment, Cipher, Collection, Device, Event, EventType, Group, Invitation, Membership, MembershipId, - MembershipType, OrgPolicy, OrgPolicyErr, Organization, OrganizationId, SsoUser, TwoFactor, User, UserId, + MembershipType, OrgPolicy, Organization, OrganizationId, SsoUser, TwoFactor, User, UserId, }, DbConn, DbConnType, ACTIVE_DB_TYPE, }, @@ -31,7 +31,7 @@ use crate::{ http_client::make_http_request, mail, util::{ - container_base_image, format_naive_datetime_local, get_display_size, get_web_vault_version, + container_base_image, format_naive_datetime_local, get_active_web_release, get_display_size, is_running_in_container, NumberOrString, }, CONFIG, VERSION, @@ -556,23 +556,9 @@ async fn update_membership_type(data: Json, token: AdminToke } } + member_to_edit.atype = new_type; // This check is also done at api::organizations::{accept_invite, _confirm_invite, _activate_member, edit_member}, update_membership_type - // It returns different error messages per function. - if new_type < MembershipType::Admin { - match OrgPolicy::is_user_allowed(&member_to_edit.user_uuid, &member_to_edit.org_uuid, true, &conn).await { - Ok(_) => {} - Err(OrgPolicyErr::TwoFactorMissing) => { - if CONFIG.email_2fa_auto_fallback() { - two_factor::email::find_and_activate_email_2fa(&member_to_edit.user_uuid, &conn).await?; - } else { - err!("You cannot modify this user to this type because they have not setup 2FA"); - } - } - Err(OrgPolicyErr::SingleOrgEnforced) => { - err!("You cannot modify this user to this type because it is a member of an organization which forbids it"); - } - } - } + OrgPolicy::check_user_allowed(&member_to_edit, "modify", &conn).await?; log_event( EventType::OrganizationUserUpdated as i32, @@ -585,7 +571,6 @@ async fn update_membership_type(data: Json, token: AdminToke ) .await; - member_to_edit.atype = new_type; member_to_edit.save(&conn).await } @@ -704,6 +689,26 @@ async fn get_ntp_time(has_http_access: bool) -> String { String::from("Unable to fetch NTP time.") } +fn web_vault_compare(active: &str, latest: &str) -> i8 { + use semver::Version; + use std::cmp::Ordering; + + let active_semver = Version::parse(active).unwrap_or_else(|e| { + warn!("Unable to parse active web-vault version '{active}': {e}"); + Version::parse("2025.1.1").unwrap() + }); + let latest_semver = Version::parse(latest).unwrap_or_else(|e| { + warn!("Unable to parse latest web-vault version '{latest}': {e}"); + Version::parse("2025.1.1").unwrap() + }); + + match active_semver.cmp(&latest_semver) { + Ordering::Less => -1, + Ordering::Equal => 0, + Ordering::Greater => 1, + } +} + #[get("/diagnostics")] async fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResult> { use chrono::prelude::*; @@ -723,32 +728,21 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> A _ => "Unable to resolve domain name.".to_string(), }; - let (latest_release, latest_commit, latest_web_build) = get_release_info(has_http_access).await; + let (latest_vw_release, latest_vw_commit, latest_web_release) = get_release_info(has_http_access).await; + let active_web_release = get_active_web_release(); + let web_vault_compare = web_vault_compare(&active_web_release, &latest_web_release); let ip_header_name = &ip_header.0.unwrap_or_default(); - // Get current running versions - let web_vault_version = get_web_vault_version(); - - // Check if the running version is newer than the latest stable released version - let web_vault_pre_release = if let Ok(web_ver_match) = semver::VersionReq::parse(&format!(">{latest_web_build}")) { - web_ver_match.matches( - &semver::Version::parse(&web_vault_version).unwrap_or_else(|_| semver::Version::parse("2025.1.1").unwrap()), - ) - } else { - error!("Unable to parse latest_web_build: '{latest_web_build}'"); - false - }; - let diagnostics_json = json!({ "dns_resolved": dns_resolved, "current_release": VERSION, - "latest_release": latest_release, - "latest_commit": latest_commit, + "latest_release": latest_vw_release, + "latest_commit": latest_vw_commit, "web_vault_enabled": &CONFIG.web_vault_enabled(), - "web_vault_version": web_vault_version, - "latest_web_build": latest_web_build, - "web_vault_pre_release": web_vault_pre_release, + "active_web_release": active_web_release, + "latest_web_release": latest_web_release, + "web_vault_compare": web_vault_compare, "running_within_container": running_within_container, "container_base_image": if running_within_container { container_base_image() } else { "Not applicable" }, "has_http_access": has_http_access, @@ -859,3 +853,32 @@ impl<'r> FromRequest<'r> for AdminToken { }) } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn validate_web_vault_compare() { + // web_vault_compare(active, latest) + // Test normal versions + assert!(web_vault_compare("2025.12.0", "2025.12.1") == -1); + assert!(web_vault_compare("2025.12.1", "2025.12.1") == 0); + assert!(web_vault_compare("2025.12.2", "2025.12.1") == 1); + + // Test patched/+build.n versions + // Newer latest version + assert!(web_vault_compare("2025.12.0+build.1", "2025.12.1") == -1); + assert!(web_vault_compare("2025.12.1", "2025.12.1+build.1") == -1); + assert!(web_vault_compare("2025.12.0+build.1", "2025.12.1+build.1") == -1); + assert!(web_vault_compare("2025.12.1+build.1", "2025.12.1+build.2") == -1); + // Equal versions + assert!(web_vault_compare("2025.12.1+build.1", "2025.12.1+build.1") == 0); + assert!(web_vault_compare("2025.12.2+build.2", "2025.12.2+build.2") == 0); + // Newer active version + assert!(web_vault_compare("2025.12.1+build.1", "2025.12.1") == 1); + assert!(web_vault_compare("2025.12.2", "2025.12.1+build.1") == 1); + assert!(web_vault_compare("2025.12.2+build.1", "2025.12.1+build.1") == 1); + assert!(web_vault_compare("2025.12.1+build.3", "2025.12.1+build.2") == 1); + } +} diff --git a/src/api/core/accounts.rs b/src/api/core/accounts.rs index 98a8cef4..f5c32acb 100644 --- a/src/api/core/accounts.rs +++ b/src/api/core/accounts.rs @@ -66,6 +66,7 @@ pub fn routes() -> Vec { put_device_token, put_clear_device_token, post_clear_device_token, + get_tasks, post_auth_request, get_auth_request, put_auth_request, @@ -75,12 +76,16 @@ pub fn routes() -> Vec { ] } -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Eq, PartialEq)] #[serde(rename_all = "camelCase")] pub struct KDFData { + #[serde(alias = "kdfType")] kdf: i32, + #[serde(alias = "iterations")] kdf_iterations: i32, + #[serde(alias = "memory")] kdf_memory: Option, + #[serde(alias = "parallelism")] kdf_parallelism: Option, } @@ -374,7 +379,7 @@ async fn post_set_password(data: Json, headers: Headers, conn: } if let Some(identifier) = data.org_identifier { - if identifier != crate::sso::FAKE_IDENTIFIER { + if identifier != crate::sso::FAKE_IDENTIFIER && identifier != crate::api::admin::FAKE_ADMIN_UUID { let org = match Organization::find_by_uuid(&identifier.into(), &conn).await { None => err!("Failed to retrieve the associated organization"), Some(org) => org, @@ -401,8 +406,8 @@ async fn post_set_password(data: Json, headers: Headers, conn: user.save(&conn).await?; Ok(Json(json!({ - "Object": "set-password", - "CaptchaBypassToken": "", + "object": "set-password", + "captchaBypassToken": "", }))) } @@ -545,17 +550,6 @@ async fn post_password(data: Json, headers: Headers, conn: DbCon save_result } -#[derive(Deserialize)] -#[serde(rename_all = "camelCase")] -struct ChangeKdfData { - #[serde(flatten)] - kdf: KDFData, - - master_password_hash: String, - new_master_password_hash: String, - key: String, -} - fn set_kdf_data(user: &mut User, data: &KDFData) -> EmptyResult { if data.kdf == UserKdfType::Pbkdf2 as i32 && data.kdf_iterations < 100_000 { err!("PBKDF2 KDF iterations must be at least 100000.") @@ -591,18 +585,61 @@ fn set_kdf_data(user: &mut User, data: &KDFData) -> EmptyResult { Ok(()) } +#[allow(dead_code)] +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct AuthenticationData { + salt: String, + kdf: KDFData, + master_password_authentication_hash: String, +} + +#[allow(dead_code)] +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct UnlockData { + salt: String, + kdf: KDFData, + master_key_wrapped_user_key: String, +} + +#[allow(dead_code)] +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct ChangeKdfData { + new_master_password_hash: String, + key: String, + authentication_data: AuthenticationData, + unlock_data: UnlockData, + master_password_hash: String, +} + #[post("/accounts/kdf", data = "")] async fn post_kdf(data: Json, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult { let data: ChangeKdfData = data.into_inner(); - let mut user = headers.user; - if !user.check_valid_password(&data.master_password_hash) { + if !headers.user.check_valid_password(&data.master_password_hash) { err!("Invalid password") } - set_kdf_data(&mut user, &data.kdf)?; + if data.authentication_data.kdf != data.unlock_data.kdf { + err!("KDF settings must be equal for authentication and unlock") + } - user.set_password(&data.new_master_password_hash, Some(data.key), true, None); + if headers.user.email != data.authentication_data.salt || headers.user.email != data.unlock_data.salt { + err!("Invalid master password salt") + } + + let mut user = headers.user; + + set_kdf_data(&mut user, &data.unlock_data.kdf)?; + + user.set_password( + &data.authentication_data.master_password_authentication_hash, + Some(data.unlock_data.master_key_wrapped_user_key), + true, + None, + ); let save_result = user.save(&conn).await; nt.send_logout(&user, Some(headers.device.uuid.clone()), &conn).await; @@ -1373,7 +1410,7 @@ async fn put_device_token(device_id: DeviceId, data: Json, headers: H } device.push_token = Some(token); - if let Err(e) = device.save(&conn).await { + if let Err(e) = device.save(true, &conn).await { err!(format!("An error occurred while trying to save the device push token: {e}")); } @@ -1409,6 +1446,14 @@ async fn post_clear_device_token(device_id: DeviceId, conn: DbConn) -> EmptyResu put_clear_device_token(device_id, conn).await } +#[get("/tasks")] +fn get_tasks(_client_headers: ClientHeaders) -> JsonResult { + Ok(Json(json!({ + "data": [], + "object": "list" + }))) +} + #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] struct AuthRequestRequest { diff --git a/src/api/core/ciphers.rs b/src/api/core/ciphers.rs index 74274a3a..d5f244f4 100644 --- a/src/api/core/ciphers.rs +++ b/src/api/core/ciphers.rs @@ -159,7 +159,28 @@ async fn sync(data: SyncData, headers: Headers, client_version: Option JsonResult { let mut data: ShareCipherData = data.into_inner(); - // Check if there are one more more collections selected when this cipher is part of an organization. - // err if this is not the case before creating an empty cipher. - if data.cipher.organization_id.is_some() && data.collection_ids.is_empty() { - err!("You must select at least one collection."); - } - // This check is usually only needed in update_cipher_from_data(), but we // need it here as well to avoid creating an empty cipher in the call to // cipher.save() below. @@ -324,7 +342,11 @@ async fn post_ciphers_create( // or otherwise), we can just ignore this field entirely. data.cipher.last_known_revision_date = None; - share_cipher_by_uuid(&cipher.uuid, data, &headers, &conn, &nt, None).await + let res = share_cipher_by_uuid(&cipher.uuid, data, &headers, &conn, &nt, None).await; + if res.is_err() { + cipher.delete(&conn).await?; + } + res } /// Called when creating a new user-owned cipher. diff --git a/src/api/core/emergency_access.rs b/src/api/core/emergency_access.rs index 84b00148..1897f995 100644 --- a/src/api/core/emergency_access.rs +++ b/src/api/core/emergency_access.rs @@ -47,24 +47,11 @@ pub fn routes() -> Vec { #[get("/emergency-access/trusted")] async fn get_contacts(headers: Headers, conn: DbConn) -> Json { - if !CONFIG.emergency_access_allowed() { - return Json(json!({ - "data": [{ - "id": "", - "status": 2, - "type": 0, - "waitTimeDays": 0, - "granteeId": "", - "email": "", - "name": "NOTE: Emergency Access is disabled!", - "object": "emergencyAccessGranteeDetails", - - }], - "object": "list", - "continuationToken": null - })); - } - let emergency_access_list = EmergencyAccess::find_all_by_grantor_uuid(&headers.user.uuid, &conn).await; + let emergency_access_list = if CONFIG.emergency_access_allowed() { + EmergencyAccess::find_all_by_grantor_uuid(&headers.user.uuid, &conn).await + } else { + Vec::new() + }; let mut emergency_access_list_json = Vec::with_capacity(emergency_access_list.len()); for ea in emergency_access_list { if let Some(grantee) = ea.to_json_grantee_details(&conn).await { diff --git a/src/api/core/mod.rs b/src/api/core/mod.rs index d5ca0cc9..dc7f4628 100644 --- a/src/api/core/mod.rs +++ b/src/api/core/mod.rs @@ -53,7 +53,7 @@ use crate::{ api::{EmptyResult, JsonResult, Notify, UpdateType}, auth::Headers, db::{ - models::{Membership, MembershipStatus, MembershipType, OrgPolicy, OrgPolicyErr, Organization, User}, + models::{Membership, MembershipStatus, OrgPolicy, Organization, User}, DbConn, }, error::Error, @@ -74,11 +74,11 @@ const GLOBAL_DOMAINS: &str = include_str!("../../static/global_domains.json"); #[get("/settings/domains")] fn get_eq_domains(headers: Headers) -> Json { - _get_eq_domains(headers, false) + _get_eq_domains(&headers, false) } -fn _get_eq_domains(headers: Headers, no_excluded: bool) -> Json { - let user = headers.user; +fn _get_eq_domains(headers: &Headers, no_excluded: bool) -> Json { + let user = &headers.user; use serde_json::from_str; let equivalent_domains: Vec> = from_str(&user.equivalent_domains).unwrap(); @@ -217,7 +217,8 @@ fn config() -> Json { // We should make sure that we keep this updated when we support the new server features // Version history: // - Individual cipher key encryption: 2024.2.0 - "version": "2025.6.0", + // - Mobile app support for MasterPasswordUnlockData: 2025.8.0 + "version": "2025.12.0", "gitHash": option_env!("GIT_REV"), "server": { "name": "Vaultwarden", @@ -269,27 +270,12 @@ async fn accept_org_invite( err!("User already accepted the invitation"); } - // This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type - // It returns different error messages per function. - if member.atype < MembershipType::Admin { - match OrgPolicy::is_user_allowed(&member.user_uuid, &member.org_uuid, false, conn).await { - Ok(_) => {} - Err(OrgPolicyErr::TwoFactorMissing) => { - if crate::CONFIG.email_2fa_auto_fallback() { - two_factor::email::activate_email_2fa(user, conn).await?; - } else { - err!("You cannot join this organization until you enable two-step login on your user account"); - } - } - Err(OrgPolicyErr::SingleOrgEnforced) => { - err!("You cannot join this organization because you are a member of an organization which forbids it"); - } - } - } - member.status = MembershipStatus::Accepted as i32; member.reset_password_key = reset_password_key; + // This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type + OrgPolicy::check_user_allowed(&member, "join", conn).await?; + member.save(conn).await?; if crate::CONFIG.mail_enabled() { diff --git a/src/api/core/organizations.rs b/src/api/core/organizations.rs index b8715ab7..285945eb 100644 --- a/src/api/core/organizations.rs +++ b/src/api/core/organizations.rs @@ -15,7 +15,7 @@ use crate::{ models::{ Cipher, CipherId, Collection, CollectionCipher, CollectionGroup, CollectionId, CollectionUser, EventType, Group, GroupId, GroupUser, Invitation, Membership, MembershipId, MembershipStatus, MembershipType, - OrgPolicy, OrgPolicyErr, OrgPolicyType, Organization, OrganizationApiKey, OrganizationId, User, UserId, + OrgPolicy, OrgPolicyType, Organization, OrganizationApiKey, OrganizationId, User, UserId, }, DbConn, }, @@ -195,8 +195,7 @@ async fn create_organization(headers: Headers, data: Json, conn: DbConn } let data: OrgData = data.into_inner(); - let (private_key, public_key) = if data.keys.is_some() { - let keys: OrgKeyData = data.keys.unwrap(); + let (private_key, public_key) = if let Some(keys) = data.keys { (Some(keys.encrypted_private_key), Some(keys.public_key)) } else { (None, None) @@ -370,9 +369,9 @@ async fn get_auto_enroll_status(identifier: &str, headers: Headers, conn: DbConn }; Ok(Json(json!({ - "Id": id, - "Identifier": identifier, - "ResetPasswordEnabled": rp_auto_enroll, + "id": id, + "identifier": identifier, + "resetPasswordEnabled": rp_auto_enroll, }))) } @@ -1463,27 +1462,12 @@ async fn _confirm_invite( err!("User in invalid state") } - // This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type - // It returns different error messages per function. - if member_to_confirm.atype < MembershipType::Admin { - match OrgPolicy::is_user_allowed(&member_to_confirm.user_uuid, org_id, true, conn).await { - Ok(_) => {} - Err(OrgPolicyErr::TwoFactorMissing) => { - if CONFIG.email_2fa_auto_fallback() { - two_factor::email::find_and_activate_email_2fa(&member_to_confirm.user_uuid, conn).await?; - } else { - err!("You cannot confirm this user because they have not setup 2FA"); - } - } - Err(OrgPolicyErr::SingleOrgEnforced) => { - err!("You cannot confirm this user because they are a member of an organization which forbids it"); - } - } - } - member_to_confirm.status = MembershipStatus::Confirmed as i32; member_to_confirm.akey = key.to_string(); + // This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type + OrgPolicy::check_user_allowed(&member_to_confirm, "confirm", conn).await?; + log_event( EventType::OrganizationUserConfirmed as i32, &member_to_confirm.uuid, @@ -1631,27 +1615,13 @@ async fn edit_member( } } - // This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type - // It returns different error messages per function. - if new_type < MembershipType::Admin { - match OrgPolicy::is_user_allowed(&member_to_edit.user_uuid, &org_id, true, &conn).await { - Ok(_) => {} - Err(OrgPolicyErr::TwoFactorMissing) => { - if CONFIG.email_2fa_auto_fallback() { - two_factor::email::find_and_activate_email_2fa(&member_to_edit.user_uuid, &conn).await?; - } else { - err!("You cannot modify this user to this type because they have not setup 2FA"); - } - } - Err(OrgPolicyErr::SingleOrgEnforced) => { - err!("You cannot modify this user to this type because they are a member of an organization which forbids it"); - } - } - } - member_to_edit.access_all = access_all; member_to_edit.atype = new_type as i32; + // This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type + // We need to perform the check after changing the type since `admin` is exempt. + OrgPolicy::check_user_allowed(&member_to_edit, "modify", &conn).await?; + // Delete all the odd collections for c in CollectionUser::find_by_organization_and_user_uuid(&org_id, &member_to_edit.user_uuid, &conn).await { c.delete(&conn).await?; @@ -2086,8 +2056,6 @@ async fn get_policy(org_id: OrganizationId, pol_type: i32, headers: AdminHeaders #[derive(Deserialize)] struct PolicyData { enabled: bool, - #[serde(rename = "type")] - _type: i32, data: Option, } @@ -2154,14 +2122,14 @@ async fn put_policy( // When enabling the SingleOrg policy, remove this org's members that are members of other orgs if pol_type_enum == OrgPolicyType::SingleOrg && data.enabled { - for member in Membership::find_by_org(&org_id, &conn).await.into_iter() { + for mut member in Membership::find_by_org(&org_id, &conn).await.into_iter() { // Policy only applies to non-Owner/non-Admin members who have accepted joining the org // Exclude invited and revoked users when checking for this policy. // Those users will not be allowed to accept or be activated because of the policy checks done there. - // We check if the count is larger then 1, because it includes this organization also. if member.atype < MembershipType::Admin && member.status != MembershipStatus::Invited as i32 - && Membership::count_accepted_and_confirmed_by_user(&member.user_uuid, &conn).await > 1 + && Membership::count_accepted_and_confirmed_by_user(&member.user_uuid, &member.org_uuid, &conn).await + > 0 { if CONFIG.mail_enabled() { let org = Organization::find_by_uuid(&member.org_uuid, &conn).await.unwrap(); @@ -2181,7 +2149,8 @@ async fn put_policy( ) .await; - member.delete(&conn).await?; + member.revoke(); + member.save(&conn).await?; } } } @@ -2628,25 +2597,10 @@ async fn _restore_member( err!("Only owners can restore other owners") } - // This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type - // It returns different error messages per function. - if member.atype < MembershipType::Admin { - match OrgPolicy::is_user_allowed(&member.user_uuid, org_id, false, conn).await { - Ok(_) => {} - Err(OrgPolicyErr::TwoFactorMissing) => { - if CONFIG.email_2fa_auto_fallback() { - two_factor::email::find_and_activate_email_2fa(&member.user_uuid, conn).await?; - } else { - err!("You cannot restore this user because they have not setup 2FA"); - } - } - Err(OrgPolicyErr::SingleOrgEnforced) => { - err!("You cannot restore this user because they are a member of an organization which forbids it"); - } - } - } - member.restore(); + // This check is also done at accept_invite, _confirm_invite, _activate_member, edit_member, admin::update_membership_type + // This check need to be done after restoring to work with the correct status + OrgPolicy::check_user_allowed(&member, "restore", conn).await?; member.save(conn).await?; log_event( diff --git a/src/api/core/sends.rs b/src/api/core/sends.rs index debd697b..10bf85be 100644 --- a/src/api/core/sends.rs +++ b/src/api/core/sends.rs @@ -568,7 +568,7 @@ async fn post_access_file( async fn download_url(host: &Host, send_id: &SendId, file_id: &SendFileId) -> Result { let operator = CONFIG.opendal_operator_for_path_type(&PathType::Sends)?; - if operator.info().scheme() == opendal::Scheme::Fs { + if operator.info().scheme() == <&'static str>::from(opendal::Scheme::Fs) { let token_claims = crate::auth::generate_send_claims(send_id, file_id); let token = crate::auth::encode_jwt(&token_claims); diff --git a/src/api/core/two_factor/email.rs b/src/api/core/two_factor/email.rs index 63e4508b..b8724cf1 100644 --- a/src/api/core/two_factor/email.rs +++ b/src/api/core/two_factor/email.rs @@ -26,12 +26,8 @@ pub fn routes() -> Vec { struct SendEmailLoginData { #[serde(alias = "DeviceIdentifier")] device_identifier: DeviceId, - - #[allow(unused)] #[serde(alias = "Email")] email: Option, - - #[allow(unused)] #[serde(alias = "MasterPasswordHash")] master_password_hash: Option, } @@ -42,20 +38,40 @@ struct SendEmailLoginData { async fn send_email_login(data: Json, conn: DbConn) -> EmptyResult { let data: SendEmailLoginData = data.into_inner(); - use crate::db::models::User; + if !CONFIG._enable_email_2fa() { + err!("Email 2FA is disabled") + } // Get the user - let Some(user) = User::find_by_device_id(&data.device_identifier, &conn).await else { - err!("Cannot find user. Try again.") + let email = match &data.email { + Some(email) if !email.is_empty() => Some(email), + _ => None, }; + let user = if let Some(email) = email { + let Some(master_password_hash) = &data.master_password_hash else { + err!("No password hash has been submitted.") + }; - if !CONFIG._enable_email_2fa() { - err!("Email 2FA is disabled") - } + let Some(user) = User::find_by_mail(email, &conn).await else { + err!("Username or password is incorrect. Try again.") + }; - send_token(&user.uuid, &conn).await?; + // Check password + if !user.check_valid_password(master_password_hash) { + err!("Username or password is incorrect. Try again.") + } - Ok(()) + user + } else { + // SSO login only sends device id, so we get the user by the most recently used device + let Some(user) = User::find_by_device_for_email2fa(&data.device_identifier, &conn).await else { + err!("Username or password is incorrect. Try again.") + }; + + user + }; + + send_token(&user.uuid, &conn).await } /// Generate the token, save the data for later verification and send email to user diff --git a/src/api/icons.rs b/src/api/icons.rs index 4e2aef1c..35a1de30 100644 --- a/src/api/icons.rs +++ b/src/api/icons.rs @@ -82,19 +82,19 @@ static ICON_SIZE_REGEX: LazyLock = LazyLock::new(|| Regex::new(r"(?x)(\d+ // It is used to prevent sending a specific header which breaks icon downloads. // If this function needs to be renamed, also adjust the code in `util.rs` #[get("//icon.png")] -fn icon_external(domain: &str) -> Option { +fn icon_external(domain: &str) -> Cached> { if !is_valid_domain(domain) { warn!("Invalid domain: {domain}"); - return None; + return Cached::ttl(None, CONFIG.icon_cache_negttl(), true); } if should_block_address(domain) { warn!("Blocked address: {domain}"); - return None; + return Cached::ttl(None, CONFIG.icon_cache_negttl(), true); } let url = CONFIG._icon_service_url().replace("{}", domain); - match CONFIG.icon_redirect_code() { + let redir = match CONFIG.icon_redirect_code() { 301 => Some(Redirect::moved(url)), // legacy permanent redirect 302 => Some(Redirect::found(url)), // legacy temporary redirect 307 => Some(Redirect::temporary(url)), @@ -103,7 +103,8 @@ fn icon_external(domain: &str) -> Option { error!("Unexpected redirect code {}", CONFIG.icon_redirect_code()); None } - } + }; + Cached::ttl(redir, CONFIG.icon_cache_ttl(), true) } #[get("//icon.png")] @@ -141,7 +142,7 @@ async fn icon_internal(domain: &str) -> Cached<(ContentType, Vec)> { /// This does some manual checks and makes use of Url to do some basic checking. /// domains can't be larger then 63 characters (not counting multiple subdomains) according to the RFC's, but we limit the total size to 255. fn is_valid_domain(domain: &str) -> bool { - const ALLOWED_CHARS: &str = "_-."; + const ALLOWED_CHARS: &str = "-."; // If parsing the domain fails using Url, it will not work with reqwest. if let Err(parse_error) = url::Url::parse(format!("https://{domain}").as_str()) { @@ -796,8 +797,11 @@ impl Emitter for FaviconEmitter { fn emit_current_tag(&mut self) -> Option { self.flush_current_attribute(true); self.last_start_tag.clear(); - if self.current_token.is_some() && !self.current_token.as_ref().unwrap().closing { - self.last_start_tag.extend(&*self.current_token.as_ref().unwrap().tag.name); + match &self.current_token { + Some(token) if !token.closing => { + self.last_start_tag.extend(&*token.tag.name); + } + _ => {} } html5gum::naive_next_state(&self.last_start_tag) } diff --git a/src/api/identity.rs b/src/api/identity.rs index 92b6c1e4..722b3eab 100644 --- a/src/api/identity.rs +++ b/src/api/identity.rs @@ -1,4 +1,4 @@ -use chrono::{NaiveDateTime, Utc}; +use chrono::Utc; use num_traits::FromPrimitive; use rocket::{ form::{Form, FromForm}, @@ -24,14 +24,14 @@ use crate::{ auth::{generate_organization_api_key_login_claims, AuthMethod, ClientHeaders, ClientIp, ClientVersion}, db::{ models::{ - AuthRequest, AuthRequestId, Device, DeviceId, EventType, Invitation, OrganizationApiKey, OrganizationId, - SsoNonce, SsoUser, TwoFactor, TwoFactorIncomplete, TwoFactorType, User, UserId, + AuthRequest, AuthRequestId, Device, DeviceId, EventType, Invitation, OIDCCodeWrapper, OrganizationApiKey, + OrganizationId, SsoAuth, SsoUser, TwoFactor, TwoFactorIncomplete, TwoFactorType, User, UserId, }, DbConn, }, error::MapResult, mail, sso, - sso::{OIDCCode, OIDCState}, + sso::{OIDCCode, OIDCCodeChallenge, OIDCCodeVerifier, OIDCState}, util, CONFIG, }; @@ -92,6 +92,7 @@ async fn login( "authorization_code" if CONFIG.sso_enabled() => { _check_is_some(&data.client_id, "client_id cannot be blank")?; _check_is_some(&data.code, "code cannot be blank")?; + _check_is_some(&data.code_verifier, "code verifier cannot be blank")?; _check_is_some(&data.device_identifier, "device_identifier cannot be blank")?; _check_is_some(&data.device_name, "device_name cannot be blank")?; @@ -147,7 +148,7 @@ async fn _refresh_login(data: ConnectData, conn: &DbConn, ip: &ClientIp) -> Json } Ok((mut device, auth_tokens)) => { // Save to update `device.updated_at` to track usage and toggle new status - device.save(conn).await?; + device.save(true, conn).await?; let result = json!({ "refresh_token": auth_tokens.refresh_token(), @@ -175,17 +176,23 @@ async fn _sso_login( // Ratelimit the login crate::ratelimit::check_limit_login(&ip.ip)?; - let code = match data.code.as_ref() { - None => err!( + let (state, code_verifier) = match (data.code.as_ref(), data.code_verifier.as_ref()) { + (None, _) => err!( "Got no code in OIDC data", ErrorEvent { event: EventType::UserFailedLogIn } ), - Some(code) => code, + (_, None) => err!( + "Got no code verifier in OIDC data", + ErrorEvent { + event: EventType::UserFailedLogIn + } + ), + (Some(code), Some(code_verifier)) => (code, code_verifier.clone()), }; - let user_infos = sso::exchange_code(code, conn).await?; + let (sso_auth, user_infos) = sso::exchange_code(state, code_verifier, conn).await?; let user_with_sso = match SsoUser::find_by_identifier(&user_infos.identifier, conn).await { None => match SsoUser::find_by_mail(&user_infos.email, conn).await { None => None, @@ -248,7 +255,7 @@ async fn _sso_login( _ => (), } - let mut user = User::new(&user_infos.email, user_infos.user_name); + let mut user = User::new(&user_infos.email, user_infos.user_name.clone()); user.verified_at = Some(now); user.save(conn).await?; @@ -267,13 +274,14 @@ async fn _sso_login( } Some((mut user, sso_user)) => { let mut device = get_device(&data, conn, &user).await?; + let twofactor_token = twofactor_auth(&mut user, &data, &mut device, ip, client_version, conn).await?; if user.private_key.is_none() { // User was invited a stub was created user.verified_at = Some(now); - if let Some(user_name) = user_infos.user_name { - user.name = user_name; + if let Some(ref user_name) = user_infos.user_name { + user.name = user_name.clone(); } user.save(conn).await?; @@ -290,30 +298,13 @@ async fn _sso_login( } }; - // We passed 2FA get full user information - let auth_user = sso::redeem(&user_infos.state, conn).await?; - - if sso_user.is_none() { - let user_sso = SsoUser { - user_uuid: user.uuid.clone(), - identifier: user_infos.identifier, - }; - user_sso.save(conn).await?; - } - // Set the user_uuid here to be passed back used for event logging. *user_id = Some(user.uuid.clone()); - let auth_tokens = sso::create_auth_tokens( - &device, - &user, - data.client_id, - auth_user.refresh_token, - auth_user.access_token, - auth_user.expires_in, - )?; + // We passed 2FA get auth tokens + let auth_tokens = sso::redeem(&device, &user, data.client_id, sso_user, sso_auth, user_infos, conn).await?; - authenticated_response(&user, &mut device, auth_tokens, twofactor_token, &now, conn, ip).await + authenticated_response(&user, &mut device, auth_tokens, twofactor_token, conn, ip).await } async fn _password_login( @@ -435,7 +426,7 @@ async fn _password_login( let auth_tokens = auth::AuthTokens::new(&device, &user, AuthMethod::Password, data.client_id); - authenticated_response(&user, &mut device, auth_tokens, twofactor_token, &now, conn, ip).await + authenticated_response(&user, &mut device, auth_tokens, twofactor_token, conn, ip).await } async fn authenticated_response( @@ -443,12 +434,12 @@ async fn authenticated_response( device: &mut Device, auth_tokens: auth::AuthTokens, twofactor_token: Option, - now: &NaiveDateTime, conn: &DbConn, ip: &ClientIp, ) -> JsonResult { if CONFIG.mail_enabled() && device.is_new() { - if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), now, device).await { + let now = Utc::now().naive_utc(); + if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, device).await { error!("Error sending new device email: {e:#?}"); if CONFIG.require_device_email() { @@ -468,10 +459,38 @@ async fn authenticated_response( } // Save to update `device.updated_at` to track usage and toggle new status - device.save(conn).await?; + device.save(true, conn).await?; let master_password_policy = master_password_policy(user, conn).await; + let has_master_password = !user.password_hash.is_empty(); + let master_password_unlock = if has_master_password { + json!({ + "Kdf": { + "KdfType": user.client_kdf_type, + "Iterations": user.client_kdf_iter, + "Memory": user.client_kdf_memory, + "Parallelism": user.client_kdf_parallelism + }, + // This field is named inconsistently and will be removed and replaced by the "wrapped" variant in the apps. + // https://github.com/bitwarden/android/blob/release/2025.12-rc41/network/src/main/kotlin/com/bitwarden/network/model/MasterPasswordUnlockDataJson.kt#L22-L26 + "MasterKeyEncryptedUserKey": user.akey, + "MasterKeyWrappedUserKey": user.akey, + "Salt": user.email + }) + } else { + Value::Null + }; + + let account_keys = json!({ + "publicKeyEncryptionKeyPair": { + "wrappedPrivateKey": user.private_key, + "publicKey": user.public_key, + "Object": "publicKeyEncryptionKeyPair" + }, + "Object": "privateKeys" + }); + let mut result = json!({ "access_token": auth_tokens.access_token(), "expires_in": auth_tokens.expires_in(), @@ -486,8 +505,10 @@ async fn authenticated_response( "ForcePasswordReset": false, "MasterPasswordPolicy": master_password_policy, "scope": auth_tokens.scope(), + "AccountKeys": account_keys, "UserDecryptionOptions": { - "HasMasterPassword": !user.password_hash.is_empty(), + "HasMasterPassword": has_master_password, + "MasterPasswordUnlock": master_password_unlock, "Object": "userDecryptionOptions" }, }); @@ -585,10 +606,29 @@ async fn _user_api_key_login( let access_claims = auth::LoginJwtClaims::default(&device, &user, &AuthMethod::UserApiKey, data.client_id); // Save to update `device.updated_at` to track usage and toggle new status - device.save(conn).await?; + device.save(true, conn).await?; info!("User {} logged in successfully via API key. IP: {}", user.email, ip.ip); + let has_master_password = !user.password_hash.is_empty(); + let master_password_unlock = if has_master_password { + json!({ + "Kdf": { + "KdfType": user.client_kdf_type, + "Iterations": user.client_kdf_iter, + "Memory": user.client_kdf_memory, + "Parallelism": user.client_kdf_parallelism + }, + // This field is named inconsistently and will be removed and replaced by the "wrapped" variant in the apps. + // https://github.com/bitwarden/android/blob/release/2025.12-rc41/network/src/main/kotlin/com/bitwarden/network/model/MasterPasswordUnlockDataJson.kt#L22-L26 + "MasterKeyEncryptedUserKey": user.akey, + "MasterKeyWrappedUserKey": user.akey, + "Salt": user.email + }) + } else { + Value::Null + }; + // Note: No refresh_token is returned. The CLI just repeats the // client_credentials login flow when the existing token expires. let result = json!({ @@ -604,6 +644,11 @@ async fn _user_api_key_login( "KdfParallelism": user.client_kdf_parallelism, "ResetMasterPassword": false, // TODO: according to official server seems something like: user.password_hash.is_empty(), but would need testing "scope": AuthMethod::UserApiKey.scope(), + "UserDecryptionOptions": { + "HasMasterPassword": has_master_password, + "MasterPasswordUnlock": master_password_unlock, + "Object": "userDecryptionOptions" + }, }); Ok(Json(result)) @@ -648,7 +693,12 @@ async fn get_device(data: &ConnectData, conn: &DbConn, user: &User) -> ApiResult // Find device or create new match Device::find_by_uuid_and_user(&device_id, &user.uuid, conn).await { Some(device) => Ok(device), - None => Device::new(device_id, user.uuid.clone(), device_name, device_type, conn).await, + None => { + let mut device = Device::new(device_id, user.uuid.clone(), device_name, device_type); + // save device without updating `device.updated_at` + device.save(false, conn).await?; + Ok(device) + } } } @@ -893,6 +943,7 @@ struct RegisterVerificationData { #[derive(rocket::Responder)] enum RegisterVerificationResponse { + #[response(status = 204)] NoContent(()), Token(Json), } @@ -997,9 +1048,12 @@ struct ConnectData { two_factor_remember: Option, #[field(name = uncased("authrequest"))] auth_request: Option, + // Needed for authorization code #[field(name = uncased("code"))] - code: Option, + code: Option, + #[field(name = uncased("code_verifier"))] + code_verifier: Option, } fn _check_is_some(value: &Option, msg: &str) -> EmptyResult { if value.is_none() { @@ -1021,14 +1075,13 @@ fn prevalidate() -> JsonResult { } #[get("/connect/oidc-signin?&", rank = 1)] -async fn oidcsignin(code: OIDCCode, state: String, conn: DbConn) -> ApiResult { - oidcsignin_redirect( +async fn oidcsignin(code: OIDCCode, state: String, mut conn: DbConn) -> ApiResult { + _oidcsignin_redirect( state, - |decoded_state| sso::OIDCCodeWrapper::Ok { - state: decoded_state, + OIDCCodeWrapper::Ok { code, }, - &conn, + &mut conn, ) .await } @@ -1040,42 +1093,44 @@ async fn oidcsignin_error( state: String, error: String, error_description: Option, - conn: DbConn, + mut conn: DbConn, ) -> ApiResult { - oidcsignin_redirect( + _oidcsignin_redirect( state, - |decoded_state| sso::OIDCCodeWrapper::Error { - state: decoded_state, + OIDCCodeWrapper::Error { error, error_description, }, - &conn, + &mut conn, ) .await } // The state was encoded using Base64 to ensure no issue with providers. // iss and scope parameters are needed for redirection to work on IOS. -async fn oidcsignin_redirect( +// We pass the state as the code to get it back later on. +async fn _oidcsignin_redirect( base64_state: String, - wrapper: impl FnOnce(OIDCState) -> sso::OIDCCodeWrapper, - conn: &DbConn, + code_response: OIDCCodeWrapper, + conn: &mut DbConn, ) -> ApiResult { let state = sso::decode_state(&base64_state)?; - let code = sso::encode_code_claims(wrapper(state.clone())); - let nonce = match SsoNonce::find(&state, conn).await { - Some(n) => n, - None => err!(format!("Failed to retrieve redirect_uri with {state}")), + let mut sso_auth = match SsoAuth::find(&state, conn).await { + None => err!(format!("Cannot retrieve sso_auth for {state}")), + Some(sso_auth) => sso_auth, }; + sso_auth.code_response = Some(code_response); + sso_auth.updated_at = Utc::now().naive_utc(); + sso_auth.save(conn).await?; - let mut url = match url::Url::parse(&nonce.redirect_uri) { + let mut url = match url::Url::parse(&sso_auth.redirect_uri) { Ok(url) => url, - Err(err) => err!(format!("Failed to parse redirect uri ({}): {err}", nonce.redirect_uri)), + Err(err) => err!(format!("Failed to parse redirect uri ({}): {err}", sso_auth.redirect_uri)), }; url.query_pairs_mut() - .append_pair("code", &code) + .append_pair("code", &state) .append_pair("state", &state) .append_pair("scope", &AuthMethod::Sso.scope()) .append_pair("iss", &CONFIG.domain()); @@ -1098,10 +1153,8 @@ struct AuthorizeData { #[allow(unused)] scope: Option, state: OIDCState, - #[allow(unused)] - code_challenge: Option, - #[allow(unused)] - code_challenge_method: Option, + code_challenge: OIDCCodeChallenge, + code_challenge_method: String, #[allow(unused)] response_mode: Option, #[allow(unused)] @@ -1118,10 +1171,16 @@ async fn authorize(data: AuthorizeData, conn: DbConn) -> ApiResult { client_id, redirect_uri, state, + code_challenge, + code_challenge_method, .. } = data; - let auth_url = sso::authorize_url(state, &client_id, &redirect_uri, conn).await?; + if code_challenge_method != "S256" { + err!("Unsupported code challenge method"); + } + + let auth_url = sso::authorize_url(state, code_challenge, &client_id, &redirect_uri, conn).await?; Ok(Redirect::temporary(String::from(auth_url))) } diff --git a/src/api/mod.rs b/src/api/mod.rs index b988f053..ecdf9408 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -47,6 +47,7 @@ pub type EmptyResult = ApiResult<()>; #[derive(Deserialize)] #[serde(rename_all = "camelCase")] struct PasswordOrOtpData { + #[serde(alias = "MasterPasswordHash")] master_password_hash: Option, otp: Option, } diff --git a/src/api/push.rs b/src/api/push.rs index 4394e7d2..a7e88455 100644 --- a/src/api/push.rs +++ b/src/api/push.rs @@ -128,7 +128,7 @@ pub async fn register_push_device(device: &mut Device, conn: &DbConn) -> EmptyRe err!(format!("An error occurred while proceeding registration of a device: {e}")); } - if let Err(e) = device.save(conn).await { + if let Err(e) = device.save(true, conn).await { err!(format!("An error occurred while trying to save the (registered) device push uuid: {e}")); } diff --git a/src/auth.rs b/src/auth.rs index e10de615..ab41898f 100644 --- a/src/auth.rs +++ b/src/auth.rs @@ -1210,8 +1210,20 @@ pub async fn refresh_tokens( ) -> ApiResult<(Device, AuthTokens)> { let refresh_claims = match decode_refresh(refresh_token) { Err(err) => { - debug!("Failed to decode {} refresh_token: {refresh_token}", ip.ip); - err_silent!(format!("Impossible to read refresh_token: {}", err.message())) + error!("Failed to decode {} refresh_token: {refresh_token}: {err:?}", ip.ip); + //err_silent!(format!("Impossible to read refresh_token: {}", err.message())) + + // If the token failed to decode, it was probably one of the old style tokens that was just a Base64 string. + // We can generate a claim for them for backwards compatibility. Note that the password refresh claims don't + // check expiration or issuer, so they're not included here. + RefreshJwtClaims { + nbf: 0, + exp: 0, + iss: String::new(), + sub: AuthMethod::Password, + device_token: refresh_token.into(), + token: None, + } } Ok(claims) => claims, }; @@ -1223,7 +1235,7 @@ pub async fn refresh_tokens( }; // Save to update `updated_at`. - device.save(conn).await?; + device.save(true, conn).await?; let user = match User::find_by_uuid(&device.user_uuid, conn).await { None => err!("Impossible to find user"), diff --git a/src/config.rs b/src/config.rs index e1a2f4c2..4fb103fa 100644 --- a/src/config.rs +++ b/src/config.rs @@ -14,7 +14,7 @@ use serde::de::{self, Deserialize, Deserializer, MapAccess, Visitor}; use crate::{ error::Error, - util::{get_env, get_env_bool, get_web_vault_version, is_valid_email, parse_experimental_client_feature_flags}, + util::{get_active_web_release, get_env, get_env_bool, is_valid_email, parse_experimental_client_feature_flags}, }; static CONFIG_FILE: LazyLock = LazyLock::new(|| { @@ -564,9 +564,9 @@ make_config! { /// Duo Auth context cleanup schedule |> Cron schedule of the job that cleans expired Duo contexts from the database. Does nothing if Duo MFA is disabled or set to use the legacy iframe prompt. /// Defaults to once every minute. Set blank to disable this job. duo_context_purge_schedule: String, false, def, "30 * * * * *".to_string(); - /// Purge incomplete SSO nonce. |> Cron schedule of the job that cleans leftover nonce in db due to incomplete SSO login. + /// Purge incomplete SSO auth. |> Cron schedule of the job that cleans leftover auth in db due to incomplete SSO login. /// Defaults to daily. Set blank to disable this job. - purge_incomplete_sso_nonce: String, false, def, "0 20 0 * * *".to_string(); + purge_incomplete_sso_auth: String, false, def, "0 20 0 * * *".to_string(); }, /// General settings @@ -789,6 +789,10 @@ make_config! { /// Bitwarden enforces this by default. In Vaultwarden we encouraged to use multiple organizations because groups were not available. /// Setting this to true will enforce the Single Org Policy to be enabled before you can enable the Reset Password policy. enforce_single_org_with_reset_pw_policy: bool, false, def, false; + + /// Prefer IPv6 (AAAA) resolving |> This settings configures the DNS resolver to resolve IPv6 first, and if not available try IPv4 + /// This could be useful in IPv6 only environments. + dns_prefer_ipv6: bool, true, def, false; }, /// OpenID Connect SSO settings @@ -1035,6 +1039,7 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { "ssh-agent", // Key Management Team "ssh-key-vault-item", + "pm-25373-windows-biometrics-v2", // Tools "export-attachments", // Mobile Team @@ -1320,12 +1325,16 @@ fn generate_smtp_img_src(embed_images: bool, domain: &str) -> String { if embed_images { "cid:".to_string() } else { - format!("{domain}/vw_static/") + // normalize base_url + let base_url = domain.trim_end_matches('/'); + format!("{base_url}/vw_static/") } } fn generate_sso_callback_path(domain: &str) -> String { - format!("{domain}/identity/connect/oidc-signin") + // normalize base_url + let base_url = domain.trim_end_matches('/'); + format!("{base_url}/identity/connect/oidc-signin") } /// Generate the correct URL for the icon service. @@ -1840,7 +1849,7 @@ fn to_json<'reg, 'rc>( // Configure the web-vault version as an integer so it can be used as a comparison smaller or greater then. // The default is based upon the version since this feature is added. static WEB_VAULT_VERSION: LazyLock = LazyLock::new(|| { - let vault_version = get_web_vault_version(); + let vault_version = get_active_web_release(); // Use a single regex capture to extract version components let re = regex::Regex::new(r"(\d{4})\.(\d{1,2})\.(\d{1,2})").unwrap(); re.captures(&vault_version) diff --git a/src/db/mod.rs b/src/db/mod.rs index 4fb2da75..ae2b1221 100644 --- a/src/db/mod.rs +++ b/src/db/mod.rs @@ -337,6 +337,46 @@ macro_rules! db_run { }; } +// Write all ToSql and FromSql given a serializable/deserializable type. +#[macro_export] +macro_rules! impl_FromToSqlText { + ($name:ty) => { + #[cfg(mysql)] + impl ToSql for $name { + fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, diesel::mysql::Mysql>) -> diesel::serialize::Result { + serde_json::to_writer(out, self).map(|_| diesel::serialize::IsNull::No).map_err(Into::into) + } + } + + #[cfg(postgresql)] + impl ToSql for $name { + fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, diesel::pg::Pg>) -> diesel::serialize::Result { + serde_json::to_writer(out, self).map(|_| diesel::serialize::IsNull::No).map_err(Into::into) + } + } + + #[cfg(sqlite)] + impl ToSql for $name { + fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, diesel::sqlite::Sqlite>) -> diesel::serialize::Result { + serde_json::to_string(self).map_err(Into::into).map(|str| { + out.set_value(str); + diesel::serialize::IsNull::No + }) + } + } + + impl FromSql for $name + where + String: FromSql, + { + fn from_sql(bytes: DB::RawValue<'_>) -> diesel::deserialize::Result { + >::from_sql(bytes) + .and_then(|str| serde_json::from_str(&str).map_err(Into::into)) + } + } + }; +} + pub mod schema; // Reexport the models, needs to be after the macros are defined so it can access them diff --git a/src/db/models/attachment.rs b/src/db/models/attachment.rs index 60b10d23..4273c22a 100644 --- a/src/db/models/attachment.rs +++ b/src/db/models/attachment.rs @@ -46,7 +46,7 @@ impl Attachment { pub async fn get_url(&self, host: &str) -> Result { let operator = CONFIG.opendal_operator_for_path_type(&PathType::Attachments)?; - if operator.info().scheme() == opendal::Scheme::Fs { + if operator.info().scheme() == <&'static str>::from(opendal::Scheme::Fs) { let token = encode_jwt(&generate_file_download_claims(self.cipher_uuid.clone(), self.id.clone())); Ok(format!("{host}/attachments/{}/{}?token={token}", self.cipher_uuid, self.id)) } else { diff --git a/src/db/models/device.rs b/src/db/models/device.rs index 0d86870f..4e3d0197 100644 --- a/src/db/models/device.rs +++ b/src/db/models/device.rs @@ -35,6 +35,25 @@ pub struct Device { /// Local methods impl Device { + pub fn new(uuid: DeviceId, user_uuid: UserId, name: String, atype: i32) -> Self { + let now = Utc::now().naive_utc(); + + Self { + uuid, + created_at: now, + updated_at: now, + + user_uuid, + name, + atype, + + push_uuid: Some(PushId(get_uuid())), + push_token: None, + refresh_token: crypto::encode_random_bytes::<64>(&BASE64URL), + twofactor_remember: None, + } + } + pub fn to_json(&self) -> Value { json!({ "id": self.uuid, @@ -110,38 +129,21 @@ impl DeviceWithAuthRequest { } use crate::db::DbConn; -use crate::api::{ApiResult, EmptyResult}; +use crate::api::EmptyResult; use crate::error::MapResult; /// Database methods impl Device { - pub async fn new(uuid: DeviceId, user_uuid: UserId, name: String, atype: i32, conn: &DbConn) -> ApiResult { - let now = Utc::now().naive_utc(); - - let device = Self { - uuid, - created_at: now, - updated_at: now, - - user_uuid, - name, - atype, - - push_uuid: Some(PushId(get_uuid())), - push_token: None, - refresh_token: crypto::encode_random_bytes::<64>(&BASE64URL), - twofactor_remember: None, - }; - - device.inner_save(conn).await.map(|()| device) - } + pub async fn save(&mut self, update_time: bool, conn: &DbConn) -> EmptyResult { + if update_time { + self.updated_at = Utc::now().naive_utc(); + } - async fn inner_save(&self, conn: &DbConn) -> EmptyResult { db_run! { conn: sqlite, mysql { crate::util::retry(|| diesel::replace_into(devices::table) - .values(self) + .values(&*self) .execute(conn), 10, ).map_res("Error saving device") @@ -149,10 +151,10 @@ impl Device { postgresql { crate::util::retry(|| diesel::insert_into(devices::table) - .values(self) + .values(&*self) .on_conflict((devices::uuid, devices::user_uuid)) .do_update() - .set(self) + .set(&*self) .execute(conn), 10, ).map_res("Error saving device") @@ -160,12 +162,6 @@ impl Device { } } - // Should only be called after user has passed authentication - pub async fn save(&mut self, conn: &DbConn) -> EmptyResult { - self.updated_at = Utc::now().naive_utc(); - self.inner_save(conn).await - } - pub async fn delete_all_by_user(user_uuid: &UserId, conn: &DbConn) -> EmptyResult { db_run! { conn: { diesel::delete(devices::table.filter(devices::user_uuid.eq(user_uuid))) diff --git a/src/db/models/mod.rs b/src/db/models/mod.rs index a9406ed0..b4fcf658 100644 --- a/src/db/models/mod.rs +++ b/src/db/models/mod.rs @@ -11,7 +11,7 @@ mod group; mod org_policy; mod organization; mod send; -mod sso_nonce; +mod sso_auth; mod two_factor; mod two_factor_duo_context; mod two_factor_incomplete; @@ -27,7 +27,7 @@ pub use self::event::{Event, EventType}; pub use self::favorite::Favorite; pub use self::folder::{Folder, FolderCipher, FolderId}; pub use self::group::{CollectionGroup, Group, GroupId, GroupUser}; -pub use self::org_policy::{OrgPolicy, OrgPolicyErr, OrgPolicyId, OrgPolicyType}; +pub use self::org_policy::{OrgPolicy, OrgPolicyId, OrgPolicyType}; pub use self::organization::{ Membership, MembershipId, MembershipStatus, MembershipType, OrgApiKeyId, Organization, OrganizationApiKey, OrganizationId, @@ -36,7 +36,7 @@ pub use self::send::{ id::{SendFileId, SendId}, Send, SendType, }; -pub use self::sso_nonce::SsoNonce; +pub use self::sso_auth::{OIDCAuthenticatedUser, OIDCCodeWrapper, SsoAuth}; pub use self::two_factor::{TwoFactor, TwoFactorType}; pub use self::two_factor_duo_context::TwoFactorDuoContext; pub use self::two_factor_incomplete::TwoFactorIncomplete; diff --git a/src/db/models/org_policy.rs b/src/db/models/org_policy.rs index 92665574..0607f146 100644 --- a/src/db/models/org_policy.rs +++ b/src/db/models/org_policy.rs @@ -2,10 +2,12 @@ use derive_more::{AsRef, From}; use serde::Deserialize; use serde_json::Value; +use crate::api::core::two_factor; use crate::api::EmptyResult; use crate::db::schema::{org_policies, users_organizations}; use crate::db::DbConn; use crate::error::MapResult; +use crate::CONFIG; use diesel::prelude::*; use super::{Membership, MembershipId, MembershipStatus, MembershipType, OrganizationId, TwoFactor, UserId}; @@ -40,6 +42,10 @@ pub enum OrgPolicyType { // FreeFamiliesSponsorshipPolicy = 13, RemoveUnlockWithPin = 14, RestrictedItemTypes = 15, + UriMatchDefaults = 16, + // AutotypeDefaultSetting = 17, // Not supported yet + // AutoConfirm = 18, // Not supported (not implemented yet) + // BlockClaimedDomainAccountCreation = 19, // Not supported (Not AGPLv3 Licensed) } // https://github.com/bitwarden/server/blob/9ebe16587175b1c0e9208f84397bb75d0d595510/src/Core/AdminConsole/Models/Data/Organizations/Policies/SendOptionsPolicyData.cs#L5 @@ -58,14 +64,6 @@ pub struct ResetPasswordDataModel { pub auto_enroll_enabled: bool, } -pub type OrgPolicyResult = Result<(), OrgPolicyErr>; - -#[derive(Debug)] -pub enum OrgPolicyErr { - TwoFactorMissing, - SingleOrgEnforced, -} - /// Local methods impl OrgPolicy { pub fn new(org_uuid: OrganizationId, atype: OrgPolicyType, enabled: bool, data: String) -> Self { @@ -280,31 +278,35 @@ impl OrgPolicy { false } - pub async fn is_user_allowed( - user_uuid: &UserId, - org_uuid: &OrganizationId, - exclude_current_org: bool, - conn: &DbConn, - ) -> OrgPolicyResult { - // Enforce TwoFactor/TwoStep login - if TwoFactor::find_by_user(user_uuid, conn).await.is_empty() { - match Self::find_by_org_and_type(org_uuid, OrgPolicyType::TwoFactorAuthentication, conn).await { - Some(p) if p.enabled => { - return Err(OrgPolicyErr::TwoFactorMissing); + pub async fn check_user_allowed(m: &Membership, action: &str, conn: &DbConn) -> EmptyResult { + if m.atype < MembershipType::Admin && m.status > (MembershipStatus::Invited as i32) { + // Enforce TwoFactor/TwoStep login + if let Some(p) = Self::find_by_org_and_type(&m.org_uuid, OrgPolicyType::TwoFactorAuthentication, conn).await + { + if p.enabled && TwoFactor::find_by_user(&m.user_uuid, conn).await.is_empty() { + if CONFIG.email_2fa_auto_fallback() { + two_factor::email::find_and_activate_email_2fa(&m.user_uuid, conn).await?; + } else { + err!(format!("Cannot {} because 2FA is required (membership {})", action, m.uuid)); + } } - _ => {} - }; - } + } + + // Check if the user is part of another Organization with SingleOrg activated + if Self::is_applicable_to_user(&m.user_uuid, OrgPolicyType::SingleOrg, Some(&m.org_uuid), conn).await { + err!(format!( + "Cannot {} because another organization policy forbids it (membership {})", + action, m.uuid + )); + } - // Enforce Single Organization Policy of other organizations user is a member of - // This check here needs to exclude this current org-id, else an accepted user can not be confirmed. - let exclude_org = if exclude_current_org { - Some(org_uuid) - } else { - None - }; - if Self::is_applicable_to_user(user_uuid, OrgPolicyType::SingleOrg, exclude_org, conn).await { - return Err(OrgPolicyErr::SingleOrgEnforced); + if let Some(p) = Self::find_by_org_and_type(&m.org_uuid, OrgPolicyType::SingleOrg, conn).await { + if p.enabled + && Membership::count_accepted_and_confirmed_by_user(&m.user_uuid, &m.org_uuid, conn).await > 0 + { + err!(format!("Cannot {} because the organization policy forbids being part of other organization (membership {})", action, m.uuid)); + } + } } Ok(()) diff --git a/src/db/models/organization.rs b/src/db/models/organization.rs index 640e47e7..0b722ef6 100644 --- a/src/db/models/organization.rs +++ b/src/db/models/organization.rs @@ -883,10 +883,15 @@ impl Membership { }} } - pub async fn count_accepted_and_confirmed_by_user(user_uuid: &UserId, conn: &DbConn) -> i64 { + pub async fn count_accepted_and_confirmed_by_user( + user_uuid: &UserId, + excluded_org: &OrganizationId, + conn: &DbConn, + ) -> i64 { db_run! { conn: { users_organizations::table .filter(users_organizations::user_uuid.eq(user_uuid)) + .filter(users_organizations::org_uuid.ne(excluded_org)) .filter(users_organizations::status.eq(MembershipStatus::Accepted as i32).or(users_organizations::status.eq(MembershipStatus::Confirmed as i32))) .count() .first::(conn) diff --git a/src/db/models/sso_auth.rs b/src/db/models/sso_auth.rs new file mode 100644 index 00000000..fec0433a --- /dev/null +++ b/src/db/models/sso_auth.rs @@ -0,0 +1,134 @@ +use chrono::{NaiveDateTime, Utc}; +use std::time::Duration; + +use crate::api::EmptyResult; +use crate::db::schema::sso_auth; +use crate::db::{DbConn, DbPool}; +use crate::error::MapResult; +use crate::sso::{OIDCCode, OIDCCodeChallenge, OIDCIdentifier, OIDCState, SSO_AUTH_EXPIRATION}; + +use diesel::deserialize::FromSql; +use diesel::expression::AsExpression; +use diesel::prelude::*; +use diesel::serialize::{Output, ToSql}; +use diesel::sql_types::Text; + +#[derive(AsExpression, Clone, Debug, Serialize, Deserialize, FromSqlRow)] +#[diesel(sql_type = Text)] +pub enum OIDCCodeWrapper { + Ok { + code: OIDCCode, + }, + Error { + error: String, + error_description: Option, + }, +} + +impl_FromToSqlText!(OIDCCodeWrapper); + +#[derive(AsExpression, Clone, Debug, Serialize, Deserialize, FromSqlRow)] +#[diesel(sql_type = Text)] +pub struct OIDCAuthenticatedUser { + pub refresh_token: Option, + pub access_token: String, + pub expires_in: Option, + pub identifier: OIDCIdentifier, + pub email: String, + pub email_verified: Option, + pub user_name: Option, +} + +impl_FromToSqlText!(OIDCAuthenticatedUser); + +#[derive(Identifiable, Queryable, Insertable, AsChangeset, Selectable)] +#[diesel(table_name = sso_auth)] +#[diesel(treat_none_as_null = true)] +#[diesel(primary_key(state))] +pub struct SsoAuth { + pub state: OIDCState, + pub client_challenge: OIDCCodeChallenge, + pub nonce: String, + pub redirect_uri: String, + pub code_response: Option, + pub auth_response: Option, + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, +} + +/// Local methods +impl SsoAuth { + pub fn new(state: OIDCState, client_challenge: OIDCCodeChallenge, nonce: String, redirect_uri: String) -> Self { + let now = Utc::now().naive_utc(); + + SsoAuth { + state, + client_challenge, + nonce, + redirect_uri, + created_at: now, + updated_at: now, + code_response: None, + auth_response: None, + } + } +} + +/// Database methods +impl SsoAuth { + pub async fn save(&self, conn: &DbConn) -> EmptyResult { + db_run! { conn: + mysql { + diesel::insert_into(sso_auth::table) + .values(self) + .on_conflict(diesel::dsl::DuplicatedKeys) + .do_update() + .set(self) + .execute(conn) + .map_res("Error saving SSO auth") + } + postgresql, sqlite { + diesel::insert_into(sso_auth::table) + .values(self) + .on_conflict(sso_auth::state) + .do_update() + .set(self) + .execute(conn) + .map_res("Error saving SSO auth") + } + } + } + + pub async fn find(state: &OIDCState, conn: &DbConn) -> Option { + let oldest = Utc::now().naive_utc() - *SSO_AUTH_EXPIRATION; + db_run! { conn: { + sso_auth::table + .filter(sso_auth::state.eq(state)) + .filter(sso_auth::created_at.ge(oldest)) + .first::(conn) + .ok() + }} + } + + pub async fn delete(self, conn: &DbConn) -> EmptyResult { + db_run! {conn: { + diesel::delete(sso_auth::table.filter(sso_auth::state.eq(self.state))) + .execute(conn) + .map_res("Error deleting sso_auth") + }} + } + + pub async fn delete_expired(pool: DbPool) -> EmptyResult { + debug!("Purging expired sso_auth"); + if let Ok(conn) = pool.get().await { + let oldest = Utc::now().naive_utc() - *SSO_AUTH_EXPIRATION; + db_run! { conn: { + diesel::delete(sso_auth::table.filter(sso_auth::created_at.lt(oldest))) + .execute(conn) + .map_res("Error deleting expired SSO nonce") + }} + } else { + err!("Failed to get DB connection while purging expired sso_auth") + } + } +} diff --git a/src/db/models/sso_nonce.rs b/src/db/models/sso_nonce.rs deleted file mode 100644 index c0e16076..00000000 --- a/src/db/models/sso_nonce.rs +++ /dev/null @@ -1,87 +0,0 @@ -use chrono::{NaiveDateTime, Utc}; - -use crate::api::EmptyResult; -use crate::db::schema::sso_nonce; -use crate::db::{DbConn, DbPool}; -use crate::error::MapResult; -use crate::sso::{OIDCState, NONCE_EXPIRATION}; -use diesel::prelude::*; - -#[derive(Identifiable, Queryable, Insertable)] -#[diesel(table_name = sso_nonce)] -#[diesel(primary_key(state))] -pub struct SsoNonce { - pub state: OIDCState, - pub nonce: String, - pub verifier: Option, - pub redirect_uri: String, - pub created_at: NaiveDateTime, -} - -/// Local methods -impl SsoNonce { - pub fn new(state: OIDCState, nonce: String, verifier: Option, redirect_uri: String) -> Self { - let now = Utc::now().naive_utc(); - - SsoNonce { - state, - nonce, - verifier, - redirect_uri, - created_at: now, - } - } -} - -/// Database methods -impl SsoNonce { - pub async fn save(&self, conn: &DbConn) -> EmptyResult { - db_run! { conn: - sqlite, mysql { - diesel::replace_into(sso_nonce::table) - .values(self) - .execute(conn) - .map_res("Error saving SSO nonce") - } - postgresql { - diesel::insert_into(sso_nonce::table) - .values(self) - .execute(conn) - .map_res("Error saving SSO nonce") - } - } - } - - pub async fn delete(state: &OIDCState, conn: &DbConn) -> EmptyResult { - db_run! { conn: { - diesel::delete(sso_nonce::table.filter(sso_nonce::state.eq(state))) - .execute(conn) - .map_res("Error deleting SSO nonce") - }} - } - - pub async fn find(state: &OIDCState, conn: &DbConn) -> Option { - let oldest = Utc::now().naive_utc() - *NONCE_EXPIRATION; - db_run! { conn: { - sso_nonce::table - .filter(sso_nonce::state.eq(state)) - .filter(sso_nonce::created_at.ge(oldest)) - .first::(conn) - .ok() - }} - } - - pub async fn delete_expired(pool: DbPool) -> EmptyResult { - debug!("Purging expired sso_nonce"); - if let Ok(conn) = pool.get().await { - let oldest = Utc::now().naive_utc() - *NONCE_EXPIRATION; - db_run! { conn: { - diesel::delete(sso_nonce::table.filter(sso_nonce::created_at.lt(oldest))) - .execute(conn) - .map_res("Error deleting expired SSO nonce") - }} - } else { - err!("Failed to get DB connection while purging expired sso_nonce") - } - } -} diff --git a/src/db/models/user.rs b/src/db/models/user.rs index e14c4218..c96e0fe7 100644 --- a/src/db/models/user.rs +++ b/src/db/models/user.rs @@ -1,4 +1,4 @@ -use crate::db::schema::{devices, invitations, sso_users, users}; +use crate::db::schema::{invitations, sso_users, twofactor_incomplete, users}; use chrono::{NaiveDateTime, TimeDelta, Utc}; use derive_more::{AsRef, Deref, Display, From}; use diesel::prelude::*; @@ -10,8 +10,7 @@ use super::{ use crate::{ api::EmptyResult, crypto, - db::models::DeviceId, - db::DbConn, + db::{models::DeviceId, DbConn}, error::MapResult, sso::OIDCIdentifier, util::{format_date, get_uuid, retry}, @@ -387,15 +386,18 @@ impl User { }} } - pub async fn find_by_device_id(device_uuid: &DeviceId, conn: &DbConn) -> Option { - db_run! { conn: { - users::table - .inner_join(devices::table.on(devices::user_uuid.eq(users::uuid))) - .filter(devices::uuid.eq(device_uuid)) - .select(users::all_columns) - .first::(conn) + pub async fn find_by_device_for_email2fa(device_uuid: &DeviceId, conn: &DbConn) -> Option { + if let Some(user_uuid) = db_run! ( conn: { + twofactor_incomplete::table + .filter(twofactor_incomplete::device_uuid.eq(device_uuid)) + .order_by(twofactor_incomplete::login_time.desc()) + .select(twofactor_incomplete::user_uuid) + .first::(conn) .ok() - }} + }) { + return Self::find_by_uuid(&user_uuid, conn).await; + } + None } pub async fn get_all(conn: &DbConn) -> Vec<(Self, Option)> { diff --git a/src/db/schema.rs b/src/db/schema.rs index a0f31f1e..914b4fe9 100644 --- a/src/db/schema.rs +++ b/src/db/schema.rs @@ -256,12 +256,15 @@ table! { } table! { - sso_nonce (state) { + sso_auth (state) { state -> Text, + client_challenge -> Text, nonce -> Text, - verifier -> Nullable, redirect_uri -> Text, + code_response -> Nullable, + auth_response -> Nullable, created_at -> Timestamp, + updated_at -> Timestamp, } } diff --git a/src/http_client.rs b/src/http_client.rs index b48f340c..5462ef8e 100644 --- a/src/http_client.rs +++ b/src/http_client.rs @@ -185,7 +185,10 @@ impl CustomDnsResolver { fn new() -> Arc { match TokioResolver::builder(TokioConnectionProvider::default()) { - Ok(builder) => { + Ok(mut builder) => { + if CONFIG.dns_prefer_ipv6() { + builder.options_mut().ip_strategy = hickory_resolver::config::LookupIpStrategy::Ipv6thenIpv4; + } let resolver = builder.build(); Arc::new(Self::Hickory(Arc::new(resolver))) } diff --git a/src/mail.rs b/src/mail.rs index 4cf52d38..270a839e 100644 --- a/src/mail.rs +++ b/src/mail.rs @@ -705,7 +705,7 @@ async fn send_with_selected_transport(email: Message) -> EmptyResult { } async fn send_email(address: &str, subject: &str, body_html: String, body_text: String) -> EmptyResult { - let smtp_from = &CONFIG.smtp_from(); + let smtp_from = Address::from_str(&CONFIG.smtp_from())?; let body = if CONFIG.smtp_embed_images() { let logo_gray_body = Body::new(crate::api::static_files("logo-gray.png").unwrap().1.to_vec()); @@ -727,9 +727,9 @@ async fn send_email(address: &str, subject: &str, body_html: String, body_text: }; let email = Message::builder() - .message_id(Some(format!("<{}@{}>", crate::util::get_uuid(), smtp_from.split('@').collect::>()[1]))) + .message_id(Some(format!("<{}@{}>", crate::util::get_uuid(), smtp_from.domain()))) .to(Mailbox::new(None, Address::from_str(address)?)) - .from(Mailbox::new(Some(CONFIG.smtp_from_name()), Address::from_str(smtp_from)?)) + .from(Mailbox::new(Some(CONFIG.smtp_from_name()), smtp_from)) .subject(subject) .multipart(body)?; diff --git a/src/main.rs b/src/main.rs index 86f7714a..8eef2e8c 100644 --- a/src/main.rs +++ b/src/main.rs @@ -126,7 +126,7 @@ fn parse_args() { exit(0); } else if pargs.contains(["-v", "--version"]) { config::SKIP_CONFIG_VALIDATION.store(true, Ordering::Relaxed); - let web_vault_version = util::get_web_vault_version(); + let web_vault_version = util::get_active_web_release(); println!("Vaultwarden {version}"); println!("Web-Vault {web_vault_version}"); exit(0); @@ -246,8 +246,8 @@ fn init_logging() -> Result { .split(',') .collect::>() .into_iter() - .flat_map(|s| match s.split('=').collect::>()[..] { - [log, lvl_str] => log::LevelFilter::from_str(lvl_str).ok().map(|lvl| (log, lvl)), + .flat_map(|s| match s.split_once('=') { + Some((log, lvl_str)) => log::LevelFilter::from_str(lvl_str).ok().map(|lvl| (log, lvl)), _ => None, }) .collect() @@ -699,10 +699,10 @@ fn schedule_jobs(pool: db::DbPool) { })); } - // Purge sso nonce from incomplete flow (default to daily at 00h20). - if !CONFIG.purge_incomplete_sso_nonce().is_empty() { - sched.add(Job::new(CONFIG.purge_incomplete_sso_nonce().parse().unwrap(), || { - runtime.spawn(db::models::SsoNonce::delete_expired(pool.clone())); + // Purge sso auth from incomplete flow (default to daily at 00h20). + if !CONFIG.purge_incomplete_sso_auth().is_empty() { + sched.add(Job::new(CONFIG.purge_incomplete_sso_auth().parse().unwrap(), || { + runtime.spawn(db::models::SsoAuth::delete_expired(pool.clone())); })); } diff --git a/src/sso.rs b/src/sso.rs index 90e4d677..ee6d707a 100644 --- a/src/sso.rs +++ b/src/sso.rs @@ -1,8 +1,7 @@ use std::{sync::LazyLock, time::Duration}; use chrono::Utc; -use derive_more::{AsRef, Deref, Display, From}; -use mini_moka::sync::Cache; +use derive_more::{AsRef, Deref, Display, From, Into}; use regex::Regex; use url::Url; @@ -11,7 +10,7 @@ use crate::{ auth, auth::{AuthMethod, AuthTokens, TokenWrapper, BW_EXPIRATION, DEFAULT_REFRESH_VALIDITY}, db::{ - models::{Device, SsoNonce, User}, + models::{Device, OIDCAuthenticatedUser, OIDCCodeWrapper, SsoAuth, SsoUser, User}, DbConn, }, sso_client::Client, @@ -20,12 +19,10 @@ use crate::{ pub static FAKE_IDENTIFIER: &str = "VW_DUMMY_IDENTIFIER_FOR_OIDC"; -static AC_CACHE: LazyLock> = - LazyLock::new(|| Cache::builder().max_capacity(1000).time_to_live(Duration::from_secs(10 * 60)).build()); - static SSO_JWT_ISSUER: LazyLock = LazyLock::new(|| format!("{}|sso", CONFIG.domain_origin())); -pub static NONCE_EXPIRATION: LazyLock = LazyLock::new(|| chrono::TimeDelta::try_minutes(10).unwrap()); +pub static SSO_AUTH_EXPIRATION: LazyLock = + LazyLock::new(|| chrono::TimeDelta::try_minutes(10).unwrap()); #[derive( Clone, @@ -47,6 +44,47 @@ pub static NONCE_EXPIRATION: LazyLock = LazyLock::new(|| chron #[from(forward)] pub struct OIDCCode(String); +#[derive( + Clone, + Debug, + Default, + DieselNewType, + FromForm, + PartialEq, + Eq, + Hash, + Serialize, + Deserialize, + AsRef, + Deref, + Display, + From, + Into, +)] +#[deref(forward)] +#[into(owned)] +pub struct OIDCCodeChallenge(String); + +#[derive( + Clone, + Debug, + Default, + DieselNewType, + FromForm, + PartialEq, + Eq, + Hash, + Serialize, + Deserialize, + AsRef, + Deref, + Display, + Into, +)] +#[deref(forward)] +#[into(owned)] +pub struct OIDCCodeVerifier(String); + #[derive( Clone, Debug, @@ -91,40 +129,6 @@ pub fn encode_ssotoken_claims() -> String { auth::encode_jwt(&claims) } -#[derive(Debug, Serialize, Deserialize)] -pub enum OIDCCodeWrapper { - Ok { - state: OIDCState, - code: OIDCCode, - }, - Error { - state: OIDCState, - error: String, - error_description: Option, - }, -} - -#[derive(Debug, Serialize, Deserialize)] -struct OIDCCodeClaims { - // Expiration time - pub exp: i64, - // Issuer - pub iss: String, - - pub code: OIDCCodeWrapper, -} - -pub fn encode_code_claims(code: OIDCCodeWrapper) -> String { - let time_now = Utc::now(); - let claims = OIDCCodeClaims { - exp: (time_now + chrono::TimeDelta::try_minutes(5).unwrap()).timestamp(), - iss: SSO_JWT_ISSUER.to_string(), - code, - }; - - auth::encode_jwt(&claims) -} - #[derive(Clone, Debug, Serialize, Deserialize)] struct BasicTokenClaims { iat: Option, @@ -132,6 +136,12 @@ struct BasicTokenClaims { exp: i64, } +#[derive(Deserialize)] +struct BasicTokenClaimsValidation { + exp: u64, + iss: String, +} + impl BasicTokenClaims { fn nbf(&self) -> i64 { self.nbf.or(self.iat).unwrap_or_else(|| Utc::now().timestamp()) @@ -139,13 +149,23 @@ impl BasicTokenClaims { } fn decode_token_claims(token_name: &str, token: &str) -> ApiResult { - let mut validation = jsonwebtoken::Validation::default(); - validation.set_issuer(&[CONFIG.sso_authority()]); - validation.insecure_disable_signature_validation(); - validation.validate_aud = false; + // We need to manually validate this token, since `insecure_decode` does not do this + match jsonwebtoken::dangerous::insecure_decode::(token) { + Ok(btcv) => { + let now = jsonwebtoken::get_current_timestamp(); + let validate_claim = btcv.claims; + // Validate the exp in the claim with a leeway of 60 seconds, same as jsonwebtoken does + if validate_claim.exp < now - 60 { + err_silent!(format!("Expired Signature for base token claim from {token_name}")) + } + if validate_claim.iss.ne(&CONFIG.sso_authority()) { + err_silent!(format!("Invalid Issuer for base token claim from {token_name}")) + } - match jsonwebtoken::decode(token, &jsonwebtoken::DecodingKey::from_secret(&[]), &validation) { - Ok(btc) => Ok(btc.claims), + // All is validated and ok, lets decode again using the wanted struct + let btc = jsonwebtoken::dangerous::insecure_decode::(token).unwrap(); + Ok(btc.claims) + } Err(err) => err_silent!(format!("Failed to decode basic token claims from {token_name}: {err}")), } } @@ -162,9 +182,14 @@ pub fn decode_state(base64_state: &str) -> ApiResult { Ok(state) } -// The `nonce` allow to protect against replay attacks // redirect_uri from: https://github.com/bitwarden/server/blob/main/src/Identity/IdentityServer/ApiClient.cs -pub async fn authorize_url(state: OIDCState, client_id: &str, raw_redirect_uri: &str, conn: DbConn) -> ApiResult { +pub async fn authorize_url( + state: OIDCState, + client_challenge: OIDCCodeChallenge, + client_id: &str, + raw_redirect_uri: &str, + conn: DbConn, +) -> ApiResult { let redirect_uri = match client_id { "web" | "browser" => format!("{}/sso-connector.html", CONFIG.domain()), "desktop" | "mobile" => "bitwarden://sso-callback".to_string(), @@ -178,8 +203,8 @@ pub async fn authorize_url(state: OIDCState, client_id: &str, raw_redirect_uri: _ => err!(format!("Unsupported client {client_id}")), }; - let (auth_url, nonce) = Client::authorize_url(state, redirect_uri).await?; - nonce.save(&conn).await?; + let (auth_url, sso_auth) = Client::authorize_url(state, client_challenge, redirect_uri).await?; + sso_auth.save(&conn).await?; Ok(auth_url) } @@ -209,78 +234,45 @@ impl OIDCIdentifier { } } -#[derive(Clone, Debug)] -pub struct AuthenticatedUser { - pub refresh_token: Option, - pub access_token: String, - pub expires_in: Option, - pub identifier: OIDCIdentifier, - pub email: String, - pub email_verified: Option, - pub user_name: Option, -} - -#[derive(Clone, Debug)] -pub struct UserInformation { - pub state: OIDCState, - pub identifier: OIDCIdentifier, - pub email: String, - pub email_verified: Option, - pub user_name: Option, -} - -async fn decode_code_claims(code: &str, conn: &DbConn) -> ApiResult<(OIDCCode, OIDCState)> { - match auth::decode_jwt::(code, SSO_JWT_ISSUER.to_string()) { - Ok(code_claims) => match code_claims.code { - OIDCCodeWrapper::Ok { - state, - code, - } => Ok((code, state)), - OIDCCodeWrapper::Error { - state, - error, - error_description, - } => { - if let Err(err) = SsoNonce::delete(&state, conn).await { - error!("Failed to delete database sso_nonce using {state}: {err}") - } - err!(format!( - "SSO authorization failed: {error}, {}", - error_description.as_ref().unwrap_or(&String::new()) - )) - } - }, - Err(err) => err!(format!("Failed to decode code wrapper: {err}")), - } -} - // During the 2FA flow we will // - retrieve the user information and then only discover he needs 2FA. -// - second time we will rely on the `AC_CACHE` since the `code` has already been exchanged. -// The `nonce` will ensure that the user is authorized only once. -// We return only the `UserInformation` to force calling `redeem` to obtain the `refresh_token`. -pub async fn exchange_code(wrapped_code: &str, conn: &DbConn) -> ApiResult { +// - second time we will rely on `SsoAuth.auth_response` since the `code` has already been exchanged. +// The `SsoAuth` will ensure that the user is authorized only once. +pub async fn exchange_code( + state: &OIDCState, + client_verifier: OIDCCodeVerifier, + conn: &DbConn, +) -> ApiResult<(SsoAuth, OIDCAuthenticatedUser)> { use openidconnect::OAuth2TokenResponse; - let (code, state) = decode_code_claims(wrapped_code, conn).await?; + let mut sso_auth = match SsoAuth::find(state, conn).await { + None => err!(format!("Invalid state cannot retrieve sso auth")), + Some(sso_auth) => sso_auth, + }; - if let Some(authenticated_user) = AC_CACHE.get(&state) { - return Ok(UserInformation { - state, - identifier: authenticated_user.identifier, - email: authenticated_user.email, - email_verified: authenticated_user.email_verified, - user_name: authenticated_user.user_name, - }); + if let Some(authenticated_user) = sso_auth.auth_response.clone() { + return Ok((sso_auth, authenticated_user)); } - let nonce = match SsoNonce::find(&state, conn).await { - None => err!(format!("Invalid state cannot retrieve nonce")), - Some(nonce) => nonce, + let code = match sso_auth.code_response.clone() { + Some(OIDCCodeWrapper::Ok { + code, + }) => code.clone(), + Some(OIDCCodeWrapper::Error { + error, + error_description, + }) => { + sso_auth.delete(conn).await?; + err!(format!("SSO authorization failed: {error}, {}", error_description.as_ref().unwrap_or(&String::new()))) + } + None => { + sso_auth.delete(conn).await?; + err!("Missing authorization provider return"); + } }; let client = Client::cached().await?; - let (token_response, id_claims) = client.exchange_code(code, nonce).await?; + let (token_response, id_claims) = client.exchange_code(code, client_verifier, &sso_auth).await?; let user_info = client.user_info(token_response.access_token().to_owned()).await?; @@ -300,7 +292,7 @@ pub async fn exchange_code(wrapped_code: &str, conn: &DbConn) -> ApiResult ApiResult ApiResult { - if let Err(err) = SsoNonce::delete(state, conn).await { - error!("Failed to delete database sso_nonce using {state}: {err}") +// User has passed 2FA flow we can delete auth info from database +pub async fn redeem( + device: &Device, + user: &User, + client_id: Option, + sso_user: Option, + sso_auth: SsoAuth, + auth_user: OIDCAuthenticatedUser, + conn: &DbConn, +) -> ApiResult { + sso_auth.delete(conn).await?; + + if sso_user.is_none() { + let user_sso = SsoUser { + user_uuid: user.uuid.clone(), + identifier: auth_user.identifier.clone(), + }; + user_sso.save(conn).await?; } - if let Some(au) = AC_CACHE.get(state) { - AC_CACHE.invalidate(state); - Ok(au) + if !CONFIG.sso_auth_only_not_session() { + let now = Utc::now(); + + let (ap_nbf, ap_exp) = + match (decode_token_claims("access_token", &auth_user.access_token), auth_user.expires_in) { + (Ok(ap), _) => (ap.nbf(), ap.exp), + (Err(_), Some(exp)) => (now.timestamp(), (now + exp).timestamp()), + _ => err!("Non jwt access_token and empty expires_in"), + }; + + let access_claims = + auth::LoginJwtClaims::new(device, user, ap_nbf, ap_exp, AuthMethod::Sso.scope_vec(), client_id, now); + + _create_auth_tokens(device, auth_user.refresh_token, access_claims, auth_user.access_token) } else { - err!("Failed to retrieve user info from sso cache") + Ok(AuthTokens::new(device, user, AuthMethod::Sso, client_id)) } } diff --git a/src/sso_client.rs b/src/sso_client.rs index 5dc614e4..0d73d906 100644 --- a/src/sso_client.rs +++ b/src/sso_client.rs @@ -7,8 +7,8 @@ use url::Url; use crate::{ api::{ApiResult, EmptyResult}, - db::models::SsoNonce, - sso::{OIDCCode, OIDCState}, + db::models::SsoAuth, + sso::{OIDCCode, OIDCCodeChallenge, OIDCCodeVerifier, OIDCState}, CONFIG, }; @@ -107,7 +107,11 @@ impl Client { } // The `state` is encoded using base64 to ensure no issue with providers (It contains the Organization identifier). - pub async fn authorize_url(state: OIDCState, redirect_uri: String) -> ApiResult<(Url, SsoNonce)> { + pub async fn authorize_url( + state: OIDCState, + client_challenge: OIDCCodeChallenge, + redirect_uri: String, + ) -> ApiResult<(Url, SsoAuth)> { let scopes = CONFIG.sso_scopes_vec().into_iter().map(Scope::new); let base64_state = data_encoding::BASE64.encode(state.to_string().as_bytes()); @@ -122,22 +126,21 @@ impl Client { .add_scopes(scopes) .add_extra_params(CONFIG.sso_authorize_extra_params_vec()); - let verifier = if CONFIG.sso_pkce() { - let (pkce_challenge, pkce_verifier) = PkceCodeChallenge::new_random_sha256(); - auth_req = auth_req.set_pkce_challenge(pkce_challenge); - Some(pkce_verifier.into_secret()) - } else { - None - }; + if CONFIG.sso_pkce() { + auth_req = auth_req + .add_extra_param::<&str, String>("code_challenge", client_challenge.clone().into()) + .add_extra_param("code_challenge_method", "S256"); + } let (auth_url, _, nonce) = auth_req.url(); - Ok((auth_url, SsoNonce::new(state, nonce.secret().clone(), verifier, redirect_uri))) + Ok((auth_url, SsoAuth::new(state, client_challenge, nonce.secret().clone(), redirect_uri))) } pub async fn exchange_code( &self, code: OIDCCode, - nonce: SsoNonce, + client_verifier: OIDCCodeVerifier, + sso_auth: &SsoAuth, ) -> ApiResult<( StandardTokenResponse< IdTokenFields< @@ -155,17 +158,21 @@ impl Client { let mut exchange = self.core_client.exchange_code(oidc_code); + let verifier = PkceCodeVerifier::new(client_verifier.into()); if CONFIG.sso_pkce() { - match nonce.verifier { - None => err!(format!("Missing verifier in the DB nonce table")), - Some(secret) => exchange = exchange.set_pkce_verifier(PkceCodeVerifier::new(secret)), + exchange = exchange.set_pkce_verifier(verifier); + } else { + let challenge = PkceCodeChallenge::from_code_verifier_sha256(&verifier); + if challenge.as_str() != String::from(sso_auth.client_challenge.clone()) { + err!(format!("PKCE client challenge failed")) + // Might need to notify admin ? how ? } } match exchange.request_async(&self.http_client).await { Err(err) => err!(format!("Failed to contact token endpoint: {:?}", err)), Ok(token_response) => { - let oidc_nonce = Nonce::new(nonce.nonce); + let oidc_nonce = Nonce::new(sso_auth.nonce.clone()); let id_token = match token_response.extra_fields().id_token() { None => err!("Token response did not contain an id_token"), diff --git a/src/static/scripts/admin.css b/src/static/scripts/admin.css index dd153b1d..0df56771 100644 --- a/src/static/scripts/admin.css +++ b/src/static/scripts/admin.css @@ -58,3 +58,20 @@ img { .abbr-badge { cursor: help; } + +.theme-icon, +.theme-icon-active { + display: inline-flex; + flex: 0 0 1.75em; + justify-content: center; +} + +.theme-icon svg, +.theme-icon-active svg { + width: 1.25em; + height: 1.25em; + min-width: 1.25em; + min-height: 1.25em; + display: block; + overflow: visible; +} \ No newline at end of file diff --git a/src/static/scripts/admin.js b/src/static/scripts/admin.js index 06d6ca5c..3f6bb1df 100644 --- a/src/static/scripts/admin.js +++ b/src/static/scripts/admin.js @@ -1,6 +1,6 @@ "use strict"; /* eslint-env es2017, browser */ -/* exported BASE_URL, _post */ +/* exported BASE_URL, _post _delete */ function getBaseUrl() { // If the base URL is `https://vaultwarden.example.com/base/path/admin/`, @@ -106,7 +106,11 @@ const showActiveTheme = (theme, focus = false) => { const themeSwitcherText = document.querySelector("#bd-theme-text"); const activeThemeIcon = document.querySelector(".theme-icon-active use"); const btnToActive = document.querySelector(`[data-bs-theme-value="${theme}"]`); - const svgOfActiveBtn = btnToActive.querySelector("span use").textContent; + if (!btnToActive) { + return; + } + const btnIconUse = btnToActive ? btnToActive.querySelector("[data-theme-icon-use]") : null; + const iconHref = btnIconUse ? btnIconUse.getAttribute("href") || btnIconUse.getAttribute("xlink:href") : null; document.querySelectorAll("[data-bs-theme-value]").forEach(element => { element.classList.remove("active"); @@ -115,7 +119,12 @@ const showActiveTheme = (theme, focus = false) => { btnToActive.classList.add("active"); btnToActive.setAttribute("aria-pressed", "true"); - activeThemeIcon.textContent = svgOfActiveBtn; + + if (iconHref && activeThemeIcon) { + activeThemeIcon.setAttribute("href", iconHref); + activeThemeIcon.setAttribute("xlink:href", iconHref); + } + const themeSwitcherLabel = `${themeSwitcherText.textContent} (${btnToActive.dataset.bsThemeValue})`; themeSwitcher.setAttribute("aria-label", themeSwitcherLabel); diff --git a/src/static/scripts/admin_diagnostics.js b/src/static/scripts/admin_diagnostics.js index 108034dd..5594b439 100644 --- a/src/static/scripts/admin_diagnostics.js +++ b/src/static/scripts/admin_diagnostics.js @@ -29,7 +29,7 @@ function isValidIp(ip) { return ipv4Regex.test(ip) || ipv6Regex.test(ip); } -function checkVersions(platform, installed, latest, commit=null, pre_release=false) { +function checkVersions(platform, installed, latest, commit=null, compare_order=0) { if (installed === "-" || latest === "-") { document.getElementById(`${platform}-failed`).classList.remove("d-none"); return; @@ -37,7 +37,7 @@ function checkVersions(platform, installed, latest, commit=null, pre_release=fal // Only check basic versions, no commit revisions if (commit === null || installed.indexOf("-") === -1) { - if (platform === "web" && pre_release === true) { + if (platform === "web" && compare_order === 1) { document.getElementById(`${platform}-prerelease`).classList.remove("d-none"); } else if (installed == latest) { document.getElementById(`${platform}-success`).classList.remove("d-none"); @@ -83,7 +83,7 @@ async function generateSupportString(event, dj) { let supportString = "### Your environment (Generated via diagnostics page)\n\n"; supportString += `* Vaultwarden version: v${dj.current_release}\n`; - supportString += `* Web-vault version: v${dj.web_vault_version}\n`; + supportString += `* Web-vault version: v${dj.active_web_release}\n`; supportString += `* OS/Arch: ${dj.host_os}/${dj.host_arch}\n`; supportString += `* Running within a container: ${dj.running_within_container} (Base: ${dj.container_base_image})\n`; supportString += `* Database type: ${dj.db_type}\n`; @@ -208,9 +208,9 @@ function initVersionCheck(dj) { } checkVersions("server", serverInstalled, serverLatest, serverLatestCommit); - const webInstalled = dj.web_vault_version; - const webLatest = dj.latest_web_build; - checkVersions("web", webInstalled, webLatest, null, dj.web_vault_pre_release); + const webInstalled = dj.active_web_release; + const webLatest = dj.latest_web_release; + checkVersions("web", webInstalled, webLatest, null, dj.web_vault_compare); } function checkDns(dns_resolved) { diff --git a/src/static/scripts/admin_users.js b/src/static/scripts/admin_users.js index be30e105..99e39aab 100644 --- a/src/static/scripts/admin_users.js +++ b/src/static/scripts/admin_users.js @@ -1,6 +1,6 @@ "use strict"; /* eslint-env es2017, browser, jquery */ -/* global _post:readable, BASE_URL:readable, reload:readable, jdenticon:readable */ +/* global _post:readable, _delete:readable BASE_URL:readable, reload:readable, jdenticon:readable */ function deleteUser(event) { event.preventDefault(); diff --git a/src/static/scripts/bootstrap.bundle.js b/src/static/scripts/bootstrap.bundle.js index 91eea7e7..93cbd3fe 100644 --- a/src/static/scripts/bootstrap.bundle.js +++ b/src/static/scripts/bootstrap.bundle.js @@ -1,5 +1,5 @@ /*! - * Bootstrap v5.3.7 (https://getbootstrap.com/) + * Bootstrap v5.3.8 (https://getbootstrap.com/) * Copyright 2011-2025 The Bootstrap Authors (https://github.com/twbs/bootstrap/graphs/contributors) * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE) */ @@ -647,7 +647,7 @@ * Constants */ - const VERSION = '5.3.7'; + const VERSION = '5.3.8'; /** * Class definition @@ -3690,9 +3690,6 @@ this._element.setAttribute('aria-expanded', 'false'); Manipulator.removeDataAttribute(this._menu, 'popper'); EventHandler.trigger(this._element, EVENT_HIDDEN$5, relatedTarget); - - // Explicitly return focus to the trigger element - this._element.focus(); } _getConfig(config) { config = super._getConfig(config); diff --git a/src/static/scripts/bootstrap.css b/src/static/scripts/bootstrap.css index e9479ad9..b83f5079 100644 --- a/src/static/scripts/bootstrap.css +++ b/src/static/scripts/bootstrap.css @@ -1,6 +1,6 @@ @charset "UTF-8"; /*! - * Bootstrap v5.3.7 (https://getbootstrap.com/) + * Bootstrap v5.3.8 (https://getbootstrap.com/) * Copyright 2011-2025 The Bootstrap Authors * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE) */ @@ -547,6 +547,10 @@ legend + * { -webkit-appearance: textfield; outline-offset: -2px; } +[type=search]::-webkit-search-cancel-button { + cursor: pointer; + filter: grayscale(1); +} /* rtl:raw: [type="tel"], @@ -6208,6 +6212,7 @@ textarea.form-control-lg { .spinner-grow, .spinner-border { display: inline-block; + flex-shrink: 0; width: var(--bs-spinner-width); height: var(--bs-spinner-height); vertical-align: var(--bs-spinner-vertical-align); diff --git a/src/static/scripts/datatables.css b/src/static/scripts/datatables.css index 4d927abf..af6a9b1e 100644 --- a/src/static/scripts/datatables.css +++ b/src/static/scripts/datatables.css @@ -4,20 +4,21 @@ * * To rebuild or modify this file with the latest versions of the included * software please visit: - * https://datatables.net/download/#bs5/dt-2.3.2 + * https://datatables.net/download/#bs5/dt-2.3.5 * * Included libraries: - * DataTables 2.3.2 + * DataTables 2.3.5 */ :root { --dt-row-selected: 13, 110, 253; --dt-row-selected-text: 255, 255, 255; - --dt-row-selected-link: 9, 10, 11; + --dt-row-selected-link: 228, 228, 228; --dt-row-stripe: 0, 0, 0; --dt-row-hover: 0, 0, 0; --dt-column-ordering: 0, 0, 0; --dt-header-align-items: center; + --dt-header-vertical-align: middle; --dt-html-background: white; } :root.dark { @@ -112,7 +113,7 @@ table.dataTable thead > tr > td.dt-ordering-asc span.dt-column-order, table.dataTable thead > tr > td.dt-ordering-desc span.dt-column-order { position: relative; width: 12px; - height: 20px; + height: 24px; } table.dataTable thead > tr > th.dt-orderable-asc span.dt-column-order:before, table.dataTable thead > tr > th.dt-orderable-asc span.dt-column-order:after, table.dataTable thead > tr > th.dt-orderable-desc span.dt-column-order:before, table.dataTable thead > tr > th.dt-orderable-desc span.dt-column-order:after, table.dataTable thead > tr > th.dt-ordering-asc span.dt-column-order:before, table.dataTable thead > tr > th.dt-ordering-asc span.dt-column-order:after, table.dataTable thead > tr > th.dt-ordering-desc span.dt-column-order:before, table.dataTable thead > tr > th.dt-ordering-desc span.dt-column-order:after, table.dataTable thead > tr > td.dt-orderable-asc span.dt-column-order:before, @@ -144,7 +145,8 @@ table.dataTable thead > tr > td.dt-ordering-asc span.dt-column-order:before, table.dataTable thead > tr > td.dt-ordering-desc span.dt-column-order:after { opacity: 0.6; } -table.dataTable thead > tr > th.sorting_desc_disabled span.dt-column-order:after, table.dataTable thead > tr > th.sorting_asc_disabled span.dt-column-order:before, +table.dataTable thead > tr > th.dt-orderable-none:not(.dt-ordering-asc, .dt-ordering-desc) span.dt-column-order:empty, table.dataTable thead > tr > th.sorting_desc_disabled span.dt-column-order:after, table.dataTable thead > tr > th.sorting_asc_disabled span.dt-column-order:before, +table.dataTable thead > tr > td.dt-orderable-none:not(.dt-ordering-asc, .dt-ordering-desc) span.dt-column-order:empty, table.dataTable thead > tr > td.sorting_desc_disabled span.dt-column-order:after, table.dataTable thead > tr > td.sorting_asc_disabled span.dt-column-order:before { display: none; @@ -340,6 +342,7 @@ table.dataTable thead td, table.dataTable tfoot th, table.dataTable tfoot td { text-align: left; + vertical-align: var(--dt-header-vertical-align); } table.dataTable thead th.dt-head-left, table.dataTable thead td.dt-head-left, @@ -422,10 +425,6 @@ table.dataTable tbody td.dt-body-nowrap { white-space: nowrap; } -:root { - --dt-header-align-items: flex-end; -} - /*! Bootstrap 5 integration for DataTables * * ©2020 SpryMedia Ltd, all rights reserved. @@ -453,7 +452,7 @@ table.table.dataTable > tbody > tr.selected > * { color: rgb(var(--dt-row-selected-text)); } table.table.dataTable > tbody > tr.selected a { - color: rgb(9, 10, 11); + color: rgb(228, 228, 228); color: rgb(var(--dt-row-selected-link)); } table.table.dataTable.table-striped > tbody > tr:nth-of-type(2n+1) > * { diff --git a/src/static/scripts/datatables.js b/src/static/scripts/datatables.js index 0ba22347..961af0b4 100644 --- a/src/static/scripts/datatables.js +++ b/src/static/scripts/datatables.js @@ -4,13 +4,13 @@ * * To rebuild or modify this file with the latest versions of the included * software please visit: - * https://datatables.net/download/#bs5/dt-2.3.2 + * https://datatables.net/download/#bs5/dt-2.3.5 * * Included libraries: - * DataTables 2.3.2 + * DataTables 2.3.5 */ -/*! DataTables 2.3.2 +/*! DataTables 2.3.5 * © SpryMedia Ltd - datatables.net/license */ @@ -178,6 +178,9 @@ this.id = sId; } + // Replacing an existing colgroup with our own. Not ideal, but a merge could take a lot of code + $this.children('colgroup').remove(); + /* Create the settings object for this table and set some of the default parameters */ var oSettings = $.extend( true, {}, DataTable.models.oSettings, { "sDestroyWidth": $this[0].style.width, @@ -513,7 +516,7 @@ * * @type string */ - builder: "bs5/dt-2.3.2", + builder: "bs5/dt-2.3.5", /** * Buttons. For use with the Buttons extension for DataTables. This is @@ -743,7 +746,7 @@ * * The extension options for ordering of data available here is complimentary * to the default type based ordering that DataTables typically uses. It - * allows much greater control over the the data that is being used to + * allows much greater control over the data that is being used to * order a column, but is necessarily therefore more complex. * * This type of ordering is useful if you want to do ordering based on data @@ -902,7 +905,7 @@ * `{type}-asc` and `{type}-desc` together. It is generally recommended * that only `{type}-pre` is used, as this provides the optimal * implementation in terms of speed, although the others are provided - * for compatibility with existing Javascript sort functions. + * for compatibility with existing JavaScript sort functions. * * `{type}-pre`: Functions defined take a single parameter: * @@ -912,7 +915,7 @@ * * * `{*}` Data to be sorted upon * - * `{type}-asc` and `{type}-desc`: Functions are typical Javascript sort + * `{type}-asc` and `{type}-desc`: Functions are typical JavaScript sort * functions, taking two parameters: * * 1. `{*}` Data to compare to the second parameter @@ -1136,7 +1139,7 @@ }; // Convert from a formatted number with characters other than `.` as the - // decimal place, to a Javascript number + // decimal place, to a JavaScript number var _numToDecimal = function ( num, decimalPoint ) { // Cache created regular expressions for speed as this function is called often if ( ! _re_dic[ decimalPoint ] ) { @@ -1202,19 +1205,19 @@ var _pluck = function ( a, prop, prop2 ) { var out = []; - var i=0, ien=a.length; + var i=0, iLen=a.length; // Could have the test in the loop for slightly smaller code, but speed // is essential here if ( prop2 !== undefined ) { - for ( ; i') .html( columns[i][titleProp] || '' ) .appendTo( row ); @@ -3492,6 +3495,14 @@ { var iDataIndex = aiDisplay[j]; var aoData = oSettings.aoData[ iDataIndex ]; + + // Row has been deleted - can't be displayed + if (aoData === null) + { + continue; + } + + // Row node hasn't been created yet if ( aoData.nTr === null ) { _fnCreateTr( oSettings, iDataIndex ); @@ -3620,7 +3631,7 @@ return $( '' ) .append( $('', { - 'colSpan': _fnVisbleColumns( settings ), + 'colSpan': _fnVisibleColumns( settings ), 'class': settings.oClasses.empty.row } ).html( zero ) )[0]; } @@ -3804,7 +3815,7 @@ var line = row[ item ].contents; - for ( var i=0, ien=line.length ; i divBodyEl.clientHeight || divBody.css('overflow-y') == "scroll"; var paddingSide = 'padding' + (browser.bScrollbarLeft ? 'Left' : 'Right' ); @@ -5437,7 +5428,7 @@ visibleColumns = _fnGetColumns( settings, 'bVisible' ), tableWidthAttr = table.getAttribute('width'), // from DOM element tableContainer = table.parentNode, - i, column, columnIdx; + i, j, column, columnIdx; var styleWidth = table.style.width; var containerWidth = _fnWrapperWidth(settings); @@ -5471,17 +5462,16 @@ false ); - // Construct a single row, worst case, table with the widest - // node in the data, assign any user defined widths, then insert it into - // the DOM and allow the browser to do all the hard work of calculating - // table widths + // Construct a worst case table with the widest, assign any user defined + // widths, then insert it into the DOM and allow the browser to do all + // the hard work of calculating table widths var tmpTable = $(table.cloneNode()) .css( 'visibility', 'hidden' ) + .css( 'margin', 0 ) .removeAttr( 'id' ); // Clean up the table body - tmpTable.append('') - var tr = $('').appendTo( tmpTable.find('tbody') ); + tmpTable.append('') // Clone the table header and footer - we can't use the header / footer // from the cloned table, since if scrolling is active, the table's @@ -5521,23 +5511,37 @@ } } ); - // Find the widest piece of data for each column and put it into the table + // Get the widest strings for each of the visible columns and add them to + // our table to create a "worst case" + var longestData = []; + for ( i=0 ; i') - .addClass(autoClass) - .addClass(column.sClass) - .append(insert) - .appendTo(tr); + longestData.push(_fnGetWideStrings(settings, visibleColumns[i])); + } + + if (longestData.length) { + for ( i=0 ; i').appendTo( tmpTable.find('tbody') ); + + for ( j=0 ; j') + .addClass(autoClass) + .addClass(column.sClass) + .append(insert) + .appendTo(tr); + } + } } // Tidy the temporary table - remove name attributes so there aren't @@ -5676,20 +5680,32 @@ } /** - * Get the maximum strlen for each data column + * Get the widest strings for each column. + * + * It is very difficult to determine what the widest string actually is due to variable character + * width and kerning. Doing an exact calculation with the DOM or even Canvas would kill performance + * and this is a critical point, so we use two techniques to determine a collection of the longest + * strings from the column, which will likely contain the widest strings: + * + * 1) Get the top three longest strings from the column + * 2) Get the top three widest words (i.e. an unbreakable phrase) + * * @param {object} settings dataTables settings object * @param {int} colIdx column of interest - * @returns {string} string of the max length + * @returns {string[]} Array of the longest strings * @memberof DataTable#oApi */ - function _fnGetMaxLenString( settings, colIdx ) + function _fnGetWideStrings( settings, colIdx ) { var column = settings.aoColumns[colIdx]; - if (! column.maxLenString) { - var s, max='', maxLen = -1; - - for ( var i=0, ien=settings.aiDisplayMaster.length ; i maxLen ) { - // We want the HTML in the string, but the length that - // is important is the stripped string - max = cellString; - maxLen = s.length; - } + collection.push({ + str: s, + len: s.length + }); + + allStrings.push(s); + } + + // Order and then cut down to the size we need + collection + .sort(function (a, b) { + return b.len - a.len; + }) + .splice(3); + + column.wideStrings = collection.map(function (item) { + return item.str; + }); + + // Longest unbroken string + let parts = allStrings.join(' ').split(' '); + + parts.sort(function (a, b) { + return b.length - a.length; + }); + + if (parts.length) { + column.wideStrings.push(parts[0]); + } + + if (parts.length > 1) { + column.wideStrings.push(parts[1]); } - column.maxLenString = max; + if (parts.length > 2) { + column.wideStrings.push(parts[3]); + } } - return column.maxLenString; + return column.wideStrings; } @@ -5811,7 +5855,7 @@ : [column]; if ( columns.length ) { - for ( var i=0, ien=columns.length ; i + + + + + + + + + + + + + + +