Browse Source

Optimizations and build speedup

With this commit I have changed several components to be more efficient.
This can be less llvm-lines generated or less `clone()` calls.

 ### Config
- Re-ordered the `make_config` macro to be more efficient
- Created a custom Deserializer for `ConfigBuilder` less code and more efficient
- Use struct's for the `prepare_json` function instead of generating a custom JSON object.
  This generates less code and is more efficient.
- Updated the `get_support_string` function to handle the masking differently.
  This generates less code and also was able to remove some sub-macro-calls

 ### Error
- Added an extra new call to prevent duplicate Strings in generated macro code.
  This generated less llvm-lines and seems to be more efficient.
- Created a custom Serializer for `ApiError` and `CompactApiError`
  This makes that struct smaller in size, so better for memory, but also less llvm-lines.

 ### General
- Removed `once_lock` and replace it all with Rust's std LazyLock
- Added and fixed some Clippy lints which reduced `clone()` calls for example.
- Updated build profiles for more efficiency
  Also added a new profile specifically for CI, which should decrease the build check
- Updated several GitHub Workflows for better security and use the new `ci` build profile
- Updated to Rust v1.90.0 which uses a new linker `rust-lld` which should help in faster building
- Updated the Cargo.toml for all crates to better use the `workspace` variables
- Added a `typos` Workflow and Pre-Commit, which should help in detecting spell error's.
  Also fixed a few found by it.

Signed-off-by: BlackDex <black.dex@gmail.com>
pull/6339/head
BlackDex 4 weeks ago
parent
commit
783c3c23cd
No known key found for this signature in database GPG Key ID: 58C80A2AA6C765E1
  1. 41
      .github/workflows/build.yml
  2. 2
      .github/workflows/check-templates.yml
  3. 7
      .github/workflows/hadolint.yml
  4. 20
      .github/workflows/release.yml
  5. 2
      .github/workflows/releasecache-cleanup.yml
  6. 8
      .github/workflows/trivy.yml
  7. 22
      .github/workflows/typos.yml
  8. 4
      .github/workflows/zizmor.yml
  9. 7
      .pre-commit-config.yaml
  10. 26
      .typos.toml
  11. 1
      Cargo.lock
  12. 77
      Cargo.toml
  13. 2
      docker/DockerSettings.yaml
  14. 8
      docker/Dockerfile.alpine
  15. 2
      docker/Dockerfile.debian
  16. 4
      docker/README.md
  17. 6
      macros/Cargo.toml
  18. 2
      rust-toolchain.toml
  19. 44
      src/api/admin.rs
  20. 19
      src/api/core/accounts.rs
  21. 2
      src/api/core/ciphers.rs
  22. 2
      src/api/core/emergency_access.rs
  23. 6
      src/api/core/organizations.rs
  24. 2
      src/api/core/public.rs
  25. 21
      src/api/core/sends.rs
  26. 2
      src/api/core/two_factor/authenticator.rs
  27. 2
      src/api/core/two_factor/mod.rs
  28. 22
      src/api/icons.rs
  29. 4
      src/api/identity.rs
  30. 44
      src/api/notifications.rs
  31. 10
      src/api/push.rs
  32. 62
      src/auth.rs
  33. 318
      src/config.rs
  34. 4
      src/crypto.rs
  35. 4
      src/db/models/attachment.rs
  36. 4
      src/db/models/device.rs
  37. 2
      src/db/models/organization.rs
  38. 2
      src/db/models/send.rs
  39. 2
      src/db/models/user.rs
  40. 168
      src/error.rs
  41. 20
      src/http_client.rs
  42. 4
      src/mail.rs
  43. 2
      src/main.rs
  44. 7
      src/ratelimit.rs
  45. 17
      src/sso.rs
  46. 18
      src/sso_client.rs
  47. 2
      src/util.rs

41
.github/workflows/build.yml

@ -31,9 +31,6 @@ on:
jobs: jobs:
build: build:
name: Build and Test ${{ matrix.channel }} name: Build and Test ${{ matrix.channel }}
permissions:
actions: write
contents: read
runs-on: ubuntu-24.04 runs-on: ubuntu-24.04
timeout-minutes: 120 timeout-minutes: 120
# Make warnings errors, this is to prevent warnings slipping through. # Make warnings errors, this is to prevent warnings slipping through.
@ -69,9 +66,9 @@ jobs:
CHANNEL: ${{ matrix.channel }} CHANNEL: ${{ matrix.channel }}
run: | run: |
if [[ "${CHANNEL}" == 'rust-toolchain' ]]; then if [[ "${CHANNEL}" == 'rust-toolchain' ]]; then
RUST_TOOLCHAIN="$(grep -oP 'channel.*"(\K.*?)(?=")' rust-toolchain.toml)" RUST_TOOLCHAIN="$(grep -m1 -oP 'channel.*"(\K.*?)(?=")' rust-toolchain.toml)"
elif [[ "${CHANNEL}" == 'msrv' ]]; then elif [[ "${CHANNEL}" == 'msrv' ]]; then
RUST_TOOLCHAIN="$(grep -oP 'rust-version.*"(\K.*?)(?=")' Cargo.toml)" RUST_TOOLCHAIN="$(grep -m1 -oP 'rust-version\s.*"(\K.*?)(?=")' Cargo.toml)"
else else
RUST_TOOLCHAIN="${CHANNEL}" RUST_TOOLCHAIN="${CHANNEL}"
fi fi
@ -81,7 +78,7 @@ jobs:
# Only install the clippy and rustfmt components on the default rust-toolchain # Only install the clippy and rustfmt components on the default rust-toolchain
- name: "Install rust-toolchain version" - name: "Install rust-toolchain version"
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @ Aug 23, 2025, 3:20 AM GMT+2 uses: dtolnay/rust-toolchain@6d653acede28d24f02e3cd41383119e8b1b35921 # master @ Sep 16, 2025, 8:37 PM GMT+2
if: ${{ matrix.channel == 'rust-toolchain' }} if: ${{ matrix.channel == 'rust-toolchain' }}
with: with:
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}" toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
@ -91,7 +88,7 @@ jobs:
# Install the any other channel to be used for which we do not execute clippy and rustfmt # Install the any other channel to be used for which we do not execute clippy and rustfmt
- name: "Install MSRV version" - name: "Install MSRV version"
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @ Aug 23, 2025, 3:20 AM GMT+2 uses: dtolnay/rust-toolchain@6d653acede28d24f02e3cd41383119e8b1b35921 # master @ Sep 16, 2025, 8:37 PM GMT+2
if: ${{ matrix.channel != 'rust-toolchain' }} if: ${{ matrix.channel != 'rust-toolchain' }}
with: with:
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}" toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
@ -116,60 +113,60 @@ jobs:
# Enable Rust Caching # Enable Rust Caching
- name: Rust Caching - name: Rust Caching
uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
with: with:
# Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes. # Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes.
# Like changing the build host from Ubuntu 20.04 to 22.04 for example. # Like changing the build host from Ubuntu 20.04 to 22.04 for example.
# Only update when really needed! Use a <year>.<month>[.<inc>] format. # Only update when really needed! Use a <year>.<month>[.<inc>] format.
prefix-key: "v2023.07-rust" prefix-key: "v2025.09-rust"
# End Enable Rust Caching # End Enable Rust Caching
# Run cargo tests # Run cargo tests
# First test all features together, afterwards test them separately. # First test all features together, afterwards test them separately.
- name: "test features: sqlite,mysql,postgresql,enable_mimalloc,query_logger" - name: "test features: sqlite,mysql,postgresql,enable_mimalloc,s3"
id: test_sqlite_mysql_postgresql_mimalloc_logger id: test_sqlite_mysql_postgresql_mimalloc_s3
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
run: | run: |
cargo test --features sqlite,mysql,postgresql,enable_mimalloc,query_logger cargo test --profile ci --features sqlite,mysql,postgresql,enable_mimalloc,s3
- name: "test features: sqlite,mysql,postgresql,enable_mimalloc" - name: "test features: sqlite,mysql,postgresql,enable_mimalloc"
id: test_sqlite_mysql_postgresql_mimalloc id: test_sqlite_mysql_postgresql_mimalloc
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
run: | run: |
cargo test --features sqlite,mysql,postgresql,enable_mimalloc cargo test --profile ci --features sqlite,mysql,postgresql,enable_mimalloc
- name: "test features: sqlite,mysql,postgresql" - name: "test features: sqlite,mysql,postgresql"
id: test_sqlite_mysql_postgresql id: test_sqlite_mysql_postgresql
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
run: | run: |
cargo test --features sqlite,mysql,postgresql cargo test --profile ci --features sqlite,mysql,postgresql
- name: "test features: sqlite" - name: "test features: sqlite"
id: test_sqlite id: test_sqlite
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
run: | run: |
cargo test --features sqlite cargo test --profile ci --features sqlite
- name: "test features: mysql" - name: "test features: mysql"
id: test_mysql id: test_mysql
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
run: | run: |
cargo test --features mysql cargo test --profile ci --features mysql
- name: "test features: postgresql" - name: "test features: postgresql"
id: test_postgresql id: test_postgresql
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
run: | run: |
cargo test --features postgresql cargo test --profile ci --features postgresql
# End Run cargo tests # End Run cargo tests
# Run cargo clippy, and fail on warnings # Run cargo clippy, and fail on warnings
- name: "clippy features: sqlite,mysql,postgresql,enable_mimalloc" - name: "clippy features: sqlite,mysql,postgresql,enable_mimalloc,s3"
id: clippy id: clippy
if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }} if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }}
run: | run: |
cargo clippy --features sqlite,mysql,postgresql,enable_mimalloc cargo clippy --profile ci --features sqlite,mysql,postgresql,enable_mimalloc,s3
# End Run cargo clippy # End Run cargo clippy
@ -187,7 +184,7 @@ jobs:
- name: "Some checks failed" - name: "Some checks failed"
if: ${{ failure() }} if: ${{ failure() }}
env: env:
TEST_DB_M_L: ${{ steps.test_sqlite_mysql_postgresql_mimalloc_logger.outcome }} TEST_DB_M_S3: ${{ steps.test_sqlite_mysql_postgresql_mimalloc_s3.outcome }}
TEST_DB_M: ${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }} TEST_DB_M: ${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}
TEST_DB: ${{ steps.test_sqlite_mysql_postgresql.outcome }} TEST_DB: ${{ steps.test_sqlite_mysql_postgresql.outcome }}
TEST_SQLITE: ${{ steps.test_sqlite.outcome }} TEST_SQLITE: ${{ steps.test_sqlite.outcome }}
@ -200,13 +197,13 @@ jobs:
echo "" >> "${GITHUB_STEP_SUMMARY}" echo "" >> "${GITHUB_STEP_SUMMARY}"
echo "|Job|Status|" >> "${GITHUB_STEP_SUMMARY}" echo "|Job|Status|" >> "${GITHUB_STEP_SUMMARY}"
echo "|---|------|" >> "${GITHUB_STEP_SUMMARY}" echo "|---|------|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (sqlite,mysql,postgresql,enable_mimalloc,query_logger)|${TEST_DB_M_L}|" >> "${GITHUB_STEP_SUMMARY}" echo "|test (sqlite,mysql,postgresql,enable_mimalloc,s3)|${TEST_DB_M_S3}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${TEST_DB_M}|" >> "${GITHUB_STEP_SUMMARY}" echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${TEST_DB_M}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (sqlite,mysql,postgresql)|${TEST_DB}|" >> "${GITHUB_STEP_SUMMARY}" echo "|test (sqlite,mysql,postgresql)|${TEST_DB}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (sqlite)|${TEST_SQLITE}|" >> "${GITHUB_STEP_SUMMARY}" echo "|test (sqlite)|${TEST_SQLITE}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (mysql)|${TEST_MYSQL}|" >> "${GITHUB_STEP_SUMMARY}" echo "|test (mysql)|${TEST_MYSQL}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|test (postgresql)|${TEST_POSTGRESQL}|" >> "${GITHUB_STEP_SUMMARY}" echo "|test (postgresql)|${TEST_POSTGRESQL}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc)|${CLIPPY}|" >> "${GITHUB_STEP_SUMMARY}" echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc,s3)|${CLIPPY}|" >> "${GITHUB_STEP_SUMMARY}"
echo "|fmt|${FMT}|" >> "${GITHUB_STEP_SUMMARY}" echo "|fmt|${FMT}|" >> "${GITHUB_STEP_SUMMARY}"
echo "" >> "${GITHUB_STEP_SUMMARY}" echo "" >> "${GITHUB_STEP_SUMMARY}"
echo "Please check the failed jobs and fix where needed." >> "${GITHUB_STEP_SUMMARY}" echo "Please check the failed jobs and fix where needed." >> "${GITHUB_STEP_SUMMARY}"

2
.github/workflows/check-templates.yml

@ -6,8 +6,6 @@ on: [ push, pull_request ]
jobs: jobs:
docker-templates: docker-templates:
name: Validate docker templates name: Validate docker templates
permissions:
contents: read
runs-on: ubuntu-24.04 runs-on: ubuntu-24.04
timeout-minutes: 30 timeout-minutes: 30

7
.github/workflows/hadolint.yml

@ -1,13 +1,12 @@
name: Hadolint name: Hadolint
permissions: {}
on: [ push, pull_request ] on: [ push, pull_request ]
permissions: {}
jobs: jobs:
hadolint: hadolint:
name: Validate Dockerfile syntax name: Validate Dockerfile syntax
permissions:
contents: read
runs-on: ubuntu-24.04 runs-on: ubuntu-24.04
timeout-minutes: 30 timeout-minutes: 30
@ -31,7 +30,7 @@ jobs:
sudo curl -L https://github.com/hadolint/hadolint/releases/download/v${HADOLINT_VERSION}/hadolint-$(uname -s)-$(uname -m) -o /usr/local/bin/hadolint && \ sudo curl -L https://github.com/hadolint/hadolint/releases/download/v${HADOLINT_VERSION}/hadolint-$(uname -s)-$(uname -m) -o /usr/local/bin/hadolint && \
sudo chmod +x /usr/local/bin/hadolint sudo chmod +x /usr/local/bin/hadolint
env: env:
HADOLINT_VERSION: 2.12.0 HADOLINT_VERSION: 2.14.0
# End Download hadolint # End Download hadolint
# Checkout the repo # Checkout the repo
- name: Checkout - name: Checkout

20
.github/workflows/release.yml

@ -21,10 +21,10 @@ jobs:
name: Build Vaultwarden containers name: Build Vaultwarden containers
if: ${{ github.repository == 'dani-garcia/vaultwarden' }} if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
permissions: permissions:
packages: write packages: write # Needed to upload packages and artifacts
contents: read contents: read
attestations: write attestations: write # Needed to generate an artifact attestation for a build
id-token: write id-token: write # Needed to mint the OIDC token necessary to request a Sigstore signing certificate
runs-on: ubuntu-24.04 runs-on: ubuntu-24.04
timeout-minutes: 120 timeout-minutes: 120
# Start a local docker registry to extract the compiled binaries to upload as artifacts and attest them # Start a local docker registry to extract the compiled binaries to upload as artifacts and attest them
@ -103,7 +103,7 @@ jobs:
# Login to Docker Hub # Login to Docker Hub
- name: Login to Docker Hub - name: Login to Docker Hub
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with: with:
username: ${{ secrets.DOCKERHUB_USERNAME }} username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
@ -119,7 +119,7 @@ jobs:
# Login to GitHub Container Registry # Login to GitHub Container Registry
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@ -136,7 +136,7 @@ jobs:
# Login to Quay.io # Login to Quay.io
- name: Login to Quay.io - name: Login to Quay.io
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with: with:
registry: quay.io registry: quay.io
username: ${{ secrets.QUAY_USERNAME }} username: ${{ secrets.QUAY_USERNAME }}
@ -204,7 +204,7 @@ jobs:
# Attest container images # Attest container images
- name: Attest - docker.io - ${{ matrix.base_image }} - name: Attest - docker.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} if: ${{ env.HAVE_DOCKERHUB_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0 uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0
with: with:
subject-name: ${{ vars.DOCKERHUB_REPO }} subject-name: ${{ vars.DOCKERHUB_REPO }}
subject-digest: ${{ env.DIGEST_SHA }} subject-digest: ${{ env.DIGEST_SHA }}
@ -212,7 +212,7 @@ jobs:
- name: Attest - ghcr.io - ${{ matrix.base_image }} - name: Attest - ghcr.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_GHCR_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} if: ${{ env.HAVE_GHCR_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0 uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0
with: with:
subject-name: ${{ vars.GHCR_REPO }} subject-name: ${{ vars.GHCR_REPO }}
subject-digest: ${{ env.DIGEST_SHA }} subject-digest: ${{ env.DIGEST_SHA }}
@ -220,7 +220,7 @@ jobs:
- name: Attest - quay.io - ${{ matrix.base_image }} - name: Attest - quay.io - ${{ matrix.base_image }}
if: ${{ env.HAVE_QUAY_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}} if: ${{ env.HAVE_QUAY_LOGIN == 'true' && steps.bake_vw.outputs.metadata != ''}}
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0 uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0
with: with:
subject-name: ${{ vars.QUAY_REPO }} subject-name: ${{ vars.QUAY_REPO }}
subject-digest: ${{ env.DIGEST_SHA }} subject-digest: ${{ env.DIGEST_SHA }}
@ -299,7 +299,7 @@ jobs:
path: vaultwarden-armv6-${{ matrix.base_image }} path: vaultwarden-armv6-${{ matrix.base_image }}
- name: "Attest artifacts ${{ matrix.base_image }}" - name: "Attest artifacts ${{ matrix.base_image }}"
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0 uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0
with: with:
subject-path: vaultwarden-* subject-path: vaultwarden-*
# End Upload artifacts to Github Actions # End Upload artifacts to Github Actions

2
.github/workflows/releasecache-cleanup.yml

@ -16,7 +16,7 @@ jobs:
releasecache-cleanup: releasecache-cleanup:
name: Releasecache Cleanup name: Releasecache Cleanup
permissions: permissions:
packages: write packages: write # To be able to cleanup old caches
runs-on: ubuntu-24.04 runs-on: ubuntu-24.04
continue-on-error: true continue-on-error: true
timeout-minutes: 30 timeout-minutes: 30

8
.github/workflows/trivy.yml

@ -23,9 +23,7 @@ jobs:
if: ${{ github.repository == 'dani-garcia/vaultwarden' }} if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
name: Trivy Scan name: Trivy Scan
permissions: permissions:
contents: read security-events: write # To write the security report
actions: read
security-events: write
runs-on: ubuntu-24.04 runs-on: ubuntu-24.04
timeout-minutes: 30 timeout-minutes: 30
@ -36,7 +34,7 @@ jobs:
persist-credentials: false persist-credentials: false
- name: Run Trivy vulnerability scanner - name: Run Trivy vulnerability scanner
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # v0.33.0 + b6643a2 uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # 0.33.1
env: env:
TRIVY_DB_REPOSITORY: docker.io/aquasec/trivy-db:2,public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2 TRIVY_DB_REPOSITORY: docker.io/aquasec/trivy-db:2,public.ecr.aws/aquasecurity/trivy-db:2,ghcr.io/aquasecurity/trivy-db:2
TRIVY_JAVA_DB_REPOSITORY: docker.io/aquasec/trivy-java-db:1,public.ecr.aws/aquasecurity/trivy-java-db:1,ghcr.io/aquasecurity/trivy-java-db:1 TRIVY_JAVA_DB_REPOSITORY: docker.io/aquasec/trivy-java-db:1,public.ecr.aws/aquasecurity/trivy-java-db:1,ghcr.io/aquasecurity/trivy-java-db:1
@ -48,6 +46,6 @@ jobs:
severity: CRITICAL,HIGH severity: CRITICAL,HIGH
- name: Upload Trivy scan results to GitHub Security tab - name: Upload Trivy scan results to GitHub Security tab
uses: github/codeql-action/upload-sarif@3c3833e0f8c1c83d449a7478aa59c036a9165498 # v3.29.11 uses: github/codeql-action/upload-sarif@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
with: with:
sarif_file: 'trivy-results.sarif' sarif_file: 'trivy-results.sarif'

22
.github/workflows/typos.yml

@ -0,0 +1,22 @@
name: Code Spell Checking
on: [ push, pull_request ]
permissions: {}
jobs:
typos:
name: Run typos spell checking
runs-on: ubuntu-24.04
timeout-minutes: 30
steps:
# Checkout the repo
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
with:
persist-credentials: false
# End Checkout the repo
# When this version is updated, do not forget to update this in `.pre-commit-config.yaml` too
- name: Spell Check Repo
uses: crate-ci/typos@40156d6074bf731adb169cfb8234954971dbc487 # v1.37.1

4
.github/workflows/zizmor.yml

@ -13,7 +13,7 @@ jobs:
name: Run zizmor name: Run zizmor
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: permissions:
security-events: write security-events: write # To write the security report
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
@ -21,7 +21,7 @@ jobs:
persist-credentials: false persist-credentials: false
- name: Run zizmor - name: Run zizmor
uses: zizmorcore/zizmor-action@5ca5fc7a4779c5263a3ffa0e1f693009994446d1 # v0.1.2 uses: zizmorcore/zizmor-action@e673c3917a1aef3c65c972347ed84ccd013ecda4 # v0.2.0
with: with:
# intentionally not scanning the entire repository, # intentionally not scanning the entire repository,
# since it contains integration tests. # since it contains integration tests.

7
.pre-commit-config.yaml

@ -1,7 +1,7 @@
--- ---
repos: repos:
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v6.0.0 rev: 3e8a8703264a2f4a69428a0aa4dcb512790b2c8c # v6.0.0
hooks: hooks:
- id: check-yaml - id: check-yaml
- id: check-json - id: check-json
@ -50,3 +50,8 @@ repos:
args: args:
- "-c" - "-c"
- "cd docker && make" - "cd docker && make"
# When this version is updated, do not forget to update this in `.github/workflows/typos.yaml` too
- repo: https://github.com/crate-ci/typos
rev: 40156d6074bf731adb169cfb8234954971dbc487 # v1.37.1
hooks:
- id: typos

26
.typos.toml

@ -0,0 +1,26 @@
[files]
extend-exclude = [
".git/",
"playwright/",
"*.js", # Ignore all JavaScript files
"!admin*.js", # Except our own JavaScript files
]
ignore-hidden = false
[default]
extend-ignore-re = [
# We use this in place of the reserved type identifier at some places
"typ",
# In SMTP it's called HELO, so ignore it
"(?i)helo_name",
"Server name sent during.+HELO",
# COSE Is short for CBOR Object Signing and Encryption, ignore these specific items
"COSEKey",
"COSEAlgorithm",
# Ignore this specific string as it's valid
"Ensure they are valid OTPs",
# This word is misspelled upstream
# https://github.com/bitwarden/server/blob/dff9f1cf538198819911cf2c20f8cda3307701c5/src/Notifications/HubHelpers.cs#L86
# https://github.com/bitwarden/clients/blob/9612a4ac45063e372a6fbe87eb253c7cb3c588fb/libs/common/src/auth/services/anonymous-hub.service.ts#L45
"AuthRequestResponseRecieved",
]

1
Cargo.lock

@ -5736,7 +5736,6 @@ dependencies = [
"mini-moka", "mini-moka",
"num-derive", "num-derive",
"num-traits", "num-traits",
"once_cell",
"opendal", "opendal",
"openidconnect", "openidconnect",
"openssl", "openssl",

77
Cargo.toml

@ -1,3 +1,10 @@
[workspace.package]
edition = "2021"
rust-version = "1.88.0"
license = "AGPL-3.0-only"
repository = "https://github.com/dani-garcia/vaultwarden"
publish = false
[workspace] [workspace]
members = ["macros"] members = ["macros"]
@ -5,15 +12,15 @@ members = ["macros"]
name = "vaultwarden" name = "vaultwarden"
version = "1.0.0" version = "1.0.0"
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"] authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
edition = "2021"
rust-version = "1.87.0"
resolver = "2"
repository = "https://github.com/dani-garcia/vaultwarden"
readme = "README.md" readme = "README.md"
license = "AGPL-3.0-only"
publish = false
build = "build.rs" build = "build.rs"
resolver = "2"
repository.workspace = true
edition.workspace = true
rust-version.workspace = true
license.workspace = true
publish.workspace = true
[features] [features]
# default = ["sqlite"] # default = ["sqlite"]
@ -57,9 +64,6 @@ tracing = { version = "0.1.41", features = ["log"] } # Needed to have lettre and
# A `dotenv` implementation for Rust # A `dotenv` implementation for Rust
dotenvy = { version = "0.15.7", default-features = false } dotenvy = { version = "0.15.7", default-features = false }
# Lazy initialization
once_cell = "1.21.3"
# Numerical libraries # Numerical libraries
num-traits = "0.2.19" num-traits = "0.2.19"
num-derive = "0.4.2" num-derive = "0.4.2"
@ -209,24 +213,15 @@ reqsign = { version = "0.16.5", optional = true }
strip = "debuginfo" strip = "debuginfo"
lto = "fat" lto = "fat"
codegen-units = 1 codegen-units = 1
debug = "line-tables-only"
# A little bit of a speedup
[profile.dev]
split-debuginfo = "unpacked"
# Always build argon2 using opt-level 3
# This is a huge speed improvement during testing
[profile.dev.package.argon2]
opt-level = 3
# Optimize for size # Optimize for size
[profile.release-micro] [profile.release-micro]
inherits = "release" inherits = "release"
opt-level = "z"
strip = "symbols" strip = "symbols"
lto = "fat" opt-level = "z"
codegen-units = 1
panic = "abort" panic = "abort"
debug = false
# Profile for systems with low resources # Profile for systems with low resources
# It will use less resources during build # It will use less resources during build
@ -236,6 +231,32 @@ strip = "symbols"
lto = "thin" lto = "thin"
codegen-units = 16 codegen-units = 16
# Used for profiling and debugging like valgrind or heaptrack
# Inherits release to be sure all optimizations have been done
[profile.dbg]
inherits = "release"
strip = "none"
split-debuginfo = "off"
debug = "full"
# A little bit of a speedup for generic building
[profile.dev]
split-debuginfo = "unpacked"
debug = "line-tables-only"
# Used for CI builds to improve compile time
[profile.ci]
inherits = "dev"
debug = false
debug-assertions = false
strip = "symbols"
panic = "abort"
# Always build argon2 using opt-level 3
# This is a huge speed improvement during testing
[profile.dev.package.argon2]
opt-level = 3
# Linting config # Linting config
# https://doc.rust-lang.org/rustc/lints/groups.html # https://doc.rust-lang.org/rustc/lints/groups.html
[workspace.lints.rust] [workspace.lints.rust]
@ -245,15 +266,16 @@ non_ascii_idents = "forbid"
# Deny # Deny
deprecated_in_future = "deny" deprecated_in_future = "deny"
deprecated_safe = { level = "deny", priority = -1 }
future_incompatible = { level = "deny", priority = -1 } future_incompatible = { level = "deny", priority = -1 }
keyword_idents = { level = "deny", priority = -1 } keyword_idents = { level = "deny", priority = -1 }
let_underscore = { level = "deny", priority = -1 } let_underscore = { level = "deny", priority = -1 }
nonstandard_style = { level = "deny", priority = -1 }
noop_method_call = "deny" noop_method_call = "deny"
refining_impl_trait = { level = "deny", priority = -1 } refining_impl_trait = { level = "deny", priority = -1 }
rust_2018_idioms = { level = "deny", priority = -1 } rust_2018_idioms = { level = "deny", priority = -1 }
rust_2021_compatibility = { level = "deny", priority = -1 } rust_2021_compatibility = { level = "deny", priority = -1 }
rust_2024_compatibility = { level = "deny", priority = -1 } rust_2024_compatibility = { level = "deny", priority = -1 }
edition_2024_expr_fragment_specifier = "allow" # Once changed to Rust 2024 this should be removed and macro's should be validated again
single_use_lifetimes = "deny" single_use_lifetimes = "deny"
trivial_casts = "deny" trivial_casts = "deny"
trivial_numeric_casts = "deny" trivial_numeric_casts = "deny"
@ -263,7 +285,8 @@ unused_lifetimes = "deny"
unused_qualifications = "deny" unused_qualifications = "deny"
variant_size_differences = "deny" variant_size_differences = "deny"
# Allow the following lints since these cause issues with Rust v1.84.0 or newer # Allow the following lints since these cause issues with Rust v1.84.0 or newer
# Building Vaultwarden with Rust v1.85.0 and edition 2024 also works without issues # Building Vaultwarden with Rust v1.85.0 with edition 2024 also works without issues
edition_2024_expr_fragment_specifier = "allow" # Once changed to Rust 2024 this should be removed and macro's should be validated again
if_let_rescope = "allow" if_let_rescope = "allow"
tail_expr_drop_order = "allow" tail_expr_drop_order = "allow"
@ -277,10 +300,12 @@ todo = "warn"
result_large_err = "allow" result_large_err = "allow"
# Deny # Deny
branches_sharing_code = "deny"
case_sensitive_file_extension_comparisons = "deny" case_sensitive_file_extension_comparisons = "deny"
cast_lossless = "deny" cast_lossless = "deny"
clone_on_ref_ptr = "deny" clone_on_ref_ptr = "deny"
equatable_if_let = "deny" equatable_if_let = "deny"
excessive_precision = "deny"
filter_map_next = "deny" filter_map_next = "deny"
float_cmp_const = "deny" float_cmp_const = "deny"
implicit_clone = "deny" implicit_clone = "deny"
@ -294,15 +319,19 @@ manual_instant_elapsed = "deny"
manual_string_new = "deny" manual_string_new = "deny"
match_wildcard_for_single_variants = "deny" match_wildcard_for_single_variants = "deny"
mem_forget = "deny" mem_forget = "deny"
needless_borrow = "deny"
needless_collect = "deny"
needless_continue = "deny" needless_continue = "deny"
needless_lifetimes = "deny" needless_lifetimes = "deny"
option_option = "deny" option_option = "deny"
redundant_clone = "deny"
string_add_assign = "deny" string_add_assign = "deny"
unnecessary_join = "deny" unnecessary_join = "deny"
unnecessary_self_imports = "deny" unnecessary_self_imports = "deny"
unnested_or_patterns = "deny" unnested_or_patterns = "deny"
unused_async = "deny" unused_async = "deny"
unused_self = "deny" unused_self = "deny"
useless_let_if_seq = "deny"
verbose_file_reads = "deny" verbose_file_reads = "deny"
zero_sized_map_values = "deny" zero_sized_map_values = "deny"

2
docker/DockerSettings.yaml

@ -5,7 +5,7 @@ vault_image_digest: "sha256:41c2b51c87882248f405d5a0ab37210d2672a312ec5d4f3b9afc
# We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts # We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts
# https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags # https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags
xx_image_digest: "sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894" xx_image_digest: "sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894"
rust_version: 1.89.0 # Rust version to be used rust_version: 1.90.0 # Rust version to be used
debian_version: trixie # Debian release name to be used debian_version: trixie # Debian release name to be used
alpine_version: "3.22" # Alpine version to be used alpine_version: "3.22" # Alpine version to be used
# For which platforms/architectures will we try to build images # For which platforms/architectures will we try to build images

8
docker/Dockerfile.alpine

@ -32,10 +32,10 @@ FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:41c2b51c87882
########################## ALPINE BUILD IMAGES ########################## ########################## ALPINE BUILD IMAGES ##########################
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 ## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
## And for Alpine we define all build images here, they will only be loaded when actually used ## And for Alpine we define all build images here, they will only be loaded when actually used
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.89.0 AS build_amd64 FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.90.0 AS build_amd64
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.89.0 AS build_arm64 FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.90.0 AS build_arm64
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.89.0 AS build_armv7 FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.90.0 AS build_armv7
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.89.0 AS build_armv6 FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.90.0 AS build_armv6
########################## BUILD IMAGE ########################## ########################## BUILD IMAGE ##########################
# hadolint ignore=DL3006 # hadolint ignore=DL3006

2
docker/Dockerfile.debian

@ -36,7 +36,7 @@ FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:9c207bead753dda9430bd
########################## BUILD IMAGE ########################## ########################## BUILD IMAGE ##########################
# hadolint ignore=DL3006 # hadolint ignore=DL3006
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.89.0-slim-trixie AS build FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.90.0-slim-trixie AS build
COPY --from=xx / / COPY --from=xx / /
ARG TARGETARCH ARG TARGETARCH
ARG TARGETVARIANT ARG TARGETVARIANT

4
docker/README.md

@ -116,7 +116,7 @@ docker/bake.sh
``` ```
You can append both `alpine` and `debian` with `-amd64`, `-arm64`, `-armv7` or `-armv6`, which will trigger a build for that specific platform.<br> You can append both `alpine` and `debian` with `-amd64`, `-arm64`, `-armv7` or `-armv6`, which will trigger a build for that specific platform.<br>
This will also append those values to the tag so you can see the builded container when running `docker images`. This will also append those values to the tag so you can see the built container when running `docker images`.
You can also append extra arguments after the target if you want. This can be useful for example to print what bake will use. You can also append extra arguments after the target if you want. This can be useful for example to print what bake will use.
```bash ```bash
@ -162,7 +162,7 @@ You can append extra arguments after the target if you want. This can be useful
For the podman builds you can, just like the `bake.sh` script, also append the architecture to build for that specific platform.<br> For the podman builds you can, just like the `bake.sh` script, also append the architecture to build for that specific platform.<br>
### Testing podman builded images ### Testing podman built images
The command to start a podman built container is almost the same as for the docker/bake built containers. The images start with `localhost/`, so you need to prepend that. The command to start a podman built container is almost the same as for the docker/bake built containers. The images start with `localhost/`, so you need to prepend that.

6
macros/Cargo.toml

@ -1,7 +1,11 @@
[package] [package]
name = "macros" name = "macros"
version = "0.1.0" version = "0.1.0"
edition = "2021" repository.workspace = true
edition.workspace = true
rust-version.workspace = true
license.workspace = true
publish.workspace = true
[lib] [lib]
name = "macros" name = "macros"

2
rust-toolchain.toml

@ -1,4 +1,4 @@
[toolchain] [toolchain]
channel = "1.89.0" channel = "1.90.0"
components = [ "rustfmt", "clippy" ] components = [ "rustfmt", "clippy" ]
profile = "minimal" profile = "minimal"

44
src/api/admin.rs

@ -1,17 +1,16 @@
use once_cell::sync::Lazy; use std::{env, sync::LazyLock};
use reqwest::Method;
use serde::de::DeserializeOwned;
use serde_json::Value;
use std::env;
use rocket::serde::json::Json; use reqwest::Method;
use rocket::{ use rocket::{
form::Form, form::Form,
http::{Cookie, CookieJar, MediaType, SameSite, Status}, http::{Cookie, CookieJar, MediaType, SameSite, Status},
request::{FromRequest, Outcome, Request}, request::{FromRequest, Outcome, Request},
response::{content::RawHtml as Html, Redirect}, response::{content::RawHtml as Html, Redirect},
serde::json::Json,
Catcher, Route, Catcher, Route,
}; };
use serde::de::DeserializeOwned;
use serde_json::Value;
use crate::{ use crate::{
api::{ api::{
@ -75,7 +74,7 @@ pub fn catchers() -> Vec<Catcher> {
} }
} }
static DB_TYPE: Lazy<&str> = Lazy::new(|| { static DB_TYPE: LazyLock<&str> = LazyLock::new(|| {
DbConnType::from_url(&CONFIG.database_url()) DbConnType::from_url(&CONFIG.database_url())
.map(|t| match t { .map(|t| match t {
DbConnType::sqlite => "SQLite", DbConnType::sqlite => "SQLite",
@ -85,8 +84,8 @@ static DB_TYPE: Lazy<&str> = Lazy::new(|| {
.unwrap_or("Unknown") .unwrap_or("Unknown")
}); });
static CAN_BACKUP: Lazy<bool> = static CAN_BACKUP: LazyLock<bool> =
Lazy::new(|| DbConnType::from_url(&CONFIG.database_url()).map(|t| t == DbConnType::sqlite).unwrap_or(false)); LazyLock::new(|| DbConnType::from_url(&CONFIG.database_url()).map(|t| t == DbConnType::sqlite).unwrap_or(false));
#[get("/")] #[get("/")]
fn admin_disabled() -> &'static str { fn admin_disabled() -> &'static str {
@ -148,10 +147,10 @@ fn admin_login(request: &Request<'_>) -> ApiResult<Html<String>> {
err_code!("Authorization failed.", Status::Unauthorized.code); err_code!("Authorization failed.", Status::Unauthorized.code);
} }
let redirect = request.segments::<std::path::PathBuf>(0..).unwrap_or_default().display().to_string(); let redirect = request.segments::<std::path::PathBuf>(0..).unwrap_or_default().display().to_string();
render_admin_login(None, Some(redirect)) render_admin_login(None, Some(&redirect))
} }
fn render_admin_login(msg: Option<&str>, redirect: Option<String>) -> ApiResult<Html<String>> { fn render_admin_login(msg: Option<&str>, redirect: Option<&str>) -> ApiResult<Html<String>> {
// If there is an error, show it // If there is an error, show it
let msg = msg.map(|msg| format!("Error: {msg}")); let msg = msg.map(|msg| format!("Error: {msg}"));
let json = json!({ let json = json!({
@ -185,14 +184,17 @@ fn post_admin_login(
if crate::ratelimit::check_limit_admin(&ip.ip).is_err() { if crate::ratelimit::check_limit_admin(&ip.ip).is_err() {
return Err(AdminResponse::TooManyRequests(render_admin_login( return Err(AdminResponse::TooManyRequests(render_admin_login(
Some("Too many requests, try again later."), Some("Too many requests, try again later."),
redirect, redirect.as_deref(),
))); )));
} }
// If the token is invalid, redirect to login page // If the token is invalid, redirect to login page
if !_validate_token(&data.token) { if !_validate_token(&data.token) {
error!("Invalid admin token. IP: {}", ip.ip); error!("Invalid admin token. IP: {}", ip.ip);
Err(AdminResponse::Unauthorized(render_admin_login(Some("Invalid admin token, please try again."), redirect))) Err(AdminResponse::Unauthorized(render_admin_login(
Some("Invalid admin token, please try again."),
redirect.as_deref(),
)))
} else { } else {
// If the token received is valid, generate JWT and save it as a cookie // If the token received is valid, generate JWT and save it as a cookie
let claims = generate_admin_claims(); let claims = generate_admin_claims();
@ -299,7 +301,7 @@ async fn invite_user(data: Json<InviteData>, _token: AdminToken, mut conn: DbCon
err_code!("User already exists", Status::Conflict.code) err_code!("User already exists", Status::Conflict.code)
} }
let mut user = User::new(data.email, None); let mut user = User::new(&data.email, None);
async fn _generate_invite(user: &User, conn: &mut DbConn) -> EmptyResult { async fn _generate_invite(user: &User, conn: &mut DbConn) -> EmptyResult {
if CONFIG.mail_enabled() { if CONFIG.mail_enabled() {
@ -816,11 +818,7 @@ impl<'r> FromRequest<'r> for AdminToken {
_ => err_handler!("Error getting Client IP"), _ => err_handler!("Error getting Client IP"),
}; };
if CONFIG.disable_admin_token() { if !CONFIG.disable_admin_token() {
Outcome::Success(Self {
ip,
})
} else {
let cookies = request.cookies(); let cookies = request.cookies();
let access_token = match cookies.get(COOKIE_NAME) { let access_token = match cookies.get(COOKIE_NAME) {
@ -844,10 +842,10 @@ impl<'r> FromRequest<'r> for AdminToken {
error!("Invalid or expired admin JWT. IP: {}.", &ip.ip); error!("Invalid or expired admin JWT. IP: {}.", &ip.ip);
return Outcome::Error((Status::Unauthorized, "Session expired")); return Outcome::Error((Status::Unauthorized, "Session expired"));
} }
Outcome::Success(Self {
ip,
})
} }
Outcome::Success(Self {
ip,
})
} }
} }

19
src/api/core/accounts.rs

@ -277,7 +277,7 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, mut c
|| CONFIG.is_signup_allowed(&email) || CONFIG.is_signup_allowed(&email)
|| pending_emergency_access.is_some() || pending_emergency_access.is_some()
{ {
User::new(email.clone(), None) User::new(&email, None)
} else { } else {
err!("Registration not allowed or user already exists") err!("Registration not allowed or user already exists")
} }
@ -287,7 +287,7 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, mut c
// Make sure we don't leave a lingering invitation. // Make sure we don't leave a lingering invitation.
Invitation::take(&email, &mut conn).await; Invitation::take(&email, &mut conn).await;
set_kdf_data(&mut user, data.kdf)?; set_kdf_data(&mut user, &data.kdf)?;
user.set_password(&data.master_password_hash, Some(data.key), true, None); user.set_password(&data.master_password_hash, Some(data.key), true, None);
user.password_hint = password_hint; user.password_hint = password_hint;
@ -350,7 +350,7 @@ async fn post_set_password(data: Json<SetPasswordData>, headers: Headers, mut co
let password_hint = clean_password_hint(&data.master_password_hint); let password_hint = clean_password_hint(&data.master_password_hint);
enforce_password_hint_setting(&password_hint)?; enforce_password_hint_setting(&password_hint)?;
set_kdf_data(&mut user, data.kdf)?; set_kdf_data(&mut user, &data.kdf)?;
user.set_password( user.set_password(
&data.master_password_hash, &data.master_password_hash,
@ -548,7 +548,7 @@ struct ChangeKdfData {
key: String, key: String,
} }
fn set_kdf_data(user: &mut User, data: KDFData) -> EmptyResult { fn set_kdf_data(user: &mut User, data: &KDFData) -> EmptyResult {
if data.kdf == UserKdfType::Pbkdf2 as i32 && data.kdf_iterations < 100_000 { if data.kdf == UserKdfType::Pbkdf2 as i32 && data.kdf_iterations < 100_000 {
err!("PBKDF2 KDF iterations must be at least 100000.") err!("PBKDF2 KDF iterations must be at least 100000.")
} }
@ -592,7 +592,7 @@ async fn post_kdf(data: Json<ChangeKdfData>, headers: Headers, mut conn: DbConn,
err!("Invalid password") err!("Invalid password")
} }
set_kdf_data(&mut user, data.kdf)?; set_kdf_data(&mut user, &data.kdf)?;
user.set_password(&data.new_master_password_hash, Some(data.key), true, None); user.set_password(&data.new_master_password_hash, Some(data.key), true, None);
let save_result = user.save(&mut conn).await; let save_result = user.save(&mut conn).await;
@ -1261,10 +1261,11 @@ async fn rotate_api_key(data: Json<PasswordOrOtpData>, headers: Headers, conn: D
#[get("/devices/knowndevice")] #[get("/devices/knowndevice")]
async fn get_known_device(device: KnownDevice, mut conn: DbConn) -> JsonResult { async fn get_known_device(device: KnownDevice, mut conn: DbConn) -> JsonResult {
let mut result = false; let result = if let Some(user) = User::find_by_mail(&device.email, &mut conn).await {
if let Some(user) = User::find_by_mail(&device.email, &mut conn).await { Device::find_by_uuid_and_user(&device.uuid, &user.uuid, &mut conn).await.is_some()
result = Device::find_by_uuid_and_user(&device.uuid, &user.uuid, &mut conn).await.is_some(); } else {
} false
};
Ok(Json(json!(result))) Ok(Json(json!(result)))
} }

2
src/api/core/ciphers.rs

@ -1275,7 +1275,7 @@ async fn save_attachment(
attachment.save(&mut conn).await.expect("Error saving attachment"); attachment.save(&mut conn).await.expect("Error saving attachment");
} }
save_temp_file(PathType::Attachments, &format!("{cipher_id}/{file_id}"), data.data, true).await?; save_temp_file(&PathType::Attachments, &format!("{cipher_id}/{file_id}"), data.data, true).await?;
nt.send_cipher_update( nt.send_cipher_update(
UpdateType::SyncCipherUpdate, UpdateType::SyncCipherUpdate,

2
src/api/core/emergency_access.rs

@ -239,7 +239,7 @@ async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, mu
invitation.save(&mut conn).await?; invitation.save(&mut conn).await?;
} }
let mut user = User::new(email.clone(), None); let mut user = User::new(&email, None);
user.save(&mut conn).await?; user.save(&mut conn).await?;
(user, true) (user, true)
} }

6
src/api/core/organizations.rs

@ -194,7 +194,7 @@ async fn create_organization(headers: Headers, data: Json<OrgData>, mut conn: Db
(None, None) (None, None)
}; };
let org = Organization::new(data.name, data.billing_email, private_key, public_key); let org = Organization::new(data.name, &data.billing_email, private_key, public_key);
let mut member = Membership::new(headers.user.uuid, org.uuid.clone(), None); let mut member = Membership::new(headers.user.uuid, org.uuid.clone(), None);
let collection = Collection::new(org.uuid.clone(), data.collection_name, None); let collection = Collection::new(org.uuid.clone(), data.collection_name, None);
@ -1127,7 +1127,7 @@ async fn send_invite(
Invitation::new(email).save(&mut conn).await?; Invitation::new(email).save(&mut conn).await?;
} }
let mut new_user = User::new(email.clone(), None); let mut new_user = User::new(email, None);
new_user.save(&mut conn).await?; new_user.save(&mut conn).await?;
user_created = true; user_created = true;
new_user new_user
@ -1600,7 +1600,7 @@ async fn edit_member(
// HACK: We need the raw user-type to be sure custom role is selected to determine the access_all permission // HACK: We need the raw user-type to be sure custom role is selected to determine the access_all permission
// The from_str() will convert the custom role type into a manager role type // The from_str() will convert the custom role type into a manager role type
let raw_type = &data.r#type.into_string(); let raw_type = &data.r#type.into_string();
// MembershipTyp::from_str will convert custom (4) to manager (3) // MembershipType::from_str will convert custom (4) to manager (3)
let Some(new_type) = MembershipType::from_str(raw_type) else { let Some(new_type) = MembershipType::from_str(raw_type) else {
err!("Invalid type") err!("Invalid type")
}; };

2
src/api/core/public.rs

@ -89,7 +89,7 @@ async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, mut conn: Db
Some(user) => user, // exists in vaultwarden Some(user) => user, // exists in vaultwarden
None => { None => {
// User does not exist yet // User does not exist yet
let mut new_user = User::new(user_data.email.clone(), None); let mut new_user = User::new(&user_data.email, None);
new_user.save(&mut conn).await?; new_user.save(&mut conn).await?;
if !CONFIG.mail_enabled() { if !CONFIG.mail_enabled() {

21
src/api/core/sends.rs

@ -1,13 +1,12 @@
use std::path::Path; use std::{path::Path, sync::LazyLock, time::Duration};
use std::time::Duration;
use chrono::{DateTime, TimeDelta, Utc}; use chrono::{DateTime, TimeDelta, Utc};
use num_traits::ToPrimitive; use num_traits::ToPrimitive;
use once_cell::sync::Lazy; use rocket::{
use rocket::form::Form; form::Form,
use rocket::fs::NamedFile; fs::{NamedFile, TempFile},
use rocket::fs::TempFile; serde::json::Json,
use rocket::serde::json::Json; };
use serde_json::Value; use serde_json::Value;
use crate::{ use crate::{
@ -20,7 +19,7 @@ use crate::{
}; };
const SEND_INACCESSIBLE_MSG: &str = "Send does not exist or is no longer available"; const SEND_INACCESSIBLE_MSG: &str = "Send does not exist or is no longer available";
static ANON_PUSH_DEVICE: Lazy<Device> = Lazy::new(|| { static ANON_PUSH_DEVICE: LazyLock<Device> = LazyLock::new(|| {
let dt = crate::util::parse_date("1970-01-01T00:00:00.000000Z"); let dt = crate::util::parse_date("1970-01-01T00:00:00.000000Z");
Device { Device {
uuid: String::from("00000000-0000-0000-0000-000000000000").into(), uuid: String::from("00000000-0000-0000-0000-000000000000").into(),
@ -271,7 +270,7 @@ async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, mut conn:
let file_id = crate::crypto::generate_send_file_id(); let file_id = crate::crypto::generate_send_file_id();
save_temp_file(PathType::Sends, &format!("{}/{file_id}", send.uuid), data, true).await?; save_temp_file(&PathType::Sends, &format!("{}/{file_id}", send.uuid), data, true).await?;
let mut data_value: Value = serde_json::from_str(&send.data)?; let mut data_value: Value = serde_json::from_str(&send.data)?;
if let Some(o) = data_value.as_object_mut() { if let Some(o) = data_value.as_object_mut() {
@ -423,7 +422,7 @@ async fn post_send_file_v2_data(
let file_path = format!("{send_id}/{file_id}"); let file_path = format!("{send_id}/{file_id}");
save_temp_file(PathType::Sends, &file_path, data.data, false).await?; save_temp_file(&PathType::Sends, &file_path, data.data, false).await?;
nt.send_send_update( nt.send_send_update(
UpdateType::SyncSendCreate, UpdateType::SyncSendCreate,
@ -564,7 +563,7 @@ async fn post_access_file(
} }
async fn download_url(host: &Host, send_id: &SendId, file_id: &SendFileId) -> Result<String, crate::Error> { async fn download_url(host: &Host, send_id: &SendId, file_id: &SendFileId) -> Result<String, crate::Error> {
let operator = CONFIG.opendal_operator_for_path_type(PathType::Sends)?; let operator = CONFIG.opendal_operator_for_path_type(&PathType::Sends)?;
if operator.info().scheme() == opendal::Scheme::Fs { if operator.info().scheme() == opendal::Scheme::Fs {
let token_claims = crate::auth::generate_send_claims(send_id, file_id); let token_claims = crate::auth::generate_send_claims(send_id, file_id);

2
src/api/core/two_factor/authenticator.rs

@ -31,7 +31,7 @@ async fn generate_authenticator(data: Json<PasswordOrOtpData>, headers: Headers,
let (enabled, key) = match twofactor { let (enabled, key) = match twofactor {
Some(tf) => (true, tf.data), Some(tf) => (true, tf.data),
_ => (false, crypto::encode_random_bytes::<20>(BASE32)), _ => (false, crypto::encode_random_bytes::<20>(&BASE32)),
}; };
// Upstream seems to also return `userVerificationToken`, but doesn't seem to be used at all. // Upstream seems to also return `userVerificationToken`, but doesn't seem to be used at all.

2
src/api/core/two_factor/mod.rs

@ -120,7 +120,7 @@ async fn recover(data: Json<RecoverTwoFactor>, client_headers: ClientHeaders, mu
async fn _generate_recover_code(user: &mut User, conn: &mut DbConn) { async fn _generate_recover_code(user: &mut User, conn: &mut DbConn) {
if user.totp_recover.is_none() { if user.totp_recover.is_none() {
let totp_recover = crypto::encode_random_bytes::<20>(BASE32); let totp_recover = crypto::encode_random_bytes::<20>(&BASE32);
user.totp_recover = Some(totp_recover); user.totp_recover = Some(totp_recover);
user.save(conn).await.ok(); user.save(conn).await.ok();
} }

22
src/api/icons.rs

@ -1,13 +1,13 @@
use std::{ use std::{
collections::HashMap, collections::HashMap,
net::IpAddr, net::IpAddr,
sync::Arc, sync::{Arc, LazyLock},
time::{Duration, SystemTime}, time::{Duration, SystemTime},
}; };
use bytes::{Bytes, BytesMut}; use bytes::{Bytes, BytesMut};
use futures::{stream::StreamExt, TryFutureExt}; use futures::{stream::StreamExt, TryFutureExt};
use once_cell::sync::Lazy; use html5gum::{Emitter, HtmlString, Readable, StringReader, Tokenizer};
use regex::Regex; use regex::Regex;
use reqwest::{ use reqwest::{
header::{self, HeaderMap, HeaderValue}, header::{self, HeaderMap, HeaderValue},
@ -16,8 +16,6 @@ use reqwest::{
use rocket::{http::ContentType, response::Redirect, Route}; use rocket::{http::ContentType, response::Redirect, Route};
use svg_hush::{data_url_filter, Filter}; use svg_hush::{data_url_filter, Filter};
use html5gum::{Emitter, HtmlString, Readable, StringReader, Tokenizer};
use crate::{ use crate::{
config::PathType, config::PathType,
error::Error, error::Error,
@ -33,7 +31,7 @@ pub fn routes() -> Vec<Route> {
} }
} }
static CLIENT: Lazy<Client> = Lazy::new(|| { static CLIENT: LazyLock<Client> = LazyLock::new(|| {
// Generate the default headers // Generate the default headers
let mut default_headers = HeaderMap::new(); let mut default_headers = HeaderMap::new();
default_headers.insert( default_headers.insert(
@ -78,7 +76,7 @@ static CLIENT: Lazy<Client> = Lazy::new(|| {
}); });
// Build Regex only once since this takes a lot of time. // Build Regex only once since this takes a lot of time.
static ICON_SIZE_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?x)(\d+)\D*(\d+)").unwrap()); static ICON_SIZE_REGEX: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(?x)(\d+)\D*(\d+)").unwrap());
// The function name `icon_external` is checked in the `on_response` function in `AppHeaders` // The function name `icon_external` is checked in the `on_response` function in `AppHeaders`
// It is used to prevent sending a specific header which breaks icon downloads. // It is used to prevent sending a specific header which breaks icon downloads.
@ -220,7 +218,7 @@ async fn get_cached_icon(path: &str) -> Option<Vec<u8>> {
} }
// Try to read the cached icon, and return it if it exists // Try to read the cached icon, and return it if it exists
if let Ok(operator) = CONFIG.opendal_operator_for_path_type(PathType::IconCache) { if let Ok(operator) = CONFIG.opendal_operator_for_path_type(&PathType::IconCache) {
if let Ok(buf) = operator.read(path).await { if let Ok(buf) = operator.read(path).await {
return Some(buf.to_vec()); return Some(buf.to_vec());
} }
@ -230,7 +228,7 @@ async fn get_cached_icon(path: &str) -> Option<Vec<u8>> {
} }
async fn file_is_expired(path: &str, ttl: u64) -> Result<bool, Error> { async fn file_is_expired(path: &str, ttl: u64) -> Result<bool, Error> {
let operator = CONFIG.opendal_operator_for_path_type(PathType::IconCache)?; let operator = CONFIG.opendal_operator_for_path_type(&PathType::IconCache)?;
let meta = operator.stat(path).await?; let meta = operator.stat(path).await?;
let modified = let modified =
meta.last_modified().ok_or_else(|| std::io::Error::other(format!("No last modified time for `{path}`")))?; meta.last_modified().ok_or_else(|| std::io::Error::other(format!("No last modified time for `{path}`")))?;
@ -246,7 +244,7 @@ async fn icon_is_negcached(path: &str) -> bool {
match expired { match expired {
// No longer negatively cached, drop the marker // No longer negatively cached, drop the marker
Ok(true) => { Ok(true) => {
match CONFIG.opendal_operator_for_path_type(PathType::IconCache) { match CONFIG.opendal_operator_for_path_type(&PathType::IconCache) {
Ok(operator) => { Ok(operator) => {
if let Err(e) = operator.delete(&miss_indicator).await { if let Err(e) = operator.delete(&miss_indicator).await {
error!("Could not remove negative cache indicator for icon {path:?}: {e:?}"); error!("Could not remove negative cache indicator for icon {path:?}: {e:?}");
@ -462,8 +460,8 @@ async fn get_page_with_referer(url: &str, referer: &str) -> Result<Response, Err
/// priority2 = get_icon_priority("https://example.com/path/to/a/favicon.ico", ""); /// priority2 = get_icon_priority("https://example.com/path/to/a/favicon.ico", "");
/// ``` /// ```
fn get_icon_priority(href: &str, sizes: &str) -> u8 { fn get_icon_priority(href: &str, sizes: &str) -> u8 {
static PRIORITY_MAP: Lazy<HashMap<&'static str, u8>> = static PRIORITY_MAP: LazyLock<HashMap<&'static str, u8>> =
Lazy::new(|| [(".png", 10), (".jpg", 20), (".jpeg", 20)].into_iter().collect()); LazyLock::new(|| [(".png", 10), (".jpg", 20), (".jpeg", 20)].into_iter().collect());
// Check if there is a dimension set // Check if there is a dimension set
let (width, height) = parse_sizes(sizes); let (width, height) = parse_sizes(sizes);
@ -597,7 +595,7 @@ async fn download_icon(domain: &str) -> Result<(Bytes, Option<&str>), Error> {
} }
async fn save_icon(path: &str, icon: Vec<u8>) { async fn save_icon(path: &str, icon: Vec<u8>) {
let operator = match CONFIG.opendal_operator_for_path_type(PathType::IconCache) { let operator = match CONFIG.opendal_operator_for_path_type(&PathType::IconCache) {
Ok(operator) => operator, Ok(operator) => operator,
Err(e) => { Err(e) => {
warn!("Failed to get OpenDAL operator while saving icon: {e}"); warn!("Failed to get OpenDAL operator while saving icon: {e}");

4
src/api/identity.rs

@ -248,7 +248,7 @@ async fn _sso_login(
_ => (), _ => (),
} }
let mut user = User::new(user_infos.email, user_infos.user_name); let mut user = User::new(&user_infos.email, user_infos.user_name);
user.verified_at = Some(now); user.verified_at = Some(now);
user.save(conn).await?; user.save(conn).await?;
@ -1066,7 +1066,7 @@ async fn oidcsignin_redirect(
wrapper: impl FnOnce(OIDCState) -> sso::OIDCCodeWrapper, wrapper: impl FnOnce(OIDCState) -> sso::OIDCCodeWrapper,
conn: &DbConn, conn: &DbConn,
) -> ApiResult<Redirect> { ) -> ApiResult<Redirect> {
let state = sso::decode_state(base64_state)?; let state = sso::decode_state(&base64_state)?;
let code = sso::encode_code_claims(wrapper(state.clone())); let code = sso::encode_code_claims(wrapper(state.clone()));
let nonce = match SsoNonce::find(&state, conn).await { let nonce = match SsoNonce::find(&state, conn).await {

44
src/api/notifications.rs

@ -1,11 +1,14 @@
use std::{net::IpAddr, sync::Arc, time::Duration}; use std::{
net::IpAddr,
sync::{Arc, LazyLock},
time::Duration,
};
use chrono::{NaiveDateTime, Utc}; use chrono::{NaiveDateTime, Utc};
use rmpv::Value; use rmpv::Value;
use rocket::{futures::StreamExt, Route}; use rocket::{futures::StreamExt, Route};
use tokio::sync::mpsc::Sender;
use rocket_ws::{Message, WebSocket}; use rocket_ws::{Message, WebSocket};
use tokio::sync::mpsc::Sender;
use crate::{ use crate::{
auth::{ClientIp, WsAccessTokenHeader}, auth::{ClientIp, WsAccessTokenHeader},
@ -16,15 +19,13 @@ use crate::{
Error, CONFIG, Error, CONFIG,
}; };
use once_cell::sync::Lazy; pub static WS_USERS: LazyLock<Arc<WebSocketUsers>> = LazyLock::new(|| {
pub static WS_USERS: Lazy<Arc<WebSocketUsers>> = Lazy::new(|| {
Arc::new(WebSocketUsers { Arc::new(WebSocketUsers {
map: Arc::new(dashmap::DashMap::new()), map: Arc::new(dashmap::DashMap::new()),
}) })
}); });
pub static WS_ANONYMOUS_SUBSCRIPTIONS: Lazy<Arc<AnonymousWebSocketSubscriptions>> = Lazy::new(|| { pub static WS_ANONYMOUS_SUBSCRIPTIONS: LazyLock<Arc<AnonymousWebSocketSubscriptions>> = LazyLock::new(|| {
Arc::new(AnonymousWebSocketSubscriptions { Arc::new(AnonymousWebSocketSubscriptions {
map: Arc::new(dashmap::DashMap::new()), map: Arc::new(dashmap::DashMap::new()),
}) })
@ -35,7 +36,7 @@ use super::{
push_send_update, push_user_update, push_send_update, push_user_update,
}; };
static NOTIFICATIONS_DISABLED: Lazy<bool> = Lazy::new(|| !CONFIG.enable_websocket() && !CONFIG.push_enabled()); static NOTIFICATIONS_DISABLED: LazyLock<bool> = LazyLock::new(|| !CONFIG.enable_websocket() && !CONFIG.push_enabled());
pub fn routes() -> Vec<Route> { pub fn routes() -> Vec<Route> {
if CONFIG.enable_websocket() { if CONFIG.enable_websocket() {
@ -109,8 +110,7 @@ fn websockets_hub<'r>(
ip: ClientIp, ip: ClientIp,
header_token: WsAccessTokenHeader, header_token: WsAccessTokenHeader,
) -> Result<rocket_ws::Stream!['r], Error> { ) -> Result<rocket_ws::Stream!['r], Error> {
let addr = ip.ip; info!("Accepting Rocket WS connection from {}", ip.ip);
info!("Accepting Rocket WS connection from {addr}");
let token = if let Some(token) = data.access_token { let token = if let Some(token) = data.access_token {
token token
@ -133,7 +133,7 @@ fn websockets_hub<'r>(
users.map.entry(claims.sub.to_string()).or_default().push((entry_uuid, tx)); users.map.entry(claims.sub.to_string()).or_default().push((entry_uuid, tx));
// Once the guard goes out of scope, the connection will have been closed and the entry will be deleted from the map // Once the guard goes out of scope, the connection will have been closed and the entry will be deleted from the map
(rx, WSEntryMapGuard::new(users, claims.sub, entry_uuid, addr)) (rx, WSEntryMapGuard::new(users, claims.sub, entry_uuid, ip.ip))
}; };
Ok({ Ok({
@ -189,8 +189,7 @@ fn websockets_hub<'r>(
#[allow(tail_expr_drop_order)] #[allow(tail_expr_drop_order)]
#[get("/anonymous-hub?<token..>")] #[get("/anonymous-hub?<token..>")]
fn anonymous_websockets_hub<'r>(ws: WebSocket, token: String, ip: ClientIp) -> Result<rocket_ws::Stream!['r], Error> { fn anonymous_websockets_hub<'r>(ws: WebSocket, token: String, ip: ClientIp) -> Result<rocket_ws::Stream!['r], Error> {
let addr = ip.ip; info!("Accepting Anonymous Rocket WS connection from {}", ip.ip);
info!("Accepting Anonymous Rocket WS connection from {addr}");
let (mut rx, guard) = { let (mut rx, guard) = {
let subscriptions = Arc::clone(&WS_ANONYMOUS_SUBSCRIPTIONS); let subscriptions = Arc::clone(&WS_ANONYMOUS_SUBSCRIPTIONS);
@ -200,7 +199,7 @@ fn anonymous_websockets_hub<'r>(ws: WebSocket, token: String, ip: ClientIp) -> R
subscriptions.map.insert(token.clone(), tx); subscriptions.map.insert(token.clone(), tx);
// Once the guard goes out of scope, the connection will have been closed and the entry will be deleted from the map // Once the guard goes out of scope, the connection will have been closed and the entry will be deleted from the map
(rx, WSAnonymousEntryMapGuard::new(subscriptions, token, addr)) (rx, WSAnonymousEntryMapGuard::new(subscriptions, token, ip.ip))
}; };
Ok({ Ok({
@ -257,11 +256,11 @@ fn anonymous_websockets_hub<'r>(ws: WebSocket, token: String, ip: ClientIp) -> R
// Websockets server // Websockets server
// //
fn serialize(val: Value) -> Vec<u8> { fn serialize(val: &Value) -> Vec<u8> {
use rmpv::encode::write_value; use rmpv::encode::write_value;
let mut buf = Vec::new(); let mut buf = Vec::new();
write_value(&mut buf, &val).expect("Error encoding MsgPack"); write_value(&mut buf, val).expect("Error encoding MsgPack");
// Add size bytes at the start // Add size bytes at the start
// Extracted from BinaryMessageFormat.js // Extracted from BinaryMessageFormat.js
@ -558,7 +557,7 @@ impl AnonymousWebSocketSubscriptions {
let data = create_anonymous_update( let data = create_anonymous_update(
vec![("Id".into(), auth_request_id.to_string().into()), ("UserId".into(), user_id.to_string().into())], vec![("Id".into(), auth_request_id.to_string().into()), ("UserId".into(), user_id.to_string().into())],
UpdateType::AuthRequestResponse, UpdateType::AuthRequestResponse,
user_id.clone(), user_id,
); );
self.send_update(auth_request_id, &data).await; self.send_update(auth_request_id, &data).await;
} }
@ -594,16 +593,19 @@ fn create_update(payload: Vec<(Value, Value)>, ut: UpdateType, acting_device_id:
])]), ])]),
]); ]);
serialize(value) serialize(&value)
} }
fn create_anonymous_update(payload: Vec<(Value, Value)>, ut: UpdateType, user_id: UserId) -> Vec<u8> { fn create_anonymous_update(payload: Vec<(Value, Value)>, ut: UpdateType, user_id: &UserId) -> Vec<u8> {
use rmpv::Value as V; use rmpv::Value as V;
let value = V::Array(vec![ let value = V::Array(vec![
1.into(), 1.into(),
V::Map(vec![]), V::Map(vec![]),
V::Nil, V::Nil,
// This word is misspelled, but upstream has this too
// https://github.com/bitwarden/server/blob/dff9f1cf538198819911cf2c20f8cda3307701c5/src/Notifications/HubHelpers.cs#L86
// https://github.com/bitwarden/clients/blob/9612a4ac45063e372a6fbe87eb253c7cb3c588fb/libs/common/src/auth/services/anonymous-hub.service.ts#L45
"AuthRequestResponseRecieved".into(), "AuthRequestResponseRecieved".into(),
V::Array(vec![V::Map(vec![ V::Array(vec![V::Map(vec![
("Type".into(), (ut as i32).into()), ("Type".into(), (ut as i32).into()),
@ -612,11 +614,11 @@ fn create_anonymous_update(payload: Vec<(Value, Value)>, ut: UpdateType, user_id
])]), ])]),
]); ]);
serialize(value) serialize(&value)
} }
fn create_ping() -> Vec<u8> { fn create_ping() -> Vec<u8> {
serialize(Value::Array(vec![6.into()])) serialize(&Value::Array(vec![6.into()]))
} }
// https://github.com/bitwarden/server/blob/375af7c43b10d9da03525d41452f95de3f921541/src/Core/Enums/PushType.cs // https://github.com/bitwarden/server/blob/375af7c43b10d9da03525d41452f95de3f921541/src/Core/Enums/PushType.cs

10
src/api/push.rs

@ -1,3 +1,8 @@
use std::{
sync::LazyLock,
time::{Duration, Instant},
};
use reqwest::{ use reqwest::{
header::{ACCEPT, AUTHORIZATION, CONTENT_TYPE}, header::{ACCEPT, AUTHORIZATION, CONTENT_TYPE},
Method, Method,
@ -13,9 +18,6 @@ use crate::{
CONFIG, CONFIG,
}; };
use once_cell::sync::Lazy;
use std::time::{Duration, Instant};
#[derive(Deserialize)] #[derive(Deserialize)]
struct AuthPushToken { struct AuthPushToken {
access_token: String, access_token: String,
@ -29,7 +31,7 @@ struct LocalAuthPushToken {
} }
async fn get_auth_api_token() -> ApiResult<String> { async fn get_auth_api_token() -> ApiResult<String> {
static API_TOKEN: Lazy<RwLock<LocalAuthPushToken>> = Lazy::new(|| { static API_TOKEN: LazyLock<RwLock<LocalAuthPushToken>> = LazyLock::new(|| {
RwLock::new(LocalAuthPushToken { RwLock::new(LocalAuthPushToken {
access_token: String::new(), access_token: String::new(),
valid_until: Instant::now(), valid_until: Instant::now(),

62
src/auth.rs

@ -1,12 +1,15 @@
// JWT Handling use std::{
env,
net::IpAddr,
sync::{LazyLock, OnceLock},
};
use chrono::{DateTime, TimeDelta, Utc}; use chrono::{DateTime, TimeDelta, Utc};
use jsonwebtoken::{errors::ErrorKind, Algorithm, DecodingKey, EncodingKey, Header}; use jsonwebtoken::{errors::ErrorKind, Algorithm, DecodingKey, EncodingKey, Header};
use num_traits::FromPrimitive; use num_traits::FromPrimitive;
use once_cell::sync::{Lazy, OnceCell};
use openssl::rsa::Rsa; use openssl::rsa::Rsa;
use serde::de::DeserializeOwned; use serde::de::DeserializeOwned;
use serde::ser::Serialize; use serde::ser::Serialize;
use std::{env, net::IpAddr};
use crate::{ use crate::{
api::ApiResult, api::ApiResult,
@ -22,27 +25,30 @@ use crate::{
const JWT_ALGORITHM: Algorithm = Algorithm::RS256; const JWT_ALGORITHM: Algorithm = Algorithm::RS256;
// Limit when BitWarden consider the token as expired // Limit when BitWarden consider the token as expired
pub static BW_EXPIRATION: Lazy<TimeDelta> = Lazy::new(|| TimeDelta::try_minutes(5).unwrap()); pub static BW_EXPIRATION: LazyLock<TimeDelta> = LazyLock::new(|| TimeDelta::try_minutes(5).unwrap());
pub static DEFAULT_REFRESH_VALIDITY: Lazy<TimeDelta> = Lazy::new(|| TimeDelta::try_days(30).unwrap()); pub static DEFAULT_REFRESH_VALIDITY: LazyLock<TimeDelta> = LazyLock::new(|| TimeDelta::try_days(30).unwrap());
pub static MOBILE_REFRESH_VALIDITY: Lazy<TimeDelta> = Lazy::new(|| TimeDelta::try_days(90).unwrap()); pub static MOBILE_REFRESH_VALIDITY: LazyLock<TimeDelta> = LazyLock::new(|| TimeDelta::try_days(90).unwrap());
pub static DEFAULT_ACCESS_VALIDITY: Lazy<TimeDelta> = Lazy::new(|| TimeDelta::try_hours(2).unwrap()); pub static DEFAULT_ACCESS_VALIDITY: LazyLock<TimeDelta> = LazyLock::new(|| TimeDelta::try_hours(2).unwrap());
static JWT_HEADER: Lazy<Header> = Lazy::new(|| Header::new(JWT_ALGORITHM)); static JWT_HEADER: LazyLock<Header> = LazyLock::new(|| Header::new(JWT_ALGORITHM));
pub static JWT_LOGIN_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|login", CONFIG.domain_origin())); pub static JWT_LOGIN_ISSUER: LazyLock<String> = LazyLock::new(|| format!("{}|login", CONFIG.domain_origin()));
static JWT_INVITE_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|invite", CONFIG.domain_origin())); static JWT_INVITE_ISSUER: LazyLock<String> = LazyLock::new(|| format!("{}|invite", CONFIG.domain_origin()));
static JWT_EMERGENCY_ACCESS_INVITE_ISSUER: Lazy<String> = static JWT_EMERGENCY_ACCESS_INVITE_ISSUER: LazyLock<String> =
Lazy::new(|| format!("{}|emergencyaccessinvite", CONFIG.domain_origin())); LazyLock::new(|| format!("{}|emergencyaccessinvite", CONFIG.domain_origin()));
static JWT_DELETE_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|delete", CONFIG.domain_origin())); static JWT_DELETE_ISSUER: LazyLock<String> = LazyLock::new(|| format!("{}|delete", CONFIG.domain_origin()));
static JWT_VERIFYEMAIL_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|verifyemail", CONFIG.domain_origin())); static JWT_VERIFYEMAIL_ISSUER: LazyLock<String> = LazyLock::new(|| format!("{}|verifyemail", CONFIG.domain_origin()));
static JWT_ADMIN_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|admin", CONFIG.domain_origin())); static JWT_ADMIN_ISSUER: LazyLock<String> = LazyLock::new(|| format!("{}|admin", CONFIG.domain_origin()));
static JWT_SEND_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|send", CONFIG.domain_origin())); static JWT_SEND_ISSUER: LazyLock<String> = LazyLock::new(|| format!("{}|send", CONFIG.domain_origin()));
static JWT_ORG_API_KEY_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|api.organization", CONFIG.domain_origin())); static JWT_ORG_API_KEY_ISSUER: LazyLock<String> =
static JWT_FILE_DOWNLOAD_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|file_download", CONFIG.domain_origin())); LazyLock::new(|| format!("{}|api.organization", CONFIG.domain_origin()));
static JWT_REGISTER_VERIFY_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|register_verify", CONFIG.domain_origin())); static JWT_FILE_DOWNLOAD_ISSUER: LazyLock<String> =
LazyLock::new(|| format!("{}|file_download", CONFIG.domain_origin()));
static PRIVATE_RSA_KEY: OnceCell<EncodingKey> = OnceCell::new(); static JWT_REGISTER_VERIFY_ISSUER: LazyLock<String> =
static PUBLIC_RSA_KEY: OnceCell<DecodingKey> = OnceCell::new(); LazyLock::new(|| format!("{}|register_verify", CONFIG.domain_origin()));
static PRIVATE_RSA_KEY: OnceLock<EncodingKey> = OnceLock::new();
static PUBLIC_RSA_KEY: OnceLock<DecodingKey> = OnceLock::new();
pub async fn initialize_keys() -> Result<(), Error> { pub async fn initialize_keys() -> Result<(), Error> {
use std::io::Error; use std::io::Error;
@ -54,7 +60,7 @@ pub async fn initialize_keys() -> Result<(), Error> {
.ok_or_else(|| Error::other("Private RSA key path filename is not valid UTF-8"))? .ok_or_else(|| Error::other("Private RSA key path filename is not valid UTF-8"))?
.to_string(); .to_string();
let operator = CONFIG.opendal_operator_for_path_type(PathType::RsaKey).map_err(Error::other)?; let operator = CONFIG.opendal_operator_for_path_type(&PathType::RsaKey).map_err(Error::other)?;
let priv_key_buffer = match operator.read(&rsa_key_filename).await { let priv_key_buffer = match operator.read(&rsa_key_filename).await {
Ok(buffer) => Some(buffer), Ok(buffer) => Some(buffer),
@ -457,7 +463,7 @@ pub fn generate_delete_claims(uuid: String) -> BasicJwtClaims {
} }
} }
pub fn generate_verify_email_claims(user_id: UserId) -> BasicJwtClaims { pub fn generate_verify_email_claims(user_id: &UserId) -> BasicJwtClaims {
let time_now = Utc::now(); let time_now = Utc::now();
let expire_hours = i64::from(CONFIG.invitation_expiration_hours()); let expire_hours = i64::from(CONFIG.invitation_expiration_hours());
BasicJwtClaims { BasicJwtClaims {
@ -696,9 +702,9 @@ impl<'r> FromRequest<'r> for OrgHeaders {
// First check the path, if this is not a valid uuid, try the query values. // First check the path, if this is not a valid uuid, try the query values.
let url_org_id: Option<OrganizationId> = { let url_org_id: Option<OrganizationId> = {
if let Some(Ok(org_id)) = request.param::<OrganizationId>(1) { if let Some(Ok(org_id)) = request.param::<OrganizationId>(1) {
Some(org_id.clone()) Some(org_id)
} else if let Some(Ok(org_id)) = request.query_value::<OrganizationId>("organizationId") { } else if let Some(Ok(org_id)) = request.query_value::<OrganizationId>("organizationId") {
Some(org_id.clone()) Some(org_id)
} else { } else {
None None
} }

318
src/config.rs

@ -1,5 +1,6 @@
use std::{ use std::{
env::consts::EXE_SUFFIX, env::consts::EXE_SUFFIX,
fmt,
process::exit, process::exit,
sync::{ sync::{
atomic::{AtomicBool, Ordering}, atomic::{AtomicBool, Ordering},
@ -8,8 +9,8 @@ use std::{
}; };
use job_scheduler_ng::Schedule; use job_scheduler_ng::Schedule;
use once_cell::sync::Lazy;
use reqwest::Url; use reqwest::Url;
use serde::de::{self, Deserialize, Deserializer, MapAccess, Visitor};
use crate::{ use crate::{
db::DbConnType, db::DbConnType,
@ -17,7 +18,7 @@ use crate::{
util::{get_env, get_env_bool, get_web_vault_version, is_valid_email, parse_experimental_client_feature_flags}, util::{get_env, get_env_bool, get_web_vault_version, is_valid_email, parse_experimental_client_feature_flags},
}; };
static CONFIG_FILE: Lazy<String> = Lazy::new(|| { static CONFIG_FILE: LazyLock<String> = LazyLock::new(|| {
let data_folder = get_env("DATA_FOLDER").unwrap_or_else(|| String::from("data")); let data_folder = get_env("DATA_FOLDER").unwrap_or_else(|| String::from("data"));
get_env("CONFIG_FILE").unwrap_or_else(|| format!("{data_folder}/config.json")) get_env("CONFIG_FILE").unwrap_or_else(|| format!("{data_folder}/config.json"))
}); });
@ -34,7 +35,7 @@ static CONFIG_FILENAME: LazyLock<String> = LazyLock::new(|| {
pub static SKIP_CONFIG_VALIDATION: AtomicBool = AtomicBool::new(false); pub static SKIP_CONFIG_VALIDATION: AtomicBool = AtomicBool::new(false);
pub static CONFIG: Lazy<Config> = Lazy::new(|| { pub static CONFIG: LazyLock<Config> = LazyLock::new(|| {
std::thread::spawn(|| { std::thread::spawn(|| {
let rt = tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap_or_else(|e| { let rt = tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap_or_else(|e| {
println!("Error loading config:\n {e:?}\n"); println!("Error loading config:\n {e:?}\n");
@ -56,6 +57,41 @@ pub static CONFIG: Lazy<Config> = Lazy::new(|| {
pub type Pass = String; pub type Pass = String;
macro_rules! make_config { macro_rules! make_config {
// Support string print
( @supportstr $name:ident, $value:expr, Pass, option ) => { serde_json::to_value(&$value.as_ref().map(|_| String::from("***"))).unwrap() }; // Optional pass, we map to an Option<String> with "***"
( @supportstr $name:ident, $value:expr, Pass, $none_action:ident ) => { "***".into() }; // Required pass, we return "***"
( @supportstr $name:ident, $value:expr, $ty:ty, option ) => { serde_json::to_value(&$value).unwrap() }; // Optional other or string, we convert to json
( @supportstr $name:ident, $value:expr, String, $none_action:ident ) => { $value.as_str().into() }; // Required string value, we convert to json
( @supportstr $name:ident, $value:expr, $ty:ty, $none_action:ident ) => { ($value).into() }; // Required other value, we return as is or convert to json
// Group or empty string
( @show ) => { "" };
( @show $lit:literal ) => { $lit };
// Wrap the optionals in an Option type
( @type $ty:ty, option) => { Option<$ty> };
( @type $ty:ty, $id:ident) => { $ty };
// Generate the values depending on none_action
( @build $value:expr, $config:expr, option, ) => { $value };
( @build $value:expr, $config:expr, def, $default:expr ) => { $value.unwrap_or($default) };
( @build $value:expr, $config:expr, auto, $default_fn:expr ) => {{
match $value {
Some(v) => v,
None => {
let f: &dyn Fn(&ConfigItems) -> _ = &$default_fn;
f($config)
}
}
}};
( @build $value:expr, $config:expr, generated, $default_fn:expr ) => {{
let f: &dyn Fn(&ConfigItems) -> _ = &$default_fn;
f($config)
}};
( @getenv $name:expr, bool ) => { get_env_bool($name) };
( @getenv $name:expr, $ty:ident ) => { get_env($name) };
($( ($(
$(#[doc = $groupdoc:literal])? $(#[doc = $groupdoc:literal])?
$group:ident $(: $group_enabled:ident)? { $group:ident $(: $group_enabled:ident)? {
@ -75,10 +111,103 @@ macro_rules! make_config {
_env: ConfigBuilder, _env: ConfigBuilder,
_usr: ConfigBuilder, _usr: ConfigBuilder,
_overrides: Vec<String>, _overrides: Vec<&'static str>,
}
// Custom Deserialize for ConfigBuilder, mainly based upon https://serde.rs/deserialize-struct.html
// This deserialize doesn't care if there are keys missing, or if there are duplicate keys
// In case of duplicate keys (which should never be possible unless manually edited), the last value is used!
// Main reason for this is removing the `visit_seq` function, which causes a lot of code generation not needed or used for this struct.
impl<'de> Deserialize<'de> for ConfigBuilder {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
const FIELDS: &[&str] = &[
$($(
stringify!($name),
)+)+
];
#[allow(non_camel_case_types)]
enum Field {
$($(
$name,
)+)+
__ignore,
}
impl<'de> Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct FieldVisitor;
impl Visitor<'_> for FieldVisitor {
type Value = Field;
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("ConfigBuilder field identifier")
}
#[inline]
fn visit_str<E>(self, value: &str) -> Result<Field, E>
where
E: de::Error,
{
match value {
$($(
stringify!($name) => Ok(Field::$name),
)+)+
_ => Ok(Field::__ignore),
}
}
}
deserializer.deserialize_identifier(FieldVisitor)
}
}
struct ConfigBuilderVisitor;
impl<'de> Visitor<'de> for ConfigBuilderVisitor {
type Value = ConfigBuilder;
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("struct ConfigBuilder")
}
#[inline]
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where
A: MapAccess<'de>,
{
let mut builder = ConfigBuilder::default();
while let Some(key) = map.next_key()? {
match key {
$($(
Field::$name => {
if builder.$name.is_some() {
return Err(de::Error::duplicate_field(stringify!($name)));
}
builder.$name = map.next_value()?;
}
)+)+
Field::__ignore => {
let _ = map.next_value::<de::IgnoredAny>()?;
}
}
}
Ok(builder)
}
}
deserializer.deserialize_struct("ConfigBuilder", FIELDS, ConfigBuilderVisitor)
}
} }
#[derive(Clone, Default, Deserialize, Serialize)] #[derive(Clone, Default, Serialize)]
pub struct ConfigBuilder { pub struct ConfigBuilder {
$($( $($(
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
@ -87,7 +216,6 @@ macro_rules! make_config {
} }
impl ConfigBuilder { impl ConfigBuilder {
#[allow(clippy::field_reassign_with_default)]
fn from_env() -> Self { fn from_env() -> Self {
let env_file = get_env("ENV_FILE").unwrap_or_else(|| String::from(".env")); let env_file = get_env("ENV_FILE").unwrap_or_else(|| String::from(".env"));
match dotenvy::from_path(&env_file) { match dotenvy::from_path(&env_file) {
@ -149,14 +277,14 @@ macro_rules! make_config {
/// Merges the values of both builders into a new builder. /// Merges the values of both builders into a new builder.
/// If both have the same element, `other` wins. /// If both have the same element, `other` wins.
fn merge(&self, other: &Self, show_overrides: bool, overrides: &mut Vec<String>) -> Self { fn merge(&self, other: &Self, show_overrides: bool, overrides: &mut Vec<&str>) -> Self {
let mut builder = self.clone(); let mut builder = self.clone();
$($( $($(
if let v @Some(_) = &other.$name { if let v @Some(_) = &other.$name {
builder.$name = v.clone(); builder.$name = v.clone();
if self.$name.is_some() { if self.$name.is_some() {
overrides.push(pastey::paste!(stringify!([<$name:upper>])).into()); overrides.push(pastey::paste!(stringify!([<$name:upper>])));
} }
} }
)+)+ )+)+
@ -197,6 +325,32 @@ macro_rules! make_config {
#[derive(Clone, Default)] #[derive(Clone, Default)]
struct ConfigItems { $($( $name: make_config!{@type $ty, $none_action}, )+)+ } struct ConfigItems { $($( $name: make_config!{@type $ty, $none_action}, )+)+ }
#[derive(Serialize)]
struct ElementDoc {
name: &'static str,
description: &'static str,
}
#[derive(Serialize)]
struct ElementData {
editable: bool,
name: &'static str,
value: serde_json::Value,
default: serde_json::Value,
#[serde(rename = "type")]
r#type: &'static str,
doc: ElementDoc,
overridden: bool,
}
#[derive(Serialize)]
pub struct GroupData {
group: &'static str,
grouptoggle: &'static str,
groupdoc: &'static str,
elements: Vec<ElementData>,
}
#[allow(unused)] #[allow(unused)]
impl Config { impl Config {
$($( $($(
@ -208,11 +362,12 @@ macro_rules! make_config {
pub fn prepare_json(&self) -> serde_json::Value { pub fn prepare_json(&self) -> serde_json::Value {
let (def, cfg, overridden) = { let (def, cfg, overridden) = {
// Lock the inner as short as possible and clone what is needed to prevent deadlocks
let inner = &self.inner.read().unwrap(); let inner = &self.inner.read().unwrap();
(inner._env.build(), inner.config.clone(), inner._overrides.clone()) (inner._env.build(), inner.config.clone(), inner._overrides.clone())
}; };
fn _get_form_type(rust_type: &str) -> &'static str { fn _get_form_type(rust_type: &'static str) -> &'static str {
match rust_type { match rust_type {
"Pass" => "password", "Pass" => "password",
"String" => "text", "String" => "text",
@ -221,48 +376,36 @@ macro_rules! make_config {
} }
} }
fn _get_doc(doc: &str) -> serde_json::Value { fn _get_doc(doc_str: &'static str) -> ElementDoc {
let mut split = doc.split("|>").map(str::trim); let mut split = doc_str.split("|>").map(str::trim);
ElementDoc {
// We do not use the json!() macro here since that causes a lot of macro recursion. name: split.next().unwrap_or_default(),
// This slows down compile time and it also causes issues with rust-analyzer description: split.next().unwrap_or_default(),
serde_json::Value::Object({ }
let mut doc_json = serde_json::Map::new();
doc_json.insert("name".into(), serde_json::to_value(split.next()).unwrap());
doc_json.insert("description".into(), serde_json::to_value(split.next()).unwrap());
doc_json
})
} }
// We do not use the json!() macro here since that causes a lot of macro recursion. let data: Vec<GroupData> = vec![
// This slows down compile time and it also causes issues with rust-analyzer $( // This repetition is for each group
serde_json::Value::Array(<[_]>::into_vec(Box::new([ GroupData {
$( group: stringify!($group),
serde_json::Value::Object({ grouptoggle: stringify!($($group_enabled)?),
let mut group = serde_json::Map::new(); groupdoc: (make_config!{ @show $($groupdoc)? }),
group.insert("group".into(), (stringify!($group)).into());
group.insert("grouptoggle".into(), (stringify!($($group_enabled)?)).into()); elements: vec![
group.insert("groupdoc".into(), (make_config!{ @show $($groupdoc)? }).into()); $( // This repetition is for each element within a group
ElementData {
group.insert("elements".into(), serde_json::Value::Array(<[_]>::into_vec(Box::new([ editable: $editable,
$( name: stringify!($name),
serde_json::Value::Object({ value: serde_json::to_value(&cfg.$name).unwrap_or_default(),
let mut element = serde_json::Map::new(); default: serde_json::to_value(&def.$name).unwrap_or_default(),
element.insert("editable".into(), ($editable).into()); r#type: _get_form_type(stringify!($ty)),
element.insert("name".into(), (stringify!($name)).into()); doc: _get_doc(concat!($($doc),+)),
element.insert("value".into(), serde_json::to_value(cfg.$name).unwrap()); overridden: overridden.contains(&pastey::paste!(stringify!([<$name:upper>]))),
element.insert("default".into(), serde_json::to_value(def.$name).unwrap()); },
element.insert("type".into(), (_get_form_type(stringify!($ty))).into()); )+], // End of elements repetition
element.insert("doc".into(), (_get_doc(concat!($($doc),+))).into()); },
element.insert("overridden".into(), (overridden.contains(&pastey::paste!(stringify!([<$name:upper>])).into())).into()); )+]; // End of groups repetition
element serde_json::to_value(data).unwrap()
}),
)+
]))));
group
}),
)+
])))
} }
pub fn get_support_json(&self) -> serde_json::Value { pub fn get_support_json(&self) -> serde_json::Value {
@ -270,8 +413,8 @@ macro_rules! make_config {
// Pass types will always be masked and no need to put them in the list. // Pass types will always be masked and no need to put them in the list.
// Besides Pass, only String types will be masked via _privacy_mask. // Besides Pass, only String types will be masked via _privacy_mask.
const PRIVACY_CONFIG: &[&str] = &[ const PRIVACY_CONFIG: &[&str] = &[
"allowed_iframe_ancestors",
"allowed_connect_src", "allowed_connect_src",
"allowed_iframe_ancestors",
"database_url", "database_url",
"domain_origin", "domain_origin",
"domain_path", "domain_path",
@ -279,16 +422,18 @@ macro_rules! make_config {
"helo_name", "helo_name",
"org_creation_users", "org_creation_users",
"signups_domains_whitelist", "signups_domains_whitelist",
"_smtp_img_src",
"smtp_from_name",
"smtp_from", "smtp_from",
"smtp_host", "smtp_host",
"smtp_username", "smtp_username",
"_smtp_img_src",
"sso_client_id",
"sso_authority", "sso_authority",
"sso_callback_path", "sso_callback_path",
"sso_client_id",
]; ];
let cfg = { let cfg = {
// Lock the inner as short as possible and clone what is needed to prevent deadlocks
let inner = &self.inner.read().unwrap(); let inner = &self.inner.read().unwrap();
inner.config.clone() inner.config.clone()
}; };
@ -318,13 +463,21 @@ macro_rules! make_config {
serde_json::Value::Object({ serde_json::Value::Object({
let mut json = serde_json::Map::new(); let mut json = serde_json::Map::new();
$($( $($(
json.insert(stringify!($name).into(), make_config!{ @supportstr $name, cfg.$name, $ty, $none_action }); json.insert(String::from(stringify!($name)), make_config!{ @supportstr $name, cfg.$name, $ty, $none_action });
)+)+; )+)+;
// Loop through all privacy sensitive keys and mask them
for mask_key in PRIVACY_CONFIG {
if let Some(value) = json.get_mut(*mask_key) {
if let Some(s) = value.as_str() {
*value = _privacy_mask(s).into();
}
}
}
json json
}) })
} }
pub fn get_overrides(&self) -> Vec<String> { pub fn get_overrides(&self) -> Vec<&'static str> {
let overrides = { let overrides = {
let inner = &self.inner.read().unwrap(); let inner = &self.inner.read().unwrap();
inner._overrides.clone() inner._overrides.clone()
@ -333,55 +486,6 @@ macro_rules! make_config {
} }
} }
}; };
// Support string print
( @supportstr $name:ident, $value:expr, Pass, option ) => { serde_json::to_value($value.as_ref().map(|_| String::from("***"))).unwrap() }; // Optional pass, we map to an Option<String> with "***"
( @supportstr $name:ident, $value:expr, Pass, $none_action:ident ) => { "***".into() }; // Required pass, we return "***"
( @supportstr $name:ident, $value:expr, String, option ) => { // Optional other value, we return as is or convert to string to apply the privacy config
if PRIVACY_CONFIG.contains(&stringify!($name)) {
serde_json::to_value($value.as_ref().map(|x| _privacy_mask(x) )).unwrap()
} else {
serde_json::to_value($value).unwrap()
}
};
( @supportstr $name:ident, $value:expr, String, $none_action:ident ) => { // Required other value, we return as is or convert to string to apply the privacy config
if PRIVACY_CONFIG.contains(&stringify!($name)) {
_privacy_mask(&$value).into()
} else {
($value).into()
}
};
( @supportstr $name:ident, $value:expr, $ty:ty, option ) => { serde_json::to_value($value).unwrap() }; // Optional other value, we return as is or convert to string to apply the privacy config
( @supportstr $name:ident, $value:expr, $ty:ty, $none_action:ident ) => { ($value).into() }; // Required other value, we return as is or convert to string to apply the privacy config
// Group or empty string
( @show ) => { "" };
( @show $lit:literal ) => { $lit };
// Wrap the optionals in an Option type
( @type $ty:ty, option) => { Option<$ty> };
( @type $ty:ty, $id:ident) => { $ty };
// Generate the values depending on none_action
( @build $value:expr, $config:expr, option, ) => { $value };
( @build $value:expr, $config:expr, def, $default:expr ) => { $value.unwrap_or($default) };
( @build $value:expr, $config:expr, auto, $default_fn:expr ) => {{
match $value {
Some(v) => v,
None => {
let f: &dyn Fn(&ConfigItems) -> _ = &$default_fn;
f($config)
}
}
}};
( @build $value:expr, $config:expr, generated, $default_fn:expr ) => {{
let f: &dyn Fn(&ConfigItems) -> _ = &$default_fn;
f($config)
}};
( @getenv $name:expr, bool ) => { get_env_bool($name) };
( @getenv $name:expr, $ty:ident ) => { get_env($name) };
} }
//STRUCTURE: //STRUCTURE:
@ -1512,7 +1616,7 @@ impl Config {
if let Some(akey) = self._duo_akey() { if let Some(akey) = self._duo_akey() {
akey akey
} else { } else {
let akey_s = crate::crypto::encode_random_bytes::<64>(data_encoding::BASE64); let akey_s = crate::crypto::encode_random_bytes::<64>(&data_encoding::BASE64);
// Save the new value // Save the new value
let builder = ConfigBuilder { let builder = ConfigBuilder {
@ -1536,7 +1640,7 @@ impl Config {
token.is_some() && !token.unwrap().trim().is_empty() token.is_some() && !token.unwrap().trim().is_empty()
} }
pub fn opendal_operator_for_path_type(&self, path_type: PathType) -> Result<opendal::Operator, Error> { pub fn opendal_operator_for_path_type(&self, path_type: &PathType) -> Result<opendal::Operator, Error> {
let path = match path_type { let path = match path_type {
PathType::Data => self.data_folder(), PathType::Data => self.data_folder(),
PathType::IconCache => self.icon_cache_folder(), PathType::IconCache => self.icon_cache_folder(),
@ -1728,7 +1832,7 @@ fn to_json<'reg, 'rc>(
// Configure the web-vault version as an integer so it can be used as a comparison smaller or greater then. // Configure the web-vault version as an integer so it can be used as a comparison smaller or greater then.
// The default is based upon the version since this feature is added. // The default is based upon the version since this feature is added.
static WEB_VAULT_VERSION: Lazy<semver::Version> = Lazy::new(|| { static WEB_VAULT_VERSION: LazyLock<semver::Version> = LazyLock::new(|| {
let vault_version = get_web_vault_version(); let vault_version = get_web_vault_version();
// Use a single regex capture to extract version components // Use a single regex capture to extract version components
let re = regex::Regex::new(r"(\d{4})\.(\d{1,2})\.(\d{1,2})").unwrap(); let re = regex::Regex::new(r"(\d{4})\.(\d{1,2})\.(\d{1,2})").unwrap();
@ -1744,7 +1848,7 @@ static WEB_VAULT_VERSION: Lazy<semver::Version> = Lazy::new(|| {
// Configure the Vaultwarden version as an integer so it can be used as a comparison smaller or greater then. // Configure the Vaultwarden version as an integer so it can be used as a comparison smaller or greater then.
// The default is based upon the version since this feature is added. // The default is based upon the version since this feature is added.
static VW_VERSION: Lazy<semver::Version> = Lazy::new(|| { static VW_VERSION: LazyLock<semver::Version> = LazyLock::new(|| {
let vw_version = crate::VERSION.unwrap_or("1.32.5"); let vw_version = crate::VERSION.unwrap_or("1.32.5");
// Use a single regex capture to extract version components // Use a single regex capture to extract version components
let re = regex::Regex::new(r"(\d{1})\.(\d{1,2})\.(\d{1,2})").unwrap(); let re = regex::Regex::new(r"(\d{1})\.(\d{1,2})\.(\d{1,2})").unwrap();

4
src/crypto.rs

@ -48,7 +48,7 @@ pub fn get_random_bytes<const N: usize>() -> [u8; N] {
} }
/// Encode random bytes using the provided function. /// Encode random bytes using the provided function.
pub fn encode_random_bytes<const N: usize>(e: Encoding) -> String { pub fn encode_random_bytes<const N: usize>(e: &Encoding) -> String {
e.encode(&get_random_bytes::<N>()) e.encode(&get_random_bytes::<N>())
} }
@ -81,7 +81,7 @@ pub fn get_random_string_alphanum(num_chars: usize) -> String {
} }
pub fn generate_id<const N: usize>() -> String { pub fn generate_id<const N: usize>() -> String {
encode_random_bytes::<N>(HEXLOWER) encode_random_bytes::<N>(&HEXLOWER)
} }
pub fn generate_send_file_id() -> String { pub fn generate_send_file_id() -> String {

4
src/db/models/attachment.rs

@ -45,7 +45,7 @@ impl Attachment {
} }
pub async fn get_url(&self, host: &str) -> Result<String, crate::Error> { pub async fn get_url(&self, host: &str) -> Result<String, crate::Error> {
let operator = CONFIG.opendal_operator_for_path_type(PathType::Attachments)?; let operator = CONFIG.opendal_operator_for_path_type(&PathType::Attachments)?;
if operator.info().scheme() == opendal::Scheme::Fs { if operator.info().scheme() == opendal::Scheme::Fs {
let token = encode_jwt(&generate_file_download_claims(self.cipher_uuid.clone(), self.id.clone())); let token = encode_jwt(&generate_file_download_claims(self.cipher_uuid.clone(), self.id.clone()));
@ -118,7 +118,7 @@ impl Attachment {
.map_res("Error deleting attachment") .map_res("Error deleting attachment")
}}?; }}?;
let operator = CONFIG.opendal_operator_for_path_type(PathType::Attachments)?; let operator = CONFIG.opendal_operator_for_path_type(&PathType::Attachments)?;
let file_path = self.get_file_path(); let file_path = self.get_file_path();
if let Err(e) = operator.delete(&file_path).await { if let Err(e) = operator.delete(&file_path).await {

4
src/db/models/device.rs

@ -48,7 +48,7 @@ impl Device {
} }
pub fn refresh_twofactor_remember(&mut self) -> String { pub fn refresh_twofactor_remember(&mut self) -> String {
let twofactor_remember = crypto::encode_random_bytes::<180>(BASE64); let twofactor_remember = crypto::encode_random_bytes::<180>(&BASE64);
self.twofactor_remember = Some(twofactor_remember.clone()); self.twofactor_remember = Some(twofactor_remember.clone());
twofactor_remember twofactor_remember
@ -135,7 +135,7 @@ impl Device {
push_uuid: Some(PushId(get_uuid())), push_uuid: Some(PushId(get_uuid())),
push_token: None, push_token: None,
refresh_token: crypto::encode_random_bytes::<64>(BASE64URL), refresh_token: crypto::encode_random_bytes::<64>(&BASE64URL),
twofactor_remember: None, twofactor_remember: None,
}; };

2
src/db/models/organization.rs

@ -169,7 +169,7 @@ impl PartialOrd<MembershipType> for i32 {
/// Local methods /// Local methods
impl Organization { impl Organization {
pub fn new(name: String, billing_email: String, private_key: Option<String>, public_key: Option<String>) -> Self { pub fn new(name: String, billing_email: &str, private_key: Option<String>, public_key: Option<String>) -> Self {
let billing_email = billing_email.to_lowercase(); let billing_email = billing_email.to_lowercase();
Self { Self {
uuid: OrganizationId(crate::util::get_uuid()), uuid: OrganizationId(crate::util::get_uuid()),

2
src/db/models/send.rs

@ -226,7 +226,7 @@ impl Send {
self.update_users_revision(conn).await; self.update_users_revision(conn).await;
if self.atype == SendType::File as i32 { if self.atype == SendType::File as i32 {
let operator = CONFIG.opendal_operator_for_path_type(PathType::Sends)?; let operator = CONFIG.opendal_operator_for_path_type(&PathType::Sends)?;
operator.remove_all(&self.uuid).await.ok(); operator.remove_all(&self.uuid).await.ok();
} }

2
src/db/models/user.rs

@ -106,7 +106,7 @@ impl User {
pub const CLIENT_KDF_TYPE_DEFAULT: i32 = UserKdfType::Pbkdf2 as i32; pub const CLIENT_KDF_TYPE_DEFAULT: i32 = UserKdfType::Pbkdf2 as i32;
pub const CLIENT_KDF_ITER_DEFAULT: i32 = 600_000; pub const CLIENT_KDF_ITER_DEFAULT: i32 = 600_000;
pub fn new(email: String, name: Option<String>) -> Self { pub fn new(email: &str, name: Option<String>) -> Self {
let now = Utc::now().naive_utc(); let now = Utc::now().naive_utc();
let email = email.to_lowercase(); let email = email.to_lowercase();

168
src/error.rs

@ -3,6 +3,7 @@
// //
use crate::db::models::EventType; use crate::db::models::EventType;
use crate::http_client::CustomHttpClientError; use crate::http_client::CustomHttpClientError;
use serde::ser::{Serialize, SerializeStruct, Serializer};
use std::error::Error as StdError; use std::error::Error as StdError;
macro_rules! make_error { macro_rules! make_error {
@ -72,7 +73,7 @@ make_error! {
Empty(Empty): _no_source, _serialize, Empty(Empty): _no_source, _serialize,
// Used to represent err! calls // Used to represent err! calls
Simple(String): _no_source, _api_error, Simple(String): _no_source, _api_error,
Compact(Compact): _no_source, _api_error_small, Compact(Compact): _no_source, _compact_api_error,
// Used in our custom http client to handle non-global IPs and blocked domains // Used in our custom http client to handle non-global IPs and blocked domains
CustomHttpClient(CustomHttpClientError): _has_source, _api_error, CustomHttpClient(CustomHttpClientError): _has_source, _api_error,
@ -128,6 +129,10 @@ impl Error {
(usr_msg, log_msg.into()).into() (usr_msg, log_msg.into()).into()
} }
pub fn new_msg<M: Into<String> + Clone>(usr_msg: M) -> Self {
(usr_msg.clone(), usr_msg.into()).into()
}
pub fn empty() -> Self { pub fn empty() -> Self {
Empty {}.into() Empty {}.into()
} }
@ -194,38 +199,97 @@ fn _no_source<T, S>(_: T) -> Option<S> {
None None
} }
fn _serialize(e: &impl serde::Serialize, _msg: &str) -> String { fn _serialize(e: &impl Serialize, _msg: &str) -> String {
serde_json::to_string(e).unwrap() serde_json::to_string(e).unwrap()
} }
/// This will serialize the default ApiErrorResponse
/// It will add the needed fields which are mostly empty or have multiple copies of the message
/// This is more efficient than having a larger struct and use the Serialize derive
/// It also prevents using `json!()` calls to create the final output
impl Serialize for ApiErrorResponse<'_> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
#[derive(serde::Serialize)]
struct ErrorModel<'a> {
message: &'a str,
object: &'static str,
}
let mut state = serializer.serialize_struct("ApiErrorResponse", 9)?;
state.serialize_field("message", self.0.message)?;
let mut validation_errors = std::collections::HashMap::with_capacity(1);
validation_errors.insert("", vec![self.0.message]);
state.serialize_field("validationErrors", &validation_errors)?;
let error_model = ErrorModel {
message: self.0.message,
object: "error",
};
state.serialize_field("errorModel", &error_model)?;
state.serialize_field("error", "")?;
state.serialize_field("error_description", "")?;
state.serialize_field("exceptionMessage", &None::<()>)?;
state.serialize_field("exceptionStackTrace", &None::<()>)?;
state.serialize_field("innerExceptionMessage", &None::<()>)?;
state.serialize_field("object", "error")?;
state.end()
}
}
/// This will serialize the smaller CompactApiErrorResponse
/// It will add the needed fields which are mostly empty
/// This is more efficient than having a larger struct and use the Serialize derive
/// It also prevents using `json!()` calls to create the final output
impl Serialize for CompactApiErrorResponse<'_> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut state = serializer.serialize_struct("CompactApiErrorResponse", 6)?;
state.serialize_field("message", self.0.message)?;
state.serialize_field("validationErrors", &None::<()>)?;
state.serialize_field("exceptionMessage", &None::<()>)?;
state.serialize_field("exceptionStackTrace", &None::<()>)?;
state.serialize_field("innerExceptionMessage", &None::<()>)?;
state.serialize_field("object", "error")?;
state.end()
}
}
/// Main API Error struct template
/// This struct which we can be used by both ApiErrorResponse and CompactApiErrorResponse
/// is small and doesn't contain unneeded empty fields. This is more memory efficient, but also less code to compile
struct ApiErrorMsg<'a> {
message: &'a str,
}
/// Default API Error response struct
/// The custom serialization adds all other needed fields
struct ApiErrorResponse<'a>(ApiErrorMsg<'a>);
/// Compact API Error response struct used for some newer error responses
/// The custom serialization adds all other needed fields
struct CompactApiErrorResponse<'a>(ApiErrorMsg<'a>);
fn _api_error(_: &impl std::any::Any, msg: &str) -> String { fn _api_error(_: &impl std::any::Any, msg: &str) -> String {
let json = json!({ let response = ApiErrorMsg {
"message": msg, message: msg,
"error": "", };
"error_description": "", serde_json::to_string(&ApiErrorResponse(response)).unwrap()
"validationErrors": {"": [ msg ]},
"errorModel": {
"message": msg,
"object": "error"
},
"exceptionMessage": null,
"exceptionStackTrace": null,
"innerExceptionMessage": null,
"object": "error"
});
_serialize(&json, "")
} }
fn _api_error_small(_: &impl std::any::Any, msg: &str) -> String { fn _compact_api_error(_: &impl std::any::Any, msg: &str) -> String {
let json = json!({ let response = ApiErrorMsg {
"message": msg, message: msg,
"validationErrors": null, };
"exceptionMessage": null, serde_json::to_string(&CompactApiErrorResponse(response)).unwrap()
"exceptionStackTrace": null,
"innerExceptionMessage": null,
"object": "error"
});
_serialize(&json, "")
} }
// //
@ -256,34 +320,41 @@ impl Responder<'_, 'static> for Error {
#[macro_export] #[macro_export]
macro_rules! err { macro_rules! err {
($kind:ident, $msg:expr) => {{ ($kind:ident, $msg:expr) => {{
error!("{}", $msg); let msg = $msg;
return Err($crate::error::Error::new($msg, $msg).with_kind($crate::error::ErrorKind::$kind($crate::error::$kind {}))); error!("{msg}");
return Err($crate::error::Error::new_msg(msg).with_kind($crate::error::ErrorKind::$kind($crate::error::$kind {})));
}}; }};
($msg:expr) => {{ ($msg:expr) => {{
error!("{}", $msg); let msg = $msg;
return Err($crate::error::Error::new($msg, $msg)); error!("{msg}");
return Err($crate::error::Error::new_msg(msg));
}}; }};
($msg:expr, ErrorEvent $err_event:tt) => {{ ($msg:expr, ErrorEvent $err_event:tt) => {{
error!("{}", $msg); let msg = $msg;
return Err($crate::error::Error::new($msg, $msg).with_event($crate::error::ErrorEvent $err_event)); error!("{msg}");
return Err($crate::error::Error::new_msg(msg).with_event($crate::error::ErrorEvent $err_event));
}}; }};
($usr_msg:expr, $log_value:expr) => {{ ($usr_msg:expr, $log_value:expr) => {{
error!("{}. {}", $usr_msg, $log_value); let usr_msg = $usr_msg;
return Err($crate::error::Error::new($usr_msg, $log_value)); let log_value = $log_value;
error!("{usr_msg}. {log_value}");
return Err($crate::error::Error::new(usr_msg, log_value));
}}; }};
($usr_msg:expr, $log_value:expr, ErrorEvent $err_event:tt) => {{ ($usr_msg:expr, $log_value:expr, ErrorEvent $err_event:tt) => {{
error!("{}. {}", $usr_msg, $log_value); let usr_msg = $usr_msg;
return Err($crate::error::Error::new($usr_msg, $log_value).with_event($crate::error::ErrorEvent $err_event)); let log_value = $log_value;
error!("{usr_msg}. {log_value}");
return Err($crate::error::Error::new(usr_msg, log_value).with_event($crate::error::ErrorEvent $err_event));
}}; }};
} }
#[macro_export] #[macro_export]
macro_rules! err_silent { macro_rules! err_silent {
($msg:expr) => {{ ($msg:expr) => {{
return Err($crate::error::Error::new($msg, $msg)); return Err($crate::error::Error::new_msg($msg));
}}; }};
($msg:expr, ErrorEvent $err_event:tt) => {{ ($msg:expr, ErrorEvent $err_event:tt) => {{
return Err($crate::error::Error::new($msg, $msg).with_event($crate::error::ErrorEvent $err_event)); return Err($crate::error::Error::new_msg($msg).with_event($crate::error::ErrorEvent $err_event));
}}; }};
($usr_msg:expr, $log_value:expr) => {{ ($usr_msg:expr, $log_value:expr) => {{
return Err($crate::error::Error::new($usr_msg, $log_value)); return Err($crate::error::Error::new($usr_msg, $log_value));
@ -296,12 +367,15 @@ macro_rules! err_silent {
#[macro_export] #[macro_export]
macro_rules! err_code { macro_rules! err_code {
($msg:expr, $err_code:expr) => {{ ($msg:expr, $err_code:expr) => {{
error!("{}", $msg); let msg = $msg;
return Err($crate::error::Error::new($msg, $msg).with_code($err_code)); error!("{msg}");
return Err($crate::error::Error::new_msg(msg).with_code($err_code));
}}; }};
($usr_msg:expr, $log_value:expr, $err_code:expr) => {{ ($usr_msg:expr, $log_value:expr, $err_code:expr) => {{
error!("{}. {}", $usr_msg, $log_value); let usr_msg = $usr_msg;
return Err($crate::error::Error::new($usr_msg, $log_value).with_code($err_code)); let log_value = $log_value;
error!("{usr_msg}. {log_value}");
return Err($crate::error::Error::new(usr_msg, log_value).with_code($err_code));
}}; }};
} }
@ -309,7 +383,7 @@ macro_rules! err_code {
macro_rules! err_discard { macro_rules! err_discard {
($msg:expr, $data:expr) => {{ ($msg:expr, $data:expr) => {{
std::io::copy(&mut $data.open(), &mut std::io::sink()).ok(); std::io::copy(&mut $data.open(), &mut std::io::sink()).ok();
return Err($crate::error::Error::new($msg, $msg)); return Err($crate::error::Error::new_msg($msg));
}}; }};
($usr_msg:expr, $log_value:expr, $data:expr) => {{ ($usr_msg:expr, $log_value:expr, $data:expr) => {{
std::io::copy(&mut $data.open(), &mut std::io::sink()).ok(); std::io::copy(&mut $data.open(), &mut std::io::sink()).ok();
@ -334,7 +408,9 @@ macro_rules! err_handler {
return ::rocket::request::Outcome::Error((rocket::http::Status::Unauthorized, $expr)); return ::rocket::request::Outcome::Error((rocket::http::Status::Unauthorized, $expr));
}}; }};
($usr_msg:expr, $log_value:expr) => {{ ($usr_msg:expr, $log_value:expr) => {{
error!(target: "auth", "Unauthorized Error: {}. {}", $usr_msg, $log_value); let usr_msg = $usr_msg;
return ::rocket::request::Outcome::Error((rocket::http::Status::Unauthorized, $usr_msg)); let log_value = $log_value;
error!(target: "auth", "Unauthorized Error: {usr_msg}. {log_value}");
return ::rocket::request::Outcome::Error((rocket::http::Status::Unauthorized, usr_msg));
}}; }};
} }

20
src/http_client.rs

@ -2,12 +2,11 @@ use std::{
fmt, fmt,
net::{IpAddr, SocketAddr}, net::{IpAddr, SocketAddr},
str::FromStr, str::FromStr,
sync::{Arc, Mutex}, sync::{Arc, LazyLock, Mutex},
time::Duration, time::Duration,
}; };
use hickory_resolver::{name_server::TokioConnectionProvider, TokioResolver}; use hickory_resolver::{name_server::TokioConnectionProvider, TokioResolver};
use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
use reqwest::{ use reqwest::{
dns::{Name, Resolve, Resolving}, dns::{Name, Resolve, Resolving},
@ -25,9 +24,10 @@ pub fn make_http_request(method: reqwest::Method, url: &str) -> Result<reqwest::
err!("Invalid host"); err!("Invalid host");
}; };
should_block_host(host)?; should_block_host(&host)?;
static INSTANCE: Lazy<Client> = Lazy::new(|| get_reqwest_client_builder().build().expect("Failed to build client")); static INSTANCE: LazyLock<Client> =
LazyLock::new(|| get_reqwest_client_builder().build().expect("Failed to build client"));
Ok(INSTANCE.request(method, url)) Ok(INSTANCE.request(method, url))
} }
@ -45,7 +45,7 @@ pub fn get_reqwest_client_builder() -> ClientBuilder {
return attempt.error("Invalid host"); return attempt.error("Invalid host");
}; };
if let Err(e) = should_block_host(host) { if let Err(e) = should_block_host(&host) {
return attempt.error(e); return attempt.error(e);
} }
@ -100,11 +100,11 @@ fn should_block_address_regex(domain_or_ip: &str) -> bool {
is_match is_match
} }
fn should_block_host(host: Host<&str>) -> Result<(), CustomHttpClientError> { fn should_block_host(host: &Host<&str>) -> Result<(), CustomHttpClientError> {
let (ip, host_str): (Option<IpAddr>, String) = match host { let (ip, host_str): (Option<IpAddr>, String) = match host {
Host::Ipv4(ip) => (Some(ip.into()), ip.to_string()), Host::Ipv4(ip) => (Some(IpAddr::V4(*ip)), ip.to_string()),
Host::Ipv6(ip) => (Some(ip.into()), ip.to_string()), Host::Ipv6(ip) => (Some(IpAddr::V6(*ip)), ip.to_string()),
Host::Domain(d) => (None, d.to_string()), Host::Domain(d) => (None, (*d).to_string()),
}; };
if let Some(ip) = ip { if let Some(ip) = ip {
@ -179,7 +179,7 @@ type BoxError = Box<dyn std::error::Error + Send + Sync>;
impl CustomDnsResolver { impl CustomDnsResolver {
fn instance() -> Arc<Self> { fn instance() -> Arc<Self> {
static INSTANCE: Lazy<Arc<CustomDnsResolver>> = Lazy::new(CustomDnsResolver::new); static INSTANCE: LazyLock<Arc<CustomDnsResolver>> = LazyLock::new(CustomDnsResolver::new);
Arc::clone(&*INSTANCE) Arc::clone(&*INSTANCE)
} }

4
src/mail.rs

@ -184,7 +184,7 @@ pub async fn send_delete_account(address: &str, user_id: &UserId) -> EmptyResult
} }
pub async fn send_verify_email(address: &str, user_id: &UserId) -> EmptyResult { pub async fn send_verify_email(address: &str, user_id: &UserId) -> EmptyResult {
let claims = generate_verify_email_claims(user_id.clone()); let claims = generate_verify_email_claims(user_id);
let verify_email_token = encode_jwt(&claims); let verify_email_token = encode_jwt(&claims);
let (subject, body_html, body_text) = get_text( let (subject, body_html, body_text) = get_text(
@ -235,7 +235,7 @@ pub async fn send_welcome(address: &str) -> EmptyResult {
} }
pub async fn send_welcome_must_verify(address: &str, user_id: &UserId) -> EmptyResult { pub async fn send_welcome_must_verify(address: &str, user_id: &UserId) -> EmptyResult {
let claims = generate_verify_email_claims(user_id.clone()); let claims = generate_verify_email_claims(user_id);
let verify_email_token = encode_jwt(&claims); let verify_email_token = encode_jwt(&claims);
let (subject, body_html, body_text) = get_text( let (subject, body_html, body_text) = get_text(

2
src/main.rs

@ -467,7 +467,7 @@ async fn check_data_folder() {
if data_folder.starts_with("s3://") { if data_folder.starts_with("s3://") {
if let Err(e) = CONFIG if let Err(e) = CONFIG
.opendal_operator_for_path_type(PathType::Data) .opendal_operator_for_path_type(&PathType::Data)
.unwrap_or_else(|e| { .unwrap_or_else(|e| {
error!("Failed to create S3 operator for data folder '{data_folder}': {e:?}"); error!("Failed to create S3 operator for data folder '{data_folder}': {e:?}");
exit(1); exit(1);

7
src/ratelimit.rs

@ -1,5 +1,4 @@
use once_cell::sync::Lazy; use std::{net::IpAddr, num::NonZeroU32, sync::LazyLock, time::Duration};
use std::{net::IpAddr, num::NonZeroU32, time::Duration};
use governor::{clock::DefaultClock, state::keyed::DashMapStateStore, Quota, RateLimiter}; use governor::{clock::DefaultClock, state::keyed::DashMapStateStore, Quota, RateLimiter};
@ -7,13 +6,13 @@ use crate::{Error, CONFIG};
type Limiter<T = IpAddr> = RateLimiter<T, DashMapStateStore<T>, DefaultClock>; type Limiter<T = IpAddr> = RateLimiter<T, DashMapStateStore<T>, DefaultClock>;
static LIMITER_LOGIN: Lazy<Limiter> = Lazy::new(|| { static LIMITER_LOGIN: LazyLock<Limiter> = LazyLock::new(|| {
let seconds = Duration::from_secs(CONFIG.login_ratelimit_seconds()); let seconds = Duration::from_secs(CONFIG.login_ratelimit_seconds());
let burst = NonZeroU32::new(CONFIG.login_ratelimit_max_burst()).expect("Non-zero login ratelimit burst"); let burst = NonZeroU32::new(CONFIG.login_ratelimit_max_burst()).expect("Non-zero login ratelimit burst");
RateLimiter::keyed(Quota::with_period(seconds).expect("Non-zero login ratelimit seconds").allow_burst(burst)) RateLimiter::keyed(Quota::with_period(seconds).expect("Non-zero login ratelimit seconds").allow_burst(burst))
}); });
static LIMITER_ADMIN: Lazy<Limiter> = Lazy::new(|| { static LIMITER_ADMIN: LazyLock<Limiter> = LazyLock::new(|| {
let seconds = Duration::from_secs(CONFIG.admin_ratelimit_seconds()); let seconds = Duration::from_secs(CONFIG.admin_ratelimit_seconds());
let burst = NonZeroU32::new(CONFIG.admin_ratelimit_max_burst()).expect("Non-zero admin ratelimit burst"); let burst = NonZeroU32::new(CONFIG.admin_ratelimit_max_burst()).expect("Non-zero admin ratelimit burst");
RateLimiter::keyed(Quota::with_period(seconds).expect("Non-zero admin ratelimit seconds").allow_burst(burst)) RateLimiter::keyed(Quota::with_period(seconds).expect("Non-zero admin ratelimit seconds").allow_burst(burst))

17
src/sso.rs

@ -1,12 +1,11 @@
use std::{sync::LazyLock, time::Duration};
use chrono::Utc; use chrono::Utc;
use derive_more::{AsRef, Deref, Display, From}; use derive_more::{AsRef, Deref, Display, From};
use mini_moka::sync::Cache;
use regex::Regex; use regex::Regex;
use std::time::Duration;
use url::Url; use url::Url;
use mini_moka::sync::Cache;
use once_cell::sync::Lazy;
use crate::{ use crate::{
api::ApiResult, api::ApiResult,
auth, auth,
@ -21,12 +20,12 @@ use crate::{
pub static FAKE_IDENTIFIER: &str = "Vaultwarden"; pub static FAKE_IDENTIFIER: &str = "Vaultwarden";
static AC_CACHE: Lazy<Cache<OIDCState, AuthenticatedUser>> = static AC_CACHE: LazyLock<Cache<OIDCState, AuthenticatedUser>> =
Lazy::new(|| Cache::builder().max_capacity(1000).time_to_live(Duration::from_secs(10 * 60)).build()); LazyLock::new(|| Cache::builder().max_capacity(1000).time_to_live(Duration::from_secs(10 * 60)).build());
static SSO_JWT_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|sso", CONFIG.domain_origin())); static SSO_JWT_ISSUER: LazyLock<String> = LazyLock::new(|| format!("{}|sso", CONFIG.domain_origin()));
pub static NONCE_EXPIRATION: Lazy<chrono::Duration> = Lazy::new(|| chrono::TimeDelta::try_minutes(10).unwrap()); pub static NONCE_EXPIRATION: LazyLock<chrono::Duration> = LazyLock::new(|| chrono::TimeDelta::try_minutes(10).unwrap());
#[derive( #[derive(
Clone, Clone,
@ -151,7 +150,7 @@ fn decode_token_claims(token_name: &str, token: &str) -> ApiResult<BasicTokenCla
} }
} }
pub fn decode_state(base64_state: String) -> ApiResult<OIDCState> { pub fn decode_state(base64_state: &str) -> ApiResult<OIDCState> {
let state = match data_encoding::BASE64.decode(base64_state.as_bytes()) { let state = match data_encoding::BASE64.decode(base64_state.as_bytes()) {
Ok(vec) => match String::from_utf8(vec) { Ok(vec) => match String::from_utf8(vec) {
Ok(valid) => OIDCState(valid), Ok(valid) => OIDCState(valid),

18
src/sso_client.rs

@ -1,13 +1,9 @@
use regex::Regex; use std::{borrow::Cow, sync::LazyLock, time::Duration};
use std::borrow::Cow;
use std::time::Duration;
use url::Url;
use mini_moka::sync::Cache; use mini_moka::sync::Cache;
use once_cell::sync::Lazy; use openidconnect::{core::*, reqwest, *};
use openidconnect::core::*; use regex::Regex;
use openidconnect::reqwest; use url::Url;
use openidconnect::*;
use crate::{ use crate::{
api::{ApiResult, EmptyResult}, api::{ApiResult, EmptyResult},
@ -16,8 +12,8 @@ use crate::{
CONFIG, CONFIG,
}; };
static CLIENT_CACHE_KEY: Lazy<String> = Lazy::new(|| "sso-client".to_string()); static CLIENT_CACHE_KEY: LazyLock<String> = LazyLock::new(|| "sso-client".to_string());
static CLIENT_CACHE: Lazy<Cache<String, Client>> = Lazy::new(|| { static CLIENT_CACHE: LazyLock<Cache<String, Client>> = LazyLock::new(|| {
Cache::builder().max_capacity(1).time_to_live(Duration::from_secs(CONFIG.sso_client_cache_expiration())).build() Cache::builder().max_capacity(1).time_to_live(Duration::from_secs(CONFIG.sso_client_cache_expiration())).build()
}); });
@ -162,7 +158,7 @@ impl Client {
if CONFIG.sso_pkce() { if CONFIG.sso_pkce() {
match nonce.verifier { match nonce.verifier {
None => err!(format!("Missing verifier in the DB nonce table")), None => err!(format!("Missing verifier in the DB nonce table")),
Some(secret) => exchange = exchange.set_pkce_verifier(PkceCodeVerifier::new(secret.clone())), Some(secret) => exchange = exchange.set_pkce_verifier(PkceCodeVerifier::new(secret)),
} }
} }

2
src/util.rs

@ -841,7 +841,7 @@ pub fn is_global(ip: std::net::IpAddr) -> bool {
/// Saves a Rocket temporary file to the OpenDAL Operator at the given path. /// Saves a Rocket temporary file to the OpenDAL Operator at the given path.
pub async fn save_temp_file( pub async fn save_temp_file(
path_type: PathType, path_type: &PathType,
path: &str, path: &str,
temp_file: rocket::fs::TempFile<'_>, temp_file: rocket::fs::TempFile<'_>,
overwrite: bool, overwrite: bool,

Loading…
Cancel
Save