diff --git a/.env.template b/.env.template index 46aa6271..07d7dbc0 100644 --- a/.env.template +++ b/.env.template @@ -84,12 +84,8 @@ ### WebSocket ### ################# -## Enables websocket notifications -# WEBSOCKET_ENABLED=false - -## Controls the WebSocket server address and port -# WEBSOCKET_ADDRESS=0.0.0.0 -# WEBSOCKET_PORT=3012 +## Enable websocket notifications +# ENABLE_WEBSOCKET=true ########################## ### Push notifications ### @@ -448,6 +444,11 @@ ## ## Maximum attempts before an email token is reset and a new email will need to be sent. # EMAIL_ATTEMPTS_LIMIT=3 +## +## Setup email 2FA regardless of any organization policy +# EMAIL_2FA_ENFORCE_ON_VERIFIED_INVITE=false +## Automatically setup email 2FA as fallback provider when needed +# EMAIL_2FA_AUTO_FALLBACK=false ## Other MFA/2FA settings ## Disable 2FA remember @@ -524,9 +525,9 @@ ## Only use this as a last resort if you are not able to use a valid certificate. # SMTP_ACCEPT_INVALID_HOSTNAMES=false -########################## +####################### ### Rocket settings ### -########################## +####################### ## Rocket specific settings ## See https://rocket.rs/v0.5/guide/configuration/ for more details. diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index e78c0eba..7956c382 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -46,7 +46,7 @@ jobs: steps: # Checkout the repo - name: "Checkout" - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 #v4.1.1 + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 #v4.1.7 # End Checkout the repo @@ -74,7 +74,7 @@ jobs: # Only install the clippy and rustfmt components on the default rust-toolchain - name: "Install rust-toolchain version" - uses: dtolnay/rust-toolchain@be73d7920c329f220ce78e0234b8f96b7ae60248 # master @ 2023-12-07 - 10:22 PM GMT+1 + uses: dtolnay/rust-toolchain@21dc36fb71dd22e3317045c0c31a3f4249868b17 # master @ Jun 13, 2024, 6:20 PM GMT+2 if: ${{ matrix.channel == 'rust-toolchain' }} with: toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}" @@ -84,7 +84,7 @@ jobs: # Install the any other channel to be used for which we do not execute clippy and rustfmt - name: "Install MSRV version" - uses: dtolnay/rust-toolchain@be73d7920c329f220ce78e0234b8f96b7ae60248 # master @ 2023-12-07 - 10:22 PM GMT+1 + uses: dtolnay/rust-toolchain@21dc36fb71dd22e3317045c0c31a3f4249868b17 # master @ Jun 13, 2024, 6:20 PM GMT+2 if: ${{ matrix.channel != 'rust-toolchain' }} with: toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}" diff --git a/.github/workflows/hadolint.yml b/.github/workflows/hadolint.yml index 82acc926..bd890580 100644 --- a/.github/workflows/hadolint.yml +++ b/.github/workflows/hadolint.yml @@ -13,7 +13,7 @@ jobs: steps: # Checkout the repo - name: Checkout - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 # End Checkout the repo # Download hadolint - https://github.com/hadolint/hadolint/releases diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 8063827d..b1fb85d4 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -58,7 +58,7 @@ jobs: steps: # Checkout the repo - name: Checkout - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 with: fetch-depth: 0 @@ -69,13 +69,13 @@ jobs: # Start Docker Buildx - name: Setup Docker Buildx - uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # v3.0.0 + uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb # v3.3.0 # https://github.com/moby/buildkit/issues/3969 - # Also set max parallelism to 2, the default of 4 breaks GitHub Actions + # Also set max parallelism to 3, the default of 4 breaks GitHub Actions and causes OOMKills with: - config-inline: | + buildkitd-config-inline: | [worker.oci] - max-parallelism = 2 + max-parallelism = 3 driver-opts: | network=host @@ -102,7 +102,7 @@ jobs: # Login to Docker Hub - name: Login to Docker Hub - uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0 + uses: docker/login-action@0d4c9c5ea7693da7b068278f7b52bda2a190a446 # v3.2.0 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} @@ -116,7 +116,7 @@ jobs: # Login to GitHub Container Registry - name: Login to GitHub Container Registry - uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0 + uses: docker/login-action@0d4c9c5ea7693da7b068278f7b52bda2a190a446 # v3.2.0 with: registry: ghcr.io username: ${{ github.repository_owner }} @@ -129,15 +129,9 @@ jobs: run: | echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${{ vars.GHCR_REPO }}" | tee -a "${GITHUB_ENV}" - - name: Add registry for ghcr.io - if: ${{ env.HAVE_GHCR_LOGIN == 'true' }} - shell: bash - run: | - echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}${{ vars.GHCR_REPO }}" | tee -a "${GITHUB_ENV}" - # Login to Quay.io - name: Login to Quay.io - uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0 + uses: docker/login-action@0d4c9c5ea7693da7b068278f7b52bda2a190a446 # v3.2.0 with: registry: quay.io username: ${{ secrets.QUAY_USERNAME }} @@ -157,7 +151,7 @@ jobs: # Check if there is a GitHub Container Registry Login and use it for caching if [[ -n "${HAVE_GHCR_LOGIN}" ]]; then echo "BAKE_CACHE_FROM=type=registry,ref=${{ vars.GHCR_REPO }}-buildcache:${{ matrix.base_image }}" | tee -a "${GITHUB_ENV}" - echo "BAKE_CACHE_TO=type=registry,ref=${{ vars.GHCR_REPO }}-buildcache:${{ matrix.base_image }},mode=max" | tee -a "${GITHUB_ENV}" + echo "BAKE_CACHE_TO=type=registry,ref=${{ vars.GHCR_REPO }}-buildcache:${{ matrix.base_image }},compression=zstd,mode=max" | tee -a "${GITHUB_ENV}" else echo "BAKE_CACHE_FROM=" echo "BAKE_CACHE_TO=" @@ -171,7 +165,7 @@ jobs: echo "CONTAINER_REGISTRIES=${CONTAINER_REGISTRIES:+${CONTAINER_REGISTRIES},}localhost:5000/vaultwarden/server" | tee -a "${GITHUB_ENV}" - name: Bake ${{ matrix.base_image }} containers - uses: docker/bake-action@849707117b03d39aba7924c50a10376a69e88d7d # v4.1.0 + uses: docker/bake-action@1c5f18a523c4c68524cfbc5161494d8bb5b29d20 # v5.0.1 env: BASE_TAGS: "${{ env.BASE_TAGS }}" SOURCE_COMMIT: "${{ env.SOURCE_COMMIT }}" @@ -204,53 +198,53 @@ jobs: # This is needed because using different platforms doesn't trigger a new pull/download # Extract amd64 binary - docker create --name amd64 --platform=linux/amd64 "vaultwarden/server:${EXTRACT_TAG}-alpine" + docker create --name amd64 --platform=linux/amd64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine" docker cp amd64:/vaultwarden vaultwarden-amd64 docker rm --force amd64 - docker rmi --force "vaultwarden/server:${EXTRACT_TAG}-alpine" + docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine" # Extract arm64 binary - docker create --name arm64 --platform=linux/arm64 "vaultwarden/server:${EXTRACT_TAG}-alpine" + docker create --name arm64 --platform=linux/arm64 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine" docker cp arm64:/vaultwarden vaultwarden-arm64 docker rm --force arm64 - docker rmi --force "vaultwarden/server:${EXTRACT_TAG}-alpine" + docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine" # Extract armv7 binary - docker create --name armv7 --platform=linux/arm/v7 "vaultwarden/server:${EXTRACT_TAG}-alpine" + docker create --name armv7 --platform=linux/arm/v7 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine" docker cp armv7:/vaultwarden vaultwarden-armv7 docker rm --force armv7 - docker rmi --force "vaultwarden/server:${EXTRACT_TAG}-alpine" + docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine" # Extract armv6 binary - docker create --name armv6 --platform=linux/arm/v6 "vaultwarden/server:${EXTRACT_TAG}-alpine" + docker create --name armv6 --platform=linux/arm/v6 "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine" docker cp armv6:/vaultwarden vaultwarden-armv6 docker rm --force armv6 - docker rmi --force "vaultwarden/server:${EXTRACT_TAG}-alpine" + docker rmi --force "localhost:5000/vaultwarden/server:${EXTRACT_TAG}-alpine" # Upload artifacts to Github Actions - name: "Upload amd64 artifact" - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 if: ${{ matrix.base_image == 'alpine' }} with: name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64 path: vaultwarden-amd64 - name: "Upload arm64 artifact" - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 if: ${{ matrix.base_image == 'alpine' }} with: name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64 path: vaultwarden-arm64 - name: "Upload armv7 artifact" - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 if: ${{ matrix.base_image == 'alpine' }} with: name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7 path: vaultwarden-armv7 - name: "Upload armv6 artifact" - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 + uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 if: ${{ matrix.base_image == 'alpine' }} with: name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6 diff --git a/.github/workflows/trivy.yml b/.github/workflows/trivy.yml index caecf695..2c957994 100644 --- a/.github/workflows/trivy.yml +++ b/.github/workflows/trivy.yml @@ -25,10 +25,10 @@ jobs: actions: read steps: - name: Checkout code - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 #v4.1.1 + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 #v4.1.7 - name: Run Trivy vulnerability scanner - uses: aquasecurity/trivy-action@d43c1f16c00cfd3978dde6c07f4bbcf9eb6993ca # v0.16.1 + uses: aquasecurity/trivy-action@7c2007bcb556501da015201bcba5aa14069b74e2 # v0.23.0 with: scan-type: repo ignore-unfixed: true @@ -37,6 +37,6 @@ jobs: severity: CRITICAL,HIGH - name: Upload Trivy scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@b7bf0a3ed3ecfa44160715d7c442788f65f0f923 # v3.23.2 + uses: github/codeql-action/upload-sarif@2bbafcdd7fbf96243689e764c2f15d9735164f33 # v3.25.10 with: sarif_file: 'trivy-results.sarif' diff --git a/Cargo.lock b/Cargo.lock index 518d8f84..2834a9f1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "addr2line" -version = "0.21.0" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" dependencies = [ "gimli", ] @@ -19,9 +19,9 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "ahash" -version = "0.8.9" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d713b3834d76b85304d4d525563c1276e2e30dc97cc67bfb4585a4a29fc2c89f" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", "once_cell", @@ -31,9 +31,9 @@ dependencies = [ [[package]] name = "aho-corasick" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] @@ -55,9 +55,9 @@ dependencies = [ [[package]] name = "allocator-api2" -version = "0.2.16" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" +checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "android-tzdata" @@ -99,22 +99,21 @@ dependencies = [ [[package]] name = "async-channel" -version = "2.2.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f28243a43d821d11341ab73c80bed182dc015c514b951616cf79bd4af39af0c3" +checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" dependencies = [ "concurrent-queue", - "event-listener 5.1.0", - "event-listener-strategy 0.5.0", + "event-listener-strategy", "futures-core", "pin-project-lite", ] [[package]] name = "async-compression" -version = "0.4.6" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a116f46a969224200a0a97f29cfd4c50e7534e4b4826bd23ea2c3c533039c82c" +checksum = "cd066d0b4ef8ecb03a55319dc13aa6910616d0f44008a045bb1835af830abff5" dependencies = [ "brotli", "flate2", @@ -126,15 +125,14 @@ dependencies = [ [[package]] name = "async-executor" -version = "1.8.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17ae5ebefcc48e7452b4987947920dac9450be1110cadf34d1b8c116bdbaf97c" +checksum = "c8828ec6e544c02b0d6691d21ed9f9218d0384a82542855073c2a3f58304aaf0" dependencies = [ - "async-lock 3.3.0", "async-task", "concurrent-queue", - "fastrand 2.0.1", - "futures-lite 2.2.0", + "fastrand 2.1.0", + "futures-lite 2.3.0", "slab", ] @@ -144,12 +142,12 @@ version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" dependencies = [ - "async-channel 2.2.0", + "async-channel 2.3.1", "async-executor", - "async-io 2.3.1", - "async-lock 3.3.0", + "async-io 2.3.3", + "async-lock 3.4.0", "blocking", - "futures-lite 2.2.0", + "futures-lite 2.3.0", "once_cell", ] @@ -175,18 +173,18 @@ dependencies = [ [[package]] name = "async-io" -version = "2.3.1" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f97ab0c5b00a7cdbe5a371b9a782ee7be1316095885c8a4ea1daf490eb0ef65" +checksum = "0d6baa8f0178795da0e71bc42c9e5d13261aac7ee549853162e66a241ba17964" dependencies = [ - "async-lock 3.3.0", + "async-lock 3.4.0", "cfg-if", "concurrent-queue", "futures-io", - "futures-lite 2.2.0", + "futures-lite 2.3.0", "parking", - "polling 3.5.0", - "rustix 0.38.31", + "polling 3.7.2", + "rustix 0.38.34", "slab", "tracing", "windows-sys 0.52.0", @@ -203,12 +201,12 @@ dependencies = [ [[package]] name = "async-lock" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d034b430882f8381900d3fe6f0aaa3ad94f2cb4ac519b429692a1bc2dda4ae7b" +checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" dependencies = [ - "event-listener 4.0.3", - "event-listener-strategy 0.4.0", + "event-listener 5.3.1", + "event-listener-strategy", "pin-project-lite", ] @@ -225,26 +223,26 @@ dependencies = [ "cfg-if", "event-listener 3.1.0", "futures-lite 1.13.0", - "rustix 0.38.31", + "rustix 0.38.34", "windows-sys 0.48.0", ] [[package]] name = "async-signal" -version = "0.2.5" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e47d90f65a225c4527103a8d747001fc56e375203592b25ad103e1ca13124c5" +checksum = "794f185324c2f00e771cd9f1ae8b5ac68be2ca7abb129a87afd6e86d228bc54d" dependencies = [ - "async-io 2.3.1", - "async-lock 2.8.0", + "async-io 2.3.3", + "async-lock 3.4.0", "atomic-waker", "cfg-if", "futures-core", "futures-io", - "rustix 0.38.31", + "rustix 0.38.34", "signal-hook-registry", "slab", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -293,24 +291,24 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.50", + "syn", ] [[package]] name = "async-task" -version = "4.7.0" +version = "4.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbb36e985947064623dbd357f727af08ffd077f93d696782f3c56365fa2e2799" +checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" [[package]] name = "async-trait" -version = "0.1.77" +version = "0.1.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" +checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107" dependencies = [ "proc-macro2", "quote", - "syn 2.0.50", + "syn", ] [[package]] @@ -336,15 +334,15 @@ checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" [[package]] name = "autocfg" -version = "1.1.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] name = "backtrace" -version = "0.3.69" +version = "0.3.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" +checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" dependencies = [ "addr2line", "cc", @@ -367,6 +365,12 @@ version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + [[package]] name = "base64ct" version = "1.6.0" @@ -375,9 +379,9 @@ checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" [[package]] name = "bigdecimal" -version = "0.4.2" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06619be423ea5bb86c95f087d5707942791a08a85530df0db2209a3ecfb8bc9" +checksum = "51d712318a27c7150326677b321a5fa91b55f6d9034ffd67f20319e147d40cee" dependencies = [ "autocfg", "libm", @@ -400,9 +404,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.2" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" [[package]] name = "blake2" @@ -424,25 +428,22 @@ dependencies = [ [[package]] name = "blocking" -version = "1.5.1" +version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a37913e8dc4ddcc604f0c6d3bf2887c995153af3611de9e23c352b44c1b9118" +checksum = "703f41c54fc768e63e091340b424302bb1c29ef4aa0c7f10fe849dfb114d29ea" dependencies = [ - "async-channel 2.2.0", - "async-lock 3.3.0", + "async-channel 2.3.1", "async-task", - "fastrand 2.0.1", "futures-io", - "futures-lite 2.2.0", + "futures-lite 2.3.0", "piper", - "tracing", ] [[package]] name = "brotli" -version = "3.4.0" +version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "516074a47ef4bce09577a3b379392300159ce5b1ba2e501ff1c819950066100f" +checksum = "74f7971dbd9326d58187408ab83117d8ac1bb9c17b085fdacd1cf2f598719b6b" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -451,9 +452,9 @@ dependencies = [ [[package]] name = "brotli-decompressor" -version = "2.5.1" +version = "4.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e2e4afe60d7dd600fdd3de8d0f08c2b7ec039712e3b6137ff98b7004e82de4f" +checksum = "9a45bd2e4095a8b518033b128020dd4a55aab1c0a381ba4404a472630f4bc362" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -461,15 +462,15 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.15.3" +version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ea184aa71bb362a1157c896979544cc23974e08fd265f29ea96b59f0b4a555b" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" [[package]] name = "bytemuck" -version = "1.14.3" +version = "1.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2ef034f05691a48569bd920a96c81b9d91bbad1ab5ac7c4616c1f6ef36cb79f" +checksum = "b236fc92302c97ed75b38da1f4917b5cdda4984745740f153a5d3059e48d725e" [[package]] name = "byteorder" @@ -479,15 +480,15 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.5.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" +checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" [[package]] name = "cached" -version = "0.48.1" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "355face540df58778b96814c48abb3c2ed67c4878a8087ab1819c1fedeec505f" +checksum = "a8466736fe5dbcaf8b8ee24f9bbefe43c884dc3e9ff7178da70f55bffca1133c" dependencies = [ "ahash", "async-trait", @@ -503,14 +504,14 @@ dependencies = [ [[package]] name = "cached_proc_macro" -version = "0.19.1" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d52f526f7cbc875b296856ca8c964a9f6290556922c303a8a3883e3c676e6a1" +checksum = "575f32e012222055211b70f5b0601f951f84523410a0e65c81f2744a6042450d" dependencies = [ "darling", "proc-macro2", "quote", - "syn 1.0.109", + "syn", ] [[package]] @@ -521,9 +522,9 @@ checksum = "ade8366b8bd5ba243f0a58f036cc0ca8a2f069cff1a2351ef1cac6b083e16fc0" [[package]] name = "cc" -version = "1.0.86" +version = "1.0.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f9fa1897e4325be0d68d48df6aa1a71ac2ed4d27723887e7754192705350730" +checksum = "066fce287b1d4eafef758e89e09d724a24808a9196fe9756b8ca90e86d0719a2" [[package]] name = "cfg-if" @@ -533,22 +534,22 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.34" +version = "0.4.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bc015644b92d5890fab7489e49d21f879d5c990186827d42ec511919404f38b" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" dependencies = [ "android-tzdata", "iana-time-zone", "num-traits", "serde", - "windows-targets 0.52.3", + "windows-targets 0.52.6", ] [[package]] name = "chrono-tz" -version = "0.8.6" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d59ae0466b83e838b81a54256c39d5d7c20b9d7daa10510a242d9b75abd5936e" +checksum = "93698b29de5e97ad0ae26447b344c482a7284c737d9ddc5f9e52b74a336671bb" dependencies = [ "chrono", "chrono-tz-build", @@ -557,9 +558,9 @@ dependencies = [ [[package]] name = "chrono-tz-build" -version = "0.2.1" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "433e39f13c9a060046954e0592a8d0a4bcb1040125cbf91cb8ee58964cfb350f" +checksum = "0c088aee841df9c3041febbb73934cfc39708749bf96dc827e3359cd39ef11b1" dependencies = [ "parse-zoneinfo", "phf", @@ -578,29 +579,18 @@ dependencies = [ [[package]] name = "concurrent-queue" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d16048cd947b08fa32c24458a22f5dc5e835264f689f4f5653210c69fd107363" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" dependencies = [ "crossbeam-utils", ] [[package]] name = "cookie" -version = "0.17.0" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7efb37c3e1ccb1ff97164ad95ac1606e8ccd35b3fa0a7d99a304c7f4a428cc24" -dependencies = [ - "percent-encoding", - "time", - "version_check", -] - -[[package]] -name = "cookie" -version = "0.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cd91cf61412820176e137621345ee43b3f4423e589e7ae4e50d601d93e35ef8" +checksum = "4ddef33a339a91ea89fb53151bd0a4689cfce27055c291dfa69945475d22c747" dependencies = [ "percent-encoding", "time", @@ -609,12 +599,12 @@ dependencies = [ [[package]] name = "cookie_store" -version = "0.20.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "387461abbc748185c3a6e1673d826918b450b87ff22639429c694619a83b6cf6" +checksum = "4934e6b7e8419148b6ef56950d277af8561060b56afd59e2aadf98b59fce6baa" dependencies = [ - "cookie 0.17.0", - "idna 0.3.0", + "cookie", + "idna 0.5.0", "log", "publicsuffix", "serde", @@ -651,18 +641,18 @@ dependencies = [ [[package]] name = "crc32fast" -version = "1.4.0" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa" +checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" dependencies = [ "cfg-if", ] [[package]] name = "cron" -version = "0.12.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ff76b51e4c068c52bfd2866e1567bee7c567ae8f24ada09fd4307019e25eab7" +checksum = "6f8c3e73077b4b4a6ab1ea5047c37c57aee77657bc8ecd6f29b0af082d0b0c07" dependencies = [ "chrono", "nom", @@ -671,9 +661,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.19" +version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" +checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" [[package]] name = "crypto-common" @@ -687,9 +677,9 @@ dependencies = [ [[package]] name = "darling" -version = "0.14.4" +version = "0.20.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b750cb3417fd1b327431a470f388520309479ab0bf5e323505daf0290cd3850" +checksum = "83b2eb4d90d12bdda5ed17de686c2acb4c57914f8f921b8da7e112b5a36f3fe1" dependencies = [ "darling_core", "darling_macro", @@ -697,27 +687,27 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.14.4" +version = "0.20.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0" +checksum = "622687fe0bac72a04e5599029151f5796111b90f1baaa9b544d807a5e31cd120" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim", - "syn 1.0.109", + "syn", ] [[package]] name = "darling_macro" -version = "0.14.4" +version = "0.20.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e" +checksum = "733cabb43482b1a1b53eee8583c2b9e8684d592215ea83efd305dd31bc2f0178" dependencies = [ "darling_core", "quote", - "syn 1.0.109", + "syn", ] [[package]] @@ -733,11 +723,25 @@ dependencies = [ "parking_lot_core", ] +[[package]] +name = "dashmap" +version = "6.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "804c8821570c3f8b70230c2ba75ffa5c0f9a4189b9a432b6656c536712acae28" +dependencies = [ + "cfg-if", + "crossbeam-utils", + "hashbrown", + "lock_api", + "once_cell", + "parking_lot_core", +] + [[package]] name = "data-encoding" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5" +checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2" [[package]] name = "data-url" @@ -780,21 +784,21 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35b50dba0afdca80b187392b24f2499a88c336d5a8493e4b4ccfb608708be56a" dependencies = [ - "bitflags 2.4.2", + "bitflags 2.6.0", "proc-macro2", "proc-macro2-diagnostics", "quote", - "syn 2.0.50", + "syn", ] [[package]] name = "diesel" -version = "2.1.4" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62c6fcf842f17f8c78ecf7c81d75c5ce84436b41ee07e03f490fbb5f5a8731d8" +checksum = "62d6dcd069e7b5fe49a302411f759d4cf1cf2c27fe798ef46fb8baefc053dd2b" dependencies = [ "bigdecimal", - "bitflags 2.4.2", + "bitflags 2.6.0", "byteorder", "chrono", "diesel_derives", @@ -813,14 +817,15 @@ dependencies = [ [[package]] name = "diesel_derives" -version = "2.1.2" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef8337737574f55a468005a83499da720f20c65586241ffea339db9ecdfd2b44" +checksum = "59de76a222c2b8059f789cbe07afbfd8deb8c31dd0bc2a21f85e256c1def8259" dependencies = [ "diesel_table_macro_syntax", + "dsl_auto_type", "proc-macro2", "quote", - "syn 2.0.50", + "syn", ] [[package]] @@ -835,9 +840,9 @@ dependencies = [ [[package]] name = "diesel_migrations" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6036b3f0120c5961381b570ee20a02432d7e2d27ea60de9578799cf9156914ac" +checksum = "8a73ce704bad4231f001bff3314d91dce4aba0770cee8b233991859abc15c1f6" dependencies = [ "diesel", "migrations_internals", @@ -846,11 +851,11 @@ dependencies = [ [[package]] name = "diesel_table_macro_syntax" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc5557efc453706fed5e4fa85006fe9817c224c3f480a34c7e5959fd700921c5" +checksum = "209c735641a413bc68c4923a9d6ad4bcb3ca306b794edaa7eb0b3228a99ffb25" dependencies = [ - "syn 2.0.50", + "syn", ] [[package]] @@ -870,36 +875,50 @@ version = "0.15.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" +[[package]] +name = "dsl_auto_type" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0892a17df262a24294c382f0d5997571006e7a4348b4327557c4ff1cd4a8bccc" +dependencies = [ + "darling", + "either", + "heck 0.5.0", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "either" -version = "1.10.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" [[package]] name = "email-encoding" -version = "0.2.0" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbfb21b9878cf7a348dcb8559109aabc0ec40d69924bd706fa5149846c4fef75" +checksum = "60d1d33cdaede7e24091f039632eb5d3c7469fe5b066a985281a34fc70fa317f" dependencies = [ - "base64 0.21.7", + "base64 0.22.1", "memchr", ] [[package]] name = "email_address" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2153bd83ebc09db15bcbdc3e2194d901804952e3dc96967e1cd3b0c5c32d112" +checksum = "c1019fa28f600f5b581b7a603d515c3f1635da041ca211b5055804788673abfe" dependencies = [ "serde", ] [[package]] name = "encoding_rs" -version = "0.8.33" +version = "0.8.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" +checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" dependencies = [ "cfg-if", ] @@ -910,10 +929,10 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ffccbb6966c05b32ef8fbac435df276c4ae4d3dc55a8cd0eb9745e6c12f546a" dependencies = [ - "heck", + "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.50", + "syn", ] [[package]] @@ -924,9 +943,9 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" dependencies = [ "libc", "windows-sys 0.52.0", @@ -960,20 +979,9 @@ dependencies = [ [[package]] name = "event-listener" -version = "4.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67b215c49b2b248c855fb73579eb1f4f26c38ffdc12973e20e07b91d78d5646e" -dependencies = [ - "concurrent-queue", - "parking", - "pin-project-lite", -] - -[[package]] -name = "event-listener" -version = "5.1.0" +version = "5.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7ad6fd685ce13acd6d9541a30f6db6567a7a24c9ffd4ba2955d29e3f22c8b27" +checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba" dependencies = [ "concurrent-queue", "parking", @@ -982,21 +990,11 @@ dependencies = [ [[package]] name = "event-listener-strategy" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "958e4d70b6d5e81971bebec42271ec641e7ff4e170a6fa605f2b8a8b65cb97d3" -dependencies = [ - "event-listener 4.0.3", - "pin-project-lite", -] - -[[package]] -name = "event-listener-strategy" -version = "0.5.0" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "feedafcaa9b749175d5ac357452a9d41ea2911da598fde46ce1fe02c37751291" +checksum = "0f214dc438f977e6d4e3500aaa277f5ad94ca83fbbd9b1a15713ce2344ccc5a1" dependencies = [ - "event-listener 5.1.0", + "event-listener 5.3.1", "pin-project-lite", ] @@ -1011,9 +1009,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.0.1" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" +checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" [[package]] name = "fern" @@ -1029,23 +1027,23 @@ dependencies = [ [[package]] name = "figment" -version = "0.10.14" +version = "0.10.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b6e5bc7bd59d60d0d45a6ccab6cf0f4ce28698fb4e81e750ddf229c9b824026" +checksum = "8cb01cd46b0cf372153850f4c6c272d9cbea2da513e07538405148f95bd789f3" dependencies = [ "atomic 0.6.0", "pear", "serde", - "toml 0.8.10", + "toml", "uncased", "version_check", ] [[package]] name = "flate2" -version = "1.0.28" +version = "1.0.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" +checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae" dependencies = [ "crc32fast", "miniz_oxide", @@ -1146,11 +1144,11 @@ dependencies = [ [[package]] name = "futures-lite" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "445ba825b27408685aaecefd65178908c36c6e96aaf6d8599419d46e624192ba" +checksum = "52527eb5074e35e9339c6b4e8d12600c7128b68fb25dcb9fa9dec18f7c25f3a5" dependencies = [ - "fastrand 2.0.1", + "fastrand 2.1.0", "futures-core", "futures-io", "parking", @@ -1165,7 +1163,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.50", + "syn", ] [[package]] @@ -1214,7 +1212,7 @@ dependencies = [ "libc", "log", "rustversion", - "windows", + "windows 0.48.0", ] [[package]] @@ -1229,9 +1227,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.12" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "js-sys", @@ -1242,9 +1240,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.28.1" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" +checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" [[package]] name = "glob" @@ -1271,7 +1269,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68a7f542ee6b35af73b06abc0dad1c1bae89964e4e253bc4b587b91c9637867b" dependencies = [ "cfg-if", - "dashmap", + "dashmap 5.5.3", "futures", "futures-timer", "no-std-compat", @@ -1286,16 +1284,35 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.24" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9" +checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" dependencies = [ "bytes", "fnv", "futures-core", "futures-sink", "futures-util", - "http", + "http 0.2.12", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "h2" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa82e28a107a8cc405f0839610bdc9b15f1e25ec7d696aa5cf173edbcb1486ab" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http 1.1.0", "indexmap", "slab", "tokio", @@ -1305,15 +1322,15 @@ dependencies = [ [[package]] name = "half" -version = "1.8.2" +version = "1.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" +checksum = "1b43ede17f21864e81be2fa654110bf1e793774238d86ef8555c37e6519c0403" [[package]] name = "handlebars" -version = "5.1.0" +version = "5.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab283476b99e66691dee3f1640fea91487a8d81f50fb5ecc75538f8f8879a1e4" +checksum = "d08485b96a0e6393e9e4d1b8d48cf74ad6c063cd905eb33f42c1ce3f0377539b" dependencies = [ "log", "pest", @@ -1326,9 +1343,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.14.3" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ "ahash", "allocator-api2", @@ -1340,11 +1357,68 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + [[package]] name = "hermit-abi" -version = "0.3.6" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd5256b483761cd23699d0da46cc6fd2ee3be420bbe6d020ae4a091e70b7e9fd" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" + +[[package]] +name = "hermit-abi" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" + +[[package]] +name = "hickory-proto" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07698b8420e2f0d6447a436ba999ec85d8fbf2a398bbd737b82cac4a2e96e512" +dependencies = [ + "async-trait", + "cfg-if", + "data-encoding", + "enum-as-inner", + "futures-channel", + "futures-io", + "futures-util", + "idna 0.4.0", + "ipnet", + "once_cell", + "rand", + "thiserror", + "tinyvec", + "tokio", + "tracing", + "url", +] + +[[package]] +name = "hickory-resolver" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28757f23aa75c98f254cf0405e6d8c25b831b32921b050a66692427679b1f243" +dependencies = [ + "cfg-if", + "futures-util", + "hickory-proto", + "ipconfig", + "lru-cache", + "once_cell", + "parking_lot", + "rand", + "resolv-conf", + "smallvec", + "thiserror", + "tokio", + "tracing", +] [[package]] name = "hmac" @@ -1375,6 +1449,17 @@ dependencies = [ "winapi", ] +[[package]] +name = "hostname" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9c7c7c8ac16c798734b8a24560c1362120597c40d5e1459f09498f8f6c8f2ba" +dependencies = [ + "cfg-if", + "libc", + "windows 0.52.0", +] + [[package]] name = "html5gum" version = "0.5.7" @@ -1386,9 +1471,20 @@ dependencies = [ [[package]] name = "http" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" dependencies = [ "bytes", "fnv", @@ -1402,15 +1498,38 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", - "http", + "http 0.2.12", + "pin-project-lite", +] + +[[package]] +name = "http-body" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +dependencies = [ + "bytes", + "http 1.1.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +dependencies = [ + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", "pin-project-lite", ] [[package]] name = "httparse" -version = "1.8.0" +version = "1.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" +checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9" [[package]] name = "httpdate" @@ -1420,28 +1539,65 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hyper" -version = "0.14.28" +version = "0.14.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80" +checksum = "f361cde2f109281a220d4307746cdfd5ee3f410da58a70377762396775634b33" dependencies = [ "bytes", "futures-channel", "futures-core", "futures-util", - "h2", - "http", - "http-body", + "h2 0.3.26", + "http 0.2.12", + "http-body 0.4.6", "httparse", "httpdate", "itoa", "pin-project-lite", - "socket2 0.5.6", + "socket2 0.5.7", "tokio", "tower-service", "tracing", "want", ] +[[package]] +name = "hyper" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4fe55fb7a772d59a5ff1dfbff4fe0258d19b89fec4b233e75d35d5d2316badc" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "h2 0.4.5", + "http 1.1.0", + "http-body 1.0.0", + "httparse", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ee4be2c948921a1a5320b629c4193916ed787a7f7f293fd3f7f5a6c9de74155" +dependencies = [ + "futures-util", + "http 1.1.0", + "hyper 1.4.0", + "hyper-util", + "rustls 0.23.11", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.26.0", + "tower-service", +] + [[package]] name = "hyper-tls" version = "0.5.0" @@ -1449,10 +1605,46 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes", - "hyper", + "hyper 0.14.29", + "native-tls", + "tokio", + "tokio-native-tls", +] + +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper 1.4.0", + "hyper-util", "native-tls", "tokio", "tokio-native-tls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ab92f4f49ee4fb4f997c784b7a2e0fa70050211e0b6a287f898c3c9785ca956" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", + "hyper 1.4.0", + "pin-project-lite", + "socket2 0.5.7", + "tokio", + "tower", + "tower-service", + "tracing", ] [[package]] @@ -1516,9 +1708,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.2.3" +version = "2.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "233cf39063f058ea2caae4091bf4a3ef70a653afbc026f5c4a4135d114e3c177" +checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", "hashbrown", @@ -1533,9 +1725,9 @@ checksum = "c8fae54786f62fb2918dcfae3d568594e50eb9b5c25bf04371af6fe7516452fb" [[package]] name = "instant" -version = "0.1.12" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" dependencies = [ "cfg-if", ] @@ -1546,7 +1738,7 @@ version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" dependencies = [ - "hermit-abi", + "hermit-abi 0.3.9", "libc", "windows-sys 0.48.0", ] @@ -1557,10 +1749,10 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f" dependencies = [ - "socket2 0.5.6", + "socket2 0.5.7", "widestring", "windows-sys 0.48.0", - "winreg", + "winreg 0.50.0", ] [[package]] @@ -1575,16 +1767,16 @@ version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b" dependencies = [ - "hermit-abi", + "hermit-abi 0.3.9", "libc", "windows-sys 0.52.0", ] [[package]] name = "itoa" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jetscii" @@ -1594,9 +1786,9 @@ checksum = "47f142fe24a9c9944451e8349de0a56af5f3e7226dc46f3ed4d4ecc0b85af75e" [[package]] name = "job_scheduler_ng" -version = "2.0.4" +version = "2.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10bbdf445513bbe53f4666218b7057d265c76fa0b30475e121a6bf05dbaacaae" +checksum = "87c252207f323e2996d087759ebdcff8f608cd3eaa9896909a0c2dd3050a3c6a" dependencies = [ "chrono", "cron", @@ -1605,18 +1797,18 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.68" +version = "0.3.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "406cda4b368d531c842222cf9d2600a9a4acce8d29423695379c6868a143a9ee" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" dependencies = [ "wasm-bindgen", ] [[package]] name = "jsonwebtoken" -version = "9.2.0" +version = "9.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c7ea04a7c5c055c175f189b6dc6ba036fd62306b58c66c9f6389036c503a3f4" +checksum = "b9ae10193d25051e74945f1ea2d0b42e03cc3b890f7e4cc5faa44997d808193f" dependencies = [ "base64 0.21.7", "js-sys", @@ -1638,26 +1830,26 @@ dependencies = [ [[package]] name = "lazy_static" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "lettre" -version = "0.11.4" +version = "0.11.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "357ff5edb6d8326473a64c82cf41ddf78ab116f89668c50c4fac1b321e5e80f4" +checksum = "1a62049a808f1c4e2356a2a380bd5f2aca3b011b0b482cf3b914ba1731426969" dependencies = [ "async-std", "async-trait", - "base64 0.21.7", + "base64 0.22.1", "chumsky", "email-encoding", "email_address", - "fastrand 2.0.1", + "fastrand 2.1.0", "futures-io", "futures-util", - "hostname", + "hostname 0.4.0", "httpdate", "idna 0.5.0", "mime", @@ -1666,7 +1858,7 @@ dependencies = [ "percent-encoding", "quoted_printable", "serde", - "socket2 0.5.6", + "socket2 0.5.7", "tokio", "tokio-native-tls", "tracing", @@ -1675,9 +1867,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.153" +version = "0.2.155" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" +checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" [[package]] name = "libm" @@ -1687,9 +1879,9 @@ checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" [[package]] name = "libmimalloc-sys" -version = "0.1.35" +version = "0.1.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3979b5c37ece694f1f5e51e7ecc871fdb0f517ed04ee45f88d15d6d553cb9664" +checksum = "23aa6811d3bd4deb8a84dde645f943476d13b248d818edcf8ce0b2f37f036b44" dependencies = [ "cc", "libc", @@ -1697,9 +1889,9 @@ dependencies = [ [[package]] name = "libsqlite3-sys" -version = "0.27.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4e226dcd58b4be396f7bd3c20da8fdee2911400705297ba7d2d7cc2c30f716" +checksum = "0c10584274047cb335c23d3e61bcef8e323adae7c5c8c760540f73610177fc3f" dependencies = [ "cc", "pkg-config", @@ -1720,15 +1912,15 @@ checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" [[package]] name = "linux-raw-sys" -version = "0.4.13" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "lock_api" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" dependencies = [ "autocfg", "scopeguard", @@ -1736,9 +1928,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.20" +version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" dependencies = [ "value-bag", ] @@ -1784,25 +1976,25 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.1" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "migrations_internals" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f23f71580015254b020e856feac3df5878c2c7a8812297edd6c0a485ac9dada" +checksum = "fd01039851e82f8799046eabbb354056283fb265c8ec0996af940f4e85a380ff" dependencies = [ "serde", - "toml 0.7.8", + "toml", ] [[package]] name = "migrations_macros" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cce3325ac70e67bbab5bd837a31cae01f1a6db64e0e744a33cb03a543469ef08" +checksum = "ffb161cc72176cb37aa47f1fc520d3ef02263d67d661f44f05d05a079e1237fd" dependencies = [ "migrations_internals", "proc-macro2", @@ -1811,9 +2003,9 @@ dependencies = [ [[package]] name = "mimalloc" -version = "0.1.39" +version = "0.1.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa01922b5ea280a911e323e4d2fd24b7fe5cc4042e0d2cda3c40775cdc4bdc9c" +checksum = "68914350ae34959d83f732418d51e2427a794055d0b9529f48259ac07af65633" dependencies = [ "libmimalloc-sys", ] @@ -1832,18 +2024,18 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" +checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" dependencies = [ "adler", ] [[package]] name = "mio" -version = "0.8.10" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", "wasi", @@ -1852,16 +2044,15 @@ dependencies = [ [[package]] name = "multer" -version = "2.1.0" +version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01acbdc23469fd8fe07ab135923371d5f5a422fbf9c522158677c8eb15bc51c2" +checksum = "83e87776546dc87511aa5ee218730c92b666d7264ab6ed41f9d215af9cd5224b" dependencies = [ "bytes", "encoding_rs", "futures-util", - "http", + "http 1.1.0", "httparse", - "log", "memchr", "mime", "spin", @@ -1872,9 +2063,9 @@ dependencies = [ [[package]] name = "mysqlclient-sys" -version = "0.2.5" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f61b381528ba293005c42a409dd73d034508e273bf90481f17ec2e964a6e969b" +checksum = "b2aa3a303b6e9878b34811838301b00a56878693c47f9ac0ba397f91adc7bf12" dependencies = [ "pkg-config", "vcpkg", @@ -1882,11 +2073,10 @@ dependencies = [ [[package]] name = "native-tls" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466" dependencies = [ - "lazy_static", "libc", "log", "openssl", @@ -1932,11 +2122,10 @@ dependencies = [ [[package]] name = "num-bigint" -version = "0.4.4" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ - "autocfg", "num-integer", "num-traits", ] @@ -1955,7 +2144,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" dependencies = [ "proc-macro2", "quote", - "syn 2.0.50", + "syn", ] [[package]] @@ -1969,9 +2158,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.18" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", ] @@ -1982,7 +2171,7 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi", + "hermit-abi 0.3.9", "libc", ] @@ -1997,9 +2186,9 @@ dependencies = [ [[package]] name = "object" -version = "0.32.2" +version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" +checksum = "081b846d1d56ddfc18fdf1a922e4f6e07a11768ea1b92dec44e42b72712ccfce" dependencies = [ "memchr", ] @@ -2016,7 +2205,7 @@ version = "0.10.64" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f" dependencies = [ - "bitflags 2.4.2", + "bitflags 2.6.0", "cfg-if", "foreign-types", "libc", @@ -2033,7 +2222,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.50", + "syn", ] [[package]] @@ -2044,18 +2233,18 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-src" -version = "300.2.3+3.2.1" +version = "300.3.1+3.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cff92b6f71555b61bb9315f7c64da3ca43d87531622120fea0195fc761b4843" +checksum = "7259953d42a81bf137fbbd73bd30a8e1914d6dce43c2b90ed575783a22608b91" dependencies = [ "cc", ] [[package]] name = "openssl-sys" -version = "0.9.101" +version = "0.9.102" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dda2b0f344e78efc2facf7d195d098df0dd72151b26ab98da807afc26c198dff" +checksum = "c597637d56fbc83893a35eb0dd04b2b8e7a50c91e64e9493e398b5df4fb45fa2" dependencies = [ "cc", "libc", @@ -2078,9 +2267,9 @@ checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" [[package]] name = "parking_lot" -version = "0.12.1" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", "parking_lot_core", @@ -2088,22 +2277,22 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.9" +version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", - "windows-targets 0.48.5", + "windows-targets 0.52.6", ] [[package]] name = "parse-zoneinfo" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c705f256449c60da65e11ff6626e0c16a0a0b96aaa348de61376b249bc340f41" +checksum = "1f2a05b18d44e2957b88f96ba460715e295bc1d7510468a2f3d3b44535d26c24" dependencies = [ "regex", ] @@ -2121,15 +2310,15 @@ dependencies = [ [[package]] name = "paste" -version = "1.0.14" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" [[package]] name = "pear" -version = "0.2.8" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ccca0f6c17acc81df8e242ed473ec144cbf5c98037e69aa6d144780aad103c8" +checksum = "bdeeaa00ce488657faba8ebf44ab9361f9365a97bd39ffb8a60663f57ff4b467" dependencies = [ "inlinable_string", "pear_codegen", @@ -2138,23 +2327,23 @@ dependencies = [ [[package]] name = "pear_codegen" -version = "0.2.8" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e22670e8eb757cff11d6c199ca7b987f352f0346e0be4dd23869ec72cb53c77" +checksum = "4bab5b985dc082b345f812b7df84e1bef27e7207b39e448439ba8bd69c93f147" dependencies = [ "proc-macro2", "proc-macro2-diagnostics", "quote", - "syn 2.0.50", + "syn", ] [[package]] name = "pem" -version = "3.0.3" +version = "3.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b8fcc794035347fb64beda2d3b462595dd2753e3f268d89c5aae77e8cf2c310" +checksum = "8e459365e590736a54c3fa561947c84837534b8e9af6fc5bf781307e82658fae" dependencies = [ - "base64 0.21.7", + "base64 0.22.1", "serde", ] @@ -2166,9 +2355,9 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" -version = "2.7.7" +version = "2.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "219c0dcc30b6a27553f9cc242972b67f75b60eb0db71f0b5462f38b058c41546" +checksum = "cd53dff83f26735fdc1ca837098ccf133605d794cdae66acfc2bfac3ec809d95" dependencies = [ "memchr", "thiserror", @@ -2177,9 +2366,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.7.7" +version = "2.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22e1288dbd7786462961e69bfd4df7848c1e37e8b74303dbdab82c3a9cdd2809" +checksum = "2a548d2beca6773b1c244554d36fcf8548a8a58e74156968211567250e48e49a" dependencies = [ "pest", "pest_generator", @@ -2187,22 +2376,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.7" +version = "2.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1381c29a877c6d34b8c176e734f35d7f7f5b3adaefe940cb4d1bb7af94678e2e" +checksum = "3c93a82e8d145725dcbaf44e5ea887c8a869efdcc28706df2d08c69e17077183" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn 2.0.50", + "syn", ] [[package]] name = "pest_meta" -version = "2.7.7" +version = "2.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0934d6907f148c22a3acbda520c7eed243ad7487a30f51f6ce52b58b7077a8a" +checksum = "a941429fea7e08bedec25e4f6785b6ffaacc6b755da98df5ef3e7dcf4a124c4f" dependencies = [ "once_cell", "pest", @@ -2253,11 +2442,31 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5be167a7af36ee22fe3115051bc51f6e6c7054c9348e28deb4f49bd6f705a315" +[[package]] +name = "pin-project" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "pin-project-lite" -version = "0.2.13" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" +checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" [[package]] name = "pin-utils" @@ -2267,12 +2476,12 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "piper" -version = "0.2.1" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "668d31b1c4eba19242f2088b2bf3316b82ca31082a8335764db4e083db7485d4" +checksum = "ae1d5c74c9876f070d3e8fd503d748c7d974c3e48da8f41350fa5222ef9b4391" dependencies = [ "atomic-waker", - "fastrand 2.0.1", + "fastrand 2.1.0", "futures-io", ] @@ -2300,14 +2509,15 @@ dependencies = [ [[package]] name = "polling" -version = "3.5.0" +version = "3.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24f040dee2588b4963afb4e420540439d126f73fdacf4a9c486a96d840bac3c9" +checksum = "a3ed00ed3fbf728b5816498ecd316d1716eecaced9c0c8d2c5a6740ca214985b" dependencies = [ "cfg-if", "concurrent-queue", + "hermit-abi 0.4.0", "pin-project-lite", - "rustix 0.38.31", + "rustix 0.38.34", "tracing", "windows-sys 0.52.0", ] @@ -2332,18 +2542,18 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" [[package]] name = "pq-sys" -version = "0.4.8" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31c0052426df997c0cbd30789eb44ca097e3541717a7b8fa36b1c464ee7edebd" +checksum = "a24ff9e4cf6945c988f0db7005d87747bf72864965c3529d259ad155ac41d584" dependencies = [ "vcpkg", ] [[package]] name = "proc-macro2" -version = "1.0.78" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" dependencies = [ "unicode-ident", ] @@ -2356,7 +2566,7 @@ checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.50", + "syn", "version_check", "yansi", ] @@ -2388,9 +2598,9 @@ dependencies = [ [[package]] name = "quanta" -version = "0.12.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ca0b7bac0b97248c40bb77288fc52029cf1459c0461ea1b05ee32ccf011de2c" +checksum = "8e5167a477619228a0b284fac2674e3c388cba90631d7b7de620e6f1fcd08da5" dependencies = [ "crossbeam-utils", "libc", @@ -2409,9 +2619,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" -version = "1.0.35" +version = "1.0.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" +checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" dependencies = [ "proc-macro2", ] @@ -2465,52 +2675,52 @@ dependencies = [ [[package]] name = "raw-cpuid" -version = "11.0.1" +version = "11.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d86a7c4638d42c44551f4791a20e687dbb4c3de1f33c43dd71e355cd429def1" +checksum = "e29830cbb1290e404f24c73af91c5d8d631ce7e128691e9477556b540cd01ecd" dependencies = [ - "bitflags 2.4.2", + "bitflags 2.6.0", ] [[package]] name = "redox_syscall" -version = "0.4.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +checksum = "c82cf8cff14456045f55ec4241383baeff27af886adb72ffb2162f99911de0fd" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.6.0", ] [[package]] name = "ref-cast" -version = "1.0.22" +version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4846d4c50d1721b1a3bef8af76924eef20d5e723647333798c1b519b3a9473f" +checksum = "ccf0a6f84d5f1d581da8b41b47ec8600871962f2a528115b542b362d4b744931" dependencies = [ "ref-cast-impl", ] [[package]] name = "ref-cast-impl" -version = "1.0.22" +version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fddb4f8d99b0a2ebafc65a87a69a7b9875e4b1ae1f00db265d300ef7f28bccc" +checksum = "bcc303e793d3734489387d205e9b186fac9c6cfacedd98cbb2e8a5943595f3e6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.50", + "syn", ] [[package]] name = "regex" -version = "1.10.3" +version = "1.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15" +checksum = "b91213439dad192326a0d7c6ee3955910425f441d7038e0d6933b0aec5c4517f" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.5", - "regex-syntax 0.8.2", + "regex-automata 0.4.7", + "regex-syntax 0.8.4", ] [[package]] @@ -2524,13 +2734,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.5" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd" +checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.2", + "regex-syntax 0.8.4", ] [[package]] @@ -2541,9 +2751,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.8.2" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" +checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" [[package]] name = "reopen" @@ -2558,23 +2768,66 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.11.24" +version = "0.11.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6920094eb85afde5e4a138be3f2de8bbdf28000f0029e72c45025a56b042251" +checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" dependencies = [ - "async-compression", "base64 0.21.7", "bytes", - "cookie 0.17.0", + "encoding_rs", + "futures-core", + "futures-util", + "h2 0.3.26", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.29", + "hyper-tls 0.5.0", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls-pemfile 1.0.4", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper 0.1.2", + "system-configuration", + "tokio", + "tokio-native-tls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "winreg 0.50.0", +] + +[[package]] +name = "reqwest" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7d6d2a27d57148378eb5e111173f4276ad26340ecc5c49a4a2152167a2d6a37" +dependencies = [ + "async-compression", + "base64 0.22.1", + "bytes", + "cookie", "cookie_store", "encoding_rs", "futures-core", "futures-util", - "h2", - "http", - "http-body", - "hyper", - "hyper-tls", + "h2 0.4.5", + "http 1.1.0", + "http-body 1.0.0", + "http-body-util", + "hyper 1.4.0", + "hyper-rustls", + "hyper-tls 0.6.0", + "hyper-util", "ipnet", "js-sys", "log", @@ -2583,24 +2836,23 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls-pemfile", + "rustls-pemfile 2.1.2", "serde", "serde_json", "serde_urlencoded", - "sync_wrapper", + "sync_wrapper 1.0.1", "system-configuration", "tokio", "tokio-native-tls", "tokio-socks", "tokio-util", "tower-service", - "trust-dns-resolver", "url", "wasm-bindgen", "wasm-bindgen-futures", "wasm-streams", "web-sys", - "winreg", + "winreg 0.52.0", ] [[package]] @@ -2609,7 +2861,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52e44394d2086d010551b14b53b1f24e31647570cd1deb0379e2c21b329aba00" dependencies = [ - "hostname", + "hostname 0.3.1", "quick-error", ] @@ -2630,9 +2882,9 @@ dependencies = [ [[package]] name = "rmp" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f9860a6cc38ed1da53456442089b4dfa35e7cedaa326df63017af88385e6b20" +checksum = "228ed7c16fa39782c3b3468e974aec2795e9089153cd08ee2e9aefb3613334c4" dependencies = [ "byteorder", "num-traits", @@ -2641,9 +2893,9 @@ dependencies = [ [[package]] name = "rmpv" -version = "1.0.1" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e0e0214a4a2b444ecce41a4025792fc31f77c7bb89c46d253953ea8c65701ec" +checksum = "58450723cd9ee93273ce44a20b6ec4efe17f8ed2e3631474387bfdecf18bb2a9" dependencies = [ "num-traits", "rmp", @@ -2651,9 +2903,9 @@ dependencies = [ [[package]] name = "rocket" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e7bb57ccb26670d73b6a47396c83139447b9e7878cab627fdfe9ea8da489150" +checksum = "a516907296a31df7dc04310e7043b61d71954d703b603cc6867a026d7e72d73f" dependencies = [ "async-stream", "async-trait", @@ -2689,9 +2941,9 @@ dependencies = [ [[package]] name = "rocket_codegen" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2238066abf75f21be6cd7dc1a09d5414a671f4246e384e49fe3f8a4936bd04c" +checksum = "575d32d7ec1a9770108c879fc7c47815a80073f96ca07ff9525a94fcede1dd46" dependencies = [ "devise", "glob", @@ -2699,22 +2951,22 @@ dependencies = [ "proc-macro2", "quote", "rocket_http", - "syn 2.0.50", + "syn", "unicode-xid", "version_check", ] [[package]] name = "rocket_http" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37a1663694d059fe5f943ea5481363e48050acedd241d46deb2e27f71110389e" +checksum = "e274915a20ee3065f611c044bd63c40757396b6dbc057d6046aec27f14f882b9" dependencies = [ - "cookie 0.18.0", + "cookie", "either", "futures", - "http", - "hyper", + "http 0.2.12", + "hyper 0.14.29", "indexmap", "log", "memchr", @@ -2722,23 +2974,23 @@ dependencies = [ "percent-encoding", "pin-project-lite", "ref-cast", - "rustls", - "rustls-pemfile", + "rustls 0.21.12", + "rustls-pemfile 1.0.4", "serde", "smallvec", "stable-pattern", "state", "time", "tokio", - "tokio-rustls", + "tokio-rustls 0.24.1", "uncased", ] [[package]] name = "rocket_ws" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6677b3fe72e2d73dd632c412546ed8153e8685c830ee8d20e4488575cb905d9" +checksum = "25f1877668c937b701177c349f21383c556cd3bb4ba8fa1d07fa96ccb3a8782e" dependencies = [ "rocket", "tokio-tungstenite", @@ -2767,9 +3019,9 @@ dependencies = [ [[package]] name = "rustc-demangle" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustix" @@ -2787,29 +3039,42 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.31" +version = "0.38.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ea3e1a662af26cd7a3ba09c0297a31af215563ecf42817c98df621387f4e949" +checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" dependencies = [ - "bitflags 2.4.2", + "bitflags 2.6.0", "errno", "libc", - "linux-raw-sys 0.4.13", + "linux-raw-sys 0.4.14", "windows-sys 0.52.0", ] [[package]] name = "rustls" -version = "0.21.10" +version = "0.21.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" dependencies = [ "log", "ring", - "rustls-webpki", + "rustls-webpki 0.101.7", "sct", ] +[[package]] +name = "rustls" +version = "0.23.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4828ea528154ae444e5a642dbb7d5623354030dc9822b83fd9bb79683c7399d0" +dependencies = [ + "once_cell", + "rustls-pki-types", + "rustls-webpki 0.102.5", + "subtle", + "zeroize", +] + [[package]] name = "rustls-pemfile" version = "1.0.4" @@ -2819,6 +3084,22 @@ dependencies = [ "base64 0.21.7", ] +[[package]] +name = "rustls-pemfile" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29993a25686778eb88d4189742cd713c9bce943bc54251a33509dc63cbacf73d" +dependencies = [ + "base64 0.22.1", + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "976295e77ce332211c0d24d92c0e83e50f5c5f046d11082cea19f3df13a3562d" + [[package]] name = "rustls-webpki" version = "0.101.7" @@ -2829,17 +3110,28 @@ dependencies = [ "untrusted", ] +[[package]] +name = "rustls-webpki" +version = "0.102.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9a6fccd794a42c2c105b513a2f62bc3fd8f3ba57a4593677ceb0bd035164d78" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + [[package]] name = "rustversion" -version = "1.0.14" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" +checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" [[package]] name = "ryu" -version = "1.0.17" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "same-file" @@ -2892,11 +3184,11 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.9.2" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" +checksum = "c627723fd09706bacdb5cf41499e95098555af3c3c29d014dc3c458ef6be11c0" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.6.0", "core-foundation", "core-foundation-sys", "libc", @@ -2905,9 +3197,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.9.1" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" +checksum = "317936bbbd05227752583946b9e66d7ce3b489f84e11a94a510b4437fef407d7" dependencies = [ "core-foundation-sys", "libc", @@ -2915,15 +3207,15 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.22" +version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" +checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" [[package]] name = "serde" -version = "1.0.197" +version = "1.0.204" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" +checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12" dependencies = [ "serde_derive", ] @@ -2940,20 +3232,20 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.197" +version = "1.0.204" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" +checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222" dependencies = [ "proc-macro2", "quote", - "syn 2.0.50", + "syn", ] [[package]] name = "serde_json" -version = "1.0.114" +version = "1.0.120" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" +checksum = "4e0d21c9a8cae1235ad58a00c11cb40d4b1e5c784f1ef2c537876ed6ffd8b7c5" dependencies = [ "itoa", "ryu", @@ -2962,9 +3254,9 @@ dependencies = [ [[package]] name = "serde_spanned" -version = "0.6.5" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb3622f419d1296904700073ea6cc23ad690adbd66f13ea683df73298736f0c1" +checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0" dependencies = [ "serde", ] @@ -3024,9 +3316,9 @@ dependencies = [ [[package]] name = "signal-hook-registry" -version = "1.4.1" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" dependencies = [ "libc", ] @@ -3060,9 +3352,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.13.1" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "socket2" @@ -3076,9 +3368,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.6" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05ffd9c0a93b7543e062e759284fcf5f5e3b098501104bfbdde4d404db792871" +checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" dependencies = [ "libc", "windows-sys 0.52.0", @@ -3132,21 +3424,21 @@ dependencies = [ [[package]] name = "strsim" -version = "0.10.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "subtle" -version = "2.5.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" -version = "1.0.109" +version = "2.0.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +checksum = "2f0209b68b3613b093e0ec905354eccaedcfe83b8cb37cbdeae64026c3064c16" dependencies = [ "proc-macro2", "quote", @@ -3154,30 +3446,25 @@ dependencies = [ ] [[package]] -name = "syn" -version = "2.0.50" +name = "sync_wrapper" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74f1bdc9872430ce9b75da68329d1c1746faf50ffac5f19e02b71e37ff881ffb" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" [[package]] name = "sync_wrapper" -version = "0.1.2" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" +checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" [[package]] name = "syslog" -version = "6.1.0" +version = "6.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7434e95bcccce1215d30f4bf84fe8c00e8de1b9be4fb736d747ca53d36e7f96f" +checksum = "dfc7e95b5b795122fafe6519e27629b5ab4232c73ebb2428f568e82b1a457ad3" dependencies = [ "error-chain", - "hostname", + "hostname 0.3.1", "libc", "log", "time", @@ -3206,34 +3493,34 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.10.0" +version = "3.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a365e8cd18e44762ef95d87f284f4b5cd04107fec2ff3052bd6a3e6069669e67" +checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" dependencies = [ "cfg-if", - "fastrand 2.0.1", - "rustix 0.38.31", + "fastrand 2.1.0", + "rustix 0.38.34", "windows-sys 0.52.0", ] [[package]] name = "thiserror" -version = "1.0.57" +version = "1.0.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e45bcbe8ed29775f228095caf2cd67af7a4ccf756ebff23a306bf3e8b47b24b" +checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.57" +version = "1.0.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" +checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" dependencies = [ "proc-macro2", "quote", - "syn 2.0.50", + "syn", ] [[package]] @@ -3257,9 +3544,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.34" +version = "0.3.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8248b6521bb14bc45b4067159b9b6ad792e2d6d754d6c41fb50e29fefe38749" +checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" dependencies = [ "deranged", "itoa", @@ -3280,9 +3567,9 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.17" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ba3a3ef41e6672a2f0f001392bb5dcd3ff0a9992d618ca761a11c3121547774" +checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" dependencies = [ "num-conv", "time-core", @@ -3290,9 +3577,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.6.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" dependencies = [ "tinyvec_macros", ] @@ -3305,9 +3592,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.36.0" +version = "1.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61285f6515fa018fb2d1e46eb21223fff441ee8db5d0f1435e8ab4f5cdb80931" +checksum = "ba4f4a02a7a80d6f274636f0aa95c7e383b912d41fe721a31f29e29698585a4a" dependencies = [ "backtrace", "bytes", @@ -3317,20 +3604,20 @@ dependencies = [ "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.6", + "socket2 0.5.7", "tokio-macros", "windows-sys 0.48.0", ] [[package]] name = "tokio-macros" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" +checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.50", + "syn", ] [[package]] @@ -3349,7 +3636,18 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls", + "rustls 0.21.12", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" +dependencies = [ + "rustls 0.23.11", + "rustls-pki-types", "tokio", ] @@ -3367,9 +3665,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.14" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" dependencies = [ "futures-core", "pin-project-lite", @@ -3378,9 +3676,9 @@ dependencies = [ [[package]] name = "tokio-tungstenite" -version = "0.20.1" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c" +checksum = "c83b561d025642014097b66e6c1bb422783339e0909e4429cde4749d1990bc38" dependencies = [ "futures-util", "log", @@ -3390,75 +3688,49 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.10" +version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" dependencies = [ "bytes", "futures-core", "futures-sink", "pin-project-lite", "tokio", - "tracing", -] - -[[package]] -name = "toml" -version = "0.7.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257" -dependencies = [ - "serde", - "serde_spanned", - "toml_datetime", - "toml_edit 0.19.15", ] [[package]] name = "toml" -version = "0.8.10" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a9aad4a3066010876e8dcf5a8a06e70a558751117a145c6ce2b82c2e2054290" +checksum = "6f49eb2ab21d2f26bd6db7bf383edc527a7ebaee412d17af4d40fdccd442f335" dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.22.6", + "toml_edit", ] [[package]] name = "toml_datetime" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1" -dependencies = [ - "serde", -] - -[[package]] -name = "toml_edit" -version = "0.19.15" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" +checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" dependencies = [ - "indexmap", "serde", - "serde_spanned", - "toml_datetime", - "winnow 0.5.40", ] [[package]] name = "toml_edit" -version = "0.22.6" +version = "0.22.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c1b5fd4128cc8d3e0cb74d4ed9a9cc7c7284becd4df68f5f940e1ad123606f6" +checksum = "d59a3a72298453f564e2b111fa896f8d07fabb36f51f06d7e875fc5e0b5a3ef1" dependencies = [ "indexmap", "serde", "serde_spanned", "toml_datetime", - "winnow 0.6.2", + "winnow", ] [[package]] @@ -3473,6 +3745,27 @@ dependencies = [ "sha2", ] +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "pin-project", + "pin-project-lite", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + [[package]] name = "tower-service" version = "0.3.2" @@ -3499,7 +3792,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.50", + "syn", ] [[package]] @@ -3541,52 +3834,6 @@ dependencies = [ "tracing-log", ] -[[package]] -name = "trust-dns-proto" -version = "0.23.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3119112651c157f4488931a01e586aa459736e9d6046d3bd9105ffb69352d374" -dependencies = [ - "async-trait", - "cfg-if", - "data-encoding", - "enum-as-inner", - "futures-channel", - "futures-io", - "futures-util", - "idna 0.4.0", - "ipnet", - "once_cell", - "rand", - "smallvec", - "thiserror", - "tinyvec", - "tokio", - "tracing", - "url", -] - -[[package]] -name = "trust-dns-resolver" -version = "0.23.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10a3e6c3aff1718b3c73e395d1f35202ba2ffa847c6a62eea0db8fb4cfe30be6" -dependencies = [ - "cfg-if", - "futures-util", - "ipconfig", - "lru-cache", - "once_cell", - "parking_lot", - "rand", - "resolv-conf", - "smallvec", - "thiserror", - "tokio", - "tracing", - "trust-dns-proto", -] - [[package]] name = "try-lock" version = "0.2.5" @@ -3595,14 +3842,14 @@ checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "tungstenite" -version = "0.20.1" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9" +checksum = "9ef1a641ea34f399a848dea702823bbecfb4c486f911735368f1f137cb8257e1" dependencies = [ "byteorder", "bytes", "data-encoding", - "http", + "http 1.1.0", "httparse", "log", "rand", @@ -3678,9 +3925,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.5.0" +version = "2.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" dependencies = [ "form_urlencoded", "idna 0.5.0", @@ -3696,9 +3943,9 @@ checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" [[package]] name = "uuid" -version = "1.7.0" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f00cc9702ca12d3c81455259621e676d0f7251cec66a21e98fe2e9a37db93b2a" +checksum = "5de17fd2f7da591098415cff336e12965a28061ddace43b59cb3c430179c9439" dependencies = [ "getrandom", ] @@ -3711,9 +3958,9 @@ checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" [[package]] name = "value-bag" -version = "1.7.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "126e423afe2dd9ac52142e7e9d5ce4135d7e13776c529d27fd6bc49f19e3280b" +checksum = "5a84c137d37ab0142f0f2ddfe332651fdbf252e7b7dbb4e67b6c1f1b2e925101" [[package]] name = "vaultwarden" @@ -3725,9 +3972,9 @@ dependencies = [ "cached", "chrono", "chrono-tz", - "cookie 0.17.0", + "cookie", "cookie_store", - "dashmap", + "dashmap 6.0.1", "data-encoding", "data-url", "diesel", @@ -3739,6 +3986,7 @@ dependencies = [ "futures", "governor", "handlebars", + "hickory-resolver", "html5gum", "job_scheduler_ng", "jsonwebtoken", @@ -3755,7 +4003,7 @@ dependencies = [ "pico-args", "rand", "regex", - "reqwest", + "reqwest 0.12.5", "ring", "rmpv", "rocket", @@ -3767,7 +4015,6 @@ dependencies = [ "syslog", "time", "tokio", - "tokio-tungstenite", "totp-lite", "tracing", "url", @@ -3791,15 +4038,15 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" [[package]] name = "waker-fn" -version = "1.1.1" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3c4517f54858c779bbcbf228f4fca63d121bf85fbecb2dc578cdf4a39395690" +checksum = "317211a0dc0ceedd78fb2ca9a44aed3d7b9b26f81870d485c07122b4350673b7" [[package]] name = "walkdir" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" dependencies = [ "same-file", "winapi-util", @@ -3822,9 +4069,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.91" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1e124130aee3fb58c5bdd6b639a0509486b0338acaaae0c84a5124b0f588b7f" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -3832,24 +4079,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.91" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9e7e1900c352b609c8488ad12639a311045f40a35491fb69ba8c12f758af70b" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.50", + "syn", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.41" +version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877b9c3f61ceea0e56331985743b13f3d25c406a7098d45180fb5f09bc19ed97" +checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" dependencies = [ "cfg-if", "js-sys", @@ -3859,9 +4106,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.91" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b30af9e2d358182b5c7449424f017eba305ed32a7010509ede96cdc4696c46ed" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -3869,22 +4116,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.91" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.50", + "syn", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.91" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f186bd2dcf04330886ce82d6f33dd75a7bfcf69ecf5763b89fcde53b6ac9838" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" [[package]] name = "wasm-streams" @@ -3901,9 +4148,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.68" +version = "0.3.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96565907687f7aceb35bc5fc03770a8a0471d82e479f25832f54a0e3f4b28446" +checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" dependencies = [ "js-sys", "wasm-bindgen", @@ -3930,22 +4177,21 @@ dependencies = [ [[package]] name = "which" -version = "6.0.0" +version = "6.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fa5e0c10bf77f44aac573e498d1a82d5fbd5e91f6fc0a99e7be4b38e85e101c" +checksum = "8211e4f58a2b2805adfbefbc07bab82958fc91e3836339b1ab7ae32465dce0d7" dependencies = [ "either", "home", - "once_cell", - "rustix 0.38.31", - "windows-sys 0.52.0", + "rustix 0.38.34", + "winsafe", ] [[package]] name = "widestring" -version = "1.0.2" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "653f141f39ec16bba3c5abe400a0c60da7468261cc2cbf36805022876bc721a8" +checksum = "7219d36b6eac893fa81e84ebe06485e7dcbb616177469b142df14f1f4deb1311" [[package]] name = "winapi" @@ -3965,11 +4211,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.6" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" dependencies = [ - "winapi", + "windows-sys 0.52.0", ] [[package]] @@ -3987,13 +4233,23 @@ dependencies = [ "windows-targets 0.48.5", ] +[[package]] +name = "windows" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be" +dependencies = [ + "windows-core", + "windows-targets 0.52.6", +] + [[package]] name = "windows-core" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ - "windows-targets 0.52.3", + "windows-targets 0.52.6", ] [[package]] @@ -4011,7 +4267,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.3", + "windows-targets 0.52.6", ] [[package]] @@ -4031,17 +4287,18 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.3" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d380ba1dc7187569a8a9e91ed34b8ccfc33123bbacb8c0aed2d1ad7f3ef2dc5f" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm 0.52.3", - "windows_aarch64_msvc 0.52.3", - "windows_i686_gnu 0.52.3", - "windows_i686_msvc 0.52.3", - "windows_x86_64_gnu 0.52.3", - "windows_x86_64_gnullvm 0.52.3", - "windows_x86_64_msvc 0.52.3", + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", ] [[package]] @@ -4052,9 +4309,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.3" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68e5dcfb9413f53afd9c8f86e56a7b4d86d9a2fa26090ea2dc9e40fba56c6ec6" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" @@ -4064,9 +4321,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.3" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8dab469ebbc45798319e69eebf92308e541ce46760b49b18c6b3fe5e8965b30f" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" @@ -4076,9 +4333,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.3" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a4e9b6a7cac734a8b4138a4e1044eac3404d8326b6c0f939276560687a033fb" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" @@ -4088,9 +4351,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.3" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28b0ec9c422ca95ff34a78755cfa6ad4a51371da2a5ace67500cf7ca5f232c58" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" @@ -4100,9 +4363,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.3" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "704131571ba93e89d7cd43482277d6632589b18ecf4468f591fbae0a8b101614" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" @@ -4112,9 +4375,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.3" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42079295511643151e98d61c38c0acc444e52dd42ab456f7ccfd5152e8ecf21c" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" @@ -4124,43 +4387,50 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.3" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0770833d60a970638e989b3fa9fd2bb1aaadcf88963d1659fd7d9990196ed2d6" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.5.40" +version = "0.6.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" +checksum = "59b5e5f6c299a3c7890b876a2a587f3115162487e704907d9b6cd29473052ba1" dependencies = [ "memchr", ] [[package]] -name = "winnow" -version = "0.6.2" +name = "winreg" +version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a4191c47f15cc3ec71fcb4913cb83d58def65dd3787610213c649283b5ce178" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" dependencies = [ - "memchr", + "cfg-if", + "windows-sys 0.48.0", ] [[package]] name = "winreg" -version = "0.50.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" dependencies = [ "cfg-if", "windows-sys 0.48.0", ] +[[package]] +name = "winsafe" +version = "0.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904" + [[package]] name = "yansi" -version = "1.0.0-rc.1" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1367295b8f788d371ce2dbc842c7b709c73ee1364d30351dd300ec2203b12377" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" dependencies = [ "is-terminal", ] @@ -4176,27 +4446,33 @@ dependencies = [ "futures", "hmac", "rand", - "reqwest", + "reqwest 0.11.27", "sha1", "threadpool", ] [[package]] name = "zerocopy" -version = "0.7.32" +version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.7.32" +version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.50", + "syn", ] + +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" diff --git a/Cargo.toml b/Cargo.toml index 8eb905b0..7d266e53 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,7 +3,7 @@ name = "vaultwarden" version = "1.0.0" authors = ["Daniel García "] edition = "2021" -rust-version = "1.74.0" +rust-version = "1.78.0" resolver = "2" repository = "https://github.com/dani-garcia/vaultwarden" @@ -36,11 +36,11 @@ unstable = [] [target."cfg(not(windows))".dependencies] # Logging -syslog = "6.1.0" +syslog = "6.1.1" [dependencies] # Logging -log = "0.4.20" +log = "0.4.22" fern = { version = "0.6.2", features = ["syslog-6", "reopen-1"] } tracing = { version = "0.1.40", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work @@ -51,57 +51,56 @@ dotenvy = { version = "0.15.7", default-features = false } once_cell = "1.19.0" # Numerical libraries -num-traits = "0.2.18" +num-traits = "0.2.19" num-derive = "0.4.2" -bigdecimal = "0.4.2" +bigdecimal = "0.4.5" # Web framework -rocket = { version = "0.5.0", features = ["tls", "json"], default-features = false } -rocket_ws = { version ="0.1.0" } +rocket = { version = "0.5.1", features = ["tls", "json"], default-features = false } +rocket_ws = { version ="0.1.1" } # WebSockets libraries -tokio-tungstenite = "0.20.1" -rmpv = "1.0.1" # MessagePack library +rmpv = "1.3.0" # MessagePack library # Concurrent HashMap used for WebSocket messaging and favicons -dashmap = "5.5.3" +dashmap = "6.0.1" # Async futures futures = "0.3.30" -tokio = { version = "1.36.0", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal"] } +tokio = { version = "1.38.0", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time", "signal", "net"] } # A generic serialization/deserialization framework -serde = { version = "1.0.197", features = ["derive"] } -serde_json = "1.0.114" +serde = { version = "1.0.204", features = ["derive"] } +serde_json = "1.0.120" # A safe, extensible ORM and Query builder -diesel = { version = "2.1.4", features = ["chrono", "r2d2", "numeric"] } -diesel_migrations = "2.1.0" +diesel = { version = "2.2.1", features = ["chrono", "r2d2", "numeric"] } +diesel_migrations = "2.2.0" diesel_logger = { version = "0.3.0", optional = true } # Bundled/Static SQLite -libsqlite3-sys = { version = "0.27.0", features = ["bundled"], optional = true } +libsqlite3-sys = { version = "0.28.0", features = ["bundled"], optional = true } # Crypto-related libraries rand = { version = "0.8.5", features = ["small_rng"] } ring = "0.17.8" # UUID generation -uuid = { version = "1.7.0", features = ["v4"] } +uuid = { version = "1.9.1", features = ["v4"] } # Date and time libraries -chrono = { version = "0.4.34", features = ["clock", "serde"], default-features = false } -chrono-tz = "0.8.6" -time = "0.3.34" +chrono = { version = "0.4.38", features = ["clock", "serde"], default-features = false } +chrono-tz = "0.9.0" +time = "0.3.36" # Job scheduler -job_scheduler_ng = "2.0.4" +job_scheduler_ng = "2.0.5" # Data encoding library Hex/Base32/Base64 -data-encoding = "2.5.0" +data-encoding = "2.6.0" # JWT library -jsonwebtoken = "9.2.0" +jsonwebtoken = "9.3.0" # TOTP library totp-lite = "2.0.1" @@ -113,31 +112,32 @@ yubico = { version = "0.11.0", features = ["online-tokio"], default-features = f webauthn-rs = "0.3.2" # Handling of URL's for WebAuthn and favicons -url = "2.5.0" +url = "2.5.2" # Email libraries -lettre = { version = "0.11.4", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false } +lettre = { version = "0.11.7", features = ["smtp-transport", "sendmail-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false } percent-encoding = "2.3.1" # URL encoding library used for URL's in the emails -email_address = "0.2.4" +email_address = "0.2.5" # HTML Template library -handlebars = { version = "5.1.0", features = ["dir_source"] } +handlebars = { version = "5.1.2", features = ["dir_source"] } # HTTP client (Used for favicons, version check, DUO and HIBP API) -reqwest = { version = "0.11.24", features = ["stream", "json", "gzip", "brotli", "socks", "cookies", "trust-dns", "native-tls-alpn"] } +reqwest = { version = "0.12.5", features = ["native-tls-alpn", "stream", "json", "gzip", "brotli", "socks", "cookies"] } +hickory-resolver = "0.24.1" # Favicon extraction libraries html5gum = "0.5.7" -regex = { version = "1.10.3", features = ["std", "perf", "unicode-perl"], default-features = false } +regex = { version = "1.10.5", features = ["std", "perf", "unicode-perl"], default-features = false } data-url = "0.3.1" -bytes = "1.5.0" +bytes = "1.6.0" # Cache function results (Used for version check and favicon fetching) -cached = { version = "0.48.1", features = ["async"] } +cached = { version = "0.52.0", features = ["async"] } # Used for custom short lived cookie jar during favicon extraction -cookie = "0.17.0" -cookie_store = "0.20.0" +cookie = "0.18.1" +cookie_store = "0.21.0" # Used by U2F, JWT and PostgreSQL openssl = "0.10.64" @@ -146,16 +146,16 @@ openssl = "0.10.64" pico-args = "0.5.0" # Macro ident concatenation -paste = "1.0.14" +paste = "1.0.15" governor = "0.6.3" # Check client versions for specific features. -semver = "1.0.22" +semver = "1.0.23" # Allow overriding the default memory allocator # Mainly used for the musl builds, since the default musl malloc is very slow -mimalloc = { version = "0.1.39", features = ["secure"], default-features = false, optional = true } -which = "6.0.0" +mimalloc = { version = "0.1.43", features = ["secure"], default-features = false, optional = true } +which = "6.0.1" # Argon2 library with support for the PHC format argon2 = "0.5.3" @@ -163,7 +163,6 @@ argon2 = "0.5.3" # Reading a password from the cli for generating the Argon2id ADMIN_TOKEN rpassword = "7.3.1" - # Strip debuginfo from the release builds # The symbols are the provide better panic traces # Also enable fat LTO and use 1 codegen unit for optimizations @@ -172,7 +171,6 @@ strip = "debuginfo" lto = "fat" codegen-units = 1 - # A little bit of a speedup [profile.dev] split-debuginfo = "unpacked" @@ -206,14 +204,13 @@ unsafe_code = "forbid" non_ascii_idents = "forbid" # Deny -future_incompatible = "deny" +future_incompatible = { level = "deny", priority = -1 } noop_method_call = "deny" -pointer_structural_match = "deny" -rust_2018_idioms = "deny" -rust_2021_compatibility = "deny" +rust_2018_idioms = { level = "deny", priority = -1 } +rust_2021_compatibility = { level = "deny", priority = -1 } trivial_casts = "deny" trivial_numeric_casts = "deny" -unused = "deny" +unused = { level = "deny", priority = -1 } unused_import_braces = "deny" unused_lifetimes = "deny" deprecated_in_future = "deny" diff --git a/build.rs b/build.rs index e7bfb7de..07bd99a7 100644 --- a/build.rs +++ b/build.rs @@ -17,6 +17,13 @@ fn main() { "You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite" ); + // Use check-cfg to let cargo know which cfg's we define, + // and avoid warnings when they are used in the code. + println!("cargo::rustc-check-cfg=cfg(sqlite)"); + println!("cargo::rustc-check-cfg=cfg(mysql)"); + println!("cargo::rustc-check-cfg=cfg(postgresql)"); + println!("cargo::rustc-check-cfg=cfg(query_logger)"); + // Rerun when these paths are changed. // Someone could have checked-out a tag or specific commit, but no other files changed. println!("cargo:rerun-if-changed=.git"); diff --git a/docker/DockerSettings.yaml b/docker/DockerSettings.yaml index 9ea779b7..807a6632 100644 --- a/docker/DockerSettings.yaml +++ b/docker/DockerSettings.yaml @@ -1,12 +1,12 @@ --- -vault_version: "v2024.1.2b" -vault_image_digest: "sha256:798c0c893b6d16728878ff280b49da08863334d1f8dd88895580dc3dba622f08" -# Cross Compile Docker Helper Scripts v1.3.0 +vault_version: "v2024.5.1b" +vault_image_digest: "sha256:1a867b4b175e85fc8602314bd83bc263c76c49787031704f16a2915567725375" +# Cross Compile Docker Helper Scripts v1.4.0 # We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts -xx_image_digest: "sha256:c9609ace652bbe51dd4ce90e0af9d48a4590f1214246da5bc70e46f6dd586edc" -rust_version: 1.76.0 # Rust version to be used +xx_image_digest: "sha256:0cd3f05c72d6c9b038eb135f91376ee1169ef3a330d34e418e65e2a5c2e9c0d4" +rust_version: 1.79.0 # Rust version to be used debian_version: bookworm # Debian release name to be used -alpine_version: 3.19 # Alpine version to be used +alpine_version: "3.20" # Alpine version to be used # For which platforms/architectures will we try to build images platforms: ["linux/amd64", "linux/arm64", "linux/arm/v7", "linux/arm/v6"] # Determine the build images per OS/Arch diff --git a/docker/Dockerfile.alpine b/docker/Dockerfile.alpine index 427f5153..e4a392f8 100644 --- a/docker/Dockerfile.alpine +++ b/docker/Dockerfile.alpine @@ -18,23 +18,23 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull docker.io/vaultwarden/web-vault:v2024.1.2b -# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2024.1.2b -# [docker.io/vaultwarden/web-vault@sha256:798c0c893b6d16728878ff280b49da08863334d1f8dd88895580dc3dba622f08] +# $ docker pull docker.io/vaultwarden/web-vault:v2024.5.1b +# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2024.5.1b +# [docker.io/vaultwarden/web-vault@sha256:1a867b4b175e85fc8602314bd83bc263c76c49787031704f16a2915567725375] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:798c0c893b6d16728878ff280b49da08863334d1f8dd88895580dc3dba622f08 -# [docker.io/vaultwarden/web-vault:v2024.1.2b] +# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:1a867b4b175e85fc8602314bd83bc263c76c49787031704f16a2915567725375 +# [docker.io/vaultwarden/web-vault:v2024.5.1b] # -FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:798c0c893b6d16728878ff280b49da08863334d1f8dd88895580dc3dba622f08 as vault +FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:1a867b4b175e85fc8602314bd83bc263c76c49787031704f16a2915567725375 as vault ########################## ALPINE BUILD IMAGES ########################## ## NOTE: The Alpine Base Images do not support other platforms then linux/amd64 ## And for Alpine we define all build images here, they will only be loaded when actually used -FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.76.0 as build_amd64 -FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.76.0 as build_arm64 -FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.76.0 as build_armv7 -FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.76.0 as build_armv6 +FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.79.0 as build_amd64 +FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.79.0 as build_arm64 +FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.79.0 as build_armv7 +FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.79.0 as build_armv6 ########################## BUILD IMAGE ########################## # hadolint ignore=DL3006 @@ -58,33 +58,29 @@ ENV DEBIAN_FRONTEND=noninteractive \ # Create CARGO_HOME folder and don't download rust docs -RUN mkdir -pv "${CARGO_HOME}" \ - && rustup set profile minimal +RUN mkdir -pv "${CARGO_HOME}" && \ + rustup set profile minimal # Creates a dummy project used to grab dependencies RUN USER=root cargo new --bin /app WORKDIR /app -# Shared variables across Debian and Alpine +# Environment variables for Cargo on Alpine based builds RUN echo "export CARGO_TARGET=${RUST_MUSL_CROSS_TARGET}" >> /env-cargo && \ - # To be able to build the armv6 image with mimalloc we need to tell the linker to also look for libatomic - if [[ "${TARGETARCH}${TARGETVARIANT}" == "armv6" ]] ; then echo "export RUSTFLAGS='-Clink-arg=-latomic'" >> /env-cargo ; fi && \ # Output the current contents of the file cat /env-cargo -# Enable MiMalloc to improve performance on Alpine builds -ARG DB=sqlite,mysql,postgresql,enable_mimalloc - RUN source /env-cargo && \ rustup target add "${CARGO_TARGET}" +# Copies over *only* your manifests and build files +COPY ./Cargo.* ./rust-toolchain.toml ./build.rs ./ + ARG CARGO_PROFILE=release -ARG VW_VERSION -# Copies over *only* your manifests and build files -COPY ./Cargo.* ./ -COPY ./rust-toolchain.toml ./rust-toolchain.toml -COPY ./build.rs ./build.rs +# Configure the DB ARG as late as possible to not invalidate the cached layers above +# Enable MiMalloc to improve performance on Alpine builds +ARG DB=sqlite,mysql,postgresql,enable_mimalloc # Builds your dependencies and removes the # dummy project, except the target folder @@ -97,6 +93,8 @@ RUN source /env-cargo && \ # To avoid copying unneeded files, use .dockerignore COPY . . +ARG VW_VERSION + # Builds again, this time it will be the actual source files being build RUN source /env-cargo && \ # Make sure that we actually build the project by updating the src/main.rs timestamp @@ -127,7 +125,7 @@ RUN source /env-cargo && \ # To uninstall: docker run --privileged --rm tonistiigi/binfmt --uninstall 'qemu-*' # # We need to add `--platform` here, because of a podman bug: https://github.com/containers/buildah/issues/4742 -FROM --platform=$TARGETPLATFORM docker.io/library/alpine:3.19 +FROM --platform=$TARGETPLATFORM docker.io/library/alpine:3.20 ENV ROCKET_PROFILE="release" \ ROCKET_ADDRESS=0.0.0.0 \ @@ -150,8 +148,7 @@ EXPOSE 3012 # and the binary from the "build" stage to the current stage WORKDIR / -COPY docker/healthcheck.sh /healthcheck.sh -COPY docker/start.sh /start.sh +COPY docker/healthcheck.sh docker/start.sh / COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/final/vaultwarden . diff --git a/docker/Dockerfile.debian b/docker/Dockerfile.debian index c2d9eee8..84ae6ff7 100644 --- a/docker/Dockerfile.debian +++ b/docker/Dockerfile.debian @@ -18,24 +18,24 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull docker.io/vaultwarden/web-vault:v2024.1.2b -# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2024.1.2b -# [docker.io/vaultwarden/web-vault@sha256:798c0c893b6d16728878ff280b49da08863334d1f8dd88895580dc3dba622f08] +# $ docker pull docker.io/vaultwarden/web-vault:v2024.5.1b +# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2024.5.1b +# [docker.io/vaultwarden/web-vault@sha256:1a867b4b175e85fc8602314bd83bc263c76c49787031704f16a2915567725375] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:798c0c893b6d16728878ff280b49da08863334d1f8dd88895580dc3dba622f08 -# [docker.io/vaultwarden/web-vault:v2024.1.2b] +# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:1a867b4b175e85fc8602314bd83bc263c76c49787031704f16a2915567725375 +# [docker.io/vaultwarden/web-vault:v2024.5.1b] # -FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:798c0c893b6d16728878ff280b49da08863334d1f8dd88895580dc3dba622f08 as vault +FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:1a867b4b175e85fc8602314bd83bc263c76c49787031704f16a2915567725375 as vault ########################## Cross Compile Docker Helper Scripts ########################## ## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts ## And these bash scripts do not have any significant difference if at all -FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:c9609ace652bbe51dd4ce90e0af9d48a4590f1214246da5bc70e46f6dd586edc AS xx +FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:0cd3f05c72d6c9b038eb135f91376ee1169ef3a330d34e418e65e2a5c2e9c0d4 AS xx ########################## BUILD IMAGE ########################## # hadolint ignore=DL3006 -FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.76.0-slim-bookworm as build +FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.79.0-slim-bookworm as build COPY --from=xx / / ARG TARGETARCH ARG TARGETVARIANT @@ -64,10 +64,7 @@ RUN apt-get update && \ "libc6-$(xx-info debian-arch)-cross" \ "libc6-dev-$(xx-info debian-arch)-cross" \ "linux-libc-dev-$(xx-info debian-arch)-cross" && \ - # Run xx-cargo early, since it sometimes seems to break when run at a later stage - echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo - -RUN xx-apt-get install -y \ + xx-apt-get install -y \ --no-install-recommends \ gcc \ libmariadb3 \ @@ -78,19 +75,29 @@ RUN xx-apt-get install -y \ # Force install arch dependend mariadb dev packages # Installing them the normal way breaks several other packages (again) apt-get download "libmariadb-dev-compat:$(xx-info debian-arch)" "libmariadb-dev:$(xx-info debian-arch)" && \ - dpkg --force-all -i ./libmariadb-dev*.deb + dpkg --force-all -i ./libmariadb-dev*.deb && \ + # Run xx-cargo early, since it sometimes seems to break when run at a later stage + echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo # Create CARGO_HOME folder and don't download rust docs -RUN mkdir -pv "${CARGO_HOME}" \ - && rustup set profile minimal +RUN mkdir -pv "${CARGO_HOME}" && \ + rustup set profile minimal # Creates a dummy project used to grab dependencies RUN USER=root cargo new --bin /app WORKDIR /app -# Environment variables for cargo across Debian and Alpine +# Environment variables for Cargo on Debian based builds +ARG ARCH_OPENSSL_LIB_DIR \ + ARCH_OPENSSL_INCLUDE_DIR + RUN source /env-cargo && \ if xx-info is-cross ; then \ + # Some special variables if needed to override some build paths + if [[ -n "${ARCH_OPENSSL_LIB_DIR}" && -n "${ARCH_OPENSSL_INCLUDE_DIR}" ]]; then \ + echo "export $(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_OPENSSL_LIB_DIR=${ARCH_OPENSSL_LIB_DIR}" >> /env-cargo && \ + echo "export $(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_OPENSSL_INCLUDE_DIR=${ARCH_OPENSSL_INCLUDE_DIR}" >> /env-cargo ; \ + fi && \ # We can't use xx-cargo since that uses clang, which doesn't work for our libraries. # Because of this we generate the needed environment variables here which we can load in the needed steps. echo "export CC_$(echo "${CARGO_TARGET}" | tr '[:upper:]' '[:lower:]' | tr - _)=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \ @@ -103,19 +110,16 @@ RUN source /env-cargo && \ # Output the current contents of the file cat /env-cargo -# Configure the DB ARG as late as possible to not invalidate the cached layers above -ARG DB=sqlite,mysql,postgresql - RUN source /env-cargo && \ rustup target add "${CARGO_TARGET}" +# Copies over *only* your manifests and build files +COPY ./Cargo.* ./rust-toolchain.toml ./build.rs ./ + ARG CARGO_PROFILE=release -ARG VW_VERSION -# Copies over *only* your manifests and build files -COPY ./Cargo.* ./ -COPY ./rust-toolchain.toml ./rust-toolchain.toml -COPY ./build.rs ./build.rs +# Configure the DB ARG as late as possible to not invalidate the cached layers above +ARG DB=sqlite,mysql,postgresql # Builds your dependencies and removes the # dummy project, except the target folder @@ -128,6 +132,8 @@ RUN source /env-cargo && \ # To avoid copying unneeded files, use .dockerignore COPY . . +ARG VW_VERSION + # Builds again, this time it will be the actual source files being build RUN source /env-cargo && \ # Make sure that we actually build the project by updating the src/main.rs timestamp @@ -185,8 +191,7 @@ EXPOSE 3012 # and the binary from the "build" stage to the current stage WORKDIR / -COPY docker/healthcheck.sh /healthcheck.sh -COPY docker/start.sh /start.sh +COPY docker/healthcheck.sh docker/start.sh / COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/final/vaultwarden . diff --git a/docker/Dockerfile.j2 b/docker/Dockerfile.j2 index e8f81469..d71b4ccc 100644 --- a/docker/Dockerfile.j2 +++ b/docker/Dockerfile.j2 @@ -82,10 +82,7 @@ RUN apt-get update && \ "libc6-$(xx-info debian-arch)-cross" \ "libc6-dev-$(xx-info debian-arch)-cross" \ "linux-libc-dev-$(xx-info debian-arch)-cross" && \ - # Run xx-cargo early, since it sometimes seems to break when run at a later stage - echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo - -RUN xx-apt-get install -y \ + xx-apt-get install -y \ --no-install-recommends \ gcc \ libmariadb3 \ @@ -96,21 +93,31 @@ RUN xx-apt-get install -y \ # Force install arch dependend mariadb dev packages # Installing them the normal way breaks several other packages (again) apt-get download "libmariadb-dev-compat:$(xx-info debian-arch)" "libmariadb-dev:$(xx-info debian-arch)" && \ - dpkg --force-all -i ./libmariadb-dev*.deb + dpkg --force-all -i ./libmariadb-dev*.deb && \ + # Run xx-cargo early, since it sometimes seems to break when run at a later stage + echo "export CARGO_TARGET=$(xx-cargo --print-target-triple)" >> /env-cargo {% endif %} # Create CARGO_HOME folder and don't download rust docs -RUN mkdir -pv "${CARGO_HOME}" \ - && rustup set profile minimal +RUN mkdir -pv "${CARGO_HOME}" && \ + rustup set profile minimal # Creates a dummy project used to grab dependencies RUN USER=root cargo new --bin /app WORKDIR /app {% if base == "debian" %} -# Environment variables for cargo across Debian and Alpine +# Environment variables for Cargo on Debian based builds +ARG ARCH_OPENSSL_LIB_DIR \ + ARCH_OPENSSL_INCLUDE_DIR + RUN source /env-cargo && \ if xx-info is-cross ; then \ + # Some special variables if needed to override some build paths + if [[ -n "${ARCH_OPENSSL_LIB_DIR}" && -n "${ARCH_OPENSSL_INCLUDE_DIR}" ]]; then \ + echo "export $(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_OPENSSL_LIB_DIR=${ARCH_OPENSSL_LIB_DIR}" >> /env-cargo && \ + echo "export $(echo "${CARGO_TARGET}" | tr '[:lower:]' '[:upper:]' | tr - _)_OPENSSL_INCLUDE_DIR=${ARCH_OPENSSL_INCLUDE_DIR}" >> /env-cargo ; \ + fi && \ # We can't use xx-cargo since that uses clang, which doesn't work for our libraries. # Because of this we generate the needed environment variables here which we can load in the needed steps. echo "export CC_$(echo "${CARGO_TARGET}" | tr '[:upper:]' '[:lower:]' | tr - _)=/usr/bin/$(xx-info)-gcc" >> /env-cargo && \ @@ -123,30 +130,28 @@ RUN source /env-cargo && \ # Output the current contents of the file cat /env-cargo -# Configure the DB ARG as late as possible to not invalidate the cached layers above -ARG DB=sqlite,mysql,postgresql {% elif base == "alpine" %} -# Shared variables across Debian and Alpine +# Environment variables for Cargo on Alpine based builds RUN echo "export CARGO_TARGET=${RUST_MUSL_CROSS_TARGET}" >> /env-cargo && \ - # To be able to build the armv6 image with mimalloc we need to tell the linker to also look for libatomic - if [[ "${TARGETARCH}${TARGETVARIANT}" == "armv6" ]] ; then echo "export RUSTFLAGS='-Clink-arg=-latomic'" >> /env-cargo ; fi && \ # Output the current contents of the file cat /env-cargo -# Enable MiMalloc to improve performance on Alpine builds -ARG DB=sqlite,mysql,postgresql,enable_mimalloc {% endif %} - RUN source /env-cargo && \ rustup target add "${CARGO_TARGET}" +# Copies over *only* your manifests and build files +COPY ./Cargo.* ./rust-toolchain.toml ./build.rs ./ + ARG CARGO_PROFILE=release -ARG VW_VERSION -# Copies over *only* your manifests and build files -COPY ./Cargo.* ./ -COPY ./rust-toolchain.toml ./rust-toolchain.toml -COPY ./build.rs ./build.rs +# Configure the DB ARG as late as possible to not invalidate the cached layers above +{% if base == "debian" %} +ARG DB=sqlite,mysql,postgresql +{% elif base == "alpine" %} +# Enable MiMalloc to improve performance on Alpine builds +ARG DB=sqlite,mysql,postgresql,enable_mimalloc +{% endif %} # Builds your dependencies and removes the # dummy project, except the target folder @@ -159,6 +164,8 @@ RUN source /env-cargo && \ # To avoid copying unneeded files, use .dockerignore COPY . . +ARG VW_VERSION + # Builds again, this time it will be the actual source files being build RUN source /env-cargo && \ # Make sure that we actually build the project by updating the src/main.rs timestamp @@ -228,8 +235,7 @@ EXPOSE 3012 # and the binary from the "build" stage to the current stage WORKDIR / -COPY docker/healthcheck.sh /healthcheck.sh -COPY docker/start.sh /start.sh +COPY docker/healthcheck.sh docker/start.sh / COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/final/vaultwarden . diff --git a/docker/README.md b/docker/README.md index 3c74043c..2e78f534 100644 --- a/docker/README.md +++ b/docker/README.md @@ -11,6 +11,11 @@ With just these two files we can build both Debian and Alpine images for the fol - armv7 (linux/arm/v7) - armv6 (linux/arm/v6) +Some unsupported platforms for Debian based images. These are not built and tested by default and are only provided to make it easier for users to build for these architectures. +- 386 (linux/386) +- ppc64le (linux/ppc64le) +- s390x (linux/s390x) + To build these containers you need to enable QEMU binfmt support to be able to run/emulate architectures which are different then your host.
This ensures the container build process can run binaries from other architectures.
diff --git a/docker/docker-bake.hcl b/docker/docker-bake.hcl index e7f6a94b..38e7ef97 100644 --- a/docker/docker-bake.hcl +++ b/docker/docker-bake.hcl @@ -125,6 +125,40 @@ target "debian-armv6" { tags = generate_tags("", "-armv6") } +// ==== Start of unsupported Debian architecture targets === +// These are provided just to help users build for these rare platforms +// They will not be built by default +target "debian-386" { + inherits = ["debian"] + platforms = ["linux/386"] + tags = generate_tags("", "-386") + args = { + ARCH_OPENSSL_LIB_DIR = "/usr/lib/i386-linux-gnu" + ARCH_OPENSSL_INCLUDE_DIR = "/usr/include/i386-linux-gnu" + } +} + +target "debian-ppc64le" { + inherits = ["debian"] + platforms = ["linux/ppc64le"] + tags = generate_tags("", "-ppc64le") + args = { + ARCH_OPENSSL_LIB_DIR = "/usr/lib/powerpc64le-linux-gnu" + ARCH_OPENSSL_INCLUDE_DIR = "/usr/include/powerpc64le-linux-gnu" + } +} + +target "debian-s390x" { + inherits = ["debian"] + platforms = ["linux/s390x"] + tags = generate_tags("", "-s390x") + args = { + ARCH_OPENSSL_LIB_DIR = "/usr/lib/s390x-linux-gnu" + ARCH_OPENSSL_INCLUDE_DIR = "/usr/include/s390x-linux-gnu" + } +} +// ==== End of unsupported Debian architecture targets === + // A Group to build all platforms individually for local testing group "debian-all" { targets = ["debian-amd64", "debian-arm64", "debian-armv7", "debian-armv6"] diff --git a/migrations/mysql/2024-02-14-135828_change_time_stamp_data_type/down.sql b/migrations/mysql/2024-02-14-135828_change_time_stamp_data_type/down.sql new file mode 100644 index 00000000..e69de29b diff --git a/migrations/mysql/2024-02-14-135828_change_time_stamp_data_type/up.sql b/migrations/mysql/2024-02-14-135828_change_time_stamp_data_type/up.sql new file mode 100644 index 00000000..f1bfe381 --- /dev/null +++ b/migrations/mysql/2024-02-14-135828_change_time_stamp_data_type/up.sql @@ -0,0 +1 @@ +ALTER TABLE twofactor MODIFY last_used BIGINT NOT NULL; diff --git a/migrations/postgresql/2024-02-14-135953_change_time_stamp_data_type/down.sql b/migrations/postgresql/2024-02-14-135953_change_time_stamp_data_type/down.sql new file mode 100644 index 00000000..e69de29b diff --git a/migrations/postgresql/2024-02-14-135953_change_time_stamp_data_type/up.sql b/migrations/postgresql/2024-02-14-135953_change_time_stamp_data_type/up.sql new file mode 100644 index 00000000..efc6dc6e --- /dev/null +++ b/migrations/postgresql/2024-02-14-135953_change_time_stamp_data_type/up.sql @@ -0,0 +1,3 @@ +ALTER TABLE twofactor +ALTER COLUMN last_used TYPE BIGINT, +ALTER COLUMN last_used SET NOT NULL; diff --git a/migrations/sqlite/2024-02-14-140000_change_time_stamp_data_type/down.sql b/migrations/sqlite/2024-02-14-140000_change_time_stamp_data_type/down.sql new file mode 100644 index 00000000..e69de29b diff --git a/migrations/sqlite/2024-02-14-140000_change_time_stamp_data_type/up.sql b/migrations/sqlite/2024-02-14-140000_change_time_stamp_data_type/up.sql new file mode 100644 index 00000000..187a614e --- /dev/null +++ b/migrations/sqlite/2024-02-14-140000_change_time_stamp_data_type/up.sql @@ -0,0 +1 @@ +-- Integer size in SQLite is already i64, so we don't need to do anything diff --git a/rust-toolchain.toml b/rust-toolchain.toml index d8e821e8..58c631f0 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "1.76.0" +channel = "1.79.0" components = [ "rustfmt", "clippy" ] profile = "minimal" diff --git a/src/api/admin.rs b/src/api/admin.rs index dfd3e28f..58a056b6 100644 --- a/src/api/admin.rs +++ b/src/api/admin.rs @@ -265,8 +265,8 @@ fn admin_page_login() -> ApiResult> { render_admin_login(None, None) } -#[derive(Deserialize, Debug)] -#[allow(non_snake_case)] +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] struct InviteData { email: String, } @@ -326,9 +326,9 @@ async fn get_users_json(_token: AdminToken, mut conn: DbConn) -> Json { let mut users_json = Vec::with_capacity(users.len()); for u in users { let mut usr = u.to_json(&mut conn).await; - usr["UserEnabled"] = json!(u.enabled); - usr["CreatedAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT)); - usr["LastActive"] = match u.last_active(&mut conn).await { + usr["userEnabled"] = json!(u.enabled); + usr["createdAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT)); + usr["lastActive"] = match u.last_active(&mut conn).await { Some(dt) => json!(format_naive_datetime_local(&dt, DT_FMT)), None => json!(None::), }; @@ -364,8 +364,8 @@ async fn users_overview(_token: AdminToken, mut conn: DbConn) -> ApiResult JsonResult { if let Some(u) = User::find_by_mail(mail, &mut conn).await { let mut usr = u.to_json(&mut conn).await; - usr["UserEnabled"] = json!(u.enabled); - usr["CreatedAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT)); + usr["userEnabled"] = json!(u.enabled); + usr["createdAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT)); Ok(Json(usr)) } else { err_code!("User doesn't exist", Status::NotFound.code); @@ -376,8 +376,8 @@ async fn get_user_by_mail_json(mail: &str, _token: AdminToken, mut conn: DbConn) async fn get_user_json(uuid: &str, _token: AdminToken, mut conn: DbConn) -> JsonResult { let u = get_user_or_404(uuid, &mut conn).await?; let mut usr = u.to_json(&mut conn).await; - usr["UserEnabled"] = json!(u.enabled); - usr["CreatedAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT)); + usr["userEnabled"] = json!(u.enabled); + usr["createdAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT)); Ok(Json(usr)) } @@ -475,7 +475,7 @@ async fn resend_user_invite(uuid: &str, _token: AdminToken, mut conn: DbConn) -> } } -#[derive(Deserialize, Debug)] +#[derive(Debug, Deserialize)] struct UserOrgTypeData { user_type: NumberOrString, user_uuid: String, @@ -510,7 +510,11 @@ async fn update_user_org_type(data: Json, token: AdminToken, mu match OrgPolicy::is_user_allowed(&user_to_edit.user_uuid, &user_to_edit.org_uuid, true, &mut conn).await { Ok(_) => {} Err(OrgPolicyErr::TwoFactorMissing) => { - err!("You cannot modify this user to this type because it has no two-step login method activated"); + if CONFIG.email_2fa_auto_fallback() { + two_factor::email::find_and_activate_email_2fa(&user_to_edit.user_uuid, &mut conn).await?; + } else { + err!("You cannot modify this user to this type because they have not setup 2FA"); + } } Err(OrgPolicyErr::SingleOrgEnforced) => { err!("You cannot modify this user to this type because it is a member of an organization which forbids it"); @@ -697,10 +701,7 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, mut conn: DbConn) let (latest_release, latest_commit, latest_web_build) = get_release_info(has_http_access, running_within_container).await; - let ip_header_name = match &ip_header.0 { - Some(h) => h, - _ => "", - }; + let ip_header_name = &ip_header.0.unwrap_or_default(); let diagnostics_json = json!({ "dns_resolved": dns_resolved, @@ -713,8 +714,8 @@ async fn diagnostics(_token: AdminToken, ip_header: IpHeader, mut conn: DbConn) "running_within_container": running_within_container, "container_base_image": if running_within_container { container_base_image() } else { "Not applicable" }, "has_http_access": has_http_access, - "ip_header_exists": &ip_header.0.is_some(), - "ip_header_match": ip_header_name == CONFIG.ip_header(), + "ip_header_exists": !ip_header_name.is_empty(), + "ip_header_match": ip_header_name.eq(&CONFIG.ip_header()), "ip_header_name": ip_header_name, "ip_header_config": &CONFIG.ip_header(), "uses_proxy": uses_proxy, diff --git a/src/api/core/accounts.rs b/src/api/core/accounts.rs index 91555a55..a747f3ec 100644 --- a/src/api/core/accounts.rs +++ b/src/api/core/accounts.rs @@ -5,8 +5,9 @@ use serde_json::Value; use crate::{ api::{ - core::log_user_event, register_push_device, unregister_push_device, AnonymousNotify, EmptyResult, JsonResult, - JsonUpcase, Notify, PasswordOrOtpData, UpdateType, + core::{log_user_event, two_factor::email}, + register_push_device, unregister_push_device, AnonymousNotify, EmptyResult, JsonResult, Notify, + PasswordOrOtpData, UpdateType, }, auth::{decode_delete, decode_invite, decode_verify_email, ClientHeaders, Headers}, crypto, @@ -49,7 +50,6 @@ pub fn routes() -> Vec { api_key, rotate_api_key, get_known_device, - get_known_device_from_path, put_avatar, put_device_token, put_clear_device_token, @@ -62,29 +62,29 @@ pub fn routes() -> Vec { ] } -#[derive(Deserialize, Debug)] -#[allow(non_snake_case)] +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] pub struct RegisterData { - Email: String, - Kdf: Option, - KdfIterations: Option, - KdfMemory: Option, - KdfParallelism: Option, - Key: String, - Keys: Option, - MasterPasswordHash: String, - MasterPasswordHint: Option, - Name: Option, - Token: Option, + email: String, + kdf: Option, + kdf_iterations: Option, + kdf_memory: Option, + kdf_parallelism: Option, + key: String, + keys: Option, + master_password_hash: String, + master_password_hint: Option, + name: Option, + token: Option, #[allow(dead_code)] - OrganizationUserId: Option, + organization_user_id: Option, } -#[derive(Deserialize, Debug)] -#[allow(non_snake_case)] +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] struct KeysData { - EncryptedPrivateKey: String, - PublicKey: String, + encrypted_private_key: String, + public_key: String, } /// Trims whitespace from password hints, and converts blank password hints to `None`. @@ -104,19 +104,32 @@ fn enforce_password_hint_setting(password_hint: &Option) -> EmptyResult } Ok(()) } +async fn is_email_2fa_required(org_user_uuid: Option, conn: &mut DbConn) -> bool { + if !CONFIG._enable_email_2fa() { + return false; + } + if CONFIG.email_2fa_enforce_on_verified_invite() { + return true; + } + if org_user_uuid.is_some() { + return OrgPolicy::is_enabled_by_org(&org_user_uuid.unwrap(), OrgPolicyType::TwoFactorAuthentication, conn) + .await; + } + false +} #[post("/accounts/register", data = "")] -async fn register(data: JsonUpcase, conn: DbConn) -> JsonResult { +async fn register(data: Json, conn: DbConn) -> JsonResult { _register(data, conn).await } -pub async fn _register(data: JsonUpcase, mut conn: DbConn) -> JsonResult { - let data: RegisterData = data.into_inner().data; - let email = data.Email.to_lowercase(); +pub async fn _register(data: Json, mut conn: DbConn) -> JsonResult { + let data: RegisterData = data.into_inner(); + let email = data.email.to_lowercase(); // Check if the length of the username exceeds 50 characters (Same is Upstream Bitwarden) // This also prevents issues with very long usernames causing to large JWT's. See #2419 - if let Some(ref name) = data.Name { + if let Some(ref name) = data.name { if name.len() > 50 { err!("The field Name must be a string with a maximum length of 50."); } @@ -124,7 +137,7 @@ pub async fn _register(data: JsonUpcase, mut conn: DbConn) -> Json // Check against the password hint setting here so if it fails, the user // can retry without losing their invitation below. - let password_hint = clean_password_hint(&data.MasterPasswordHint); + let password_hint = clean_password_hint(&data.master_password_hint); enforce_password_hint_setting(&password_hint)?; let mut verified_by_invite = false; @@ -135,7 +148,7 @@ pub async fn _register(data: JsonUpcase, mut conn: DbConn) -> Json err!("Registration not allowed or user already exists") } - if let Some(token) = data.Token { + if let Some(token) = data.token { let claims = decode_invite(&token)?; if claims.email == email { // Verify the email address when signing up via a valid invite token @@ -152,7 +165,8 @@ pub async fn _register(data: JsonUpcase, mut conn: DbConn) -> Json } user } else if CONFIG.is_signup_allowed(&email) - || EmergencyAccess::find_invited_by_grantee_email(&email, &mut conn).await.is_some() + || (CONFIG.emergency_access_allowed() + && EmergencyAccess::find_invited_by_grantee_email(&email, &mut conn).await.is_some()) { user } else { @@ -174,28 +188,28 @@ pub async fn _register(data: JsonUpcase, mut conn: DbConn) -> Json // Make sure we don't leave a lingering invitation. Invitation::take(&email, &mut conn).await; - if let Some(client_kdf_type) = data.Kdf { + if let Some(client_kdf_type) = data.kdf { user.client_kdf_type = client_kdf_type; } - if let Some(client_kdf_iter) = data.KdfIterations { + if let Some(client_kdf_iter) = data.kdf_iterations { user.client_kdf_iter = client_kdf_iter; } - user.client_kdf_memory = data.KdfMemory; - user.client_kdf_parallelism = data.KdfParallelism; + user.client_kdf_memory = data.kdf_memory; + user.client_kdf_parallelism = data.kdf_parallelism; - user.set_password(&data.MasterPasswordHash, Some(data.Key), true, None); + user.set_password(&data.master_password_hash, Some(data.key), true, None); user.password_hint = password_hint; // Add extra fields if present - if let Some(name) = data.Name { + if let Some(name) = data.name { user.name = name; } - if let Some(keys) = data.Keys { - user.private_key = Some(keys.EncryptedPrivateKey); - user.public_key = Some(keys.PublicKey); + if let Some(keys) = data.keys { + user.private_key = Some(keys.encrypted_private_key); + user.public_key = Some(keys.public_key); } if CONFIG.mail_enabled() { @@ -203,17 +217,28 @@ pub async fn _register(data: JsonUpcase, mut conn: DbConn) -> Json if let Err(e) = mail::send_welcome_must_verify(&user.email, &user.uuid).await { error!("Error sending welcome email: {:#?}", e); } - user.last_verifying_at = Some(user.created_at); } else if let Err(e) = mail::send_welcome(&user.email).await { error!("Error sending welcome email: {:#?}", e); } + + if verified_by_invite && is_email_2fa_required(data.organization_user_id, &mut conn).await { + let _ = email::activate_email_2fa(&user, &mut conn).await; + } } user.save(&mut conn).await?; + + // accept any open emergency access invitations + if !CONFIG.mail_enabled() && CONFIG.emergency_access_allowed() { + for mut emergency_invite in EmergencyAccess::find_all_invited_by_grantee_email(&user.email, &mut conn).await { + let _ = emergency_invite.accept_invite(&user.uuid, &user.email, &mut conn).await; + } + } + Ok(Json(json!({ - "Object": "register", - "CaptchaBypassToken": "", + "object": "register", + "captchaBypassToken": "", }))) } @@ -222,57 +247,57 @@ async fn profile(headers: Headers, mut conn: DbConn) -> Json { Json(headers.user.to_json(&mut conn).await) } -#[derive(Deserialize, Debug)] -#[allow(non_snake_case)] +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] struct ProfileData { - // Culture: String, // Ignored, always use en-US - // MasterPasswordHint: Option, // Ignored, has been moved to ChangePassData - Name: String, + // culture: String, // Ignored, always use en-US + // masterPasswordHint: Option, // Ignored, has been moved to ChangePassData + name: String, } #[put("/accounts/profile", data = "")] -async fn put_profile(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { +async fn put_profile(data: Json, headers: Headers, conn: DbConn) -> JsonResult { post_profile(data, headers, conn).await } #[post("/accounts/profile", data = "")] -async fn post_profile(data: JsonUpcase, headers: Headers, mut conn: DbConn) -> JsonResult { - let data: ProfileData = data.into_inner().data; +async fn post_profile(data: Json, headers: Headers, mut conn: DbConn) -> JsonResult { + let data: ProfileData = data.into_inner(); // Check if the length of the username exceeds 50 characters (Same is Upstream Bitwarden) // This also prevents issues with very long usernames causing to large JWT's. See #2419 - if data.Name.len() > 50 { + if data.name.len() > 50 { err!("The field Name must be a string with a maximum length of 50."); } let mut user = headers.user; - user.name = data.Name; + user.name = data.name; user.save(&mut conn).await?; Ok(Json(user.to_json(&mut conn).await)) } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct AvatarData { - AvatarColor: Option, + avatar_color: Option, } #[put("/accounts/avatar", data = "")] -async fn put_avatar(data: JsonUpcase, headers: Headers, mut conn: DbConn) -> JsonResult { - let data: AvatarData = data.into_inner().data; +async fn put_avatar(data: Json, headers: Headers, mut conn: DbConn) -> JsonResult { + let data: AvatarData = data.into_inner(); // It looks like it only supports the 6 hex color format. // If you try to add the short value it will not show that color. // Check and force 7 chars, including the #. - if let Some(color) = &data.AvatarColor { + if let Some(color) = &data.avatar_color { if color.len() != 7 { err!("The field AvatarColor must be a HTML/Hex color code with a length of 7 characters") } } let mut user = headers.user; - user.avatar_color = data.AvatarColor; + user.avatar_color = data.avatar_color; user.save(&mut conn).await?; Ok(Json(user.to_json(&mut conn).await)) @@ -287,62 +312,57 @@ async fn get_public_keys(uuid: &str, _headers: Headers, mut conn: DbConn) -> Jso }; Ok(Json(json!({ - "UserId": user.uuid, - "PublicKey": user.public_key, - "Object":"userKey" + "userId": user.uuid, + "publicKey": user.public_key, + "object":"userKey" }))) } #[post("/accounts/keys", data = "")] -async fn post_keys(data: JsonUpcase, headers: Headers, mut conn: DbConn) -> JsonResult { - let data: KeysData = data.into_inner().data; +async fn post_keys(data: Json, headers: Headers, mut conn: DbConn) -> JsonResult { + let data: KeysData = data.into_inner(); let mut user = headers.user; - user.private_key = Some(data.EncryptedPrivateKey); - user.public_key = Some(data.PublicKey); + user.private_key = Some(data.encrypted_private_key); + user.public_key = Some(data.public_key); user.save(&mut conn).await?; Ok(Json(json!({ - "PrivateKey": user.private_key, - "PublicKey": user.public_key, - "Object":"keys" + "privateKey": user.private_key, + "publicKey": user.public_key, + "object":"keys" }))) } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct ChangePassData { - MasterPasswordHash: String, - NewMasterPasswordHash: String, - MasterPasswordHint: Option, - Key: String, + master_password_hash: String, + new_master_password_hash: String, + master_password_hint: Option, + key: String, } #[post("/accounts/password", data = "")] -async fn post_password( - data: JsonUpcase, - headers: Headers, - mut conn: DbConn, - nt: Notify<'_>, -) -> EmptyResult { - let data: ChangePassData = data.into_inner().data; +async fn post_password(data: Json, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { + let data: ChangePassData = data.into_inner(); let mut user = headers.user; - if !user.check_valid_password(&data.MasterPasswordHash) { + if !user.check_valid_password(&data.master_password_hash) { err!("Invalid password") } - user.password_hint = clean_password_hint(&data.MasterPasswordHint); + user.password_hint = clean_password_hint(&data.master_password_hint); enforce_password_hint_setting(&user.password_hint)?; log_user_event(EventType::UserChangedPassword as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn) .await; user.set_password( - &data.NewMasterPasswordHash, - Some(data.Key), + &data.new_master_password_hash, + Some(data.key), true, Some(vec![String::from("post_rotatekey"), String::from("get_contacts"), String::from("get_public_keys")]), ); @@ -358,48 +378,48 @@ async fn post_password( } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct ChangeKdfData { - Kdf: i32, - KdfIterations: i32, - KdfMemory: Option, - KdfParallelism: Option, + kdf: i32, + kdf_iterations: i32, + kdf_memory: Option, + kdf_parallelism: Option, - MasterPasswordHash: String, - NewMasterPasswordHash: String, - Key: String, + master_password_hash: String, + new_master_password_hash: String, + key: String, } #[post("/accounts/kdf", data = "")] -async fn post_kdf(data: JsonUpcase, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { - let data: ChangeKdfData = data.into_inner().data; +async fn post_kdf(data: Json, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { + let data: ChangeKdfData = data.into_inner(); let mut user = headers.user; - if !user.check_valid_password(&data.MasterPasswordHash) { + if !user.check_valid_password(&data.master_password_hash) { err!("Invalid password") } - if data.Kdf == UserKdfType::Pbkdf2 as i32 && data.KdfIterations < 100_000 { + if data.kdf == UserKdfType::Pbkdf2 as i32 && data.kdf_iterations < 100_000 { err!("PBKDF2 KDF iterations must be at least 100000.") } - if data.Kdf == UserKdfType::Argon2id as i32 { - if data.KdfIterations < 1 { + if data.kdf == UserKdfType::Argon2id as i32 { + if data.kdf_iterations < 1 { err!("Argon2 KDF iterations must be at least 1.") } - if let Some(m) = data.KdfMemory { + if let Some(m) = data.kdf_memory { if !(15..=1024).contains(&m) { err!("Argon2 memory must be between 15 MB and 1024 MB.") } - user.client_kdf_memory = data.KdfMemory; + user.client_kdf_memory = data.kdf_memory; } else { err!("Argon2 memory parameter is required.") } - if let Some(p) = data.KdfParallelism { + if let Some(p) = data.kdf_parallelism { if !(1..=16).contains(&p) { err!("Argon2 parallelism must be between 1 and 16.") } - user.client_kdf_parallelism = data.KdfParallelism; + user.client_kdf_parallelism = data.kdf_parallelism; } else { err!("Argon2 parallelism parameter is required.") } @@ -407,9 +427,9 @@ async fn post_kdf(data: JsonUpcase, headers: Headers, mut conn: D user.client_kdf_memory = None; user.client_kdf_parallelism = None; } - user.client_kdf_iter = data.KdfIterations; - user.client_kdf_type = data.Kdf; - user.set_password(&data.NewMasterPasswordHash, Some(data.Key), true, None); + user.client_kdf_iter = data.kdf_iterations; + user.client_kdf_type = data.kdf; + user.set_password(&data.new_master_password_hash, Some(data.key), true, None); let save_result = user.save(&mut conn).await; nt.send_logout(&user, Some(headers.device.uuid)).await; @@ -418,29 +438,51 @@ async fn post_kdf(data: JsonUpcase, headers: Headers, mut conn: D } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct UpdateFolderData { - Id: String, - Name: String, + // There is a bug in 2024.3.x which adds a `null` item. + // To bypass this we allow a Option here, but skip it during the updates + // See: https://github.com/bitwarden/clients/issues/8453 + id: Option, + name: String, +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct UpdateEmergencyAccessData { + id: String, + key_encrypted: String, +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct UpdateResetPasswordData { + organization_id: String, + reset_password_key: String, } use super::ciphers::CipherData; +use super::sends::{update_send_from_data, SendData}; #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct KeyData { - Ciphers: Vec, - Folders: Vec, - Key: String, - PrivateKey: String, - MasterPasswordHash: String, + ciphers: Vec, + folders: Vec, + sends: Vec, + emergency_access_keys: Vec, + reset_password_keys: Vec, + key: String, + master_password_hash: String, + private_key: String, } #[post("/accounts/key", data = "")] -async fn post_rotatekey(data: JsonUpcase, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { - let data: KeyData = data.into_inner().data; +async fn post_rotatekey(data: Json, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { + // TODO: See if we can wrap everything within a SQL Transaction. If something fails it should revert everything. + let data: KeyData = data.into_inner(); - if !headers.user.check_valid_password(&data.MasterPasswordHash) { + if !headers.user.check_valid_password(&data.master_password_hash) { err!("Invalid password") } @@ -448,50 +490,93 @@ async fn post_rotatekey(data: JsonUpcase, headers: Headers, mut conn: D // Bitwarden does not process the import if there is one item invalid. // Since we check for the size of the encrypted note length, we need to do that here to pre-validate it. // TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks. - Cipher::validate_notes(&data.Ciphers)?; + Cipher::validate_notes(&data.ciphers)?; let user_uuid = &headers.user.uuid; // Update folder data - for folder_data in data.Folders { - let mut saved_folder = match Folder::find_by_uuid(&folder_data.Id, &mut conn).await { - Some(folder) => folder, - None => err!("Folder doesn't exist"), - }; + for folder_data in data.folders { + // Skip `null` folder id entries. + // See: https://github.com/bitwarden/clients/issues/8453 + if let Some(folder_id) = folder_data.id { + let mut saved_folder = match Folder::find_by_uuid(&folder_id, &mut conn).await { + Some(folder) => folder, + None => err!("Folder doesn't exist"), + }; + + if &saved_folder.user_uuid != user_uuid { + err!("The folder is not owned by the user") + } - if &saved_folder.user_uuid != user_uuid { - err!("The folder is not owned by the user") + saved_folder.name = folder_data.name; + saved_folder.save(&mut conn).await? } + } - saved_folder.name = folder_data.Name; - saved_folder.save(&mut conn).await? + // Update emergency access data + for emergency_access_data in data.emergency_access_keys { + let mut saved_emergency_access = + match EmergencyAccess::find_by_uuid_and_grantor_uuid(&emergency_access_data.id, user_uuid, &mut conn).await + { + Some(emergency_access) => emergency_access, + None => err!("Emergency access doesn't exist or is not owned by the user"), + }; + + saved_emergency_access.key_encrypted = Some(emergency_access_data.key_encrypted); + saved_emergency_access.save(&mut conn).await? + } + + // Update reset password data + for reset_password_data in data.reset_password_keys { + let mut user_org = + match UserOrganization::find_by_user_and_org(user_uuid, &reset_password_data.organization_id, &mut conn) + .await + { + Some(reset_password) => reset_password, + None => err!("Reset password doesn't exist"), + }; + + user_org.reset_password_key = Some(reset_password_data.reset_password_key); + user_org.save(&mut conn).await? + } + + // Update send data + for send_data in data.sends { + let mut send = match Send::find_by_uuid(send_data.id.as_ref().unwrap(), &mut conn).await { + Some(send) => send, + None => err!("Send doesn't exist"), + }; + + update_send_from_data(&mut send, send_data, &headers, &mut conn, &nt, UpdateType::None).await?; } // Update cipher data use super::ciphers::update_cipher_from_data; - for cipher_data in data.Ciphers { - let mut saved_cipher = match Cipher::find_by_uuid(cipher_data.Id.as_ref().unwrap(), &mut conn).await { - Some(cipher) => cipher, - None => err!("Cipher doesn't exist"), - }; + for cipher_data in data.ciphers { + if cipher_data.organization_id.is_none() { + let mut saved_cipher = match Cipher::find_by_uuid(cipher_data.id.as_ref().unwrap(), &mut conn).await { + Some(cipher) => cipher, + None => err!("Cipher doesn't exist"), + }; - if saved_cipher.user_uuid.as_ref().unwrap() != user_uuid { - err!("The cipher is not owned by the user") - } + if saved_cipher.user_uuid.as_ref().unwrap() != user_uuid { + err!("The cipher is not owned by the user") + } - // Prevent triggering cipher updates via WebSockets by settings UpdateType::None - // The user sessions are invalidated because all the ciphers were re-encrypted and thus triggering an update could cause issues. - // We force the users to logout after the user has been saved to try and prevent these issues. - update_cipher_from_data(&mut saved_cipher, cipher_data, &headers, false, &mut conn, &nt, UpdateType::None) - .await? + // Prevent triggering cipher updates via WebSockets by settings UpdateType::None + // The user sessions are invalidated because all the ciphers were re-encrypted and thus triggering an update could cause issues. + // We force the users to logout after the user has been saved to try and prevent these issues. + update_cipher_from_data(&mut saved_cipher, cipher_data, &headers, None, &mut conn, &nt, UpdateType::None) + .await? + } } // Update user data let mut user = headers.user; - user.akey = data.Key; - user.private_key = Some(data.PrivateKey); + user.akey = data.key; + user.private_key = Some(data.private_key); user.reset_security_stamp(); let save_result = user.save(&mut conn).await; @@ -505,13 +590,8 @@ async fn post_rotatekey(data: JsonUpcase, headers: Headers, mut conn: D } #[post("/accounts/security-stamp", data = "")] -async fn post_sstamp( - data: JsonUpcase, - headers: Headers, - mut conn: DbConn, - nt: Notify<'_>, -) -> EmptyResult { - let data: PasswordOrOtpData = data.into_inner().data; +async fn post_sstamp(data: Json, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { + let data: PasswordOrOtpData = data.into_inner(); let mut user = headers.user; data.validate(&user, true, &mut conn).await?; @@ -526,82 +606,79 @@ async fn post_sstamp( } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct EmailTokenData { - MasterPasswordHash: String, - NewEmail: String, + master_password_hash: String, + new_email: String, } #[post("/accounts/email-token", data = "")] -async fn post_email_token(data: JsonUpcase, headers: Headers, mut conn: DbConn) -> EmptyResult { +async fn post_email_token(data: Json, headers: Headers, mut conn: DbConn) -> EmptyResult { if !CONFIG.email_change_allowed() { err!("Email change is not allowed."); } - let data: EmailTokenData = data.into_inner().data; + let data: EmailTokenData = data.into_inner(); let mut user = headers.user; - if !user.check_valid_password(&data.MasterPasswordHash) { + if !user.check_valid_password(&data.master_password_hash) { err!("Invalid password") } - if User::find_by_mail(&data.NewEmail, &mut conn).await.is_some() { + if User::find_by_mail(&data.new_email, &mut conn).await.is_some() { err!("Email already in use"); } - if !CONFIG.is_email_domain_allowed(&data.NewEmail) { + if !CONFIG.is_email_domain_allowed(&data.new_email) { err!("Email domain not allowed"); } let token = crypto::generate_email_token(6); if CONFIG.mail_enabled() { - if let Err(e) = mail::send_change_email(&data.NewEmail, &token).await { + if let Err(e) = mail::send_change_email(&data.new_email, &token).await { error!("Error sending change-email email: {:#?}", e); } + } else { + debug!("Email change request for user ({}) to email ({}) with token ({})", user.uuid, data.new_email, token); } - user.email_new = Some(data.NewEmail); + user.email_new = Some(data.new_email); user.email_new_token = Some(token); user.save(&mut conn).await } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct ChangeEmailData { - MasterPasswordHash: String, - NewEmail: String, + master_password_hash: String, + new_email: String, - Key: String, - NewMasterPasswordHash: String, - Token: NumberOrString, + key: String, + new_master_password_hash: String, + token: NumberOrString, } #[post("/accounts/email", data = "")] -async fn post_email( - data: JsonUpcase, - headers: Headers, - mut conn: DbConn, - nt: Notify<'_>, -) -> EmptyResult { +async fn post_email(data: Json, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> EmptyResult { if !CONFIG.email_change_allowed() { err!("Email change is not allowed."); } - let data: ChangeEmailData = data.into_inner().data; + let data: ChangeEmailData = data.into_inner(); let mut user = headers.user; - if !user.check_valid_password(&data.MasterPasswordHash) { + if !user.check_valid_password(&data.master_password_hash) { err!("Invalid password") } - if User::find_by_mail(&data.NewEmail, &mut conn).await.is_some() { + if User::find_by_mail(&data.new_email, &mut conn).await.is_some() { err!("Email already in use"); } match user.email_new { Some(ref val) => { - if val != &data.NewEmail { + if val != &data.new_email { err!("Email change mismatch"); } } @@ -612,7 +689,7 @@ async fn post_email( // Only check the token if we sent out an email... match user.email_new_token { Some(ref val) => { - if *val != data.Token.into_string() { + if *val != data.token.into_string() { err!("Token mismatch"); } } @@ -623,11 +700,11 @@ async fn post_email( user.verified_at = None; } - user.email = data.NewEmail; + user.email = data.new_email; user.email_new = None; user.email_new_token = None; - user.set_password(&data.NewMasterPasswordHash, Some(data.Key), true, None); + user.set_password(&data.new_master_password_hash, Some(data.key), true, None); let save_result = user.save(&mut conn).await; @@ -652,22 +729,22 @@ async fn post_verify_email(headers: Headers) -> EmptyResult { } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct VerifyEmailTokenData { - UserId: String, - Token: String, + user_id: String, + token: String, } #[post("/accounts/verify-email-token", data = "")] -async fn post_verify_email_token(data: JsonUpcase, mut conn: DbConn) -> EmptyResult { - let data: VerifyEmailTokenData = data.into_inner().data; +async fn post_verify_email_token(data: Json, mut conn: DbConn) -> EmptyResult { + let data: VerifyEmailTokenData = data.into_inner(); - let mut user = match User::find_by_uuid(&data.UserId, &mut conn).await { + let mut user = match User::find_by_uuid(&data.user_id, &mut conn).await { Some(user) => user, None => err!("User doesn't exist"), }; - let claims = match decode_verify_email(&data.Token) { + let claims = match decode_verify_email(&data.token) { Ok(claims) => claims, Err(_) => err!("Invalid claim"), }; @@ -685,17 +762,17 @@ async fn post_verify_email_token(data: JsonUpcase, mut con } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct DeleteRecoverData { - Email: String, + email: String, } #[post("/accounts/delete-recover", data = "")] -async fn post_delete_recover(data: JsonUpcase, mut conn: DbConn) -> EmptyResult { - let data: DeleteRecoverData = data.into_inner().data; +async fn post_delete_recover(data: Json, mut conn: DbConn) -> EmptyResult { + let data: DeleteRecoverData = data.into_inner(); if CONFIG.mail_enabled() { - if let Some(user) = User::find_by_mail(&data.Email, &mut conn).await { + if let Some(user) = User::find_by_mail(&data.email, &mut conn).await { if let Err(e) = mail::send_delete_account(&user.email, &user.uuid).await { error!("Error sending delete account email: {:#?}", e); } @@ -711,22 +788,22 @@ async fn post_delete_recover(data: JsonUpcase, mut conn: DbCo } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct DeleteRecoverTokenData { - UserId: String, - Token: String, + user_id: String, + token: String, } #[post("/accounts/delete-recover-token", data = "")] -async fn post_delete_recover_token(data: JsonUpcase, mut conn: DbConn) -> EmptyResult { - let data: DeleteRecoverTokenData = data.into_inner().data; +async fn post_delete_recover_token(data: Json, mut conn: DbConn) -> EmptyResult { + let data: DeleteRecoverTokenData = data.into_inner(); - let user = match User::find_by_uuid(&data.UserId, &mut conn).await { + let user = match User::find_by_uuid(&data.user_id, &mut conn).await { Some(user) => user, None => err!("User doesn't exist"), }; - let claims = match decode_delete(&data.Token) { + let claims = match decode_delete(&data.token) { Ok(claims) => claims, Err(_) => err!("Invalid claim"), }; @@ -737,13 +814,13 @@ async fn post_delete_recover_token(data: JsonUpcase, mut } #[post("/accounts/delete", data = "")] -async fn post_delete_account(data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { +async fn post_delete_account(data: Json, headers: Headers, conn: DbConn) -> EmptyResult { delete_account(data, headers, conn).await } #[delete("/accounts", data = "")] -async fn delete_account(data: JsonUpcase, headers: Headers, mut conn: DbConn) -> EmptyResult { - let data: PasswordOrOtpData = data.into_inner().data; +async fn delete_account(data: Json, headers: Headers, mut conn: DbConn) -> EmptyResult { + let data: PasswordOrOtpData = data.into_inner(); let user = headers.user; data.validate(&user, true, &mut conn).await?; @@ -753,26 +830,26 @@ async fn delete_account(data: JsonUpcase, headers: Headers, m #[get("/accounts/revision-date")] fn revision_date(headers: Headers) -> JsonResult { - let revision_date = headers.user.updated_at.timestamp_millis(); + let revision_date = headers.user.updated_at.and_utc().timestamp_millis(); Ok(Json(json!(revision_date))) } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct PasswordHintData { - Email: String, + email: String, } #[post("/accounts/password-hint", data = "")] -async fn password_hint(data: JsonUpcase, mut conn: DbConn) -> EmptyResult { +async fn password_hint(data: Json, mut conn: DbConn) -> EmptyResult { if !CONFIG.mail_enabled() && !CONFIG.show_password_hint() { err!("This server is not configured to provide password hints."); } const NO_HINT: &str = "Sorry, you have no password hint..."; - let data: PasswordHintData = data.into_inner().data; - let email = &data.Email; + let data: PasswordHintData = data.into_inner(); + let email = &data.email; match User::find_by_mail(email, &mut conn).await { None => { @@ -806,29 +883,29 @@ async fn password_hint(data: JsonUpcase, mut conn: DbConn) -> } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] pub struct PreloginData { - Email: String, + email: String, } #[post("/accounts/prelogin", data = "")] -async fn prelogin(data: JsonUpcase, conn: DbConn) -> Json { +async fn prelogin(data: Json, conn: DbConn) -> Json { _prelogin(data, conn).await } -pub async fn _prelogin(data: JsonUpcase, mut conn: DbConn) -> Json { - let data: PreloginData = data.into_inner().data; +pub async fn _prelogin(data: Json, mut conn: DbConn) -> Json { + let data: PreloginData = data.into_inner(); - let (kdf_type, kdf_iter, kdf_mem, kdf_para) = match User::find_by_mail(&data.Email, &mut conn).await { + let (kdf_type, kdf_iter, kdf_mem, kdf_para) = match User::find_by_mail(&data.email, &mut conn).await { Some(user) => (user.client_kdf_type, user.client_kdf_iter, user.client_kdf_memory, user.client_kdf_parallelism), None => (User::CLIENT_KDF_TYPE_DEFAULT, User::CLIENT_KDF_ITER_DEFAULT, None, None), }; let result = json!({ - "Kdf": kdf_type, - "KdfIterations": kdf_iter, - "KdfMemory": kdf_mem, - "KdfParallelism": kdf_para, + "kdf": kdf_type, + "kdfIterations": kdf_iter, + "kdfMemory": kdf_mem, + "kdfParallelism": kdf_para, }); Json(result) @@ -836,27 +913,27 @@ pub async fn _prelogin(data: JsonUpcase, mut conn: DbConn) -> Json // https://github.com/bitwarden/server/blob/master/src/Api/Models/Request/Accounts/SecretVerificationRequestModel.cs #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct SecretVerificationRequest { - MasterPasswordHash: String, + master_password_hash: String, } #[post("/accounts/verify-password", data = "")] -fn verify_password(data: JsonUpcase, headers: Headers) -> EmptyResult { - let data: SecretVerificationRequest = data.into_inner().data; +fn verify_password(data: Json, headers: Headers) -> EmptyResult { + let data: SecretVerificationRequest = data.into_inner(); let user = headers.user; - if !user.check_valid_password(&data.MasterPasswordHash) { + if !user.check_valid_password(&data.master_password_hash) { err!("Invalid password") } Ok(()) } -async fn _api_key(data: JsonUpcase, rotate: bool, headers: Headers, mut conn: DbConn) -> JsonResult { +async fn _api_key(data: Json, rotate: bool, headers: Headers, mut conn: DbConn) -> JsonResult { use crate::util::format_date; - let data: PasswordOrOtpData = data.into_inner().data; + let data: PasswordOrOtpData = data.into_inner(); let mut user = headers.user; data.validate(&user, true, &mut conn).await?; @@ -867,38 +944,31 @@ async fn _api_key(data: JsonUpcase, rotate: bool, headers: He } Ok(Json(json!({ - "ApiKey": user.api_key, - "RevisionDate": format_date(&user.updated_at), - "Object": "apiKey", + "apiKey": user.api_key, + "revisionDate": format_date(&user.updated_at), + "object": "apiKey", }))) } #[post("/accounts/api-key", data = "")] -async fn api_key(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { +async fn api_key(data: Json, headers: Headers, conn: DbConn) -> JsonResult { _api_key(data, false, headers, conn).await } #[post("/accounts/rotate-api-key", data = "")] -async fn rotate_api_key(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { +async fn rotate_api_key(data: Json, headers: Headers, conn: DbConn) -> JsonResult { _api_key(data, true, headers, conn).await } -// This variant is deprecated: https://github.com/bitwarden/server/pull/2682 -#[get("/devices/knowndevice//")] -async fn get_known_device_from_path(email: &str, uuid: &str, mut conn: DbConn) -> JsonResult { - // This endpoint doesn't have auth header +#[get("/devices/knowndevice")] +async fn get_known_device(device: KnownDevice, mut conn: DbConn) -> JsonResult { let mut result = false; - if let Some(user) = User::find_by_mail(email, &mut conn).await { - result = Device::find_by_uuid_and_user(uuid, &user.uuid, &mut conn).await.is_some(); + if let Some(user) = User::find_by_mail(&device.email, &mut conn).await { + result = Device::find_by_uuid_and_user(&device.uuid, &user.uuid, &mut conn).await.is_some(); } Ok(Json(json!(result))) } -#[get("/devices/knowndevice")] -async fn get_known_device(device: KnownDevice, conn: DbConn) -> JsonResult { - get_known_device_from_path(&device.email, &device.uuid, conn).await -} - struct KnownDevice { email: String, uuid: String, @@ -940,20 +1010,20 @@ impl<'r> FromRequest<'r> for KnownDevice { } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct PushToken { - PushToken: String, + push_token: String, } #[post("/devices/identifier//token", data = "")] -async fn post_device_token(uuid: &str, data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { +async fn post_device_token(uuid: &str, data: Json, headers: Headers, conn: DbConn) -> EmptyResult { put_device_token(uuid, data, headers, conn).await } #[put("/devices/identifier//token", data = "")] -async fn put_device_token(uuid: &str, data: JsonUpcase, headers: Headers, mut conn: DbConn) -> EmptyResult { - let data = data.into_inner().data; - let token = data.PushToken; +async fn put_device_token(uuid: &str, data: Json, headers: Headers, mut conn: DbConn) -> EmptyResult { + let data = data.into_inner(); + let token = data.push_token; let mut device = match Device::find_by_uuid_and_user(&headers.device.uuid, &headers.user.uuid, &mut conn).await { Some(device) => device, @@ -1008,12 +1078,12 @@ async fn post_clear_device_token(uuid: &str, conn: DbConn) -> EmptyResult { } #[derive(Debug, Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct AuthRequestRequest { - accessCode: String, - deviceIdentifier: String, + access_code: String, + device_identifier: String, email: String, - publicKey: String, + public_key: String, #[serde(alias = "type")] _type: i32, } @@ -1036,15 +1106,15 @@ async fn post_auth_request( let mut auth_request = AuthRequest::new( user.uuid.clone(), - data.deviceIdentifier.clone(), + data.device_identifier.clone(), headers.device_type, headers.ip.ip.to_string(), - data.accessCode, - data.publicKey, + data.access_code, + data.public_key, ); auth_request.save(&mut conn).await?; - nt.send_auth_request(&user.uuid, &auth_request.uuid, &data.deviceIdentifier, &mut conn).await; + nt.send_auth_request(&user.uuid, &auth_request.uuid, &data.device_identifier, &mut conn).await; Ok(Json(json!({ "id": auth_request.uuid, @@ -1090,12 +1160,12 @@ async fn get_auth_request(uuid: &str, mut conn: DbConn) -> JsonResult { } #[derive(Debug, Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct AuthResponseRequest { - deviceIdentifier: String, + device_identifier: String, key: String, - masterPasswordHash: Option, - requestApproved: bool, + master_password_hash: Option, + request_approved: bool, } #[put("/auth-requests/", data = "")] @@ -1114,15 +1184,15 @@ async fn put_auth_request( } }; - auth_request.approved = Some(data.requestApproved); + auth_request.approved = Some(data.request_approved); auth_request.enc_key = Some(data.key); - auth_request.master_password_hash = data.masterPasswordHash; - auth_request.response_device_id = Some(data.deviceIdentifier.clone()); + auth_request.master_password_hash = data.master_password_hash; + auth_request.response_device_id = Some(data.device_identifier.clone()); auth_request.save(&mut conn).await?; if auth_request.approved.unwrap_or(false) { ant.send_auth_response(&auth_request.user_uuid, &auth_request.uuid).await; - nt.send_auth_response(&auth_request.user_uuid, &auth_request.uuid, data.deviceIdentifier, &mut conn).await; + nt.send_auth_response(&auth_request.user_uuid, &auth_request.uuid, data.device_identifier, &mut conn).await; } let response_date_utc = auth_request.response_date.map(|response_date| response_date.and_utc()); diff --git a/src/api/core/ciphers.rs b/src/api/core/ciphers.rs index b3dca3b6..c2c78b33 100644 --- a/src/api/core/ciphers.rs +++ b/src/api/core/ciphers.rs @@ -10,8 +10,9 @@ use rocket::{ }; use serde_json::Value; +use crate::util::NumberOrString; use crate::{ - api::{self, core::log_event, EmptyResult, JsonResult, JsonUpcase, Notify, PasswordOrOtpData, UpdateType}, + api::{self, core::log_event, EmptyResult, JsonResult, Notify, PasswordOrOtpData, UpdateType}, auth::Headers, crypto, db::{models::*, DbConn, DbPool}, @@ -78,6 +79,8 @@ pub fn routes() -> Vec { delete_all, move_cipher_selected, move_cipher_selected_put, + put_collections2_update, + post_collections2_update, put_collections_update, post_collections_update, post_collections_admin, @@ -140,15 +143,15 @@ async fn sync(data: SyncData, headers: Headers, mut conn: DbConn) -> Json }; Json(json!({ - "Profile": user_json, - "Folders": folders_json, - "Collections": collections_json, - "Policies": policies_json, - "Ciphers": ciphers_json, - "Domains": domains_json, - "Sends": sends_json, + "profile": user_json, + "folders": folders_json, + "collections": collections_json, + "policies": policies_json, + "ciphers": ciphers_json, + "domains": domains_json, + "sends": sends_json, "unofficialServer": true, - "Object": "sync" + "object": "sync" })) } @@ -166,9 +169,9 @@ async fn get_ciphers(headers: Headers, mut conn: DbConn) -> Json { } Json(json!({ - "Data": ciphers_json, - "Object": "list", - "ContinuationToken": null + "data": ciphers_json, + "object": "list", + "continuationToken": null })) } @@ -197,17 +200,17 @@ async fn get_cipher_details(uuid: &str, headers: Headers, conn: DbConn) -> JsonR get_cipher(uuid, headers, conn).await } -#[derive(Deserialize, Debug)] -#[allow(non_snake_case)] +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] pub struct CipherData { // Id is optional as it is included only in bulk share - pub Id: Option, + pub id: Option, // Folder id is not included in import - FolderId: Option, + folder_id: Option, // TODO: Some of these might appear all the time, no need for Option - OrganizationId: Option, + pub organization_id: Option, - Key: Option, + key: Option, /* Login = 1, @@ -215,27 +218,27 @@ pub struct CipherData { Card = 3, Identity = 4 */ - pub Type: i32, - pub Name: String, - pub Notes: Option, - Fields: Option, + pub r#type: i32, + pub name: String, + pub notes: Option, + fields: Option, // Only one of these should exist, depending on type - Login: Option, - SecureNote: Option, - Card: Option, - Identity: Option, + login: Option, + secure_note: Option, + card: Option, + identity: Option, - Favorite: Option, - Reprompt: Option, + favorite: Option, + reprompt: Option, - PasswordHistory: Option, + password_history: Option, // These are used during key rotation // 'Attachments' is unused, contains map of {id: filename} - #[serde(rename = "Attachments")] - _Attachments: Option, - Attachments2: Option>, + #[allow(dead_code)] + attachments: Option, + attachments2: Option>, // The revision datetime (in ISO 8601 format) of the client's local copy // of the cipher. This is used to prevent a client from updating a cipher @@ -243,31 +246,26 @@ pub struct CipherData { // loss. It's not an error when no value is provided; this can happen // when using older client versions, or if the operation doesn't involve // updating an existing cipher. - LastKnownRevisionDate: Option, + last_known_revision_date: Option, } -#[derive(Deserialize, Debug)] -#[allow(non_snake_case)] +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] pub struct PartialCipherData { - FolderId: Option, - Favorite: bool, + folder_id: Option, + favorite: bool, } -#[derive(Deserialize, Debug)] -#[allow(non_snake_case)] +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] pub struct Attachments2Data { - FileName: String, - Key: String, + file_name: String, + key: String, } /// Called when an org admin clones an org cipher. #[post("/ciphers/admin", data = "")] -async fn post_ciphers_admin( - data: JsonUpcase, - headers: Headers, - conn: DbConn, - nt: Notify<'_>, -) -> JsonResult { +async fn post_ciphers_admin(data: Json, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult { post_ciphers_create(data, headers, conn, nt).await } @@ -276,25 +274,25 @@ async fn post_ciphers_admin( /// `organizationId` is null. #[post("/ciphers/create", data = "")] async fn post_ciphers_create( - data: JsonUpcase, + data: Json, headers: Headers, mut conn: DbConn, nt: Notify<'_>, ) -> JsonResult { - let mut data: ShareCipherData = data.into_inner().data; + let mut data: ShareCipherData = data.into_inner(); // Check if there are one more more collections selected when this cipher is part of an organization. // err if this is not the case before creating an empty cipher. - if data.Cipher.OrganizationId.is_some() && data.CollectionIds.is_empty() { + if data.cipher.organization_id.is_some() && data.collection_ids.is_empty() { err!("You must select at least one collection."); } // This check is usually only needed in update_cipher_from_data(), but we // need it here as well to avoid creating an empty cipher in the call to // cipher.save() below. - enforce_personal_ownership_policy(Some(&data.Cipher), &headers, &mut conn).await?; + enforce_personal_ownership_policy(Some(&data.cipher), &headers, &mut conn).await?; - let mut cipher = Cipher::new(data.Cipher.Type, data.Cipher.Name.clone()); + let mut cipher = Cipher::new(data.cipher.r#type, data.cipher.name.clone()); cipher.user_uuid = Some(headers.user.uuid.clone()); cipher.save(&mut conn).await?; @@ -304,24 +302,24 @@ async fn post_ciphers_create( // the current time, so the stale data check will end up failing down the // line. Since this function only creates new ciphers (whether by cloning // or otherwise), we can just ignore this field entirely. - data.Cipher.LastKnownRevisionDate = None; + data.cipher.last_known_revision_date = None; share_cipher_by_uuid(&cipher.uuid, data, &headers, &mut conn, &nt).await } /// Called when creating a new user-owned cipher. #[post("/ciphers", data = "")] -async fn post_ciphers(data: JsonUpcase, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult { - let mut data: CipherData = data.into_inner().data; +async fn post_ciphers(data: Json, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult { + let mut data: CipherData = data.into_inner(); // The web/browser clients set this field to null as expected, but the // mobile clients seem to set the invalid value `0001-01-01T00:00:00`, // which results in a warning message being logged. This field isn't // needed when creating a new cipher, so just ignore it unconditionally. - data.LastKnownRevisionDate = None; + data.last_known_revision_date = None; - let mut cipher = Cipher::new(data.Type, data.Name.clone()); - update_cipher_from_data(&mut cipher, data, &headers, false, &mut conn, &nt, UpdateType::SyncCipherCreate).await?; + let mut cipher = Cipher::new(data.r#type, data.name.clone()); + update_cipher_from_data(&mut cipher, data, &headers, None, &mut conn, &nt, UpdateType::SyncCipherCreate).await?; Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await)) } @@ -338,7 +336,7 @@ async fn enforce_personal_ownership_policy( headers: &Headers, conn: &mut DbConn, ) -> EmptyResult { - if data.is_none() || data.unwrap().OrganizationId.is_none() { + if data.is_none() || data.unwrap().organization_id.is_none() { let user_uuid = &headers.user.uuid; let policy_type = OrgPolicyType::PersonalOwnership; if OrgPolicy::is_applicable_to_user(user_uuid, policy_type, None, conn).await { @@ -352,7 +350,7 @@ pub async fn update_cipher_from_data( cipher: &mut Cipher, data: CipherData, headers: &Headers, - shared_to_collection: bool, + shared_to_collections: Option>, conn: &mut DbConn, nt: &Notify<'_>, ut: UpdateType, @@ -362,7 +360,7 @@ pub async fn update_cipher_from_data( // Check that the client isn't updating an existing cipher with stale data. // And only perform this check when not importing ciphers, else the date/time check will fail. if ut != UpdateType::None { - if let Some(dt) = data.LastKnownRevisionDate { + if let Some(dt) = data.last_known_revision_date { match NaiveDateTime::parse_from_str(&dt, "%+") { // ISO 8601 format Err(err) => warn!("Error parsing LastKnownRevisionDate '{}': {}", dt, err), @@ -374,24 +372,24 @@ pub async fn update_cipher_from_data( } } - if cipher.organization_uuid.is_some() && cipher.organization_uuid != data.OrganizationId { + if cipher.organization_uuid.is_some() && cipher.organization_uuid != data.organization_id { err!("Organization mismatch. Please resync the client before updating the cipher") } - if let Some(note) = &data.Notes { + if let Some(note) = &data.notes { if note.len() > 10_000 { err!("The field Notes exceeds the maximum encrypted value length of 10000 characters.") } } // Check if this cipher is being transferred from a personal to an organization vault - let transfer_cipher = cipher.organization_uuid.is_none() && data.OrganizationId.is_some(); + let transfer_cipher = cipher.organization_uuid.is_none() && data.organization_id.is_some(); - if let Some(org_id) = data.OrganizationId { + if let Some(org_id) = data.organization_id { match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, conn).await { None => err!("You don't have permission to add item to organization"), Some(org_user) => { - if shared_to_collection + if shared_to_collections.is_some() || org_user.has_full_access() || cipher.is_write_accessible_to_user(&headers.user.uuid, conn).await { @@ -411,7 +409,7 @@ pub async fn update_cipher_from_data( cipher.user_uuid = Some(headers.user.uuid.clone()); } - if let Some(ref folder_id) = data.FolderId { + if let Some(ref folder_id) = data.folder_id { match Folder::find_by_uuid(folder_id, conn).await { Some(folder) => { if folder.user_uuid != headers.user.uuid { @@ -423,7 +421,7 @@ pub async fn update_cipher_from_data( } // Modify attachments name and keys when rotating - if let Some(attachments) = data.Attachments2 { + if let Some(attachments) = data.attachments2 { for (id, attachment) in attachments { let mut saved_att = match Attachment::find_by_id(&id, conn).await { Some(att) => att, @@ -444,8 +442,8 @@ pub async fn update_cipher_from_data( break; } - saved_att.akey = Some(attachment.Key); - saved_att.file_name = attachment.FileName; + saved_att.akey = Some(attachment.key); + saved_att.file_name = attachment.file_name; saved_att.save(conn).await?; } @@ -459,44 +457,44 @@ pub async fn update_cipher_from_data( fn _clean_cipher_data(mut json_data: Value) -> Value { if json_data.is_array() { json_data.as_array_mut().unwrap().iter_mut().for_each(|ref mut f| { - f.as_object_mut().unwrap().remove("Response"); + f.as_object_mut().unwrap().remove("response"); }); }; json_data } - let type_data_opt = match data.Type { - 1 => data.Login, - 2 => data.SecureNote, - 3 => data.Card, - 4 => data.Identity, + let type_data_opt = match data.r#type { + 1 => data.login, + 2 => data.secure_note, + 3 => data.card, + 4 => data.identity, _ => err!("Invalid type"), }; let type_data = match type_data_opt { Some(mut data) => { // Remove the 'Response' key from the base object. - data.as_object_mut().unwrap().remove("Response"); + data.as_object_mut().unwrap().remove("response"); // Remove the 'Response' key from every Uri. - if data["Uris"].is_array() { - data["Uris"] = _clean_cipher_data(data["Uris"].clone()); + if data["uris"].is_array() { + data["uris"] = _clean_cipher_data(data["uris"].clone()); } data } None => err!("Data missing"), }; - cipher.key = data.Key; - cipher.name = data.Name; - cipher.notes = data.Notes; - cipher.fields = data.Fields.map(|f| _clean_cipher_data(f).to_string()); + cipher.key = data.key; + cipher.name = data.name; + cipher.notes = data.notes; + cipher.fields = data.fields.map(|f| _clean_cipher_data(f).to_string()); cipher.data = type_data.to_string(); - cipher.password_history = data.PasswordHistory.map(|f| f.to_string()); - cipher.reprompt = data.Reprompt; + cipher.password_history = data.password_history.map(|f| f.to_string()); + cipher.reprompt = data.reprompt; cipher.save(conn).await?; - cipher.move_to_folder(data.FolderId, &headers.user.uuid, conn).await?; - cipher.set_favorite(data.Favorite, &headers.user.uuid, conn).await?; + cipher.move_to_folder(data.folder_id, &headers.user.uuid, conn).await?; + cipher.set_favorite(data.favorite, &headers.user.uuid, conn).await?; if ut != UpdateType::None { // Only log events for organizational ciphers @@ -518,69 +516,83 @@ pub async fn update_cipher_from_data( ) .await; } - nt.send_cipher_update(ut, cipher, &cipher.update_users_revision(conn).await, &headers.device.uuid, None, conn) - .await; + nt.send_cipher_update( + ut, + cipher, + &cipher.update_users_revision(conn).await, + &headers.device.uuid, + shared_to_collections, + conn, + ) + .await; } Ok(()) } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct ImportData { - Ciphers: Vec, - Folders: Vec, - FolderRelationships: Vec, + ciphers: Vec, + folders: Vec, + folder_relationships: Vec, } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct RelationsData { // Cipher id - Key: usize, + key: usize, // Folder id - Value: usize, + value: usize, } #[post("/ciphers/import", data = "")] async fn post_ciphers_import( - data: JsonUpcase, + data: Json, headers: Headers, mut conn: DbConn, nt: Notify<'_>, ) -> EmptyResult { enforce_personal_ownership_policy(None, &headers, &mut conn).await?; - let data: ImportData = data.into_inner().data; + let data: ImportData = data.into_inner(); // Validate the import before continuing // Bitwarden does not process the import if there is one item invalid. // Since we check for the size of the encrypted note length, we need to do that here to pre-validate it. // TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks. - Cipher::validate_notes(&data.Ciphers)?; + Cipher::validate_notes(&data.ciphers)?; // Read and create the folders - let mut folders: Vec<_> = Vec::new(); - for folder in data.Folders.into_iter() { - let mut new_folder = Folder::new(headers.user.uuid.clone(), folder.Name); - new_folder.save(&mut conn).await?; + let existing_folders: Vec = + Folder::find_by_user(&headers.user.uuid, &mut conn).await.into_iter().map(|f| f.uuid).collect(); + let mut folders: Vec = Vec::with_capacity(data.folders.len()); + for folder in data.folders.into_iter() { + let folder_uuid = if folder.id.is_some() && existing_folders.contains(folder.id.as_ref().unwrap()) { + folder.id.unwrap() + } else { + let mut new_folder = Folder::new(headers.user.uuid.clone(), folder.name); + new_folder.save(&mut conn).await?; + new_folder.uuid + }; - folders.push(new_folder); + folders.push(folder_uuid); } // Read the relations between folders and ciphers - let mut relations_map = HashMap::new(); + let mut relations_map = HashMap::with_capacity(data.folder_relationships.len()); - for relation in data.FolderRelationships { - relations_map.insert(relation.Key, relation.Value); + for relation in data.folder_relationships { + relations_map.insert(relation.key, relation.value); } // Read and create the ciphers - for (index, mut cipher_data) in data.Ciphers.into_iter().enumerate() { - let folder_uuid = relations_map.get(&index).map(|i| folders[*i].uuid.clone()); - cipher_data.FolderId = folder_uuid; + for (index, mut cipher_data) in data.ciphers.into_iter().enumerate() { + let folder_uuid = relations_map.get(&index).map(|i| folders[*i].clone()); + cipher_data.folder_id = folder_uuid; - let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone()); - update_cipher_from_data(&mut cipher, cipher_data, &headers, false, &mut conn, &nt, UpdateType::None).await?; + let mut cipher = Cipher::new(cipher_data.r#type, cipher_data.name.clone()); + update_cipher_from_data(&mut cipher, cipher_data, &headers, None, &mut conn, &nt, UpdateType::None).await?; } let mut user = headers.user; @@ -594,7 +606,7 @@ async fn post_ciphers_import( #[put("/ciphers//admin", data = "")] async fn put_cipher_admin( uuid: &str, - data: JsonUpcase, + data: Json, headers: Headers, conn: DbConn, nt: Notify<'_>, @@ -605,7 +617,7 @@ async fn put_cipher_admin( #[post("/ciphers//admin", data = "")] async fn post_cipher_admin( uuid: &str, - data: JsonUpcase, + data: Json, headers: Headers, conn: DbConn, nt: Notify<'_>, @@ -614,25 +626,19 @@ async fn post_cipher_admin( } #[post("/ciphers/", data = "")] -async fn post_cipher( - uuid: &str, - data: JsonUpcase, - headers: Headers, - conn: DbConn, - nt: Notify<'_>, -) -> JsonResult { +async fn post_cipher(uuid: &str, data: Json, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult { put_cipher(uuid, data, headers, conn, nt).await } #[put("/ciphers/", data = "")] async fn put_cipher( uuid: &str, - data: JsonUpcase, + data: Json, headers: Headers, mut conn: DbConn, nt: Notify<'_>, ) -> JsonResult { - let data: CipherData = data.into_inner().data; + let data: CipherData = data.into_inner(); let mut cipher = match Cipher::find_by_uuid(uuid, &mut conn).await { Some(cipher) => cipher, @@ -648,18 +654,13 @@ async fn put_cipher( err!("Cipher is not write accessible") } - update_cipher_from_data(&mut cipher, data, &headers, false, &mut conn, &nt, UpdateType::SyncCipherUpdate).await?; + update_cipher_from_data(&mut cipher, data, &headers, None, &mut conn, &nt, UpdateType::SyncCipherUpdate).await?; Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await)) } #[post("/ciphers//partial", data = "")] -async fn post_cipher_partial( - uuid: &str, - data: JsonUpcase, - headers: Headers, - conn: DbConn, -) -> JsonResult { +async fn post_cipher_partial(uuid: &str, data: Json, headers: Headers, conn: DbConn) -> JsonResult { put_cipher_partial(uuid, data, headers, conn).await } @@ -667,18 +668,18 @@ async fn post_cipher_partial( #[put("/ciphers//partial", data = "")] async fn put_cipher_partial( uuid: &str, - data: JsonUpcase, + data: Json, headers: Headers, mut conn: DbConn, ) -> JsonResult { - let data: PartialCipherData = data.into_inner().data; + let data: PartialCipherData = data.into_inner(); let cipher = match Cipher::find_by_uuid(uuid, &mut conn).await { Some(cipher) => cipher, None => err!("Cipher doesn't exist"), }; - if let Some(ref folder_id) = data.FolderId { + if let Some(ref folder_id) = data.folder_id { match Folder::find_by_uuid(folder_id, &mut conn).await { Some(folder) => { if folder.user_uuid != headers.user.uuid { @@ -690,45 +691,127 @@ async fn put_cipher_partial( } // Move cipher - cipher.move_to_folder(data.FolderId.clone(), &headers.user.uuid, &mut conn).await?; + cipher.move_to_folder(data.folder_id.clone(), &headers.user.uuid, &mut conn).await?; // Update favorite - cipher.set_favorite(Some(data.Favorite), &headers.user.uuid, &mut conn).await?; + cipher.set_favorite(Some(data.favorite), &headers.user.uuid, &mut conn).await?; Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await)) } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct CollectionsAdminData { - CollectionIds: Vec, + collection_ids: Vec, +} + +#[put("/ciphers//collections_v2", data = "")] +async fn put_collections2_update( + uuid: &str, + data: Json, + headers: Headers, + conn: DbConn, + nt: Notify<'_>, +) -> JsonResult { + post_collections2_update(uuid, data, headers, conn, nt).await +} + +#[post("/ciphers//collections_v2", data = "")] +async fn post_collections2_update( + uuid: &str, + data: Json, + headers: Headers, + conn: DbConn, + nt: Notify<'_>, +) -> JsonResult { + let cipher_details = post_collections_update(uuid, data, headers, conn, nt).await?; + Ok(Json(json!({ // AttachmentUploadDataResponseModel + "object": "optionalCipherDetails", + "unavailable": false, + "cipher": *cipher_details + }))) } #[put("/ciphers//collections", data = "")] async fn put_collections_update( uuid: &str, - data: JsonUpcase, + data: Json, headers: Headers, conn: DbConn, nt: Notify<'_>, -) -> EmptyResult { - post_collections_admin(uuid, data, headers, conn, nt).await +) -> JsonResult { + post_collections_update(uuid, data, headers, conn, nt).await } #[post("/ciphers//collections", data = "")] async fn post_collections_update( uuid: &str, - data: JsonUpcase, + data: Json, headers: Headers, - conn: DbConn, + mut conn: DbConn, nt: Notify<'_>, -) -> EmptyResult { - post_collections_admin(uuid, data, headers, conn, nt).await +) -> JsonResult { + let data: CollectionsAdminData = data.into_inner(); + + let cipher = match Cipher::find_by_uuid(uuid, &mut conn).await { + Some(cipher) => cipher, + None => err!("Cipher doesn't exist"), + }; + + if !cipher.is_write_accessible_to_user(&headers.user.uuid, &mut conn).await { + err!("Cipher is not write accessible") + } + + let posted_collections = HashSet::::from_iter(data.collection_ids); + let current_collections = + HashSet::::from_iter(cipher.get_collections(headers.user.uuid.clone(), &mut conn).await); + + for collection in posted_collections.symmetric_difference(¤t_collections) { + match Collection::find_by_uuid(collection, &mut conn).await { + None => err!("Invalid collection ID provided"), + Some(collection) => { + if collection.is_writable_by_user(&headers.user.uuid, &mut conn).await { + if posted_collections.contains(&collection.uuid) { + // Add to collection + CollectionCipher::save(&cipher.uuid, &collection.uuid, &mut conn).await?; + } else { + // Remove from collection + CollectionCipher::delete(&cipher.uuid, &collection.uuid, &mut conn).await?; + } + } else { + err!("No rights to modify the collection") + } + } + } + } + + nt.send_cipher_update( + UpdateType::SyncCipherUpdate, + &cipher, + &cipher.update_users_revision(&mut conn).await, + &headers.device.uuid, + Some(Vec::from_iter(posted_collections)), + &mut conn, + ) + .await; + + log_event( + EventType::CipherUpdatedCollections as i32, + &cipher.uuid, + &cipher.organization_uuid.clone().unwrap(), + &headers.user.uuid, + headers.device.atype, + &headers.ip.ip, + &mut conn, + ) + .await; + + Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await)) } #[put("/ciphers//collections-admin", data = "")] async fn put_collections_admin( uuid: &str, - data: JsonUpcase, + data: Json, headers: Headers, conn: DbConn, nt: Notify<'_>, @@ -739,12 +822,12 @@ async fn put_collections_admin( #[post("/ciphers//collections-admin", data = "")] async fn post_collections_admin( uuid: &str, - data: JsonUpcase, + data: Json, headers: Headers, mut conn: DbConn, nt: Notify<'_>, ) -> EmptyResult { - let data: CollectionsAdminData = data.into_inner().data; + let data: CollectionsAdminData = data.into_inner(); let cipher = match Cipher::find_by_uuid(uuid, &mut conn).await { Some(cipher) => cipher, @@ -755,9 +838,9 @@ async fn post_collections_admin( err!("Cipher is not write accessible") } - let posted_collections: HashSet = data.CollectionIds.iter().cloned().collect(); - let current_collections: HashSet = - cipher.get_collections(headers.user.uuid.clone(), &mut conn).await.iter().cloned().collect(); + let posted_collections = HashSet::::from_iter(data.collection_ids); + let current_collections = + HashSet::::from_iter(cipher.get_admin_collections(headers.user.uuid.clone(), &mut conn).await); for collection in posted_collections.symmetric_difference(¤t_collections) { match Collection::find_by_uuid(collection, &mut conn).await { @@ -803,21 +886,23 @@ async fn post_collections_admin( } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct ShareCipherData { - Cipher: CipherData, - CollectionIds: Vec, + #[serde(alias = "Cipher")] + cipher: CipherData, + #[serde(alias = "CollectionIds")] + collection_ids: Vec, } #[post("/ciphers//share", data = "")] async fn post_cipher_share( uuid: &str, - data: JsonUpcase, + data: Json, headers: Headers, mut conn: DbConn, nt: Notify<'_>, ) -> JsonResult { - let data: ShareCipherData = data.into_inner().data; + let data: ShareCipherData = data.into_inner(); share_cipher_by_uuid(uuid, data, &headers, &mut conn, &nt).await } @@ -825,53 +910,53 @@ async fn post_cipher_share( #[put("/ciphers//share", data = "")] async fn put_cipher_share( uuid: &str, - data: JsonUpcase, + data: Json, headers: Headers, mut conn: DbConn, nt: Notify<'_>, ) -> JsonResult { - let data: ShareCipherData = data.into_inner().data; + let data: ShareCipherData = data.into_inner(); share_cipher_by_uuid(uuid, data, &headers, &mut conn, &nt).await } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct ShareSelectedCipherData { - Ciphers: Vec, - CollectionIds: Vec, + ciphers: Vec, + collection_ids: Vec, } #[put("/ciphers/share", data = "")] async fn put_cipher_share_selected( - data: JsonUpcase, + data: Json, headers: Headers, mut conn: DbConn, nt: Notify<'_>, ) -> EmptyResult { - let mut data: ShareSelectedCipherData = data.into_inner().data; + let mut data: ShareSelectedCipherData = data.into_inner(); - if data.Ciphers.is_empty() { + if data.ciphers.is_empty() { err!("You must select at least one cipher.") } - if data.CollectionIds.is_empty() { + if data.collection_ids.is_empty() { err!("You must select at least one collection.") } - for cipher in data.Ciphers.iter() { - if cipher.Id.is_none() { + for cipher in data.ciphers.iter() { + if cipher.id.is_none() { err!("Request missing ids field") } } - while let Some(cipher) = data.Ciphers.pop() { + while let Some(cipher) = data.ciphers.pop() { let mut shared_cipher_data = ShareCipherData { - Cipher: cipher, - CollectionIds: data.CollectionIds.clone(), + cipher, + collection_ids: data.collection_ids.clone(), }; - match shared_cipher_data.Cipher.Id.take() { + match shared_cipher_data.cipher.id.take() { Some(id) => share_cipher_by_uuid(&id, shared_cipher_data, &headers, &mut conn, &nt).await?, None => err!("Request missing ids field"), }; @@ -898,16 +983,16 @@ async fn share_cipher_by_uuid( None => err!("Cipher doesn't exist"), }; - let mut shared_to_collection = false; + let mut shared_to_collections = vec![]; - if let Some(organization_uuid) = &data.Cipher.OrganizationId { - for uuid in &data.CollectionIds { + if let Some(organization_uuid) = &data.cipher.organization_id { + for uuid in &data.collection_ids { match Collection::find_by_uuid_and_org(uuid, organization_uuid, conn).await { None => err!("Invalid collection ID provided"), Some(collection) => { if collection.is_writable_by_user(&headers.user.uuid, conn).await { CollectionCipher::save(&cipher.uuid, &collection.uuid, conn).await?; - shared_to_collection = true; + shared_to_collections.push(collection.uuid); } else { err!("No rights to modify the collection") } @@ -917,13 +1002,13 @@ async fn share_cipher_by_uuid( }; // When LastKnownRevisionDate is None, it is a new cipher, so send CipherCreate. - let ut = if data.Cipher.LastKnownRevisionDate.is_some() { + let ut = if data.cipher.last_known_revision_date.is_some() { UpdateType::SyncCipherUpdate } else { UpdateType::SyncCipherCreate }; - update_cipher_from_data(&mut cipher, data.Cipher, headers, shared_to_collection, conn, nt, ut).await?; + update_cipher_from_data(&mut cipher, data.cipher, headers, Some(shared_to_collections), conn, nt, ut).await?; Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, conn).await)) } @@ -953,12 +1038,12 @@ async fn get_attachment(uuid: &str, attachment_id: &str, headers: Headers, mut c } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct AttachmentRequestData { - Key: String, - FileName: String, - FileSize: i64, - AdminRequest: Option, // true when attaching from an org vault view + key: String, + file_name: String, + file_size: NumberOrString, + admin_request: Option, // true when attaching from an org vault view } enum FileUploadType { @@ -973,7 +1058,7 @@ enum FileUploadType { #[post("/ciphers//attachment/v2", data = "")] async fn post_attachment_v2( uuid: &str, - data: JsonUpcase, + data: Json, headers: Headers, mut conn: DbConn, ) -> JsonResult { @@ -986,26 +1071,28 @@ async fn post_attachment_v2( err!("Cipher is not write accessible") } - let data: AttachmentRequestData = data.into_inner().data; - if data.FileSize < 0 { + let data: AttachmentRequestData = data.into_inner(); + let file_size = data.file_size.into_i64()?; + + if file_size < 0 { err!("Attachment size can't be negative") } let attachment_id = crypto::generate_attachment_id(); let attachment = - Attachment::new(attachment_id.clone(), cipher.uuid.clone(), data.FileName, data.FileSize, Some(data.Key)); + Attachment::new(attachment_id.clone(), cipher.uuid.clone(), data.file_name, file_size, Some(data.key)); attachment.save(&mut conn).await.expect("Error saving attachment"); let url = format!("/ciphers/{}/attachment/{}", cipher.uuid, attachment_id); - let response_key = match data.AdminRequest { - Some(b) if b => "CipherMiniResponse", - _ => "CipherResponse", + let response_key = match data.admin_request { + Some(b) if b => "cipherMiniResponse", + _ => "cipherResponse", }; Ok(Json(json!({ // AttachmentUploadDataResponseModel - "Object": "attachment-fileUpload", - "AttachmentId": attachment_id, - "Url": url, - "FileUploadType": FileUploadType::Direct as i32, + "object": "attachment-fileUpload", + "attachmentId": attachment_id, + "url": url, + "fileUploadType": FileUploadType::Direct as i32, response_key: cipher.to_json(&headers.host, &headers.user.uuid, None, CipherSyncType::User, &mut conn).await, }))) } @@ -1341,7 +1428,7 @@ async fn delete_cipher_admin(uuid: &str, headers: Headers, mut conn: DbConn, nt: #[delete("/ciphers", data = "")] async fn delete_cipher_selected( - data: JsonUpcase, + data: Json, headers: Headers, conn: DbConn, nt: Notify<'_>, @@ -1351,7 +1438,7 @@ async fn delete_cipher_selected( #[post("/ciphers/delete", data = "")] async fn delete_cipher_selected_post( - data: JsonUpcase, + data: Json, headers: Headers, conn: DbConn, nt: Notify<'_>, @@ -1361,7 +1448,7 @@ async fn delete_cipher_selected_post( #[put("/ciphers/delete", data = "")] async fn delete_cipher_selected_put( - data: JsonUpcase, + data: Json, headers: Headers, conn: DbConn, nt: Notify<'_>, @@ -1371,7 +1458,7 @@ async fn delete_cipher_selected_put( #[delete("/ciphers/admin", data = "")] async fn delete_cipher_selected_admin( - data: JsonUpcase, + data: Json, headers: Headers, conn: DbConn, nt: Notify<'_>, @@ -1381,7 +1468,7 @@ async fn delete_cipher_selected_admin( #[post("/ciphers/delete-admin", data = "")] async fn delete_cipher_selected_post_admin( - data: JsonUpcase, + data: Json, headers: Headers, conn: DbConn, nt: Notify<'_>, @@ -1391,7 +1478,7 @@ async fn delete_cipher_selected_post_admin( #[put("/ciphers/delete-admin", data = "")] async fn delete_cipher_selected_put_admin( - data: JsonUpcase, + data: Json, headers: Headers, conn: DbConn, nt: Notify<'_>, @@ -1411,7 +1498,7 @@ async fn restore_cipher_put_admin(uuid: &str, headers: Headers, mut conn: DbConn #[put("/ciphers/restore", data = "")] async fn restore_cipher_selected( - data: JsonUpcase, + data: Json, headers: Headers, mut conn: DbConn, nt: Notify<'_>, @@ -1420,23 +1507,23 @@ async fn restore_cipher_selected( } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct MoveCipherData { - FolderId: Option, - Ids: Vec, + folder_id: Option, + ids: Vec, } #[post("/ciphers/move", data = "")] async fn move_cipher_selected( - data: JsonUpcase, + data: Json, headers: Headers, mut conn: DbConn, nt: Notify<'_>, ) -> EmptyResult { - let data = data.into_inner().data; + let data = data.into_inner(); let user_uuid = headers.user.uuid; - if let Some(ref folder_id) = data.FolderId { + if let Some(ref folder_id) = data.folder_id { match Folder::find_by_uuid(folder_id, &mut conn).await { Some(folder) => { if folder.user_uuid != user_uuid { @@ -1447,7 +1534,7 @@ async fn move_cipher_selected( } } - for uuid in data.Ids { + for uuid in data.ids { let cipher = match Cipher::find_by_uuid(&uuid, &mut conn).await { Some(cipher) => cipher, None => err!("Cipher doesn't exist"), @@ -1458,7 +1545,7 @@ async fn move_cipher_selected( } // Move cipher - cipher.move_to_folder(data.FolderId.clone(), &user_uuid, &mut conn).await?; + cipher.move_to_folder(data.folder_id.clone(), &user_uuid, &mut conn).await?; nt.send_cipher_update( UpdateType::SyncCipherUpdate, @@ -1476,7 +1563,7 @@ async fn move_cipher_selected( #[put("/ciphers/move", data = "")] async fn move_cipher_selected_put( - data: JsonUpcase, + data: Json, headers: Headers, conn: DbConn, nt: Notify<'_>, @@ -1493,12 +1580,12 @@ struct OrganizationId { #[post("/ciphers/purge?", data = "")] async fn delete_all( organization: Option, - data: JsonUpcase, + data: Json, headers: Headers, mut conn: DbConn, nt: Notify<'_>, ) -> EmptyResult { - let data: PasswordOrOtpData = data.into_inner().data; + let data: PasswordOrOtpData = data.into_inner(); let mut user = headers.user; data.validate(&user, true, &mut conn).await?; @@ -1605,25 +1692,23 @@ async fn _delete_cipher_by_uuid( Ok(()) } +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct CipherIdsData { + ids: Vec, +} + async fn _delete_multiple_ciphers( - data: JsonUpcase, + data: Json, headers: Headers, mut conn: DbConn, soft_delete: bool, nt: Notify<'_>, ) -> EmptyResult { - let data: Value = data.into_inner().data; - - let uuids = match data.get("Ids") { - Some(ids) => match ids.as_array() { - Some(ids) => ids.iter().filter_map(Value::as_str), - None => err!("Posted ids field is not an array"), - }, - None => err!("Request missing ids field"), - }; + let data = data.into_inner(); - for uuid in uuids { - if let error @ Err(_) = _delete_cipher_by_uuid(uuid, &headers, &mut conn, soft_delete, &nt).await { + for uuid in data.ids { + if let error @ Err(_) = _delete_cipher_by_uuid(&uuid, &headers, &mut conn, soft_delete, &nt).await { return error; }; } @@ -1671,33 +1756,25 @@ async fn _restore_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &mut DbCon } async fn _restore_multiple_ciphers( - data: JsonUpcase, + data: Json, headers: &Headers, conn: &mut DbConn, nt: &Notify<'_>, ) -> JsonResult { - let data: Value = data.into_inner().data; - - let uuids = match data.get("Ids") { - Some(ids) => match ids.as_array() { - Some(ids) => ids.iter().filter_map(Value::as_str), - None => err!("Posted ids field is not an array"), - }, - None => err!("Request missing ids field"), - }; + let data = data.into_inner(); let mut ciphers: Vec = Vec::new(); - for uuid in uuids { - match _restore_cipher_by_uuid(uuid, headers, conn, nt).await { + for uuid in data.ids { + match _restore_cipher_by_uuid(&uuid, headers, conn, nt).await { Ok(json) => ciphers.push(json.into_inner()), err => return err, } } Ok(Json(json!({ - "Data": ciphers, - "Object": "list", - "ContinuationToken": null + "data": ciphers, + "object": "list", + "continuationToken": null }))) } diff --git a/src/api/core/emergency_access.rs b/src/api/core/emergency_access.rs index fb163849..1c29b774 100644 --- a/src/api/core/emergency_access.rs +++ b/src/api/core/emergency_access.rs @@ -1,11 +1,11 @@ -use chrono::{Duration, Utc}; +use chrono::{TimeDelta, Utc}; use rocket::{serde::json::Json, Route}; use serde_json::Value; use crate::{ api::{ core::{CipherSyncData, CipherSyncType}, - EmptyResult, JsonResult, JsonUpcase, + EmptyResult, JsonResult, }, auth::{decode_emergency_access_invite, Headers}, db::{models::*, DbConn, DbPool}, @@ -43,31 +43,33 @@ pub fn routes() -> Vec { async fn get_contacts(headers: Headers, mut conn: DbConn) -> Json { if !CONFIG.emergency_access_allowed() { return Json(json!({ - "Data": [{ - "Id": "", - "Status": 2, - "Type": 0, - "WaitTimeDays": 0, - "GranteeId": "", - "Email": "", - "Name": "NOTE: Emergency Access is disabled!", - "Object": "emergencyAccessGranteeDetails", + "data": [{ + "id": "", + "status": 2, + "type": 0, + "waitTimeDays": 0, + "granteeId": "", + "email": "", + "name": "NOTE: Emergency Access is disabled!", + "object": "emergencyAccessGranteeDetails", }], - "Object": "list", - "ContinuationToken": null + "object": "list", + "continuationToken": null })); } let emergency_access_list = EmergencyAccess::find_all_by_grantor_uuid(&headers.user.uuid, &mut conn).await; let mut emergency_access_list_json = Vec::with_capacity(emergency_access_list.len()); for ea in emergency_access_list { - emergency_access_list_json.push(ea.to_json_grantee_details(&mut conn).await); + if let Some(grantee) = ea.to_json_grantee_details(&mut conn).await { + emergency_access_list_json.push(grantee) + } } Json(json!({ - "Data": emergency_access_list_json, - "Object": "list", - "ContinuationToken": null + "data": emergency_access_list_json, + "object": "list", + "continuationToken": null })) } @@ -84,18 +86,20 @@ async fn get_grantees(headers: Headers, mut conn: DbConn) -> Json { } Json(json!({ - "Data": emergency_access_list_json, - "Object": "list", - "ContinuationToken": null + "data": emergency_access_list_json, + "object": "list", + "continuationToken": null })) } #[get("/emergency-access/")] -async fn get_emergency_access(emer_id: &str, mut conn: DbConn) -> JsonResult { +async fn get_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn) -> JsonResult { check_emergency_access_enabled()?; - match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await { - Some(emergency_access) => Ok(Json(emergency_access.to_json_grantee_details(&mut conn).await)), + match EmergencyAccess::find_by_uuid_and_grantor_uuid(emer_id, &headers.user.uuid, &mut conn).await { + Some(emergency_access) => Ok(Json( + emergency_access.to_json_grantee_details(&mut conn).await.expect("Grantee user should exist but does not!"), + )), None => err!("Emergency access not valid."), } } @@ -105,42 +109,49 @@ async fn get_emergency_access(emer_id: &str, mut conn: DbConn) -> JsonResult { // region put/post #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct EmergencyAccessUpdateData { - Type: NumberOrString, - WaitTimeDays: i32, - KeyEncrypted: Option, + r#type: NumberOrString, + wait_time_days: i32, + key_encrypted: Option, } #[put("/emergency-access/", data = "")] -async fn put_emergency_access(emer_id: &str, data: JsonUpcase, conn: DbConn) -> JsonResult { - post_emergency_access(emer_id, data, conn).await +async fn put_emergency_access( + emer_id: &str, + data: Json, + headers: Headers, + conn: DbConn, +) -> JsonResult { + post_emergency_access(emer_id, data, headers, conn).await } #[post("/emergency-access/", data = "")] async fn post_emergency_access( emer_id: &str, - data: JsonUpcase, + data: Json, + headers: Headers, mut conn: DbConn, ) -> JsonResult { check_emergency_access_enabled()?; - let data: EmergencyAccessUpdateData = data.into_inner().data; + let data: EmergencyAccessUpdateData = data.into_inner(); - let mut emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await { - Some(emergency_access) => emergency_access, - None => err!("Emergency access not valid."), - }; + let mut emergency_access = + match EmergencyAccess::find_by_uuid_and_grantor_uuid(emer_id, &headers.user.uuid, &mut conn).await { + Some(emergency_access) => emergency_access, + None => err!("Emergency access not valid."), + }; - let new_type = match EmergencyAccessType::from_str(&data.Type.into_string()) { + let new_type = match EmergencyAccessType::from_str(&data.r#type.into_string()) { Some(new_type) => new_type as i32, None => err!("Invalid emergency access type."), }; emergency_access.atype = new_type; - emergency_access.wait_time_days = data.WaitTimeDays; - if data.KeyEncrypted.is_some() { - emergency_access.key_encrypted = data.KeyEncrypted; + emergency_access.wait_time_days = data.wait_time_days; + if data.key_encrypted.is_some() { + emergency_access.key_encrypted = data.key_encrypted; } emergency_access.save(&mut conn).await?; @@ -155,17 +166,21 @@ async fn post_emergency_access( async fn delete_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn) -> EmptyResult { check_emergency_access_enabled()?; - let grantor_user = headers.user; - - let emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await { - Some(emer) => { - if emer.grantor_uuid != grantor_user.uuid && emer.grantee_uuid != Some(grantor_user.uuid) { - err!("Emergency access not valid.") - } - emer + let emergency_access = match ( + EmergencyAccess::find_by_uuid_and_grantor_uuid(emer_id, &headers.user.uuid, &mut conn).await, + EmergencyAccess::find_by_uuid_and_grantee_uuid(emer_id, &headers.user.uuid, &mut conn).await, + ) { + (Some(grantor_emer), None) => { + info!("Grantor deleted emergency access {emer_id}"); + grantor_emer } - None => err!("Emergency access not valid."), + (None, Some(grantee_emer)) => { + info!("Grantee deleted emergency access {emer_id}"); + grantee_emer + } + _ => err!("Emergency access not valid."), }; + emergency_access.delete(&mut conn).await?; Ok(()) } @@ -180,24 +195,24 @@ async fn post_delete_emergency_access(emer_id: &str, headers: Headers, conn: DbC // region invite #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct EmergencyAccessInviteData { - Email: String, - Type: NumberOrString, - WaitTimeDays: i32, + email: String, + r#type: NumberOrString, + wait_time_days: i32, } #[post("/emergency-access/invite", data = "")] -async fn send_invite(data: JsonUpcase, headers: Headers, mut conn: DbConn) -> EmptyResult { +async fn send_invite(data: Json, headers: Headers, mut conn: DbConn) -> EmptyResult { check_emergency_access_enabled()?; - let data: EmergencyAccessInviteData = data.into_inner().data; - let email = data.Email.to_lowercase(); - let wait_time_days = data.WaitTimeDays; + let data: EmergencyAccessInviteData = data.into_inner(); + let email = data.email.to_lowercase(); + let wait_time_days = data.wait_time_days; let emergency_access_status = EmergencyAccessStatus::Invited as i32; - let new_type = match EmergencyAccessType::from_str(&data.Type.into_string()) { + let new_type = match EmergencyAccessType::from_str(&data.r#type.into_string()) { Some(new_type) => new_type as i32, None => err!("Invalid emergency access type."), }; @@ -209,7 +224,7 @@ async fn send_invite(data: JsonUpcase, headers: Heade err!("You can not set yourself as an emergency contact.") } - let grantee_user = match User::find_by_mail(&email, &mut conn).await { + let (grantee_user, new_user) = match User::find_by_mail(&email, &mut conn).await { None => { if !CONFIG.invitations_allowed() { err!(format!("Grantee user does not exist: {}", &email)) @@ -226,9 +241,10 @@ async fn send_invite(data: JsonUpcase, headers: Heade let mut user = User::new(email.clone()); user.save(&mut conn).await?; - user + (user, true) } - Some(user) => user, + Some(user) if user.password_hash.is_empty() => (user, true), + Some(user) => (user, false), }; if EmergencyAccess::find_by_grantor_uuid_and_grantee_uuid_or_email( @@ -256,15 +272,9 @@ async fn send_invite(data: JsonUpcase, headers: Heade &grantor_user.email, ) .await?; - } else { - // Automatically mark user as accepted if no email invites - match User::find_by_mail(&email, &mut conn).await { - Some(user) => match accept_invite_process(&user.uuid, &mut new_emergency_access, &email, &mut conn).await { - Ok(v) => v, - Err(e) => err!(e.to_string()), - }, - None => err!("Grantee user not found."), - } + } else if !new_user { + // if mail is not enabled immediately accept the invitation for existing users + new_emergency_access.accept_invite(&grantee_user.uuid, &email, &mut conn).await?; } Ok(()) @@ -274,14 +284,11 @@ async fn send_invite(data: JsonUpcase, headers: Heade async fn resend_invite(emer_id: &str, headers: Headers, mut conn: DbConn) -> EmptyResult { check_emergency_access_enabled()?; - let mut emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await { - Some(emer) => emer, - None => err!("Emergency access not valid."), - }; - - if emergency_access.grantor_uuid != headers.user.uuid { - err!("Emergency access not valid."); - } + let mut emergency_access = + match EmergencyAccess::find_by_uuid_and_grantor_uuid(emer_id, &headers.user.uuid, &mut conn).await { + Some(emer) => emer, + None => err!("Emergency access not valid."), + }; if emergency_access.status != EmergencyAccessStatus::Invited as i32 { err!("The grantee user is already accepted or confirmed to the organization"); @@ -308,34 +315,29 @@ async fn resend_invite(emer_id: &str, headers: Headers, mut conn: DbConn) -> Emp &grantor_user.email, ) .await?; - } else { - if Invitation::find_by_mail(&email, &mut conn).await.is_none() { - let invitation = Invitation::new(&email); - invitation.save(&mut conn).await?; - } - - // Automatically mark user as accepted if no email invites - match accept_invite_process(&grantee_user.uuid, &mut emergency_access, &email, &mut conn).await { - Ok(v) => v, - Err(e) => err!(e.to_string()), - } + } else if !grantee_user.password_hash.is_empty() { + // accept the invitation for existing user + emergency_access.accept_invite(&grantee_user.uuid, &email, &mut conn).await?; + } else if CONFIG.invitations_allowed() && Invitation::find_by_mail(&email, &mut conn).await.is_none() { + let invitation = Invitation::new(&email); + invitation.save(&mut conn).await?; } Ok(()) } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct AcceptData { - Token: String, + token: String, } #[post("/emergency-access//accept", data = "")] -async fn accept_invite(emer_id: &str, data: JsonUpcase, headers: Headers, mut conn: DbConn) -> EmptyResult { +async fn accept_invite(emer_id: &str, data: Json, headers: Headers, mut conn: DbConn) -> EmptyResult { check_emergency_access_enabled()?; - let data: AcceptData = data.into_inner().data; - let token = &data.Token; + let data: AcceptData = data.into_inner(); + let token = &data.token; let claims = decode_emergency_access_invite(token)?; // This can happen if the user who received the invite used a different email to signup. @@ -352,10 +354,13 @@ async fn accept_invite(emer_id: &str, data: JsonUpcase, headers: Hea None => err!("Invited user not found"), }; - let mut emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await { - Some(emer) => emer, - None => err!("Emergency access not valid."), - }; + // We need to search for the uuid in combination with the email, since we do not yet store the uuid of the grantee in the database. + // The uuid of the grantee gets stored once accepted. + let mut emergency_access = + match EmergencyAccess::find_by_uuid_and_grantee_email(emer_id, &headers.user.email, &mut conn).await { + Some(emer) => emer, + None => err!("Emergency access not valid."), + }; // get grantor user to send Accepted email let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &mut conn).await { @@ -367,10 +372,7 @@ async fn accept_invite(emer_id: &str, data: JsonUpcase, headers: Hea && grantor_user.name == claims.grantor_name && grantor_user.email == claims.grantor_email { - match accept_invite_process(&grantee_user.uuid, &mut emergency_access, &grantee_user.email, &mut conn).await { - Ok(v) => v, - Err(e) => err!(e.to_string()), - } + emergency_access.accept_invite(&grantee_user.uuid, &grantee_user.email, &mut conn).await?; if CONFIG.mail_enabled() { mail::send_emergency_access_invite_accepted(&grantor_user.email, &grantee_user.email).await?; @@ -382,49 +384,30 @@ async fn accept_invite(emer_id: &str, data: JsonUpcase, headers: Hea } } -async fn accept_invite_process( - grantee_uuid: &str, - emergency_access: &mut EmergencyAccess, - grantee_email: &str, - conn: &mut DbConn, -) -> EmptyResult { - if emergency_access.email.is_none() || emergency_access.email.as_ref().unwrap() != grantee_email { - err!("User email does not match invite."); - } - - if emergency_access.status == EmergencyAccessStatus::Accepted as i32 { - err!("Emergency contact already accepted."); - } - - emergency_access.status = EmergencyAccessStatus::Accepted as i32; - emergency_access.grantee_uuid = Some(String::from(grantee_uuid)); - emergency_access.email = None; - emergency_access.save(conn).await -} - #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct ConfirmData { - Key: String, + key: String, } #[post("/emergency-access//confirm", data = "")] async fn confirm_emergency_access( emer_id: &str, - data: JsonUpcase, + data: Json, headers: Headers, mut conn: DbConn, ) -> JsonResult { check_emergency_access_enabled()?; let confirming_user = headers.user; - let data: ConfirmData = data.into_inner().data; - let key = data.Key; + let data: ConfirmData = data.into_inner(); + let key = data.key; - let mut emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await { - Some(emer) => emer, - None => err!("Emergency access not valid."), - }; + let mut emergency_access = + match EmergencyAccess::find_by_uuid_and_grantor_uuid(emer_id, &confirming_user.uuid, &mut conn).await { + Some(emer) => emer, + None => err!("Emergency access not valid."), + }; if emergency_access.status != EmergencyAccessStatus::Accepted as i32 || emergency_access.grantor_uuid != confirming_user.uuid @@ -467,14 +450,13 @@ async fn initiate_emergency_access(emer_id: &str, headers: Headers, mut conn: Db check_emergency_access_enabled()?; let initiating_user = headers.user; - let mut emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await { - Some(emer) => emer, - None => err!("Emergency access not valid."), - }; + let mut emergency_access = + match EmergencyAccess::find_by_uuid_and_grantee_uuid(emer_id, &initiating_user.uuid, &mut conn).await { + Some(emer) => emer, + None => err!("Emergency access not valid."), + }; - if emergency_access.status != EmergencyAccessStatus::Confirmed as i32 - || emergency_access.grantee_uuid != Some(initiating_user.uuid) - { + if emergency_access.status != EmergencyAccessStatus::Confirmed as i32 { err!("Emergency access not valid.") } @@ -506,14 +488,13 @@ async fn initiate_emergency_access(emer_id: &str, headers: Headers, mut conn: Db async fn approve_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn) -> JsonResult { check_emergency_access_enabled()?; - let mut emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await { - Some(emer) => emer, - None => err!("Emergency access not valid."), - }; + let mut emergency_access = + match EmergencyAccess::find_by_uuid_and_grantor_uuid(emer_id, &headers.user.uuid, &mut conn).await { + Some(emer) => emer, + None => err!("Emergency access not valid."), + }; - if emergency_access.status != EmergencyAccessStatus::RecoveryInitiated as i32 - || emergency_access.grantor_uuid != headers.user.uuid - { + if emergency_access.status != EmergencyAccessStatus::RecoveryInitiated as i32 { err!("Emergency access not valid.") } @@ -544,23 +525,18 @@ async fn approve_emergency_access(emer_id: &str, headers: Headers, mut conn: DbC async fn reject_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn) -> JsonResult { check_emergency_access_enabled()?; - let mut emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await { - Some(emer) => emer, - None => err!("Emergency access not valid."), - }; + let mut emergency_access = + match EmergencyAccess::find_by_uuid_and_grantor_uuid(emer_id, &headers.user.uuid, &mut conn).await { + Some(emer) => emer, + None => err!("Emergency access not valid."), + }; - if (emergency_access.status != EmergencyAccessStatus::RecoveryInitiated as i32 - && emergency_access.status != EmergencyAccessStatus::RecoveryApproved as i32) - || emergency_access.grantor_uuid != headers.user.uuid + if emergency_access.status != EmergencyAccessStatus::RecoveryInitiated as i32 + && emergency_access.status != EmergencyAccessStatus::RecoveryApproved as i32 { err!("Emergency access not valid.") } - let grantor_user = match User::find_by_uuid(&headers.user.uuid, &mut conn).await { - Some(user) => user, - None => err!("Grantor user not found."), - }; - if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() { let grantee_user = match User::find_by_uuid(grantee_uuid, &mut conn).await { Some(user) => user, @@ -571,7 +547,7 @@ async fn reject_emergency_access(emer_id: &str, headers: Headers, mut conn: DbCo emergency_access.save(&mut conn).await?; if CONFIG.mail_enabled() { - mail::send_emergency_access_recovery_rejected(&grantee_user.email, &grantor_user.name).await?; + mail::send_emergency_access_recovery_rejected(&grantee_user.email, &headers.user.name).await?; } Ok(Json(emergency_access.to_json())) } else { @@ -587,10 +563,11 @@ async fn reject_emergency_access(emer_id: &str, headers: Headers, mut conn: DbCo async fn view_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn) -> JsonResult { check_emergency_access_enabled()?; - let emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await { - Some(emer) => emer, - None => err!("Emergency access not valid."), - }; + let emergency_access = + match EmergencyAccess::find_by_uuid_and_grantee_uuid(emer_id, &headers.user.uuid, &mut conn).await { + Some(emer) => emer, + None => err!("Emergency access not valid."), + }; if !is_valid_request(&emergency_access, &headers.user.uuid, EmergencyAccessType::View) { err!("Emergency access not valid.") @@ -614,9 +591,9 @@ async fn view_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn } Ok(Json(json!({ - "Ciphers": ciphers_json, - "KeyEncrypted": &emergency_access.key_encrypted, - "Object": "emergencyAccessView", + "ciphers": ciphers_json, + "keyEncrypted": &emergency_access.key_encrypted, + "object": "emergencyAccessView", }))) } @@ -625,10 +602,11 @@ async fn takeover_emergency_access(emer_id: &str, headers: Headers, mut conn: Db check_emergency_access_enabled()?; let requesting_user = headers.user; - let emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await { - Some(emer) => emer, - None => err!("Emergency access not valid."), - }; + let emergency_access = + match EmergencyAccess::find_by_uuid_and_grantee_uuid(emer_id, &requesting_user.uuid, &mut conn).await { + Some(emer) => emer, + None => err!("Emergency access not valid."), + }; if !is_valid_request(&emergency_access, &requesting_user.uuid, EmergencyAccessType::Takeover) { err!("Emergency access not valid.") @@ -640,42 +618,43 @@ async fn takeover_emergency_access(emer_id: &str, headers: Headers, mut conn: Db }; let result = json!({ - "Kdf": grantor_user.client_kdf_type, - "KdfIterations": grantor_user.client_kdf_iter, - "KdfMemory": grantor_user.client_kdf_memory, - "KdfParallelism": grantor_user.client_kdf_parallelism, - "KeyEncrypted": &emergency_access.key_encrypted, - "Object": "emergencyAccessTakeover", + "kdf": grantor_user.client_kdf_type, + "kdfIterations": grantor_user.client_kdf_iter, + "kdfMemory": grantor_user.client_kdf_memory, + "kdfParallelism": grantor_user.client_kdf_parallelism, + "keyEncrypted": &emergency_access.key_encrypted, + "object": "emergencyAccessTakeover", }); Ok(Json(result)) } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct EmergencyAccessPasswordData { - NewMasterPasswordHash: String, - Key: String, + new_master_password_hash: String, + key: String, } #[post("/emergency-access//password", data = "")] async fn password_emergency_access( emer_id: &str, - data: JsonUpcase, + data: Json, headers: Headers, mut conn: DbConn, ) -> EmptyResult { check_emergency_access_enabled()?; - let data: EmergencyAccessPasswordData = data.into_inner().data; - let new_master_password_hash = &data.NewMasterPasswordHash; + let data: EmergencyAccessPasswordData = data.into_inner(); + let new_master_password_hash = &data.new_master_password_hash; //let key = &data.Key; let requesting_user = headers.user; - let emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await { - Some(emer) => emer, - None => err!("Emergency access not valid."), - }; + let emergency_access = + match EmergencyAccess::find_by_uuid_and_grantee_uuid(emer_id, &requesting_user.uuid, &mut conn).await { + Some(emer) => emer, + None => err!("Emergency access not valid."), + }; if !is_valid_request(&emergency_access, &requesting_user.uuid, EmergencyAccessType::Takeover) { err!("Emergency access not valid.") @@ -687,7 +666,7 @@ async fn password_emergency_access( }; // change grantor_user password - grantor_user.set_password(new_master_password_hash, Some(data.Key), true, None); + grantor_user.set_password(new_master_password_hash, Some(data.key), true, None); grantor_user.save(&mut conn).await?; // Disable TwoFactor providers since they will otherwise block logins @@ -707,10 +686,11 @@ async fn password_emergency_access( #[get("/emergency-access//policies")] async fn policies_emergency_access(emer_id: &str, headers: Headers, mut conn: DbConn) -> JsonResult { let requesting_user = headers.user; - let emergency_access = match EmergencyAccess::find_by_uuid(emer_id, &mut conn).await { - Some(emer) => emer, - None => err!("Emergency access not valid."), - }; + let emergency_access = + match EmergencyAccess::find_by_uuid_and_grantee_uuid(emer_id, &requesting_user.uuid, &mut conn).await { + Some(emer) => emer, + None => err!("Emergency access not valid."), + }; if !is_valid_request(&emergency_access, &requesting_user.uuid, EmergencyAccessType::Takeover) { err!("Emergency access not valid.") @@ -725,9 +705,9 @@ async fn policies_emergency_access(emer_id: &str, headers: Headers, mut conn: Db let policies_json: Vec = policies.await.iter().map(OrgPolicy::to_json).collect(); Ok(Json(json!({ - "Data": policies_json, - "Object": "list", - "ContinuationToken": null + "data": policies_json, + "object": "list", + "continuationToken": null }))) } @@ -766,7 +746,7 @@ pub async fn emergency_request_timeout_job(pool: DbPool) { for mut emer in emergency_access_list { // The find_all_recoveries_initiated already checks if the recovery_initiated_at is not null (None) let recovery_allowed_at = - emer.recovery_initiated_at.unwrap() + Duration::days(i64::from(emer.wait_time_days)); + emer.recovery_initiated_at.unwrap() + TimeDelta::try_days(i64::from(emer.wait_time_days)).unwrap(); if recovery_allowed_at.le(&now) { // Only update the access status // Updating the whole record could cause issues when the emergency_notification_reminder_job is also active @@ -822,10 +802,10 @@ pub async fn emergency_notification_reminder_job(pool: DbPool) { // The find_all_recoveries_initiated already checks if the recovery_initiated_at is not null (None) // Calculate the day before the recovery will become active let final_recovery_reminder_at = - emer.recovery_initiated_at.unwrap() + Duration::days(i64::from(emer.wait_time_days - 1)); + emer.recovery_initiated_at.unwrap() + TimeDelta::try_days(i64::from(emer.wait_time_days - 1)).unwrap(); // Calculate if a day has passed since the previous notification, else no notification has been sent before let next_recovery_reminder_at = if let Some(last_notification_at) = emer.last_notification_at { - last_notification_at + Duration::days(1) + last_notification_at + TimeDelta::try_days(1).unwrap() } else { now }; diff --git a/src/api/core/events.rs b/src/api/core/events.rs index d7aaeb4a..484094f5 100644 --- a/src/api/core/events.rs +++ b/src/api/core/events.rs @@ -5,7 +5,7 @@ use rocket::{form::FromForm, serde::json::Json, Route}; use serde_json::Value; use crate::{ - api::{EmptyResult, JsonResult, JsonUpcaseVec}, + api::{EmptyResult, JsonResult}, auth::{AdminHeaders, Headers}, db::{ models::{Cipher, Event, UserOrganization}, @@ -22,7 +22,6 @@ pub fn routes() -> Vec { } #[derive(FromForm)] -#[allow(non_snake_case)] struct EventRange { start: String, end: String, @@ -53,9 +52,9 @@ async fn get_org_events(org_id: &str, data: EventRange, _headers: AdminHeaders, }; Ok(Json(json!({ - "Data": events_json, - "Object": "list", - "ContinuationToken": get_continuation_token(&events_json), + "data": events_json, + "object": "list", + "continuationToken": get_continuation_token(&events_json), }))) } @@ -85,9 +84,9 @@ async fn get_cipher_events(cipher_id: &str, data: EventRange, headers: Headers, }; Ok(Json(json!({ - "Data": events_json, - "Object": "list", - "ContinuationToken": get_continuation_token(&events_json), + "data": events_json, + "object": "list", + "continuationToken": get_continuation_token(&events_json), }))) } @@ -119,9 +118,9 @@ async fn get_user_events( }; Ok(Json(json!({ - "Data": events_json, - "Object": "list", - "ContinuationToken": get_continuation_token(&events_json), + "data": events_json, + "object": "list", + "continuationToken": get_continuation_token(&events_json), }))) } @@ -145,33 +144,33 @@ pub fn main_routes() -> Vec { routes![post_events_collect,] } -#[derive(Deserialize, Debug)] -#[allow(non_snake_case)] +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] struct EventCollection { // Mandatory - Type: i32, - Date: String, + r#type: i32, + date: String, // Optional - CipherId: Option, - OrganizationId: Option, + cipher_id: Option, + organization_id: Option, } // Upstream: // https://github.com/bitwarden/server/blob/8a22c0479e987e756ce7412c48a732f9002f0a2d/src/Events/Controllers/CollectController.cs // https://github.com/bitwarden/server/blob/8a22c0479e987e756ce7412c48a732f9002f0a2d/src/Core/Services/Implementations/EventService.cs #[post("/collect", format = "application/json", data = "")] -async fn post_events_collect(data: JsonUpcaseVec, headers: Headers, mut conn: DbConn) -> EmptyResult { +async fn post_events_collect(data: Json>, headers: Headers, mut conn: DbConn) -> EmptyResult { if !CONFIG.org_events_enabled() { return Ok(()); } - for event in data.iter().map(|d| &d.data) { - let event_date = parse_date(&event.Date); - match event.Type { + for event in data.iter() { + let event_date = parse_date(&event.date); + match event.r#type { 1000..=1099 => { _log_user_event( - event.Type, + event.r#type, &headers.user.uuid, headers.device.atype, Some(event_date), @@ -181,9 +180,9 @@ async fn post_events_collect(data: JsonUpcaseVec, headers: Head .await; } 1600..=1699 => { - if let Some(org_uuid) = &event.OrganizationId { + if let Some(org_uuid) = &event.organization_id { _log_event( - event.Type, + event.r#type, org_uuid, org_uuid, &headers.user.uuid, @@ -196,11 +195,11 @@ async fn post_events_collect(data: JsonUpcaseVec, headers: Head } } _ => { - if let Some(cipher_uuid) = &event.CipherId { + if let Some(cipher_uuid) = &event.cipher_id { if let Some(cipher) = Cipher::find_by_uuid(cipher_uuid, &mut conn).await { if let Some(org_uuid) = cipher.organization_uuid { _log_event( - event.Type, + event.r#type, cipher_uuid, &org_uuid, &headers.user.uuid, @@ -289,7 +288,7 @@ async fn _log_event( let mut event = Event::new(event_type, event_date); match event_type { // 1000..=1099 Are user events, they need to be logged via log_user_event() - // Collection Events + // Cipher Events 1100..=1199 => { event.cipher_uuid = Some(String::from(source_uuid)); } diff --git a/src/api/core/folders.rs b/src/api/core/folders.rs index 3af1285c..9766d7a1 100644 --- a/src/api/core/folders.rs +++ b/src/api/core/folders.rs @@ -2,7 +2,7 @@ use rocket::serde::json::Json; use serde_json::Value; use crate::{ - api::{EmptyResult, JsonResult, JsonUpcase, Notify, UpdateType}, + api::{EmptyResult, JsonResult, Notify, UpdateType}, auth::Headers, db::{models::*, DbConn}, }; @@ -17,9 +17,9 @@ async fn get_folders(headers: Headers, mut conn: DbConn) -> Json { let folders_json: Vec = folders.iter().map(Folder::to_json).collect(); Json(json!({ - "Data": folders_json, - "Object": "list", - "ContinuationToken": null, + "data": folders_json, + "object": "list", + "continuationToken": null, })) } @@ -38,16 +38,17 @@ async fn get_folder(uuid: &str, headers: Headers, mut conn: DbConn) -> JsonResul } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] pub struct FolderData { - pub Name: String, + pub name: String, + pub id: Option, } #[post("/folders", data = "")] -async fn post_folders(data: JsonUpcase, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult { - let data: FolderData = data.into_inner().data; +async fn post_folders(data: Json, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult { + let data: FolderData = data.into_inner(); - let mut folder = Folder::new(headers.user.uuid, data.Name); + let mut folder = Folder::new(headers.user.uuid, data.name); folder.save(&mut conn).await?; nt.send_folder_update(UpdateType::SyncFolderCreate, &folder, &headers.device.uuid, &mut conn).await; @@ -56,25 +57,19 @@ async fn post_folders(data: JsonUpcase, headers: Headers, mut conn: } #[post("/folders/", data = "")] -async fn post_folder( - uuid: &str, - data: JsonUpcase, - headers: Headers, - conn: DbConn, - nt: Notify<'_>, -) -> JsonResult { +async fn post_folder(uuid: &str, data: Json, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult { put_folder(uuid, data, headers, conn, nt).await } #[put("/folders/", data = "")] async fn put_folder( uuid: &str, - data: JsonUpcase, + data: Json, headers: Headers, mut conn: DbConn, nt: Notify<'_>, ) -> JsonResult { - let data: FolderData = data.into_inner().data; + let data: FolderData = data.into_inner(); let mut folder = match Folder::find_by_uuid(uuid, &mut conn).await { Some(folder) => folder, @@ -85,7 +80,7 @@ async fn put_folder( err!("Folder belongs to another user") } - folder.name = data.Name; + folder.name = data.name; folder.save(&mut conn).await?; nt.send_folder_update(UpdateType::SyncFolderUpdate, &folder, &headers.device.uuid, &mut conn).await; diff --git a/src/api/core/mod.rs b/src/api/core/mod.rs index 7712ea82..9da0e886 100644 --- a/src/api/core/mod.rs +++ b/src/api/core/mod.rs @@ -49,19 +49,19 @@ pub fn events_routes() -> Vec { use rocket::{serde::json::Json, serde::json::Value, Catcher, Route}; use crate::{ - api::{JsonResult, JsonUpcase, Notify, UpdateType}, + api::{JsonResult, Notify, UpdateType}, auth::Headers, db::DbConn, error::Error, util::{get_reqwest_client, parse_experimental_client_feature_flags}, }; -#[derive(Serialize, Deserialize, Debug)] -#[allow(non_snake_case)] +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] struct GlobalDomain { - Type: i32, - Domains: Vec, - Excluded: bool, + r#type: i32, + domains: Vec, + excluded: bool, } const GLOBAL_DOMAINS: &str = include_str!("../../static/global_domains.json"); @@ -81,38 +81,38 @@ fn _get_eq_domains(headers: Headers, no_excluded: bool) -> Json { let mut globals: Vec = from_str(GLOBAL_DOMAINS).unwrap(); for global in &mut globals { - global.Excluded = excluded_globals.contains(&global.Type); + global.excluded = excluded_globals.contains(&global.r#type); } if no_excluded { - globals.retain(|g| !g.Excluded); + globals.retain(|g| !g.excluded); } Json(json!({ - "EquivalentDomains": equivalent_domains, - "GlobalEquivalentDomains": globals, - "Object": "domains", + "equivalentDomains": equivalent_domains, + "globalEquivalentDomains": globals, + "object": "domains", })) } -#[derive(Deserialize, Debug)] -#[allow(non_snake_case)] +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] struct EquivDomainData { - ExcludedGlobalEquivalentDomains: Option>, - EquivalentDomains: Option>>, + excluded_global_equivalent_domains: Option>, + equivalent_domains: Option>>, } #[post("/settings/domains", data = "")] async fn post_eq_domains( - data: JsonUpcase, + data: Json, headers: Headers, mut conn: DbConn, nt: Notify<'_>, ) -> JsonResult { - let data: EquivDomainData = data.into_inner().data; + let data: EquivDomainData = data.into_inner(); - let excluded_globals = data.ExcludedGlobalEquivalentDomains.unwrap_or_default(); - let equivalent_domains = data.EquivalentDomains.unwrap_or_default(); + let excluded_globals = data.excluded_global_equivalent_domains.unwrap_or_default(); + let equivalent_domains = data.equivalent_domains.unwrap_or_default(); let mut user = headers.user; use serde_json::to_string; @@ -128,12 +128,7 @@ async fn post_eq_domains( } #[put("/settings/domains", data = "")] -async fn put_eq_domains( - data: JsonUpcase, - headers: Headers, - conn: DbConn, - nt: Notify<'_>, -) -> JsonResult { +async fn put_eq_domains(data: Json, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult { post_eq_domains(data, headers, conn, nt).await } @@ -157,15 +152,15 @@ async fn hibp_breach(username: &str) -> JsonResult { Ok(Json(value)) } else { Ok(Json(json!([{ - "Name": "HaveIBeenPwned", - "Title": "Manual HIBP Check", - "Domain": "haveibeenpwned.com", - "BreachDate": "2019-08-18T00:00:00Z", - "AddedDate": "2019-08-18T00:00:00Z", - "Description": format!("Go to: https://haveibeenpwned.com/account/{username} for a manual check.

HaveIBeenPwned API key not set!
Go to https://haveibeenpwned.com/API/Key to purchase an API key from HaveIBeenPwned.

"), - "LogoPath": "vw_static/hibp.png", - "PwnCount": 0, - "DataClasses": [ + "name": "HaveIBeenPwned", + "title": "Manual HIBP Check", + "domain": "haveibeenpwned.com", + "breachDate": "2019-08-18T00:00:00Z", + "addedDate": "2019-08-18T00:00:00Z", + "description": format!("Go to: https://haveibeenpwned.com/account/{username} for a manual check.

HaveIBeenPwned API key not set!
Go to https://haveibeenpwned.com/API/Key to purchase an API key from HaveIBeenPwned.

"), + "logoPath": "vw_static/hibp.png", + "pwnCount": 0, + "dataClasses": [ "Error - No API key set!" ] }]))) @@ -191,14 +186,17 @@ fn version() -> Json<&'static str> { #[get("/config")] fn config() -> Json { let domain = crate::CONFIG.domain(); - let feature_states = parse_experimental_client_feature_flags(&crate::CONFIG.experimental_client_feature_flags()); + let mut feature_states = + parse_experimental_client_feature_flags(&crate::CONFIG.experimental_client_feature_flags()); + // Force the new key rotation feature + feature_states.insert("key-rotation-improvements".to_string(), true); Json(json!({ // Note: The clients use this version to handle backwards compatibility concerns // This means they expect a version that closely matches the Bitwarden server version // We should make sure that we keep this updated when we support the new server features // Version history: // - Individual cipher key encryption: 2023.9.1 - "version": "2023.9.1", + "version": "2024.2.0", "gitHash": option_env!("GIT_REV"), "server": { "name": "Vaultwarden", diff --git a/src/api/core/organizations.rs b/src/api/core/organizations.rs index f3d39849..204dd56f 100644 --- a/src/api/core/organizations.rs +++ b/src/api/core/organizations.rs @@ -6,7 +6,7 @@ use serde_json::Value; use crate::{ api::{ core::{log_event, two_factor, CipherSyncData, CipherSyncType}, - EmptyResult, JsonResult, JsonUpcase, JsonUpcaseVec, JsonVec, Notify, PasswordOrOtpData, UpdateType, + EmptyResult, JsonResult, Notify, PasswordOrOtpData, UpdateType, }, auth::{decode_invite, AdminHeaders, Headers, ManagerHeaders, ManagerHeadersLoose, OwnerHeaders}, db::{models::*, DbConn}, @@ -100,56 +100,56 @@ pub fn routes() -> Vec { } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct OrgData { - BillingEmail: String, - CollectionName: String, - Key: String, - Name: String, - Keys: Option, - #[serde(rename = "PlanType")] - _PlanType: NumberOrString, // Ignored, always use the same plan + billing_email: String, + collection_name: String, + key: String, + name: String, + keys: Option, + #[allow(dead_code)] + plan_type: NumberOrString, // Ignored, always use the same plan } #[derive(Deserialize, Debug)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct OrganizationUpdateData { - BillingEmail: String, - Name: String, + billing_email: String, + name: String, } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct NewCollectionData { - Name: String, - Groups: Vec, - Users: Vec, - ExternalId: Option, + name: String, + groups: Vec, + users: Vec, + external_id: Option, } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct NewCollectionObjectData { - HidePasswords: bool, - Id: String, - ReadOnly: bool, + hide_passwords: bool, + id: String, + read_only: bool, } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct OrgKeyData { - EncryptedPrivateKey: String, - PublicKey: String, + encrypted_private_key: String, + public_key: String, } #[derive(Deserialize, Debug)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct OrgBulkIds { - Ids: Vec, + ids: Vec, } #[post("/organizations", data = "")] -async fn create_organization(headers: Headers, data: JsonUpcase, mut conn: DbConn) -> JsonResult { +async fn create_organization(headers: Headers, data: Json, mut conn: DbConn) -> JsonResult { if !CONFIG.is_org_creation_allowed(&headers.user.email) { err!("User not allowed to create organizations") } @@ -159,19 +159,19 @@ async fn create_organization(headers: Headers, data: JsonUpcase, mut co ) } - let data: OrgData = data.into_inner().data; - let (private_key, public_key) = if data.Keys.is_some() { - let keys: OrgKeyData = data.Keys.unwrap(); - (Some(keys.EncryptedPrivateKey), Some(keys.PublicKey)) + let data: OrgData = data.into_inner(); + let (private_key, public_key) = if data.keys.is_some() { + let keys: OrgKeyData = data.keys.unwrap(); + (Some(keys.encrypted_private_key), Some(keys.public_key)) } else { (None, None) }; - let org = Organization::new(data.Name, data.BillingEmail, private_key, public_key); + let org = Organization::new(data.name, data.billing_email, private_key, public_key); let mut user_org = UserOrganization::new(headers.user.uuid, org.uuid.clone()); - let collection = Collection::new(org.uuid.clone(), data.CollectionName, None); + let collection = Collection::new(org.uuid.clone(), data.collection_name, None); - user_org.akey = data.Key; + user_org.akey = data.key; user_org.access_all = true; user_org.atype = UserOrgType::Owner as i32; user_org.status = UserOrgStatus::Confirmed as i32; @@ -186,11 +186,11 @@ async fn create_organization(headers: Headers, data: JsonUpcase, mut co #[delete("/organizations/", data = "")] async fn delete_organization( org_id: &str, - data: JsonUpcase, + data: Json, headers: OwnerHeaders, mut conn: DbConn, ) -> EmptyResult { - let data: PasswordOrOtpData = data.into_inner().data; + let data: PasswordOrOtpData = data.into_inner(); data.validate(&headers.user, true, &mut conn).await?; @@ -203,7 +203,7 @@ async fn delete_organization( #[post("/organizations//delete", data = "")] async fn post_delete_organization( org_id: &str, - data: JsonUpcase, + data: Json, headers: OwnerHeaders, conn: DbConn, ) -> EmptyResult { @@ -249,7 +249,7 @@ async fn get_organization(org_id: &str, _headers: OwnerHeaders, mut conn: DbConn async fn put_organization( org_id: &str, headers: OwnerHeaders, - data: JsonUpcase, + data: Json, conn: DbConn, ) -> JsonResult { post_organization(org_id, headers, data, conn).await @@ -259,18 +259,18 @@ async fn put_organization( async fn post_organization( org_id: &str, headers: OwnerHeaders, - data: JsonUpcase, + data: Json, mut conn: DbConn, ) -> JsonResult { - let data: OrganizationUpdateData = data.into_inner().data; + let data: OrganizationUpdateData = data.into_inner(); let mut org = match Organization::find_by_uuid(org_id, &mut conn).await { Some(organization) => organization, None => err!("Can't find organization details"), }; - org.name = data.Name; - org.billing_email = data.BillingEmail; + org.name = data.name; + org.billing_email = data.billing_email; org.save(&mut conn).await?; @@ -292,22 +292,22 @@ async fn post_organization( #[get("/collections")] async fn get_user_collections(headers: Headers, mut conn: DbConn) -> Json { Json(json!({ - "Data": + "data": Collection::find_by_user_uuid(headers.user.uuid, &mut conn).await .iter() .map(Collection::to_json) .collect::(), - "Object": "list", - "ContinuationToken": null, + "object": "list", + "continuationToken": null, })) } #[get("/organizations//collections")] async fn get_org_collections(org_id: &str, _headers: ManagerHeadersLoose, mut conn: DbConn) -> Json { Json(json!({ - "Data": _get_org_collections(org_id, &mut conn).await, - "Object": "list", - "ContinuationToken": null, + "data": _get_org_collections(org_id, &mut conn).await, + "object": "list", + "continuationToken": null, })) } @@ -320,9 +320,29 @@ async fn get_org_collections_details(org_id: &str, headers: ManagerHeadersLoose, None => err!("User is not part of organization"), }; + // get all collection memberships for the current organization let coll_users = CollectionUser::find_by_organization(org_id, &mut conn).await; + // check if current user has full access to the organization (either directly or via any group) + let has_full_access_to_org = user_org.access_all + || (CONFIG.org_groups_enabled() + && GroupUser::has_full_access_by_member(org_id, &user_org.uuid, &mut conn).await); + for col in Collection::find_by_organization(org_id, &mut conn).await { + // check whether the current user has access to the given collection + let assigned = has_full_access_to_org + || CollectionUser::has_access_to_collection_by_user(&col.uuid, &user_org.user_uuid, &mut conn).await + || (CONFIG.org_groups_enabled() + && GroupUser::has_access_to_collection_by_member(&col.uuid, &user_org.uuid, &mut conn).await); + + // get the users assigned directly to the given collection + let users: Vec = coll_users + .iter() + .filter(|collection_user| collection_user.collection_uuid == col.uuid) + .map(|collection_user| SelectionReadOnly::to_collection_user_details_read_only(collection_user).to_json()) + .collect(); + + // get the group details for the given collection let groups: Vec = if CONFIG.org_groups_enabled() { CollectionGroup::find_by_collection(&col.uuid, &mut conn) .await @@ -332,41 +352,21 @@ async fn get_org_collections_details(org_id: &str, headers: ManagerHeadersLoose, }) .collect() } else { - // The Bitwarden clients seem to call this API regardless of whether groups are enabled, - // so just act as if there are no groups. Vec::with_capacity(0) }; - let mut assigned = false; - let users: Vec = coll_users - .iter() - .filter(|collection_user| collection_user.collection_uuid == col.uuid) - .map(|collection_user| { - // Remember `user_uuid` is swapped here with the `user_org.uuid` with a join during the `CollectionUser::find_by_organization` call. - // We check here if the current user is assigned to this collection or not. - if collection_user.user_uuid == user_org.uuid { - assigned = true; - } - SelectionReadOnly::to_collection_user_details_read_only(collection_user).to_json() - }) - .collect(); - - if user_org.access_all { - assigned = true; - } - let mut json_object = col.to_json(); - json_object["Assigned"] = json!(assigned); - json_object["Users"] = json!(users); - json_object["Groups"] = json!(groups); - json_object["Object"] = json!("collectionAccessDetails"); + json_object["assigned"] = json!(assigned); + json_object["users"] = json!(users); + json_object["groups"] = json!(groups); + json_object["object"] = json!("collectionAccessDetails"); data.push(json_object) } Ok(Json(json!({ - "Data": data, - "Object": "list", - "ContinuationToken": null, + "data": data, + "object": "list", + "continuationToken": null, }))) } @@ -378,17 +378,17 @@ async fn _get_org_collections(org_id: &str, conn: &mut DbConn) -> Value { async fn post_organization_collections( org_id: &str, headers: ManagerHeadersLoose, - data: JsonUpcase, + data: Json, mut conn: DbConn, ) -> JsonResult { - let data: NewCollectionData = data.into_inner().data; + let data: NewCollectionData = data.into_inner(); let org = match Organization::find_by_uuid(org_id, &mut conn).await { Some(organization) => organization, None => err!("Can't find organization details"), }; - let collection = Collection::new(org.uuid, data.Name, data.ExternalId); + let collection = Collection::new(org.uuid, data.name, data.external_id); collection.save(&mut conn).await?; log_event( @@ -402,14 +402,14 @@ async fn post_organization_collections( ) .await; - for group in data.Groups { - CollectionGroup::new(collection.uuid.clone(), group.Id, group.ReadOnly, group.HidePasswords) + for group in data.groups { + CollectionGroup::new(collection.uuid.clone(), group.id, group.read_only, group.hide_passwords) .save(&mut conn) .await?; } - for user in data.Users { - let org_user = match UserOrganization::find_by_uuid(&user.Id, &mut conn).await { + for user in data.users { + let org_user = match UserOrganization::find_by_uuid(&user.id, &mut conn).await { Some(u) => u, None => err!("User is not part of organization"), }; @@ -418,7 +418,7 @@ async fn post_organization_collections( continue; } - CollectionUser::save(&org_user.user_uuid, &collection.uuid, user.ReadOnly, user.HidePasswords, &mut conn) + CollectionUser::save(&org_user.user_uuid, &collection.uuid, user.read_only, user.hide_passwords, &mut conn) .await?; } @@ -434,7 +434,7 @@ async fn put_organization_collection_update( org_id: &str, col_id: &str, headers: ManagerHeaders, - data: JsonUpcase, + data: Json, conn: DbConn, ) -> JsonResult { post_organization_collection_update(org_id, col_id, headers, data, conn).await @@ -445,10 +445,10 @@ async fn post_organization_collection_update( org_id: &str, col_id: &str, headers: ManagerHeaders, - data: JsonUpcase, + data: Json, mut conn: DbConn, ) -> JsonResult { - let data: NewCollectionData = data.into_inner().data; + let data: NewCollectionData = data.into_inner(); let org = match Organization::find_by_uuid(org_id, &mut conn).await { Some(organization) => organization, @@ -464,8 +464,8 @@ async fn post_organization_collection_update( err!("Collection is not owned by organization"); } - collection.name = data.Name; - collection.external_id = match data.ExternalId { + collection.name = data.name; + collection.external_id = match data.external_id { Some(external_id) if !external_id.trim().is_empty() => Some(external_id), _ => None, }; @@ -485,16 +485,16 @@ async fn post_organization_collection_update( CollectionGroup::delete_all_by_collection(col_id, &mut conn).await?; - for group in data.Groups { - CollectionGroup::new(String::from(col_id), group.Id, group.ReadOnly, group.HidePasswords) + for group in data.groups { + CollectionGroup::new(String::from(col_id), group.id, group.read_only, group.hide_passwords) .save(&mut conn) .await?; } CollectionUser::delete_all_by_collection(col_id, &mut conn).await?; - for user in data.Users { - let org_user = match UserOrganization::find_by_uuid(&user.Id, &mut conn).await { + for user in data.users { + let org_user = match UserOrganization::find_by_uuid(&user.id, &mut conn).await { Some(u) => u, None => err!("User is not part of organization"), }; @@ -503,7 +503,7 @@ async fn post_organization_collection_update( continue; } - CollectionUser::save(&org_user.user_uuid, col_id, user.ReadOnly, user.HidePasswords, &mut conn).await?; + CollectionUser::save(&org_user.user_uuid, col_id, user.read_only, user.hide_passwords, &mut conn).await?; } Ok(Json(collection.to_json())) @@ -589,10 +589,12 @@ async fn delete_organization_collection( } #[derive(Deserialize, Debug)] -#[allow(non_snake_case, dead_code)] +#[serde(rename_all = "camelCase")] struct DeleteCollectionData { - Id: String, - OrgId: String, + #[allow(dead_code)] + id: String, + #[allow(dead_code)] + org_id: String, } #[post("/organizations//collections//delete", data = "<_data>")] @@ -600,28 +602,28 @@ async fn post_organization_collection_delete( org_id: &str, col_id: &str, headers: ManagerHeaders, - _data: JsonUpcase, + _data: Json, mut conn: DbConn, ) -> EmptyResult { _delete_organization_collection(org_id, col_id, &headers, &mut conn).await } #[derive(Deserialize, Debug)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct BulkCollectionIds { - Ids: Vec, + ids: Vec, } #[delete("/organizations//collections", data = "")] async fn bulk_delete_organization_collections( org_id: &str, headers: ManagerHeadersLoose, - data: JsonUpcase, + data: Json, mut conn: DbConn, ) -> EmptyResult { - let data: BulkCollectionIds = data.into_inner().data; + let data: BulkCollectionIds = data.into_inner(); - let collections = data.Ids; + let collections = data.ids; let headers = ManagerHeaders::from_loose(headers, &collections, &mut conn).await?; @@ -664,30 +666,22 @@ async fn get_org_collection_detail( Vec::with_capacity(0) }; - let mut assigned = false; let users: Vec = CollectionUser::find_by_collection_swap_user_uuid_with_org_user_uuid(&collection.uuid, &mut conn) .await .iter() .map(|collection_user| { - // Remember `user_uuid` is swapped here with the `user_org.uuid` with a join during the `find_by_collection_swap_user_uuid_with_org_user_uuid` call. - // We check here if the current user is assigned to this collection or not. - if collection_user.user_uuid == user_org.uuid { - assigned = true; - } SelectionReadOnly::to_collection_user_details_read_only(collection_user).to_json() }) .collect(); - if user_org.access_all { - assigned = true; - } + let assigned = Collection::can_access_collection(&user_org, &collection.uuid, &mut conn).await; let mut json_object = collection.to_json(); - json_object["Assigned"] = json!(assigned); - json_object["Users"] = json!(users); - json_object["Groups"] = json!(groups); - json_object["Object"] = json!("collectionAccessDetails"); + json_object["assigned"] = json!(assigned); + json_object["users"] = json!(users); + json_object["groups"] = json!(groups); + json_object["object"] = json!("collectionAccessDetails"); Ok(Json(json_object)) } @@ -719,7 +713,7 @@ async fn get_collection_users(org_id: &str, coll_id: &str, _headers: ManagerHead async fn put_collection_users( org_id: &str, coll_id: &str, - data: JsonUpcaseVec, + data: Json>, _headers: ManagerHeaders, mut conn: DbConn, ) -> EmptyResult { @@ -732,8 +726,8 @@ async fn put_collection_users( CollectionUser::delete_all_by_collection(coll_id, &mut conn).await?; // And then add all the received ones (except if the user has access_all) - for d in data.iter().map(|d| &d.data) { - let user = match UserOrganization::find_by_uuid(&d.Id, &mut conn).await { + for d in data.iter() { + let user = match UserOrganization::find_by_uuid(&d.id, &mut conn).await { Some(u) => u, None => err!("User is not part of organization"), }; @@ -742,7 +736,7 @@ async fn put_collection_users( continue; } - CollectionUser::save(&user.user_uuid, coll_id, d.ReadOnly, d.HidePasswords, &mut conn).await?; + CollectionUser::save(&user.user_uuid, coll_id, d.read_only, d.hide_passwords, &mut conn).await?; } Ok(()) @@ -757,9 +751,9 @@ struct OrgIdData { #[get("/ciphers/organization-details?")] async fn get_org_details(data: OrgIdData, headers: Headers, mut conn: DbConn) -> Json { Json(json!({ - "Data": _get_org_details(&data.organization_id, &headers.host, &headers.user.uuid, &mut conn).await, - "Object": "list", - "ContinuationToken": null, + "data": _get_org_details(&data.organization_id, &headers.host, &headers.user.uuid, &mut conn).await, + "object": "list", + "continuationToken": null, })) } @@ -803,20 +797,15 @@ async fn get_org_users( } Json(json!({ - "Data": users_json, - "Object": "list", - "ContinuationToken": null, + "data": users_json, + "object": "list", + "continuationToken": null, })) } #[post("/organizations//keys", data = "")] -async fn post_org_keys( - org_id: &str, - data: JsonUpcase, - _headers: AdminHeaders, - mut conn: DbConn, -) -> JsonResult { - let data: OrgKeyData = data.into_inner().data; +async fn post_org_keys(org_id: &str, data: Json, _headers: AdminHeaders, mut conn: DbConn) -> JsonResult { + let data: OrgKeyData = data.into_inner(); let mut org = match Organization::find_by_uuid(org_id, &mut conn).await { Some(organization) => { @@ -828,46 +817,41 @@ async fn post_org_keys( None => err!("Can't find organization details"), }; - org.private_key = Some(data.EncryptedPrivateKey); - org.public_key = Some(data.PublicKey); + org.private_key = Some(data.encrypted_private_key); + org.public_key = Some(data.public_key); org.save(&mut conn).await?; Ok(Json(json!({ - "Object": "organizationKeys", - "PublicKey": org.public_key, - "PrivateKey": org.private_key, + "object": "organizationKeys", + "publicKey": org.public_key, + "privateKey": org.private_key, }))) } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct CollectionData { - Id: String, - ReadOnly: bool, - HidePasswords: bool, + id: String, + read_only: bool, + hide_passwords: bool, } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct InviteData { - Emails: Vec, - Groups: Vec, - Type: NumberOrString, - Collections: Option>, - AccessAll: Option, + emails: Vec, + groups: Vec, + r#type: NumberOrString, + collections: Option>, + access_all: Option, } #[post("/organizations//users/invite", data = "")] -async fn send_invite( - org_id: &str, - data: JsonUpcase, - headers: AdminHeaders, - mut conn: DbConn, -) -> EmptyResult { - let data: InviteData = data.into_inner().data; +async fn send_invite(org_id: &str, data: Json, headers: AdminHeaders, mut conn: DbConn) -> EmptyResult { + let data: InviteData = data.into_inner(); - let new_type = match UserOrgType::from_str(&data.Type.into_string()) { + let new_type = match UserOrgType::from_str(&data.r#type.into_string()) { Some(new_type) => new_type as i32, None => err!("Invalid type"), }; @@ -876,7 +860,7 @@ async fn send_invite( err!("Only Owners can invite Managers, Admins or Owners") } - for email in data.Emails.iter() { + for email in data.emails.iter() { let email = email.to_lowercase(); let mut user_org_status = UserOrgStatus::Invited as i32; let user = match User::find_by_mail(&email, &mut conn).await { @@ -912,19 +896,25 @@ async fn send_invite( }; let mut new_user = UserOrganization::new(user.uuid.clone(), String::from(org_id)); - let access_all = data.AccessAll.unwrap_or(false); + let access_all = data.access_all.unwrap_or(false); new_user.access_all = access_all; new_user.atype = new_type; new_user.status = user_org_status; // If no accessAll, add the collections received if !access_all { - for col in data.Collections.iter().flatten() { - match Collection::find_by_uuid_and_org(&col.Id, org_id, &mut conn).await { + for col in data.collections.iter().flatten() { + match Collection::find_by_uuid_and_org(&col.id, org_id, &mut conn).await { None => err!("Collection not found in Organization"), Some(collection) => { - CollectionUser::save(&user.uuid, &collection.uuid, col.ReadOnly, col.HidePasswords, &mut conn) - .await?; + CollectionUser::save( + &user.uuid, + &collection.uuid, + col.read_only, + col.hide_passwords, + &mut conn, + ) + .await?; } } } @@ -932,7 +922,7 @@ async fn send_invite( new_user.save(&mut conn).await?; - for group in data.Groups.iter() { + for group in data.groups.iter() { let mut group_entry = GroupUser::new(String::from(group), user.uuid.clone()); group_entry.save(&mut conn).await?; } @@ -972,14 +962,14 @@ async fn send_invite( #[post("/organizations//users/reinvite", data = "")] async fn bulk_reinvite_user( org_id: &str, - data: JsonUpcase, + data: Json, headers: AdminHeaders, mut conn: DbConn, ) -> Json { - let data: OrgBulkIds = data.into_inner().data; + let data: OrgBulkIds = data.into_inner(); let mut bulk_response = Vec::new(); - for org_user_id in data.Ids { + for org_user_id in data.ids { let err_msg = match _reinvite_user(org_id, &org_user_id, &headers.user.email, &mut conn).await { Ok(_) => String::new(), Err(e) => format!("{e:?}"), @@ -987,17 +977,17 @@ async fn bulk_reinvite_user( bulk_response.push(json!( { - "Object": "OrganizationBulkConfirmResponseModel", - "Id": org_user_id, - "Error": err_msg + "object": "OrganizationBulkConfirmResponseModel", + "id": org_user_id, + "error": err_msg } )) } Json(json!({ - "Data": bulk_response, - "Object": "list", - "ContinuationToken": null + "data": bulk_response, + "object": "list", + "continuationToken": null })) } @@ -1053,25 +1043,20 @@ async fn _reinvite_user(org_id: &str, user_org: &str, invited_by_email: &str, co } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct AcceptData { - Token: String, - ResetPasswordKey: Option, + token: String, + reset_password_key: Option, } #[post("/organizations//users/<_org_user_id>/accept", data = "")] -async fn accept_invite( - org_id: &str, - _org_user_id: &str, - data: JsonUpcase, - mut conn: DbConn, -) -> EmptyResult { +async fn accept_invite(org_id: &str, _org_user_id: &str, data: Json, mut conn: DbConn) -> EmptyResult { // The web-vault passes org_id and org_user_id in the URL, but we are just reading them from the JWT instead - let data: AcceptData = data.into_inner().data; - let claims = decode_invite(&data.Token)?; + let data: AcceptData = data.into_inner(); + let claims = decode_invite(&data.token)?; match User::find_by_mail(&claims.email, &mut conn).await { - Some(_) => { + Some(user) => { Invitation::take(&claims.email, &mut conn).await; if let (Some(user_org), Some(org)) = (&claims.user_org_id, &claims.org_id) { @@ -1085,7 +1070,7 @@ async fn accept_invite( } let master_password_required = OrgPolicy::org_is_reset_password_auto_enroll(org, &mut conn).await; - if data.ResetPasswordKey.is_none() && master_password_required { + if data.reset_password_key.is_none() && master_password_required { err!("Reset password key is required, but not provided."); } @@ -1095,7 +1080,11 @@ async fn accept_invite( match OrgPolicy::is_user_allowed(&user_org.user_uuid, org_id, false, &mut conn).await { Ok(_) => {} Err(OrgPolicyErr::TwoFactorMissing) => { - err!("You cannot join this organization until you enable two-step login on your user account"); + if CONFIG.email_2fa_auto_fallback() { + two_factor::email::activate_email_2fa(&user, &mut conn).await?; + } else { + err!("You cannot join this organization until you enable two-step login on your user account"); + } } Err(OrgPolicyErr::SingleOrgEnforced) => { err!("You cannot join this organization because you are a member of an organization which forbids it"); @@ -1106,7 +1095,7 @@ async fn accept_invite( user_org.status = UserOrgStatus::Accepted as i32; if master_password_required { - user_org.reset_password_key = data.ResetPasswordKey; + user_org.reset_password_key = data.reset_password_key; } user_org.save(&mut conn).await?; @@ -1135,32 +1124,45 @@ async fn accept_invite( Ok(()) } +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct ConfirmData { + id: Option, + key: Option, +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +struct BulkConfirmData { + keys: Option>, +} + #[post("/organizations//users/confirm", data = "")] async fn bulk_confirm_invite( org_id: &str, - data: JsonUpcase, + data: Json, headers: AdminHeaders, mut conn: DbConn, nt: Notify<'_>, ) -> Json { - let data = data.into_inner().data; + let data = data.into_inner(); let mut bulk_response = Vec::new(); - match data["Keys"].as_array() { + match data.keys { Some(keys) => { for invite in keys { - let org_user_id = invite["Id"].as_str().unwrap_or_default(); - let user_key = invite["Key"].as_str().unwrap_or_default(); - let err_msg = match _confirm_invite(org_id, org_user_id, user_key, &headers, &mut conn, &nt).await { + let org_user_id = invite.id.unwrap_or_default(); + let user_key = invite.key.unwrap_or_default(); + let err_msg = match _confirm_invite(org_id, &org_user_id, &user_key, &headers, &mut conn, &nt).await { Ok(_) => String::new(), Err(e) => format!("{e:?}"), }; bulk_response.push(json!( { - "Object": "OrganizationBulkConfirmResponseModel", - "Id": org_user_id, - "Error": err_msg + "object": "OrganizationBulkConfirmResponseModel", + "id": org_user_id, + "error": err_msg } )); } @@ -1169,9 +1171,9 @@ async fn bulk_confirm_invite( } Json(json!({ - "Data": bulk_response, - "Object": "list", - "ContinuationToken": null + "data": bulk_response, + "object": "list", + "continuationToken": null })) } @@ -1179,14 +1181,14 @@ async fn bulk_confirm_invite( async fn confirm_invite( org_id: &str, org_user_id: &str, - data: JsonUpcase, + data: Json, headers: AdminHeaders, mut conn: DbConn, nt: Notify<'_>, ) -> EmptyResult { - let data = data.into_inner().data; - let user_key = data["Key"].as_str().unwrap_or_default(); - _confirm_invite(org_id, org_user_id, user_key, &headers, &mut conn, &nt).await + let data = data.into_inner(); + let user_key = data.key.unwrap_or_default(); + _confirm_invite(org_id, org_user_id, &user_key, &headers, &mut conn, &nt).await } async fn _confirm_invite( @@ -1220,10 +1222,14 @@ async fn _confirm_invite( match OrgPolicy::is_user_allowed(&user_to_confirm.user_uuid, org_id, true, conn).await { Ok(_) => {} Err(OrgPolicyErr::TwoFactorMissing) => { - err!("You cannot confirm this user because it has no two-step login method activated"); + if CONFIG.email_2fa_auto_fallback() { + two_factor::email::find_and_activate_email_2fa(&user_to_confirm.user_uuid, conn).await?; + } else { + err!("You cannot confirm this user because they have not setup 2FA"); + } } Err(OrgPolicyErr::SingleOrgEnforced) => { - err!("You cannot confirm this user because it is a member of an organization which forbids it"); + err!("You cannot confirm this user because they are a member of an organization which forbids it"); } } } @@ -1285,19 +1291,19 @@ async fn get_user( } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct EditUserData { - Type: NumberOrString, - Collections: Option>, - Groups: Option>, - AccessAll: bool, + r#type: NumberOrString, + collections: Option>, + groups: Option>, + access_all: bool, } #[put("/organizations//users/", data = "", rank = 1)] async fn put_organization_user( org_id: &str, org_user_id: &str, - data: JsonUpcase, + data: Json, headers: AdminHeaders, conn: DbConn, ) -> EmptyResult { @@ -1308,13 +1314,13 @@ async fn put_organization_user( async fn edit_user( org_id: &str, org_user_id: &str, - data: JsonUpcase, + data: Json, headers: AdminHeaders, mut conn: DbConn, ) -> EmptyResult { - let data: EditUserData = data.into_inner().data; + let data: EditUserData = data.into_inner(); - let new_type = match UserOrgType::from_str(&data.Type.into_string()) { + let new_type = match UserOrgType::from_str(&data.r#type.into_string()) { Some(new_type) => new_type, None => err!("Invalid type"), }; @@ -1351,15 +1357,19 @@ async fn edit_user( match OrgPolicy::is_user_allowed(&user_to_edit.user_uuid, org_id, true, &mut conn).await { Ok(_) => {} Err(OrgPolicyErr::TwoFactorMissing) => { - err!("You cannot modify this user to this type because it has no two-step login method activated"); + if CONFIG.email_2fa_auto_fallback() { + two_factor::email::find_and_activate_email_2fa(&user_to_edit.user_uuid, &mut conn).await?; + } else { + err!("You cannot modify this user to this type because they have not setup 2FA"); + } } Err(OrgPolicyErr::SingleOrgEnforced) => { - err!("You cannot modify this user to this type because it is a member of an organization which forbids it"); + err!("You cannot modify this user to this type because they are a member of an organization which forbids it"); } } } - user_to_edit.access_all = data.AccessAll; + user_to_edit.access_all = data.access_all; user_to_edit.atype = new_type as i32; // Delete all the odd collections @@ -1368,16 +1378,16 @@ async fn edit_user( } // If no accessAll, add the collections received - if !data.AccessAll { - for col in data.Collections.iter().flatten() { - match Collection::find_by_uuid_and_org(&col.Id, org_id, &mut conn).await { + if !data.access_all { + for col in data.collections.iter().flatten() { + match Collection::find_by_uuid_and_org(&col.id, org_id, &mut conn).await { None => err!("Collection not found in Organization"), Some(collection) => { CollectionUser::save( &user_to_edit.user_uuid, &collection.uuid, - col.ReadOnly, - col.HidePasswords, + col.read_only, + col.hide_passwords, &mut conn, ) .await?; @@ -1388,7 +1398,7 @@ async fn edit_user( GroupUser::delete_all_by_user(&user_to_edit.uuid, &mut conn).await?; - for group in data.Groups.iter().flatten() { + for group in data.groups.iter().flatten() { let mut group_entry = GroupUser::new(String::from(group), user_to_edit.uuid.clone()); group_entry.save(&mut conn).await?; } @@ -1410,15 +1420,15 @@ async fn edit_user( #[delete("/organizations//users", data = "")] async fn bulk_delete_user( org_id: &str, - data: JsonUpcase, + data: Json, headers: AdminHeaders, mut conn: DbConn, nt: Notify<'_>, ) -> Json { - let data: OrgBulkIds = data.into_inner().data; + let data: OrgBulkIds = data.into_inner(); let mut bulk_response = Vec::new(); - for org_user_id in data.Ids { + for org_user_id in data.ids { let err_msg = match _delete_user(org_id, &org_user_id, &headers, &mut conn, &nt).await { Ok(_) => String::new(), Err(e) => format!("{e:?}"), @@ -1426,17 +1436,17 @@ async fn bulk_delete_user( bulk_response.push(json!( { - "Object": "OrganizationBulkConfirmResponseModel", - "Id": org_user_id, - "Error": err_msg + "object": "OrganizationBulkConfirmResponseModel", + "id": org_user_id, + "error": err_msg } )) } Json(json!({ - "Data": bulk_response, - "Object": "list", - "ContinuationToken": null + "data": bulk_response, + "object": "list", + "continuationToken": null })) } @@ -1506,25 +1516,25 @@ async fn _delete_user( #[post("/organizations//users/public-keys", data = "")] async fn bulk_public_keys( org_id: &str, - data: JsonUpcase, + data: Json, _headers: AdminHeaders, mut conn: DbConn, ) -> Json { - let data: OrgBulkIds = data.into_inner().data; + let data: OrgBulkIds = data.into_inner(); let mut bulk_response = Vec::new(); // Check all received UserOrg UUID's and find the matching User to retrieve the public-key. // If the user does not exists, just ignore it, and do not return any information regarding that UserOrg UUID. // The web-vault will then ignore that user for the following steps. - for user_org_id in data.Ids { + for user_org_id in data.ids { match UserOrganization::find_by_uuid_and_org(&user_org_id, org_id, &mut conn).await { Some(user_org) => match User::find_by_uuid(&user_org.user_uuid, &mut conn).await { Some(user) => bulk_response.push(json!( { - "Object": "organizationUserPublicKeyResponseModel", - "Id": user_org_id, - "UserId": user.uuid, - "Key": user.public_key + "object": "organizationUserPublicKeyResponseModel", + "id": user_org_id, + "userId": user.uuid, + "key": user.public_key } )), None => debug!("User doesn't exist"), @@ -1534,9 +1544,9 @@ async fn bulk_public_keys( } Json(json!({ - "Data": bulk_response, - "Object": "list", - "ContinuationToken": null + "data": bulk_response, + "object": "list", + "continuationToken": null })) } @@ -1544,42 +1554,42 @@ use super::ciphers::update_cipher_from_data; use super::ciphers::CipherData; #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct ImportData { - Ciphers: Vec, - Collections: Vec, - CollectionRelationships: Vec, + ciphers: Vec, + collections: Vec, + collection_relationships: Vec, } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct RelationsData { // Cipher index - Key: usize, + key: usize, // Collection index - Value: usize, + value: usize, } #[post("/ciphers/import-organization?", data = "")] async fn post_org_import( query: OrgIdData, - data: JsonUpcase, + data: Json, headers: AdminHeaders, mut conn: DbConn, nt: Notify<'_>, ) -> EmptyResult { - let data: ImportData = data.into_inner().data; + let data: ImportData = data.into_inner(); let org_id = query.organization_id; // Validate the import before continuing // Bitwarden does not process the import if there is one item invalid. // Since we check for the size of the encrypted note length, we need to do that here to pre-validate it. // TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks. - Cipher::validate_notes(&data.Ciphers)?; + Cipher::validate_notes(&data.ciphers)?; let mut collections = Vec::new(); - for coll in data.Collections { - let collection = Collection::new(org_id.clone(), coll.Name, coll.ExternalId); + for coll in data.collections { + let collection = Collection::new(org_id.clone(), coll.name, coll.external_id); if collection.save(&mut conn).await.is_err() { collections.push(Err(Error::new("Failed to create Collection", "Failed to create Collection"))); } else { @@ -1589,16 +1599,16 @@ async fn post_org_import( // Read the relations between collections and ciphers let mut relations = Vec::new(); - for relation in data.CollectionRelationships { - relations.push((relation.Key, relation.Value)); + for relation in data.collection_relationships { + relations.push((relation.key, relation.value)); } let headers: Headers = headers.into(); let mut ciphers = Vec::new(); - for cipher_data in data.Ciphers { - let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone()); - update_cipher_from_data(&mut cipher, cipher_data, &headers, false, &mut conn, &nt, UpdateType::None).await.ok(); + for cipher_data in data.ciphers { + let mut cipher = Cipher::new(cipher_data.r#type, cipher_data.name.clone()); + update_cipher_from_data(&mut cipher, cipher_data, &headers, None, &mut conn, &nt, UpdateType::None).await.ok(); ciphers.push(cipher); } @@ -1624,9 +1634,9 @@ async fn list_policies(org_id: &str, _headers: AdminHeaders, mut conn: DbConn) - let policies_json: Vec = policies.iter().map(OrgPolicy::to_json).collect(); Json(json!({ - "Data": policies_json, - "Object": "list", - "ContinuationToken": null + "data": policies_json, + "object": "list", + "continuationToken": null })) } @@ -1648,9 +1658,9 @@ async fn list_policies_token(org_id: &str, token: &str, mut conn: DbConn) -> Jso let policies_json: Vec = policies.iter().map(OrgPolicy::to_json).collect(); Ok(Json(json!({ - "Data": policies_json, - "Object": "list", - "ContinuationToken": null + "data": policies_json, + "object": "list", + "continuationToken": null }))) } @@ -1775,27 +1785,27 @@ fn get_organization_tax(org_id: &str, _headers: Headers) -> Json { fn get_plans() -> Json { // Respond with a minimal json just enough to allow the creation of an new organization. Json(json!({ - "Object": "list", - "Data": [{ - "Object": "plan", - "Type": 0, - "Product": 0, - "Name": "Free", - "NameLocalizationKey": "planNameFree", - "BitwardenProduct": 0, - "MaxUsers": 0, - "DescriptionLocalizationKey": "planDescFree" + "object": "list", + "data": [{ + "object": "plan", + "type": 0, + "product": 0, + "name": "Free", + "nameLocalizationKey": "planNameFree", + "bitwardenProduct": 0, + "maxUsers": 0, + "descriptionLocalizationKey": "planDescFree" },{ - "Object": "plan", - "Type": 0, - "Product": 1, - "Name": "Free", - "NameLocalizationKey": "planNameFree", - "BitwardenProduct": 1, - "MaxUsers": 0, - "DescriptionLocalizationKey": "planDescFree" + "object": "plan", + "type": 0, + "product": 1, + "name": "Free", + "nameLocalizationKey": "planNameFree", + "bitwardenProduct": 1, + "maxUsers": 0, + "descriptionLocalizationKey": "planDescFree" }], - "ContinuationToken": null + "continuationToken": null })) } @@ -1812,41 +1822,44 @@ fn get_plans_tax_rates(_headers: Headers) -> Json { fn _empty_data_json() -> Value { json!({ - "Object": "list", - "Data": [], - "ContinuationToken": null + "object": "list", + "data": [], + "continuationToken": null }) } #[derive(Deserialize, Debug)] -#[allow(non_snake_case, dead_code)] +#[serde(rename_all = "camelCase")] struct OrgImportGroupData { - Name: String, // "GroupName" - ExternalId: String, // "cn=GroupName,ou=Groups,dc=example,dc=com" - Users: Vec, // ["uid=user,ou=People,dc=example,dc=com"] + #[allow(dead_code)] + name: String, // "GroupName" + #[allow(dead_code)] + external_id: String, // "cn=GroupName,ou=Groups,dc=example,dc=com" + #[allow(dead_code)] + users: Vec, // ["uid=user,ou=People,dc=example,dc=com"] } #[derive(Deserialize, Debug)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct OrgImportUserData { - Email: String, // "user@maildomain.net" + email: String, // "user@maildomain.net" #[allow(dead_code)] - ExternalId: String, // "uid=user,ou=People,dc=example,dc=com" - Deleted: bool, + external_id: String, // "uid=user,ou=People,dc=example,dc=com" + deleted: bool, } #[derive(Deserialize, Debug)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct OrgImportData { #[allow(dead_code)] - Groups: Vec, - OverwriteExisting: bool, - Users: Vec, + groups: Vec, + overwrite_existing: bool, + users: Vec, } #[post("/organizations//import", data = "")] -async fn import(org_id: &str, data: JsonUpcase, headers: Headers, mut conn: DbConn) -> EmptyResult { - let data = data.into_inner().data; +async fn import(org_id: &str, data: Json, headers: Headers, mut conn: DbConn) -> EmptyResult { + let data = data.into_inner(); // TODO: Currently we aren't storing the externalId's anywhere, so we also don't have a way // to differentiate between auto-imported users and manually added ones. @@ -1860,10 +1873,10 @@ async fn import(org_id: &str, data: JsonUpcase, headers: Headers, None => err!("User not part of organization"), }; - for user_data in &data.Users { - if user_data.Deleted { + for user_data in &data.users { + if user_data.deleted { // If user is marked for deletion and it exists, delete it - if let Some(user_org) = UserOrganization::find_by_email_and_org(&user_data.Email, org_id, &mut conn).await { + if let Some(user_org) = UserOrganization::find_by_email_and_org(&user_data.email, org_id, &mut conn).await { log_event( EventType::OrganizationUserRemoved as i32, &user_org.uuid, @@ -1879,8 +1892,8 @@ async fn import(org_id: &str, data: JsonUpcase, headers: Headers, } // If user is not part of the organization, but it exists - } else if UserOrganization::find_by_email_and_org(&user_data.Email, org_id, &mut conn).await.is_none() { - if let Some(user) = User::find_by_mail(&user_data.Email, &mut conn).await { + } else if UserOrganization::find_by_email_and_org(&user_data.email, org_id, &mut conn).await.is_none() { + if let Some(user) = User::find_by_mail(&user_data.email, &mut conn).await { let user_org_status = if CONFIG.mail_enabled() { UserOrgStatus::Invited as i32 } else { @@ -1912,7 +1925,7 @@ async fn import(org_id: &str, data: JsonUpcase, headers: Headers, }; mail::send_invite( - &user_data.Email, + &user_data.email, &user.uuid, Some(String::from(org_id)), Some(new_org_user.uuid), @@ -1926,10 +1939,10 @@ async fn import(org_id: &str, data: JsonUpcase, headers: Headers, } // If this flag is enabled, any user that isn't provided in the Users list will be removed (by default they will be kept unless they have Deleted == true) - if data.OverwriteExisting { + if data.overwrite_existing { for user_org in UserOrganization::find_by_org_and_type(org_id, UserOrgType::User, &mut conn).await { if let Some(user_email) = User::find_by_uuid(&user_org.user_uuid, &mut conn).await.map(|u| u.email) { - if !data.Users.iter().any(|u| u.Email == user_email) { + if !data.users.iter().any(|u| u.email == user_email) { log_event( EventType::OrganizationUserRemoved as i32, &user_org.uuid, @@ -1965,7 +1978,7 @@ async fn deactivate_organization_user( #[put("/organizations//users/deactivate", data = "")] async fn bulk_deactivate_organization_user( org_id: &str, - data: JsonUpcase, + data: Json, headers: AdminHeaders, conn: DbConn, ) -> Json { @@ -1982,30 +1995,35 @@ async fn revoke_organization_user( _revoke_organization_user(org_id, org_user_id, &headers, &mut conn).await } +#[derive(Deserialize, Debug)] +#[serde(rename_all = "camelCase")] +struct OrgBulkRevokeData { + ids: Option>, +} + #[put("/organizations//users/revoke", data = "")] async fn bulk_revoke_organization_user( org_id: &str, - data: JsonUpcase, + data: Json, headers: AdminHeaders, mut conn: DbConn, ) -> Json { - let data = data.into_inner().data; + let data = data.into_inner(); let mut bulk_response = Vec::new(); - match data["Ids"].as_array() { + match data.ids { Some(org_users) => { for org_user_id in org_users { - let org_user_id = org_user_id.as_str().unwrap_or_default(); - let err_msg = match _revoke_organization_user(org_id, org_user_id, &headers, &mut conn).await { + let err_msg = match _revoke_organization_user(org_id, &org_user_id, &headers, &mut conn).await { Ok(_) => String::new(), Err(e) => format!("{e:?}"), }; bulk_response.push(json!( { - "Object": "OrganizationUserBulkResponseModel", - "Id": org_user_id, - "Error": err_msg + "object": "OrganizationUserBulkResponseModel", + "id": org_user_id, + "error": err_msg } )); } @@ -2014,9 +2032,9 @@ async fn bulk_revoke_organization_user( } Json(json!({ - "Data": bulk_response, - "Object": "list", - "ContinuationToken": null + "data": bulk_response, + "object": "list", + "continuationToken": null })) } @@ -2075,7 +2093,7 @@ async fn activate_organization_user( #[put("/organizations//users/activate", data = "")] async fn bulk_activate_organization_user( org_id: &str, - data: JsonUpcase, + data: Json, headers: AdminHeaders, conn: DbConn, ) -> Json { @@ -2095,38 +2113,32 @@ async fn restore_organization_user( #[put("/organizations//users/restore", data = "")] async fn bulk_restore_organization_user( org_id: &str, - data: JsonUpcase, + data: Json, headers: AdminHeaders, mut conn: DbConn, ) -> Json { - let data = data.into_inner().data; + let data = data.into_inner(); let mut bulk_response = Vec::new(); - match data["Ids"].as_array() { - Some(org_users) => { - for org_user_id in org_users { - let org_user_id = org_user_id.as_str().unwrap_or_default(); - let err_msg = match _restore_organization_user(org_id, org_user_id, &headers, &mut conn).await { - Ok(_) => String::new(), - Err(e) => format!("{e:?}"), - }; + for org_user_id in data.ids { + let err_msg = match _restore_organization_user(org_id, &org_user_id, &headers, &mut conn).await { + Ok(_) => String::new(), + Err(e) => format!("{e:?}"), + }; - bulk_response.push(json!( - { - "Object": "OrganizationUserBulkResponseModel", - "Id": org_user_id, - "Error": err_msg - } - )); + bulk_response.push(json!( + { + "object": "OrganizationUserBulkResponseModel", + "id": org_user_id, + "error": err_msg } - } - None => error!("No users to restore"), + )); } Json(json!({ - "Data": bulk_response, - "Object": "list", - "ContinuationToken": null + "data": bulk_response, + "object": "list", + "continuationToken": null })) } @@ -2151,10 +2163,14 @@ async fn _restore_organization_user( match OrgPolicy::is_user_allowed(&user_org.user_uuid, org_id, false, conn).await { Ok(_) => {} Err(OrgPolicyErr::TwoFactorMissing) => { - err!("You cannot restore this user because it has no two-step login method activated"); + if CONFIG.email_2fa_auto_fallback() { + two_factor::email::find_and_activate_email_2fa(&user_org.user_uuid, conn).await?; + } else { + err!("You cannot restore this user because they have not setup 2FA"); + } } Err(OrgPolicyErr::SingleOrgEnforced) => { - err!("You cannot restore this user because it is a member of an organization which forbids it"); + err!("You cannot restore this user because they are a member of an organization which forbids it"); } } } @@ -2196,35 +2212,35 @@ async fn get_groups(org_id: &str, _headers: ManagerHeadersLoose, mut conn: DbCon }; Ok(Json(json!({ - "Data": groups, - "Object": "list", - "ContinuationToken": null, + "data": groups, + "object": "list", + "continuationToken": null, }))) } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct GroupRequest { - Name: String, - AccessAll: Option, - ExternalId: Option, - Collections: Vec, - Users: Vec, + name: String, + access_all: Option, + external_id: Option, + collections: Vec, + users: Vec, } impl GroupRequest { pub fn to_group(&self, organizations_uuid: &str) -> Group { Group::new( String::from(organizations_uuid), - self.Name.clone(), - self.AccessAll.unwrap_or(false), - self.ExternalId.clone(), + self.name.clone(), + self.access_all.unwrap_or(false), + self.external_id.clone(), ) } pub fn update_group(&self, mut group: Group) -> Group { - group.name = self.Name.clone(); - group.access_all = self.AccessAll.unwrap_or(false); + group.name.clone_from(&self.name); + group.access_all = self.access_all.unwrap_or(false); // Group Updates do not support changing the external_id // These input fields are in a disabled state, and can only be updated/added via ldap_import @@ -2233,31 +2249,31 @@ impl GroupRequest { } #[derive(Deserialize, Serialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct SelectionReadOnly { - Id: String, - ReadOnly: bool, - HidePasswords: bool, + id: String, + read_only: bool, + hide_passwords: bool, } impl SelectionReadOnly { pub fn to_collection_group(&self, groups_uuid: String) -> CollectionGroup { - CollectionGroup::new(self.Id.clone(), groups_uuid, self.ReadOnly, self.HidePasswords) + CollectionGroup::new(self.id.clone(), groups_uuid, self.read_only, self.hide_passwords) } pub fn to_collection_group_details_read_only(collection_group: &CollectionGroup) -> SelectionReadOnly { SelectionReadOnly { - Id: collection_group.groups_uuid.clone(), - ReadOnly: collection_group.read_only, - HidePasswords: collection_group.hide_passwords, + id: collection_group.groups_uuid.clone(), + read_only: collection_group.read_only, + hide_passwords: collection_group.hide_passwords, } } pub fn to_collection_user_details_read_only(collection_user: &CollectionUser) -> SelectionReadOnly { SelectionReadOnly { - Id: collection_user.user_uuid.clone(), - ReadOnly: collection_user.read_only, - HidePasswords: collection_user.hide_passwords, + id: collection_user.user_uuid.clone(), + read_only: collection_user.read_only, + hide_passwords: collection_user.hide_passwords, } } @@ -2270,7 +2286,7 @@ impl SelectionReadOnly { async fn post_group( org_id: &str, group_id: &str, - data: JsonUpcase, + data: Json, headers: AdminHeaders, conn: DbConn, ) -> JsonResult { @@ -2278,17 +2294,12 @@ async fn post_group( } #[post("/organizations//groups", data = "")] -async fn post_groups( - org_id: &str, - headers: AdminHeaders, - data: JsonUpcase, - mut conn: DbConn, -) -> JsonResult { +async fn post_groups(org_id: &str, headers: AdminHeaders, data: Json, mut conn: DbConn) -> JsonResult { if !CONFIG.org_groups_enabled() { err!("Group support is disabled"); } - let group_request = data.into_inner().data; + let group_request = data.into_inner(); let group = group_request.to_group(org_id); log_event( @@ -2302,14 +2313,14 @@ async fn post_groups( ) .await; - add_update_group(group, group_request.Collections, group_request.Users, org_id, &headers, &mut conn).await + add_update_group(group, group_request.collections, group_request.users, org_id, &headers, &mut conn).await } #[put("/organizations//groups/", data = "")] async fn put_group( org_id: &str, group_id: &str, - data: JsonUpcase, + data: Json, headers: AdminHeaders, mut conn: DbConn, ) -> JsonResult { @@ -2322,7 +2333,7 @@ async fn put_group( None => err!("Group not found"), }; - let group_request = data.into_inner().data; + let group_request = data.into_inner(); let updated_group = group_request.update_group(group); CollectionGroup::delete_all_by_group(group_id, &mut conn).await?; @@ -2339,7 +2350,7 @@ async fn put_group( ) .await; - add_update_group(updated_group, group_request.Collections, group_request.Users, org_id, &headers, &mut conn).await + add_update_group(updated_group, group_request.collections, group_request.users, org_id, &headers, &mut conn).await } async fn add_update_group( @@ -2374,11 +2385,11 @@ async fn add_update_group( } Ok(Json(json!({ - "Id": group.uuid, - "OrganizationId": group.organizations_uuid, - "Name": group.name, - "AccessAll": group.access_all, - "ExternalId": group.external_id + "id": group.uuid, + "organizationId": group.organizations_uuid, + "name": group.name, + "accessAll": group.access_all, + "externalId": group.external_id }))) } @@ -2433,7 +2444,7 @@ async fn _delete_group(org_id: &str, group_id: &str, headers: &AdminHeaders, con #[delete("/organizations//groups", data = "")] async fn bulk_delete_groups( org_id: &str, - data: JsonUpcase, + data: Json, headers: AdminHeaders, mut conn: DbConn, ) -> EmptyResult { @@ -2441,9 +2452,9 @@ async fn bulk_delete_groups( err!("Group support is disabled"); } - let data: OrgBulkIds = data.into_inner().data; + let data: OrgBulkIds = data.into_inner(); - for group_id in data.Ids { + for group_id in data.ids { _delete_group(org_id, &group_id, &headers, &mut conn).await? } Ok(()) @@ -2488,7 +2499,7 @@ async fn put_group_users( org_id: &str, group_id: &str, headers: AdminHeaders, - data: JsonVec, + data: Json>, mut conn: DbConn, ) -> EmptyResult { if !CONFIG.org_groups_enabled() { @@ -2540,16 +2551,16 @@ async fn get_user_groups(_org_id: &str, user_id: &str, _headers: AdminHeaders, m } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct OrganizationUserUpdateGroupsRequest { - GroupIds: Vec, + group_ids: Vec, } #[post("/organizations//users//groups", data = "")] async fn post_user_groups( org_id: &str, org_user_id: &str, - data: JsonUpcase, + data: Json, headers: AdminHeaders, conn: DbConn, ) -> EmptyResult { @@ -2560,7 +2571,7 @@ async fn post_user_groups( async fn put_user_groups( org_id: &str, org_user_id: &str, - data: JsonUpcase, + data: Json, headers: AdminHeaders, mut conn: DbConn, ) -> EmptyResult { @@ -2579,8 +2590,8 @@ async fn put_user_groups( GroupUser::delete_all_by_user(org_user_id, &mut conn).await?; - let assigned_group_ids = data.into_inner().data; - for assigned_group_id in assigned_group_ids.GroupIds { + let assigned_group_ids = data.into_inner(); + for assigned_group_id in assigned_group_ids.group_ids { let mut group_user = GroupUser::new(assigned_group_id.clone(), String::from(org_user_id)); group_user.save(&mut conn).await?; } @@ -2655,18 +2666,18 @@ async fn delete_group_user( } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct OrganizationUserResetPasswordEnrollmentRequest { - ResetPasswordKey: Option, - MasterPasswordHash: Option, - Otp: Option, + reset_password_key: Option, + master_password_hash: Option, + otp: Option, } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct OrganizationUserResetPasswordRequest { - NewMasterPasswordHash: String, - Key: String, + new_master_password_hash: String, + key: String, } #[get("/organizations//keys")] @@ -2677,9 +2688,9 @@ async fn get_organization_keys(org_id: &str, mut conn: DbConn) -> JsonResult { }; Ok(Json(json!({ - "Object": "organizationKeys", - "PublicKey": org.public_key, - "PrivateKey": org.private_key, + "object": "organizationKeys", + "publicKey": org.public_key, + "privateKey": org.private_key, }))) } @@ -2688,7 +2699,7 @@ async fn put_reset_password( org_id: &str, org_user_id: &str, headers: AdminHeaders, - data: JsonUpcase, + data: Json, mut conn: DbConn, nt: Notify<'_>, ) -> EmptyResult { @@ -2722,10 +2733,10 @@ async fn put_reset_password( err!(format!("Error sending user reset password email: {e:#?}")); } - let reset_request = data.into_inner().data; + let reset_request = data.into_inner(); let mut user = user; - user.set_password(reset_request.NewMasterPasswordHash.as_str(), Some(reset_request.Key), true, None); + user.set_password(reset_request.new_master_password_hash.as_str(), Some(reset_request.key), true, None); user.save(&mut conn).await?; nt.send_logout(&user, None).await; @@ -2770,13 +2781,13 @@ async fn get_reset_password_details( // https://github.com/bitwarden/server/blob/3b50ccb9f804efaacdc46bed5b60e5b28eddefcf/src/Api/Models/Response/Organizations/OrganizationUserResponseModel.cs#L111 Ok(Json(json!({ - "Object": "organizationUserResetPasswordDetails", - "Kdf":user.client_kdf_type, - "KdfIterations":user.client_kdf_iter, - "KdfMemory":user.client_kdf_memory, - "KdfParallelism":user.client_kdf_parallelism, - "ResetPasswordKey":org_user.reset_password_key, - "EncryptedPrivateKey":org.private_key, + "object": "organizationUserResetPasswordDetails", + "kdf":user.client_kdf_type, + "kdfIterations":user.client_kdf_iter, + "kdfMemory":user.client_kdf_memory, + "kdfParallelism":user.client_kdf_parallelism, + "resetPasswordKey":org_user.reset_password_key, + "encryptedPrivateKey":org.private_key, }))) } @@ -2824,7 +2835,7 @@ async fn put_reset_password_enrollment( org_id: &str, org_user_id: &str, headers: Headers, - data: JsonUpcase, + data: Json, mut conn: DbConn, ) -> EmptyResult { let mut org_user = match UserOrganization::find_by_user_and_org(&headers.user.uuid, org_id, &mut conn).await { @@ -2834,23 +2845,24 @@ async fn put_reset_password_enrollment( check_reset_password_applicable(org_id, &mut conn).await?; - let reset_request = data.into_inner().data; + let reset_request = data.into_inner(); - if reset_request.ResetPasswordKey.is_none() && OrgPolicy::org_is_reset_password_auto_enroll(org_id, &mut conn).await + if reset_request.reset_password_key.is_none() + && OrgPolicy::org_is_reset_password_auto_enroll(org_id, &mut conn).await { err!("Reset password can't be withdrawed due to an enterprise policy"); } - if reset_request.ResetPasswordKey.is_some() { + if reset_request.reset_password_key.is_some() { PasswordOrOtpData { - MasterPasswordHash: reset_request.MasterPasswordHash, - Otp: reset_request.Otp, + master_password_hash: reset_request.master_password_hash, + otp: reset_request.otp, } .validate(&headers.user, true, &mut conn) .await?; } - org_user.reset_password_key = reset_request.ResetPasswordKey; + org_user.reset_password_key = reset_request.reset_password_key; org_user.save(&mut conn).await?; let log_id = if org_user.reset_password_key.is_some() { @@ -2914,12 +2926,12 @@ async fn get_org_export(org_id: &str, headers: AdminHeaders, mut conn: DbConn) - async fn _api_key( org_id: &str, - data: JsonUpcase, + data: Json, rotate: bool, headers: AdminHeaders, mut conn: DbConn, ) -> JsonResult { - let data: PasswordOrOtpData = data.into_inner().data; + let data: PasswordOrOtpData = data.into_inner(); let user = headers.user; // Validate the admin users password/otp @@ -2943,21 +2955,21 @@ async fn _api_key( }; Ok(Json(json!({ - "ApiKey": org_api_key.api_key, - "RevisionDate": crate::util::format_date(&org_api_key.revision_date), - "Object": "apiKey", + "apiKey": org_api_key.api_key, + "revisionDate": crate::util::format_date(&org_api_key.revision_date), + "object": "apiKey", }))) } #[post("/organizations//api-key", data = "")] -async fn api_key(org_id: &str, data: JsonUpcase, headers: AdminHeaders, conn: DbConn) -> JsonResult { +async fn api_key(org_id: &str, data: Json, headers: AdminHeaders, conn: DbConn) -> JsonResult { _api_key(org_id, data, false, headers, conn).await } #[post("/organizations//rotate-api-key", data = "")] async fn rotate_api_key( org_id: &str, - data: JsonUpcase, + data: Json, headers: AdminHeaders, conn: DbConn, ) -> JsonResult { diff --git a/src/api/core/public.rs b/src/api/core/public.rs index 74f79a3e..0cdcbb63 100644 --- a/src/api/core/public.rs +++ b/src/api/core/public.rs @@ -1,13 +1,14 @@ use chrono::Utc; use rocket::{ request::{self, FromRequest, Outcome}, + serde::json::Json, Request, Route, }; use std::collections::HashSet; use crate::{ - api::{EmptyResult, JsonUpcase}, + api::EmptyResult, auth, db::{models::*, DbConn}, mail, CONFIG, @@ -18,43 +19,43 @@ pub fn routes() -> Vec { } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct OrgImportGroupData { - Name: String, - ExternalId: String, - MemberExternalIds: Vec, + name: String, + external_id: String, + member_external_ids: Vec, } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct OrgImportUserData { - Email: String, - ExternalId: String, - Deleted: bool, + email: String, + external_id: String, + deleted: bool, } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct OrgImportData { - Groups: Vec, - Members: Vec, - OverwriteExisting: bool, - // LargeImport: bool, // For now this will not be used, upstream uses this to prevent syncs of more then 2000 users or groups without the flag set. + groups: Vec, + members: Vec, + overwrite_existing: bool, + // largeImport: bool, // For now this will not be used, upstream uses this to prevent syncs of more then 2000 users or groups without the flag set. } #[post("/public/organization/import", data = "")] -async fn ldap_import(data: JsonUpcase, token: PublicToken, mut conn: DbConn) -> EmptyResult { +async fn ldap_import(data: Json, token: PublicToken, mut conn: DbConn) -> EmptyResult { // Most of the logic for this function can be found here // https://github.com/bitwarden/server/blob/fd892b2ff4547648a276734fb2b14a8abae2c6f5/src/Core/Services/Implementations/OrganizationService.cs#L1797 let org_id = token.0; - let data = data.into_inner().data; + let data = data.into_inner(); - for user_data in &data.Members { - if user_data.Deleted { + for user_data in &data.members { + if user_data.deleted { // If user is marked for deletion and it exists, revoke it if let Some(mut user_org) = - UserOrganization::find_by_email_and_org(&user_data.Email, &org_id, &mut conn).await + UserOrganization::find_by_email_and_org(&user_data.email, &org_id, &mut conn).await { // Only revoke a user if it is not the last confirmed owner let revoked = if user_org.atype == UserOrgType::Owner @@ -72,27 +73,27 @@ async fn ldap_import(data: JsonUpcase, token: PublicToken, mut co user_org.revoke() }; - let ext_modified = user_org.set_external_id(Some(user_data.ExternalId.clone())); + let ext_modified = user_org.set_external_id(Some(user_data.external_id.clone())); if revoked || ext_modified { user_org.save(&mut conn).await?; } } // If user is part of the organization, restore it } else if let Some(mut user_org) = - UserOrganization::find_by_email_and_org(&user_data.Email, &org_id, &mut conn).await + UserOrganization::find_by_email_and_org(&user_data.email, &org_id, &mut conn).await { let restored = user_org.restore(); - let ext_modified = user_org.set_external_id(Some(user_data.ExternalId.clone())); + let ext_modified = user_org.set_external_id(Some(user_data.external_id.clone())); if restored || ext_modified { user_org.save(&mut conn).await?; } } else { // If user is not part of the organization - let user = match User::find_by_mail(&user_data.Email, &mut conn).await { + let user = match User::find_by_mail(&user_data.email, &mut conn).await { Some(user) => user, // exists in vaultwarden None => { // User does not exist yet - let mut new_user = User::new(user_data.Email.clone()); + let mut new_user = User::new(user_data.email.clone()); new_user.save(&mut conn).await?; if !CONFIG.mail_enabled() { @@ -109,7 +110,7 @@ async fn ldap_import(data: JsonUpcase, token: PublicToken, mut co }; let mut new_org_user = UserOrganization::new(user.uuid.clone(), org_id.clone()); - new_org_user.set_external_id(Some(user_data.ExternalId.clone())); + new_org_user.set_external_id(Some(user_data.external_id.clone())); new_org_user.access_all = false; new_org_user.atype = UserOrgType::User as i32; new_org_user.status = user_org_status; @@ -123,7 +124,7 @@ async fn ldap_import(data: JsonUpcase, token: PublicToken, mut co }; mail::send_invite( - &user_data.Email, + &user_data.email, &user.uuid, Some(org_id.clone()), Some(new_org_user.uuid), @@ -136,12 +137,17 @@ async fn ldap_import(data: JsonUpcase, token: PublicToken, mut co } if CONFIG.org_groups_enabled() { - for group_data in &data.Groups { - let group_uuid = match Group::find_by_external_id(&group_data.ExternalId, &mut conn).await { + for group_data in &data.groups { + let group_uuid = match Group::find_by_external_id_and_org(&group_data.external_id, &org_id, &mut conn).await + { Some(group) => group.uuid, None => { - let mut group = - Group::new(org_id.clone(), group_data.Name.clone(), false, Some(group_data.ExternalId.clone())); + let mut group = Group::new( + org_id.clone(), + group_data.name.clone(), + false, + Some(group_data.external_id.clone()), + ); group.save(&mut conn).await?; group.uuid } @@ -149,7 +155,7 @@ async fn ldap_import(data: JsonUpcase, token: PublicToken, mut co GroupUser::delete_all_by_group(&group_uuid, &mut conn).await?; - for ext_id in &group_data.MemberExternalIds { + for ext_id in &group_data.member_external_ids { if let Some(user_org) = UserOrganization::find_by_external_id_and_org(ext_id, &org_id, &mut conn).await { let mut group_user = GroupUser::new(group_uuid.clone(), user_org.uuid.clone()); @@ -162,9 +168,9 @@ async fn ldap_import(data: JsonUpcase, token: PublicToken, mut co } // If this flag is enabled, any user that isn't provided in the Users list will be removed (by default they will be kept unless they have Deleted == true) - if data.OverwriteExisting { + if data.overwrite_existing { // Generate a HashSet to quickly verify if a member is listed or not. - let sync_members: HashSet = data.Members.into_iter().map(|m| m.ExternalId).collect(); + let sync_members: HashSet = data.members.into_iter().map(|m| m.external_id).collect(); for user_org in UserOrganization::find_by_org(&org_id, &mut conn).await { if let Some(ref user_external_id) = user_org.external_id { if !sync_members.contains(user_external_id) { @@ -209,19 +215,15 @@ impl<'r> FromRequest<'r> for PublicToken { Err(_) => err_handler!("Invalid claim"), }; // Check if time is between claims.nbf and claims.exp - let time_now = Utc::now().naive_utc().timestamp(); + let time_now = Utc::now().timestamp(); if time_now < claims.nbf { err_handler!("Token issued in the future"); } if time_now > claims.exp { err_handler!("Token expired"); } - // Check if claims.iss is host|claims.scope[0] - let host = match auth::Host::from_request(request).await { - Outcome::Success(host) => host, - _ => err_handler!("Error getting Host"), - }; - let complete_host = format!("{}|{}", host.host, claims.scope[0]); + // Check if claims.iss is domain|claims.scope[0] + let complete_host = format!("{}|{}", CONFIG.domain_origin(), claims.scope[0]); if complete_host != claims.iss { err_handler!("Token not issued by this server"); } diff --git a/src/api/core/sends.rs b/src/api/core/sends.rs index 1bc6d00f..27aea95a 100644 --- a/src/api/core/sends.rs +++ b/src/api/core/sends.rs @@ -1,6 +1,6 @@ use std::path::Path; -use chrono::{DateTime, Duration, Utc}; +use chrono::{DateTime, TimeDelta, Utc}; use num_traits::ToPrimitive; use rocket::form::Form; use rocket::fs::NamedFile; @@ -9,7 +9,7 @@ use rocket::serde::json::Json; use serde_json::Value; use crate::{ - api::{ApiResult, EmptyResult, JsonResult, JsonUpcase, Notify, UpdateType}, + api::{ApiResult, EmptyResult, JsonResult, Notify, UpdateType}, auth::{ClientIp, Headers, Host}, db::{models::*, DbConn, DbPool}, util::{NumberOrString, SafeString}, @@ -48,23 +48,26 @@ pub async fn purge_sends(pool: DbPool) { } #[derive(Deserialize)] -#[allow(non_snake_case)] -struct SendData { - Type: i32, - Key: String, - Password: Option, - MaxAccessCount: Option, - ExpirationDate: Option>, - DeletionDate: DateTime, - Disabled: bool, - HideEmail: Option, +#[serde(rename_all = "camelCase")] +pub struct SendData { + r#type: i32, + key: String, + password: Option, + max_access_count: Option, + expiration_date: Option>, + deletion_date: DateTime, + disabled: bool, + hide_email: Option, // Data field - Name: String, - Notes: Option, - Text: Option, - File: Option, - FileLength: Option, + name: String, + notes: Option, + text: Option, + file: Option, + file_length: Option, + + // Used for key rotations + pub id: Option, } /// Enforces the `Disable Send` policy. A non-owner/admin user belonging to @@ -93,7 +96,7 @@ async fn enforce_disable_send_policy(headers: &Headers, conn: &mut DbConn) -> Em /// Ref: https://bitwarden.com/help/article/policies/#send-options async fn enforce_disable_hide_email_policy(data: &SendData, headers: &Headers, conn: &mut DbConn) -> EmptyResult { let user_uuid = &headers.user.uuid; - let hide_email = data.HideEmail.unwrap_or(false); + let hide_email = data.hide_email.unwrap_or(false); if hide_email && OrgPolicy::is_hide_email_disabled(user_uuid, conn).await { err!( "Due to an Enterprise Policy, you are not allowed to hide your email address \ @@ -104,40 +107,40 @@ async fn enforce_disable_hide_email_policy(data: &SendData, headers: &Headers, c } fn create_send(data: SendData, user_uuid: String) -> ApiResult { - let data_val = if data.Type == SendType::Text as i32 { - data.Text - } else if data.Type == SendType::File as i32 { - data.File + let data_val = if data.r#type == SendType::Text as i32 { + data.text + } else if data.r#type == SendType::File as i32 { + data.file } else { err!("Invalid Send type") }; let data_str = if let Some(mut d) = data_val { - d.as_object_mut().and_then(|o| o.remove("Response")); + d.as_object_mut().and_then(|o| o.remove("response")); serde_json::to_string(&d)? } else { err!("Send data not provided"); }; - if data.DeletionDate > Utc::now() + Duration::days(31) { + if data.deletion_date > Utc::now() + TimeDelta::try_days(31).unwrap() { err!( "You cannot have a Send with a deletion date that far into the future. Adjust the Deletion Date to a value less than 31 days from now and try again." ); } - let mut send = Send::new(data.Type, data.Name, data_str, data.Key, data.DeletionDate.naive_utc()); + let mut send = Send::new(data.r#type, data.name, data_str, data.key, data.deletion_date.naive_utc()); send.user_uuid = Some(user_uuid); - send.notes = data.Notes; - send.max_access_count = match data.MaxAccessCount { + send.notes = data.notes; + send.max_access_count = match data.max_access_count { Some(m) => Some(m.into_i32()?), _ => None, }; - send.expiration_date = data.ExpirationDate.map(|d| d.naive_utc()); - send.disabled = data.Disabled; - send.hide_email = data.HideEmail; - send.atype = data.Type; + send.expiration_date = data.expiration_date.map(|d| d.naive_utc()); + send.disabled = data.disabled; + send.hide_email = data.hide_email; + send.atype = data.r#type; - send.set_password(data.Password.as_deref()); + send.set_password(data.password.as_deref()); Ok(send) } @@ -148,9 +151,9 @@ async fn get_sends(headers: Headers, mut conn: DbConn) -> Json { let sends_json: Vec = sends.await.iter().map(|s| s.to_json()).collect(); Json(json!({ - "Data": sends_json, - "Object": "list", - "ContinuationToken": null + "data": sends_json, + "object": "list", + "continuationToken": null })) } @@ -169,13 +172,13 @@ async fn get_send(uuid: &str, headers: Headers, mut conn: DbConn) -> JsonResult } #[post("/sends", data = "")] -async fn post_send(data: JsonUpcase, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult { +async fn post_send(data: Json, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult { enforce_disable_send_policy(&headers, &mut conn).await?; - let data: SendData = data.into_inner().data; + let data: SendData = data.into_inner(); enforce_disable_hide_email_policy(&data, &headers, &mut conn).await?; - if data.Type == SendType::File as i32 { + if data.r#type == SendType::File as i32 { err!("File sends should use /api/sends/file") } @@ -195,7 +198,7 @@ async fn post_send(data: JsonUpcase, headers: Headers, mut conn: DbCon #[derive(FromForm)] struct UploadData<'f> { - model: Json>, + model: Json, data: TempFile<'f>, } @@ -215,7 +218,7 @@ async fn post_send_file(data: Form>, headers: Headers, mut conn: model, mut data, } = data.into_inner(); - let model = model.into_inner().data; + let model = model.into_inner(); let Some(size) = data.len().to_i64() else { err!("Invalid send size"); @@ -263,9 +266,9 @@ async fn post_send_file(data: Form>, headers: Headers, mut conn: let mut data_value: Value = serde_json::from_str(&send.data)?; if let Some(o) = data_value.as_object_mut() { - o.insert(String::from("Id"), Value::String(file_id)); - o.insert(String::from("Size"), Value::Number(size.into())); - o.insert(String::from("SizeName"), Value::String(crate::util::get_display_size(size))); + o.insert(String::from("id"), Value::String(file_id)); + o.insert(String::from("size"), Value::Number(size.into())); + o.insert(String::from("sizeName"), Value::String(crate::util::get_display_size(size))); } send.data = serde_json::to_string(&data_value)?; @@ -285,18 +288,18 @@ async fn post_send_file(data: Form>, headers: Headers, mut conn: // Upstream: https://github.com/bitwarden/server/blob/d0c793c95181dfb1b447eb450f85ba0bfd7ef643/src/Api/Controllers/SendsController.cs#L190 #[post("/sends/file/v2", data = "")] -async fn post_send_file_v2(data: JsonUpcase, headers: Headers, mut conn: DbConn) -> JsonResult { +async fn post_send_file_v2(data: Json, headers: Headers, mut conn: DbConn) -> JsonResult { enforce_disable_send_policy(&headers, &mut conn).await?; - let data = data.into_inner().data; + let data = data.into_inner(); - if data.Type != SendType::File as i32 { + if data.r#type != SendType::File as i32 { err!("Send content is not a file"); } enforce_disable_hide_email_policy(&data, &headers, &mut conn).await?; - let file_length = match &data.FileLength { + let file_length = match &data.file_length { Some(m) => m.into_i64()?, _ => err!("Invalid send length"), }; @@ -331,9 +334,9 @@ async fn post_send_file_v2(data: JsonUpcase, headers: Headers, mut con let mut data_value: Value = serde_json::from_str(&send.data)?; if let Some(o) = data_value.as_object_mut() { - o.insert(String::from("Id"), Value::String(file_id.clone())); - o.insert(String::from("Size"), Value::Number(file_length.into())); - o.insert(String::from("SizeName"), Value::String(crate::util::get_display_size(file_length))); + o.insert(String::from("id"), Value::String(file_id.clone())); + o.insert(String::from("size"), Value::Number(file_length.into())); + o.insert(String::from("sizeName"), Value::String(crate::util::get_display_size(file_length))); } send.data = serde_json::to_string(&data_value)?; send.save(&mut conn).await?; @@ -392,15 +395,15 @@ async fn post_send_file_v2_data( } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] pub struct SendAccessData { - pub Password: Option, + pub password: Option, } #[post("/sends/access/", data = "")] async fn post_access( access_id: &str, - data: JsonUpcase, + data: Json, mut conn: DbConn, ip: ClientIp, nt: Notify<'_>, @@ -431,7 +434,7 @@ async fn post_access( } if send.password_hash.is_some() { - match data.into_inner().data.Password { + match data.into_inner().password { Some(ref p) if send.check_password(p) => { /* Nothing to do here */ } Some(_) => err!("Invalid password", format!("IP: {}.", ip.ip)), None => err_code!("Password not provided", format!("IP: {}.", ip.ip), 401), @@ -461,7 +464,7 @@ async fn post_access( async fn post_access_file( send_id: &str, file_id: &str, - data: JsonUpcase, + data: Json, host: Host, mut conn: DbConn, nt: Notify<'_>, @@ -492,7 +495,7 @@ async fn post_access_file( } if send.password_hash.is_some() { - match data.into_inner().data.Password { + match data.into_inner().password { Some(ref p) if send.check_password(p) => { /* Nothing to do here */ } Some(_) => err!("Invalid password."), None => err_code!("Password not provided", 401), @@ -515,9 +518,9 @@ async fn post_access_file( let token_claims = crate::auth::generate_send_claims(send_id, file_id); let token = crate::auth::encode_jwt(&token_claims); Ok(Json(json!({ - "Object": "send-fileDownload", - "Id": file_id, - "Url": format!("{}/api/sends/{}/{}?t={}", &host.host, send_id, file_id, token) + "object": "send-fileDownload", + "id": file_id, + "url": format!("{}/api/sends/{}/{}?t={}", &host.host, send_id, file_id, token) }))) } @@ -532,16 +535,10 @@ async fn download_send(send_id: SafeString, file_id: SafeString, t: &str) -> Opt } #[put("/sends/", data = "")] -async fn put_send( - id: &str, - data: JsonUpcase, - headers: Headers, - mut conn: DbConn, - nt: Notify<'_>, -) -> JsonResult { +async fn put_send(id: &str, data: Json, headers: Headers, mut conn: DbConn, nt: Notify<'_>) -> JsonResult { enforce_disable_send_policy(&headers, &mut conn).await?; - let data: SendData = data.into_inner().data; + let data: SendData = data.into_inner(); enforce_disable_hide_email_policy(&data, &headers, &mut conn).await?; let mut send = match Send::find_by_uuid(id, &mut conn).await { @@ -549,19 +546,38 @@ async fn put_send( None => err!("Send not found"), }; + update_send_from_data(&mut send, data, &headers, &mut conn, &nt, UpdateType::SyncSendUpdate).await?; + + Ok(Json(send.to_json())) +} + +pub async fn update_send_from_data( + send: &mut Send, + data: SendData, + headers: &Headers, + conn: &mut DbConn, + nt: &Notify<'_>, + ut: UpdateType, +) -> EmptyResult { if send.user_uuid.as_ref() != Some(&headers.user.uuid) { err!("Send is not owned by user") } - if send.atype != data.Type { + if send.atype != data.r#type { err!("Sends can't change type") } + if data.deletion_date > Utc::now() + TimeDelta::try_days(31).unwrap() { + err!( + "You cannot have a Send with a deletion date that far into the future. Adjust the Deletion Date to a value less than 31 days from now and try again." + ); + } + // When updating a file Send, we receive nulls in the File field, as it's immutable, // so we only need to update the data field in the Text case - if data.Type == SendType::Text as i32 { - let data_str = if let Some(mut d) = data.Text { - d.as_object_mut().and_then(|d| d.remove("Response")); + if data.r#type == SendType::Text as i32 { + let data_str = if let Some(mut d) = data.text { + d.as_object_mut().and_then(|d| d.remove("response")); serde_json::to_string(&d)? } else { err!("Send data not provided"); @@ -569,39 +585,28 @@ async fn put_send( send.data = data_str; } - if data.DeletionDate > Utc::now() + Duration::days(31) { - err!( - "You cannot have a Send with a deletion date that far into the future. Adjust the Deletion Date to a value less than 31 days from now and try again." - ); - } - send.name = data.Name; - send.akey = data.Key; - send.deletion_date = data.DeletionDate.naive_utc(); - send.notes = data.Notes; - send.max_access_count = match data.MaxAccessCount { + send.name = data.name; + send.akey = data.key; + send.deletion_date = data.deletion_date.naive_utc(); + send.notes = data.notes; + send.max_access_count = match data.max_access_count { Some(m) => Some(m.into_i32()?), _ => None, }; - send.expiration_date = data.ExpirationDate.map(|d| d.naive_utc()); - send.hide_email = data.HideEmail; - send.disabled = data.Disabled; + send.expiration_date = data.expiration_date.map(|d| d.naive_utc()); + send.hide_email = data.hide_email; + send.disabled = data.disabled; // Only change the value if it's present - if let Some(password) = data.Password { + if let Some(password) = data.password { send.set_password(Some(&password)); } - send.save(&mut conn).await?; - nt.send_send_update( - UpdateType::SyncSendUpdate, - &send, - &send.update_users_revision(&mut conn).await, - &headers.device.uuid, - &mut conn, - ) - .await; - - Ok(Json(send.to_json())) + send.save(conn).await?; + if ut != UpdateType::None { + nt.send_send_update(ut, send, &send.update_users_revision(conn).await, &headers.device.uuid, conn).await; + } + Ok(()) } #[delete("/sends/")] diff --git a/src/api/core/two_factor/authenticator.rs b/src/api/core/two_factor/authenticator.rs index e6e283e9..9d4bd480 100644 --- a/src/api/core/two_factor/authenticator.rs +++ b/src/api/core/two_factor/authenticator.rs @@ -3,10 +3,7 @@ use rocket::serde::json::Json; use rocket::Route; use crate::{ - api::{ - core::log_user_event, core::two_factor::_generate_recover_code, EmptyResult, JsonResult, JsonUpcase, - PasswordOrOtpData, - }, + api::{core::log_user_event, core::two_factor::_generate_recover_code, EmptyResult, JsonResult, PasswordOrOtpData}, auth::{ClientIp, Headers}, crypto, db::{ @@ -23,8 +20,8 @@ pub fn routes() -> Vec { } #[post("/two-factor/get-authenticator", data = "")] -async fn generate_authenticator(data: JsonUpcase, headers: Headers, mut conn: DbConn) -> JsonResult { - let data: PasswordOrOtpData = data.into_inner().data; +async fn generate_authenticator(data: Json, headers: Headers, mut conn: DbConn) -> JsonResult { + let data: PasswordOrOtpData = data.into_inner(); let user = headers.user; data.validate(&user, false, &mut conn).await?; @@ -38,36 +35,32 @@ async fn generate_authenticator(data: JsonUpcase, headers: He }; Ok(Json(json!({ - "Enabled": enabled, - "Key": key, - "Object": "twoFactorAuthenticator" + "enabled": enabled, + "key": key, + "object": "twoFactorAuthenticator" }))) } -#[derive(Deserialize, Debug)] -#[allow(non_snake_case)] +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] struct EnableAuthenticatorData { - Key: String, - Token: NumberOrString, - MasterPasswordHash: Option, - Otp: Option, + key: String, + token: NumberOrString, + master_password_hash: Option, + otp: Option, } #[post("/two-factor/authenticator", data = "")] -async fn activate_authenticator( - data: JsonUpcase, - headers: Headers, - mut conn: DbConn, -) -> JsonResult { - let data: EnableAuthenticatorData = data.into_inner().data; - let key = data.Key; - let token = data.Token.into_string(); +async fn activate_authenticator(data: Json, headers: Headers, mut conn: DbConn) -> JsonResult { + let data: EnableAuthenticatorData = data.into_inner(); + let key = data.key; + let token = data.token.into_string(); let mut user = headers.user; PasswordOrOtpData { - MasterPasswordHash: data.MasterPasswordHash, - Otp: data.Otp, + master_password_hash: data.master_password_hash, + otp: data.otp, } .validate(&user, true, &mut conn) .await?; @@ -90,18 +83,14 @@ async fn activate_authenticator( log_user_event(EventType::UserUpdated2fa as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn).await; Ok(Json(json!({ - "Enabled": true, - "Key": key, - "Object": "twoFactorAuthenticator" + "enabled": true, + "key": key, + "object": "twoFactorAuthenticator" }))) } #[put("/two-factor/authenticator", data = "")] -async fn activate_authenticator_put( - data: JsonUpcase, - headers: Headers, - conn: DbConn, -) -> JsonResult { +async fn activate_authenticator_put(data: Json, headers: Headers, conn: DbConn) -> JsonResult { activate_authenticator(data, headers, conn).await } @@ -156,8 +145,8 @@ pub async fn validate_totp_code( let time = (current_timestamp + step * 30i64) as u64; let generated = totp_custom::(30, 6, &decoded_secret, time); - // Check the the given code equals the generated and if the time_step is larger then the one last used. - if generated == totp_code && time_step > i64::from(twofactor.last_used) { + // Check the given code equals the generated and if the time_step is larger then the one last used. + if generated == totp_code && time_step > twofactor.last_used { // If the step does not equals 0 the time is drifted either server or client side. if step != 0 { warn!("TOTP Time drift detected. The step offset is {}", step); @@ -165,10 +154,10 @@ pub async fn validate_totp_code( // Save the last used time step so only totp time steps higher then this one are allowed. // This will also save a newly created twofactor if the code is correct. - twofactor.last_used = time_step as i32; + twofactor.last_used = time_step; twofactor.save(conn).await?; return Ok(()); - } else if generated == totp_code && time_step <= i64::from(twofactor.last_used) { + } else if generated == totp_code && time_step <= twofactor.last_used { warn!("This TOTP or a TOTP code within {} steps back or forward has already been used!", steps); err!( format!("Invalid TOTP code! Server time: {} IP: {}", current_time.format("%F %T UTC"), ip.ip), diff --git a/src/api/core/two_factor/duo.rs b/src/api/core/two_factor/duo.rs index ea5589fb..c5bfa9e5 100644 --- a/src/api/core/two_factor/duo.rs +++ b/src/api/core/two_factor/duo.rs @@ -5,7 +5,7 @@ use rocket::Route; use crate::{ api::{ - core::log_user_event, core::two_factor::_generate_recover_code, ApiResult, EmptyResult, JsonResult, JsonUpcase, + core::log_user_event, core::two_factor::_generate_recover_code, ApiResult, EmptyResult, JsonResult, PasswordOrOtpData, }, auth::Headers, @@ -92,8 +92,8 @@ impl DuoStatus { const DISABLED_MESSAGE_DEFAULT: &str = ""; #[post("/two-factor/get-duo", data = "")] -async fn get_duo(data: JsonUpcase, headers: Headers, mut conn: DbConn) -> JsonResult { - let data: PasswordOrOtpData = data.into_inner().data; +async fn get_duo(data: Json, headers: Headers, mut conn: DbConn) -> JsonResult { + let data: PasswordOrOtpData = data.into_inner(); let user = headers.user; data.validate(&user, false, &mut conn).await?; @@ -109,16 +109,16 @@ async fn get_duo(data: JsonUpcase, headers: Headers, mut conn let json = if let Some(data) = data { json!({ - "Enabled": enabled, - "Host": data.host, - "SecretKey": data.sk, - "IntegrationKey": data.ik, - "Object": "twoFactorDuo" + "enabled": enabled, + "host": data.host, + "secretKey": data.sk, + "integrationKey": data.ik, + "object": "twoFactorDuo" }) } else { json!({ - "Enabled": enabled, - "Object": "twoFactorDuo" + "enabled": enabled, + "object": "twoFactorDuo" }) }; @@ -126,21 +126,21 @@ async fn get_duo(data: JsonUpcase, headers: Headers, mut conn } #[derive(Deserialize)] -#[allow(non_snake_case, dead_code)] +#[serde(rename_all = "camelCase")] struct EnableDuoData { - Host: String, - SecretKey: String, - IntegrationKey: String, - MasterPasswordHash: Option, - Otp: Option, + host: String, + secret_key: String, + integration_key: String, + master_password_hash: Option, + otp: Option, } impl From for DuoData { fn from(d: EnableDuoData) -> Self { Self { - host: d.Host, - ik: d.IntegrationKey, - sk: d.SecretKey, + host: d.host, + ik: d.integration_key, + sk: d.secret_key, } } } @@ -151,17 +151,17 @@ fn check_duo_fields_custom(data: &EnableDuoData) -> bool { st.is_empty() || s == DISABLED_MESSAGE_DEFAULT } - !empty_or_default(&data.Host) && !empty_or_default(&data.SecretKey) && !empty_or_default(&data.IntegrationKey) + !empty_or_default(&data.host) && !empty_or_default(&data.secret_key) && !empty_or_default(&data.integration_key) } #[post("/two-factor/duo", data = "")] -async fn activate_duo(data: JsonUpcase, headers: Headers, mut conn: DbConn) -> JsonResult { - let data: EnableDuoData = data.into_inner().data; +async fn activate_duo(data: Json, headers: Headers, mut conn: DbConn) -> JsonResult { + let data: EnableDuoData = data.into_inner(); let mut user = headers.user; PasswordOrOtpData { - MasterPasswordHash: data.MasterPasswordHash.clone(), - Otp: data.Otp.clone(), + master_password_hash: data.master_password_hash.clone(), + otp: data.otp.clone(), } .validate(&user, true, &mut conn) .await?; @@ -184,16 +184,16 @@ async fn activate_duo(data: JsonUpcase, headers: Headers, mut con log_user_event(EventType::UserUpdated2fa as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn).await; Ok(Json(json!({ - "Enabled": true, - "Host": data.host, - "SecretKey": data.sk, - "IntegrationKey": data.ik, - "Object": "twoFactorDuo" + "enabled": true, + "host": data.host, + "secretKey": data.sk, + "integrationKey": data.ik, + "object": "twoFactorDuo" }))) } #[put("/two-factor/duo", data = "")] -async fn activate_duo_put(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { +async fn activate_duo_put(data: Json, headers: Headers, conn: DbConn) -> JsonResult { activate_duo(data, headers, conn).await } diff --git a/src/api/core/two_factor/email.rs b/src/api/core/two_factor/email.rs index e1ee847f..a4a69240 100644 --- a/src/api/core/two_factor/email.rs +++ b/src/api/core/two_factor/email.rs @@ -1,16 +1,16 @@ -use chrono::{Duration, NaiveDateTime, Utc}; +use chrono::{DateTime, TimeDelta, Utc}; use rocket::serde::json::Json; use rocket::Route; use crate::{ api::{ core::{log_user_event, two_factor::_generate_recover_code}, - EmptyResult, JsonResult, JsonUpcase, PasswordOrOtpData, + EmptyResult, JsonResult, PasswordOrOtpData, }, auth::Headers, crypto, db::{ - models::{EventType, TwoFactor, TwoFactorType}, + models::{EventType, TwoFactor, TwoFactorType, User}, DbConn, }, error::{Error, MapResult}, @@ -22,28 +22,28 @@ pub fn routes() -> Vec { } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct SendEmailLoginData { - Email: String, - MasterPasswordHash: String, + email: String, + master_password_hash: String, } /// User is trying to login and wants to use email 2FA. /// Does not require Bearer token #[post("/two-factor/send-email-login", data = "")] // JsonResult -async fn send_email_login(data: JsonUpcase, mut conn: DbConn) -> EmptyResult { - let data: SendEmailLoginData = data.into_inner().data; +async fn send_email_login(data: Json, mut conn: DbConn) -> EmptyResult { + let data: SendEmailLoginData = data.into_inner(); use crate::db::models::User; // Get the user - let user = match User::find_by_mail(&data.Email, &mut conn).await { + let user = match User::find_by_mail(&data.email, &mut conn).await { Some(user) => user, None => err!("Username or password is incorrect. Try again."), }; // Check password - if !user.check_valid_password(&data.MasterPasswordHash) { + if !user.check_valid_password(&data.master_password_hash) { err!("Username or password is incorrect. Try again.") } @@ -76,8 +76,8 @@ pub async fn send_token(user_uuid: &str, conn: &mut DbConn) -> EmptyResult { /// When user clicks on Manage email 2FA show the user the related information #[post("/two-factor/get-email", data = "")] -async fn get_email(data: JsonUpcase, headers: Headers, mut conn: DbConn) -> JsonResult { - let data: PasswordOrOtpData = data.into_inner().data; +async fn get_email(data: Json, headers: Headers, mut conn: DbConn) -> JsonResult { + let data: PasswordOrOtpData = data.into_inner(); let user = headers.user; data.validate(&user, false, &mut conn).await?; @@ -92,30 +92,30 @@ async fn get_email(data: JsonUpcase, headers: Headers, mut co }; Ok(Json(json!({ - "Email": mfa_email, - "Enabled": enabled, - "Object": "twoFactorEmail" + "email": mfa_email, + "enabled": enabled, + "object": "twoFactorEmail" }))) } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct SendEmailData { /// Email where 2FA codes will be sent to, can be different than user email account. - Email: String, - MasterPasswordHash: Option, - Otp: Option, + email: String, + master_password_hash: Option, + otp: Option, } /// Send a verification email to the specified email address to check whether it exists/belongs to user. #[post("/two-factor/send-email", data = "")] -async fn send_email(data: JsonUpcase, headers: Headers, mut conn: DbConn) -> EmptyResult { - let data: SendEmailData = data.into_inner().data; +async fn send_email(data: Json, headers: Headers, mut conn: DbConn) -> EmptyResult { + let data: SendEmailData = data.into_inner(); let user = headers.user; PasswordOrOtpData { - MasterPasswordHash: data.MasterPasswordHash, - Otp: data.Otp, + master_password_hash: data.master_password_hash, + otp: data.otp, } .validate(&user, false, &mut conn) .await?; @@ -131,7 +131,7 @@ async fn send_email(data: JsonUpcase, headers: Headers, mut conn: } let generated_token = crypto::generate_email_token(CONFIG.email_token_size()); - let twofactor_data = EmailTokenData::new(data.Email, generated_token); + let twofactor_data = EmailTokenData::new(data.email, generated_token); // Uses EmailVerificationChallenge as type to show that it's not verified yet. let twofactor = TwoFactor::new(user.uuid, TwoFactorType::EmailVerificationChallenge, twofactor_data.to_json()); @@ -143,24 +143,24 @@ async fn send_email(data: JsonUpcase, headers: Headers, mut conn: } #[derive(Deserialize, Serialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct EmailData { - Email: String, - Token: String, - MasterPasswordHash: Option, - Otp: Option, + email: String, + token: String, + master_password_hash: Option, + otp: Option, } /// Verify email belongs to user and can be used for 2FA email codes. #[put("/two-factor/email", data = "")] -async fn email(data: JsonUpcase, headers: Headers, mut conn: DbConn) -> JsonResult { - let data: EmailData = data.into_inner().data; +async fn email(data: Json, headers: Headers, mut conn: DbConn) -> JsonResult { + let data: EmailData = data.into_inner(); let mut user = headers.user; // This is the last step in the verification process, delete the otp directly afterwards PasswordOrOtpData { - MasterPasswordHash: data.MasterPasswordHash, - Otp: data.Otp, + master_password_hash: data.master_password_hash, + otp: data.otp, } .validate(&user, true, &mut conn) .await?; @@ -176,7 +176,7 @@ async fn email(data: JsonUpcase, headers: Headers, mut conn: DbConn) _ => err!("No token available"), }; - if !crypto::ct_eq(issued_token, data.Token) { + if !crypto::ct_eq(issued_token, data.token) { err!("Token is invalid") } @@ -190,9 +190,9 @@ async fn email(data: JsonUpcase, headers: Headers, mut conn: DbConn) log_user_event(EventType::UserUpdated2fa as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn).await; Ok(Json(json!({ - "Email": email_data.email, - "Enabled": "true", - "Object": "twoFactorEmail" + "email": email_data.email, + "enabled": "true", + "object": "twoFactorEmail" }))) } @@ -232,9 +232,9 @@ pub async fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, c twofactor.data = email_data.to_json(); twofactor.save(conn).await?; - let date = NaiveDateTime::from_timestamp_opt(email_data.token_sent, 0).expect("Email token timestamp invalid."); + let date = DateTime::from_timestamp(email_data.token_sent, 0).expect("Email token timestamp invalid.").naive_utc(); let max_time = CONFIG.email_expiration_time() as i64; - if date + Duration::seconds(max_time) < Utc::now().naive_utc() { + if date + TimeDelta::try_seconds(max_time).unwrap() < Utc::now().naive_utc() { err!( "Token has expired", ErrorEvent { @@ -265,14 +265,14 @@ impl EmailTokenData { EmailTokenData { email, last_token: Some(token), - token_sent: Utc::now().naive_utc().timestamp(), + token_sent: Utc::now().timestamp(), attempts: 0, } } pub fn set_token(&mut self, token: String) { self.last_token = Some(token); - self.token_sent = Utc::now().naive_utc().timestamp(); + self.token_sent = Utc::now().timestamp(); } pub fn reset_token(&mut self) { @@ -297,6 +297,15 @@ impl EmailTokenData { } } +pub async fn activate_email_2fa(user: &User, conn: &mut DbConn) -> EmptyResult { + if user.verified_at.is_none() { + err!("Auto-enabling of email 2FA failed because the users email address has not been verified!"); + } + let twofactor_data = EmailTokenData::new(user.email.clone(), String::new()); + let twofactor = TwoFactor::new(user.uuid.clone(), TwoFactorType::Email, twofactor_data.to_json()); + twofactor.save(conn).await +} + /// Takes an email address and obscures it by replacing it with asterisks except two characters. pub fn obscure_email(email: &str) -> String { let split: Vec<&str> = email.rsplitn(2, '@').collect(); @@ -318,6 +327,14 @@ pub fn obscure_email(email: &str) -> String { format!("{}@{}", new_name, &domain) } +pub async fn find_and_activate_email_2fa(user_uuid: &str, conn: &mut DbConn) -> EmptyResult { + if let Some(user) = User::find_by_uuid(user_uuid, conn).await { + activate_email_2fa(&user, conn).await + } else { + err!("User not found!"); + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/src/api/core/two_factor/mod.rs b/src/api/core/two_factor/mod.rs index a40c23e6..2fbcfb3b 100644 --- a/src/api/core/two_factor/mod.rs +++ b/src/api/core/two_factor/mod.rs @@ -1,4 +1,4 @@ -use chrono::{Duration, Utc}; +use chrono::{TimeDelta, Utc}; use data_encoding::BASE32; use rocket::serde::json::Json; use rocket::Route; @@ -7,7 +7,7 @@ use serde_json::Value; use crate::{ api::{ core::{log_event, log_user_event}, - EmptyResult, JsonResult, JsonUpcase, PasswordOrOtpData, + EmptyResult, JsonResult, PasswordOrOtpData, }, auth::{ClientHeaders, Headers}, crypto, @@ -50,52 +50,52 @@ async fn get_twofactor(headers: Headers, mut conn: DbConn) -> Json { let twofactors_json: Vec = twofactors.iter().map(TwoFactor::to_json_provider).collect(); Json(json!({ - "Data": twofactors_json, - "Object": "list", - "ContinuationToken": null, + "data": twofactors_json, + "object": "list", + "continuationToken": null, })) } #[post("/two-factor/get-recover", data = "")] -async fn get_recover(data: JsonUpcase, headers: Headers, mut conn: DbConn) -> JsonResult { - let data: PasswordOrOtpData = data.into_inner().data; +async fn get_recover(data: Json, headers: Headers, mut conn: DbConn) -> JsonResult { + let data: PasswordOrOtpData = data.into_inner(); let user = headers.user; data.validate(&user, true, &mut conn).await?; Ok(Json(json!({ - "Code": user.totp_recover, - "Object": "twoFactorRecover" + "code": user.totp_recover, + "object": "twoFactorRecover" }))) } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct RecoverTwoFactor { - MasterPasswordHash: String, - Email: String, - RecoveryCode: String, + master_password_hash: String, + email: String, + recovery_code: String, } #[post("/two-factor/recover", data = "")] -async fn recover(data: JsonUpcase, client_headers: ClientHeaders, mut conn: DbConn) -> JsonResult { - let data: RecoverTwoFactor = data.into_inner().data; +async fn recover(data: Json, client_headers: ClientHeaders, mut conn: DbConn) -> JsonResult { + let data: RecoverTwoFactor = data.into_inner(); use crate::db::models::User; // Get the user - let mut user = match User::find_by_mail(&data.Email, &mut conn).await { + let mut user = match User::find_by_mail(&data.email, &mut conn).await { Some(user) => user, None => err!("Username or password is incorrect. Try again."), }; // Check password - if !user.check_valid_password(&data.MasterPasswordHash) { + if !user.check_valid_password(&data.master_password_hash) { err!("Username or password is incorrect. Try again.") } // Check if recovery code is correct - if !user.check_valid_recovery_code(&data.RecoveryCode) { + if !user.check_valid_recovery_code(&data.recovery_code) { err!("Recovery code is incorrect. Try again.") } @@ -127,27 +127,27 @@ async fn _generate_recover_code(user: &mut User, conn: &mut DbConn) { } #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct DisableTwoFactorData { - MasterPasswordHash: Option, - Otp: Option, - Type: NumberOrString, + master_password_hash: Option, + otp: Option, + r#type: NumberOrString, } #[post("/two-factor/disable", data = "")] -async fn disable_twofactor(data: JsonUpcase, headers: Headers, mut conn: DbConn) -> JsonResult { - let data: DisableTwoFactorData = data.into_inner().data; +async fn disable_twofactor(data: Json, headers: Headers, mut conn: DbConn) -> JsonResult { + let data: DisableTwoFactorData = data.into_inner(); let user = headers.user; // Delete directly after a valid token has been provided PasswordOrOtpData { - MasterPasswordHash: data.MasterPasswordHash, - Otp: data.Otp, + master_password_hash: data.master_password_hash, + otp: data.otp, } .validate(&user, true, &mut conn) .await?; - let type_ = data.Type.into_i32()?; + let type_ = data.r#type.into_i32()?; if let Some(twofactor) = TwoFactor::find_by_user_and_type(&user.uuid, type_, &mut conn).await { twofactor.delete(&mut conn).await?; @@ -160,14 +160,14 @@ async fn disable_twofactor(data: JsonUpcase, headers: Head } Ok(Json(json!({ - "Enabled": false, - "Type": type_, - "Object": "twoFactorProvider" + "enabled": false, + "type": type_, + "object": "twoFactorProvider" }))) } #[put("/two-factor/disable", data = "")] -async fn disable_twofactor_put(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { +async fn disable_twofactor_put(data: Json, headers: Headers, conn: DbConn) -> JsonResult { disable_twofactor(data, headers, conn).await } @@ -259,7 +259,7 @@ pub async fn send_incomplete_2fa_notifications(pool: DbPool) { }; let now = Utc::now().naive_utc(); - let time_limit = Duration::minutes(CONFIG.incomplete_2fa_time_limit()); + let time_limit = TimeDelta::try_minutes(CONFIG.incomplete_2fa_time_limit()).unwrap(); let time_before = now - time_limit; let incomplete_logins = TwoFactorIncomplete::find_logins_before(&time_before, &mut conn).await; for login in incomplete_logins { diff --git a/src/api/core/two_factor/protected_actions.rs b/src/api/core/two_factor/protected_actions.rs index 09c7ede0..8bfc59c1 100644 --- a/src/api/core/two_factor/protected_actions.rs +++ b/src/api/core/two_factor/protected_actions.rs @@ -1,8 +1,8 @@ -use chrono::{Duration, NaiveDateTime, Utc}; -use rocket::Route; +use chrono::{DateTime, TimeDelta, Utc}; +use rocket::{serde::json::Json, Route}; use crate::{ - api::{EmptyResult, JsonUpcase}, + api::EmptyResult, auth::Headers, crypto, db::{ @@ -18,7 +18,7 @@ pub fn routes() -> Vec { } /// Data stored in the TwoFactor table in the db -#[derive(Serialize, Deserialize, Debug)] +#[derive(Debug, Serialize, Deserialize)] pub struct ProtectedActionData { /// Token issued to validate the protected action pub token: String, @@ -32,7 +32,7 @@ impl ProtectedActionData { pub fn new(token: String) -> Self { Self { token, - token_sent: Utc::now().naive_utc().timestamp(), + token_sent: Utc::now().timestamp(), attempts: 0, } } @@ -82,23 +82,24 @@ async fn request_otp(headers: Headers, mut conn: DbConn) -> EmptyResult { } #[derive(Deserialize, Serialize, Debug)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct ProtectedActionVerify { - OTP: String, + #[serde(rename = "OTP", alias = "otp")] + otp: String, } #[post("/accounts/verify-otp", data = "")] -async fn verify_otp(data: JsonUpcase, headers: Headers, mut conn: DbConn) -> EmptyResult { +async fn verify_otp(data: Json, headers: Headers, mut conn: DbConn) -> EmptyResult { if !CONFIG.mail_enabled() { err!("Email is disabled for this server. Either enable email or login using your master password instead of login via device."); } let user = headers.user; - let data: ProtectedActionVerify = data.into_inner().data; + let data: ProtectedActionVerify = data.into_inner(); // Delete the token after one validation attempt // This endpoint only gets called for the vault export, and doesn't need a second attempt - validate_protected_action_otp(&data.OTP, &user.uuid, true, &mut conn).await + validate_protected_action_otp(&data.otp, &user.uuid, true, &mut conn).await } pub async fn validate_protected_action_otp( @@ -122,9 +123,9 @@ pub async fn validate_protected_action_otp( // Check if the token has expired (Using the email 2fa expiration time) let date = - NaiveDateTime::from_timestamp_opt(pa_data.token_sent, 0).expect("Protected Action token timestamp invalid."); + DateTime::from_timestamp(pa_data.token_sent, 0).expect("Protected Action token timestamp invalid.").naive_utc(); let max_time = CONFIG.email_expiration_time() as i64; - if date + Duration::seconds(max_time) < Utc::now().naive_utc() { + if date + TimeDelta::try_seconds(max_time).unwrap() < Utc::now().naive_utc() { pa.delete(conn).await?; err!("Token has expired") } diff --git a/src/api/core/two_factor/webauthn.rs b/src/api/core/two_factor/webauthn.rs index 14ba8514..52ca70c4 100644 --- a/src/api/core/two_factor/webauthn.rs +++ b/src/api/core/two_factor/webauthn.rs @@ -7,7 +7,7 @@ use webauthn_rs::{base64_data::Base64UrlSafeData, proto::*, AuthenticationState, use crate::{ api::{ core::{log_user_event, two_factor::_generate_recover_code}, - EmptyResult, JsonResult, JsonUpcase, PasswordOrOtpData, + EmptyResult, JsonResult, PasswordOrOtpData, }, auth::Headers, db::{ @@ -96,20 +96,20 @@ pub struct WebauthnRegistration { impl WebauthnRegistration { fn to_json(&self) -> Value { json!({ - "Id": self.id, - "Name": self.name, + "id": self.id, + "name": self.name, "migrated": self.migrated, }) } } #[post("/two-factor/get-webauthn", data = "")] -async fn get_webauthn(data: JsonUpcase, headers: Headers, mut conn: DbConn) -> JsonResult { +async fn get_webauthn(data: Json, headers: Headers, mut conn: DbConn) -> JsonResult { if !CONFIG.domain_set() { err!("`DOMAIN` environment variable is not set. Webauthn disabled") } - let data: PasswordOrOtpData = data.into_inner().data; + let data: PasswordOrOtpData = data.into_inner(); let user = headers.user; data.validate(&user, false, &mut conn).await?; @@ -118,19 +118,15 @@ async fn get_webauthn(data: JsonUpcase, headers: Headers, mut let registrations_json: Vec = registrations.iter().map(WebauthnRegistration::to_json).collect(); Ok(Json(json!({ - "Enabled": enabled, - "Keys": registrations_json, - "Object": "twoFactorWebAuthn" + "enabled": enabled, + "keys": registrations_json, + "object": "twoFactorWebAuthn" }))) } #[post("/two-factor/get-webauthn-challenge", data = "")] -async fn generate_webauthn_challenge( - data: JsonUpcase, - headers: Headers, - mut conn: DbConn, -) -> JsonResult { - let data: PasswordOrOtpData = data.into_inner().data; +async fn generate_webauthn_challenge(data: Json, headers: Headers, mut conn: DbConn) -> JsonResult { + let data: PasswordOrOtpData = data.into_inner(); let user = headers.user; data.validate(&user, false, &mut conn).await?; @@ -161,102 +157,94 @@ async fn generate_webauthn_challenge( } #[derive(Debug, Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct EnableWebauthnData { - Id: NumberOrString, // 1..5 - Name: String, - DeviceResponse: RegisterPublicKeyCredentialCopy, - MasterPasswordHash: Option, - Otp: Option, + id: NumberOrString, // 1..5 + name: String, + device_response: RegisterPublicKeyCredentialCopy, + master_password_hash: Option, + otp: Option, } -// This is copied from RegisterPublicKeyCredential to change the Response objects casing #[derive(Debug, Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct RegisterPublicKeyCredentialCopy { - pub Id: String, - pub RawId: Base64UrlSafeData, - pub Response: AuthenticatorAttestationResponseRawCopy, - pub Type: String, + pub id: String, + pub raw_id: Base64UrlSafeData, + pub response: AuthenticatorAttestationResponseRawCopy, + pub r#type: String, } // This is copied from AuthenticatorAttestationResponseRaw to change clientDataJSON to clientDataJson #[derive(Debug, Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] pub struct AuthenticatorAttestationResponseRawCopy { - pub AttestationObject: Base64UrlSafeData, - pub ClientDataJson: Base64UrlSafeData, + #[serde(rename = "AttestationObject", alias = "attestationObject")] + pub attestation_object: Base64UrlSafeData, + #[serde(rename = "clientDataJson", alias = "clientDataJSON")] + pub client_data_json: Base64UrlSafeData, } impl From for RegisterPublicKeyCredential { fn from(r: RegisterPublicKeyCredentialCopy) -> Self { Self { - id: r.Id, - raw_id: r.RawId, + id: r.id, + raw_id: r.raw_id, response: AuthenticatorAttestationResponseRaw { - attestation_object: r.Response.AttestationObject, - client_data_json: r.Response.ClientDataJson, + attestation_object: r.response.attestation_object, + client_data_json: r.response.client_data_json, }, - type_: r.Type, + type_: r.r#type, } } } -// This is copied from PublicKeyCredential to change the Response objects casing #[derive(Debug, Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] pub struct PublicKeyCredentialCopy { - pub Id: String, - pub RawId: Base64UrlSafeData, - pub Response: AuthenticatorAssertionResponseRawCopy, - pub Extensions: Option, - pub Type: String, + pub id: String, + pub raw_id: Base64UrlSafeData, + pub response: AuthenticatorAssertionResponseRawCopy, + pub extensions: Option, + pub r#type: String, } // This is copied from AuthenticatorAssertionResponseRaw to change clientDataJSON to clientDataJson #[derive(Debug, Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] pub struct AuthenticatorAssertionResponseRawCopy { - pub AuthenticatorData: Base64UrlSafeData, - pub ClientDataJson: Base64UrlSafeData, - pub Signature: Base64UrlSafeData, - pub UserHandle: Option, -} - -#[derive(Debug, Deserialize)] -#[allow(non_snake_case)] -pub struct AuthenticationExtensionsClientOutputsCopy { - #[serde(default)] - pub Appid: bool, + pub authenticator_data: Base64UrlSafeData, + #[serde(rename = "clientDataJson", alias = "clientDataJSON")] + pub client_data_json: Base64UrlSafeData, + pub signature: Base64UrlSafeData, + pub user_handle: Option, } impl From for PublicKeyCredential { fn from(r: PublicKeyCredentialCopy) -> Self { Self { - id: r.Id, - raw_id: r.RawId, + id: r.id, + raw_id: r.raw_id, response: AuthenticatorAssertionResponseRaw { - authenticator_data: r.Response.AuthenticatorData, - client_data_json: r.Response.ClientDataJson, - signature: r.Response.Signature, - user_handle: r.Response.UserHandle, + authenticator_data: r.response.authenticator_data, + client_data_json: r.response.client_data_json, + signature: r.response.signature, + user_handle: r.response.user_handle, }, - extensions: r.Extensions.map(|e| AuthenticationExtensionsClientOutputs { - appid: e.Appid, - }), - type_: r.Type, + extensions: r.extensions, + type_: r.r#type, } } } #[post("/two-factor/webauthn", data = "")] -async fn activate_webauthn(data: JsonUpcase, headers: Headers, mut conn: DbConn) -> JsonResult { - let data: EnableWebauthnData = data.into_inner().data; +async fn activate_webauthn(data: Json, headers: Headers, mut conn: DbConn) -> JsonResult { + let data: EnableWebauthnData = data.into_inner(); let mut user = headers.user; PasswordOrOtpData { - MasterPasswordHash: data.MasterPasswordHash, - Otp: data.Otp, + master_password_hash: data.master_password_hash, + otp: data.otp, } .validate(&user, true, &mut conn) .await?; @@ -274,13 +262,13 @@ async fn activate_webauthn(data: JsonUpcase, headers: Header // Verify the credentials with the saved state let (credential, _data) = - WebauthnConfig::load().register_credential(&data.DeviceResponse.into(), &state, |_| Ok(false))?; + WebauthnConfig::load().register_credential(&data.device_response.into(), &state, |_| Ok(false))?; let mut registrations: Vec<_> = get_webauthn_registrations(&user.uuid, &mut conn).await?.1; // TODO: Check for repeated ID's registrations.push(WebauthnRegistration { - id: data.Id.into_i32()?, - name: data.Name, + id: data.id.into_i32()?, + name: data.name, migrated: false, credential, @@ -296,28 +284,28 @@ async fn activate_webauthn(data: JsonUpcase, headers: Header let keys_json: Vec = registrations.iter().map(WebauthnRegistration::to_json).collect(); Ok(Json(json!({ - "Enabled": true, - "Keys": keys_json, - "Object": "twoFactorU2f" + "enabled": true, + "keys": keys_json, + "object": "twoFactorU2f" }))) } #[put("/two-factor/webauthn", data = "")] -async fn activate_webauthn_put(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { +async fn activate_webauthn_put(data: Json, headers: Headers, conn: DbConn) -> JsonResult { activate_webauthn(data, headers, conn).await } -#[derive(Deserialize, Debug)] -#[allow(non_snake_case)] +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] struct DeleteU2FData { - Id: NumberOrString, - MasterPasswordHash: String, + id: NumberOrString, + master_password_hash: String, } #[delete("/two-factor/webauthn", data = "")] -async fn delete_webauthn(data: JsonUpcase, headers: Headers, mut conn: DbConn) -> JsonResult { - let id = data.data.Id.into_i32()?; - if !headers.user.check_valid_password(&data.data.MasterPasswordHash) { +async fn delete_webauthn(data: Json, headers: Headers, mut conn: DbConn) -> JsonResult { + let id = data.id.into_i32()?; + if !headers.user.check_valid_password(&data.master_password_hash) { err!("Invalid password"); } @@ -358,9 +346,9 @@ async fn delete_webauthn(data: JsonUpcase, headers: Headers, mut let keys_json: Vec = data.iter().map(WebauthnRegistration::to_json).collect(); Ok(Json(json!({ - "Enabled": true, - "Keys": keys_json, - "Object": "twoFactorU2f" + "enabled": true, + "keys": keys_json, + "object": "twoFactorU2f" }))) } @@ -413,8 +401,8 @@ pub async fn validate_webauthn_login(user_uuid: &str, response: &str, conn: &mut ), }; - let rsp: crate::util::UpCase = serde_json::from_str(response)?; - let rsp: PublicKeyCredential = rsp.data.into(); + let rsp: PublicKeyCredentialCopy = serde_json::from_str(response)?; + let rsp: PublicKeyCredential = rsp.into(); let mut registrations = get_webauthn_registrations(user_uuid, conn).await?.1; diff --git a/src/api/core/two_factor/yubikey.rs b/src/api/core/two_factor/yubikey.rs index ea43f36f..2eff3b6f 100644 --- a/src/api/core/two_factor/yubikey.rs +++ b/src/api/core/two_factor/yubikey.rs @@ -1,12 +1,12 @@ use rocket::serde::json::Json; use rocket::Route; use serde_json::Value; -use yubico::{config::Config, verify}; +use yubico::{config::Config, verify_async}; use crate::{ api::{ core::{log_user_event, two_factor::_generate_recover_code}, - EmptyResult, JsonResult, JsonUpcase, PasswordOrOtpData, + EmptyResult, JsonResult, PasswordOrOtpData, }, auth::Headers, db::{ @@ -21,28 +21,30 @@ pub fn routes() -> Vec { routes![generate_yubikey, activate_yubikey, activate_yubikey_put,] } -#[derive(Deserialize, Debug)] -#[allow(non_snake_case)] +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] struct EnableYubikeyData { - Key1: Option, - Key2: Option, - Key3: Option, - Key4: Option, - Key5: Option, - Nfc: bool, - MasterPasswordHash: Option, - Otp: Option, + key1: Option, + key2: Option, + key3: Option, + key4: Option, + key5: Option, + nfc: bool, + master_password_hash: Option, + otp: Option, } #[derive(Deserialize, Serialize, Debug)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] pub struct YubikeyMetadata { - Keys: Vec, - pub Nfc: bool, + #[serde(rename = "keys", alias = "Keys")] + keys: Vec, + #[serde(rename = "nfc", alias = "Nfc")] + pub nfc: bool, } fn parse_yubikeys(data: &EnableYubikeyData) -> Vec { - let data_keys = [&data.Key1, &data.Key2, &data.Key3, &data.Key4, &data.Key5]; + let data_keys = [&data.key1, &data.key2, &data.key3, &data.key4, &data.key5]; data_keys.iter().filter_map(|e| e.as_ref().cloned()).collect() } @@ -74,21 +76,18 @@ async fn verify_yubikey_otp(otp: String) -> EmptyResult { let config = Config::default().set_client_id(yubico_id).set_key(yubico_secret); match CONFIG.yubico_server() { - Some(server) => { - tokio::task::spawn_blocking(move || verify(otp, config.set_api_hosts(vec![server]))).await.unwrap() - } - None => tokio::task::spawn_blocking(move || verify(otp, config)).await.unwrap(), + Some(server) => verify_async(otp, config.set_api_hosts(vec![server])).await, + None => verify_async(otp, config).await, } .map_res("Failed to verify OTP") - .and(Ok(())) } #[post("/two-factor/get-yubikey", data = "")] -async fn generate_yubikey(data: JsonUpcase, headers: Headers, mut conn: DbConn) -> JsonResult { +async fn generate_yubikey(data: Json, headers: Headers, mut conn: DbConn) -> JsonResult { // Make sure the credentials are set get_yubico_credentials()?; - let data: PasswordOrOtpData = data.into_inner().data; + let data: PasswordOrOtpData = data.into_inner(); let user = headers.user; data.validate(&user, false, &mut conn).await?; @@ -101,29 +100,29 @@ async fn generate_yubikey(data: JsonUpcase, headers: Headers, if let Some(r) = r { let yubikey_metadata: YubikeyMetadata = serde_json::from_str(&r.data)?; - let mut result = jsonify_yubikeys(yubikey_metadata.Keys); + let mut result = jsonify_yubikeys(yubikey_metadata.keys); - result["Enabled"] = Value::Bool(true); - result["Nfc"] = Value::Bool(yubikey_metadata.Nfc); - result["Object"] = Value::String("twoFactorU2f".to_owned()); + result["enabled"] = Value::Bool(true); + result["nfc"] = Value::Bool(yubikey_metadata.nfc); + result["object"] = Value::String("twoFactorU2f".to_owned()); Ok(Json(result)) } else { Ok(Json(json!({ - "Enabled": false, - "Object": "twoFactorU2f", + "enabled": false, + "object": "twoFactorU2f", }))) } } #[post("/two-factor/yubikey", data = "")] -async fn activate_yubikey(data: JsonUpcase, headers: Headers, mut conn: DbConn) -> JsonResult { - let data: EnableYubikeyData = data.into_inner().data; +async fn activate_yubikey(data: Json, headers: Headers, mut conn: DbConn) -> JsonResult { + let data: EnableYubikeyData = data.into_inner(); let mut user = headers.user; PasswordOrOtpData { - MasterPasswordHash: data.MasterPasswordHash.clone(), - Otp: data.Otp.clone(), + master_password_hash: data.master_password_hash.clone(), + otp: data.otp.clone(), } .validate(&user, true, &mut conn) .await?; @@ -139,8 +138,8 @@ async fn activate_yubikey(data: JsonUpcase, headers: Headers, if yubikeys.is_empty() { return Ok(Json(json!({ - "Enabled": false, - "Object": "twoFactorU2f", + "enabled": false, + "object": "twoFactorU2f", }))); } @@ -157,8 +156,8 @@ async fn activate_yubikey(data: JsonUpcase, headers: Headers, let yubikey_ids: Vec = yubikeys.into_iter().map(|x| (x[..12]).to_owned()).collect(); let yubikey_metadata = YubikeyMetadata { - Keys: yubikey_ids, - Nfc: data.Nfc, + keys: yubikey_ids, + nfc: data.nfc, }; yubikey_data.data = serde_json::to_string(&yubikey_metadata).unwrap(); @@ -168,17 +167,17 @@ async fn activate_yubikey(data: JsonUpcase, headers: Headers, log_user_event(EventType::UserUpdated2fa as i32, &user.uuid, headers.device.atype, &headers.ip.ip, &mut conn).await; - let mut result = jsonify_yubikeys(yubikey_metadata.Keys); + let mut result = jsonify_yubikeys(yubikey_metadata.keys); - result["Enabled"] = Value::Bool(true); - result["Nfc"] = Value::Bool(yubikey_metadata.Nfc); - result["Object"] = Value::String("twoFactorU2f".to_owned()); + result["enabled"] = Value::Bool(true); + result["nfc"] = Value::Bool(yubikey_metadata.nfc); + result["object"] = Value::String("twoFactorU2f".to_owned()); Ok(Json(result)) } #[put("/two-factor/yubikey", data = "")] -async fn activate_yubikey_put(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { +async fn activate_yubikey_put(data: Json, headers: Headers, conn: DbConn) -> JsonResult { activate_yubikey(data, headers, conn).await } @@ -190,14 +189,10 @@ pub async fn validate_yubikey_login(response: &str, twofactor_data: &str) -> Emp let yubikey_metadata: YubikeyMetadata = serde_json::from_str(twofactor_data).expect("Can't parse Yubikey Metadata"); let response_id = &response[..12]; - if !yubikey_metadata.Keys.contains(&response_id.to_owned()) { + if !yubikey_metadata.keys.contains(&response_id.to_owned()) { err!("Given Yubikey is not registered"); } - let result = verify_yubikey_otp(response.to_owned()).await; - - match result { - Ok(_answer) => Ok(()), - Err(_e) => err!("Failed to verify Yubikey against OTP server"), - } + verify_yubikey_otp(response.to_owned()).await.map_res("Failed to verify Yubikey against OTP server")?; + Ok(()) } diff --git a/src/api/icons.rs b/src/api/icons.rs index f47357bb..94fab3f8 100644 --- a/src/api/icons.rs +++ b/src/api/icons.rs @@ -1,6 +1,6 @@ use std::{ net::IpAddr, - sync::Arc, + sync::{Arc, Mutex}, time::{Duration, SystemTime}, }; @@ -16,14 +16,13 @@ use rocket::{http::ContentType, response::Redirect, Route}; use tokio::{ fs::{create_dir_all, remove_file, symlink_metadata, File}, io::{AsyncReadExt, AsyncWriteExt}, - net::lookup_host, }; use html5gum::{Emitter, HtmlString, InfallibleTokenizer, Readable, StringReader, Tokenizer}; use crate::{ error::Error, - util::{get_reqwest_client_builder, Cached}, + util::{get_reqwest_client_builder, Cached, CustomDnsResolver, CustomResolverError}, CONFIG, }; @@ -49,48 +48,32 @@ static CLIENT: Lazy = Lazy::new(|| { let icon_download_timeout = Duration::from_secs(CONFIG.icon_download_timeout()); let pool_idle_timeout = Duration::from_secs(10); // Reuse the client between requests - let client = get_reqwest_client_builder() + get_reqwest_client_builder() .cookie_provider(Arc::clone(&cookie_store)) .timeout(icon_download_timeout) .pool_max_idle_per_host(5) // Configure the Hyper Pool to only have max 5 idle connections .pool_idle_timeout(pool_idle_timeout) // Configure the Hyper Pool to timeout after 10 seconds - .trust_dns(true) - .default_headers(default_headers.clone()); - - match client.build() { - Ok(client) => client, - Err(e) => { - error!("Possible trust-dns error, trying with trust-dns disabled: '{e}'"); - get_reqwest_client_builder() - .cookie_provider(cookie_store) - .timeout(icon_download_timeout) - .pool_max_idle_per_host(5) // Configure the Hyper Pool to only have max 5 idle connections - .pool_idle_timeout(pool_idle_timeout) // Configure the Hyper Pool to timeout after 10 seconds - .trust_dns(false) - .default_headers(default_headers) - .build() - .expect("Failed to build client") - } - } + .dns_resolver(CustomDnsResolver::instance()) + .default_headers(default_headers.clone()) + .build() + .expect("Failed to build client") }); // Build Regex only once since this takes a lot of time. static ICON_SIZE_REGEX: Lazy = Lazy::new(|| Regex::new(r"(?x)(\d+)\D*(\d+)").unwrap()); -// Special HashMap which holds the user defined Regex to speedup matching the regex. -static ICON_BLACKLIST_REGEX: Lazy> = Lazy::new(dashmap::DashMap::new); - -async fn icon_redirect(domain: &str, template: &str) -> Option { +#[get("//icon.png")] +fn icon_external(domain: &str) -> Option { if !is_valid_domain(domain) { warn!("Invalid domain: {}", domain); return None; } - if check_domain_blacklist_reason(domain).await.is_some() { + if is_domain_blacklisted(domain) { return None; } - let url = template.replace("{}", domain); + let url = CONFIG._icon_service_url().replace("{}", domain); match CONFIG.icon_redirect_code() { 301 => Some(Redirect::moved(url)), // legacy permanent redirect 302 => Some(Redirect::found(url)), // legacy temporary redirect @@ -103,11 +86,6 @@ async fn icon_redirect(domain: &str, template: &str) -> Option { } } -#[get("//icon.png")] -async fn icon_external(domain: &str) -> Option { - icon_redirect(domain, &CONFIG._icon_service_url()).await -} - #[get("//icon.png")] async fn icon_internal(domain: &str) -> Cached<(ContentType, Vec)> { const FALLBACK_ICON: &[u8] = include_bytes!("../static/images/fallback-icon.png"); @@ -166,153 +144,28 @@ fn is_valid_domain(domain: &str) -> bool { true } -/// TODO: This is extracted from IpAddr::is_global, which is unstable: -/// https://doc.rust-lang.org/nightly/std/net/enum.IpAddr.html#method.is_global -/// Remove once https://github.com/rust-lang/rust/issues/27709 is merged -#[allow(clippy::nonminimal_bool)] -#[cfg(not(feature = "unstable"))] -fn is_global(ip: IpAddr) -> bool { - match ip { - IpAddr::V4(ip) => { - // check if this address is 192.0.0.9 or 192.0.0.10. These addresses are the only two - // globally routable addresses in the 192.0.0.0/24 range. - if u32::from(ip) == 0xc0000009 || u32::from(ip) == 0xc000000a { - return true; - } - !ip.is_private() - && !ip.is_loopback() - && !ip.is_link_local() - && !ip.is_broadcast() - && !ip.is_documentation() - && !(ip.octets()[0] == 100 && (ip.octets()[1] & 0b1100_0000 == 0b0100_0000)) - && !(ip.octets()[0] == 192 && ip.octets()[1] == 0 && ip.octets()[2] == 0) - && !(ip.octets()[0] & 240 == 240 && !ip.is_broadcast()) - && !(ip.octets()[0] == 198 && (ip.octets()[1] & 0xfe) == 18) - // Make sure the address is not in 0.0.0.0/8 - && ip.octets()[0] != 0 - } - IpAddr::V6(ip) => { - if ip.is_multicast() && ip.segments()[0] & 0x000f == 14 { - true - } else { - !ip.is_multicast() - && !ip.is_loopback() - && !((ip.segments()[0] & 0xffc0) == 0xfe80) - && !((ip.segments()[0] & 0xfe00) == 0xfc00) - && !ip.is_unspecified() - && !((ip.segments()[0] == 0x2001) && (ip.segments()[1] == 0xdb8)) - } - } - } -} - -#[cfg(feature = "unstable")] -fn is_global(ip: IpAddr) -> bool { - ip.is_global() -} - -/// These are some tests to check that the implementations match -/// The IPv4 can be all checked in 5 mins or so and they are correct as of nightly 2020-07-11 -/// The IPV6 can't be checked in a reasonable time, so we check about ten billion random ones, so far correct -/// Note that the is_global implementation is subject to change as new IP RFCs are created -/// -/// To run while showing progress output: -/// cargo test --features sqlite,unstable -- --nocapture --ignored -#[cfg(test)] -#[cfg(feature = "unstable")] -mod tests { - use super::*; - - #[test] - #[ignore] - fn test_ipv4_global() { - for a in 0..u8::MAX { - println!("Iter: {}/255", a); - for b in 0..u8::MAX { - for c in 0..u8::MAX { - for d in 0..u8::MAX { - let ip = IpAddr::V4(std::net::Ipv4Addr::new(a, b, c, d)); - assert_eq!(ip.is_global(), is_global(ip)) - } - } - } - } - } - - #[test] - #[ignore] - fn test_ipv6_global() { - use ring::rand::{SecureRandom, SystemRandom}; - let mut v = [0u8; 16]; - let rand = SystemRandom::new(); - for i in 0..1_000 { - println!("Iter: {}/1_000", i); - for _ in 0..10_000_000 { - rand.fill(&mut v).expect("Error generating random values"); - let ip = IpAddr::V6(std::net::Ipv6Addr::new( - (v[14] as u16) << 8 | v[15] as u16, - (v[12] as u16) << 8 | v[13] as u16, - (v[10] as u16) << 8 | v[11] as u16, - (v[8] as u16) << 8 | v[9] as u16, - (v[6] as u16) << 8 | v[7] as u16, - (v[4] as u16) << 8 | v[5] as u16, - (v[2] as u16) << 8 | v[3] as u16, - (v[0] as u16) << 8 | v[1] as u16, - )); - assert_eq!(ip.is_global(), is_global(ip)) - } - } - } -} - -#[derive(Clone)] -enum DomainBlacklistReason { - Regex, - IP, -} - -use cached::proc_macro::cached; -#[cached(key = "String", convert = r#"{ domain.to_string() }"#, size = 16, time = 60)] -async fn check_domain_blacklist_reason(domain: &str) -> Option { - // First check the blacklist regex if there is a match. - // This prevents the blocked domain(s) from being leaked via a DNS lookup. - if let Some(blacklist) = CONFIG.icon_blacklist_regex() { - // Use the pre-generate Regex stored in a Lazy HashMap if there's one, else generate it. - let is_match = if let Some(regex) = ICON_BLACKLIST_REGEX.get(&blacklist) { - regex.is_match(domain) - } else { - // Clear the current list if the previous key doesn't exists. - // To prevent growing of the HashMap after someone has changed it via the admin interface. - if ICON_BLACKLIST_REGEX.len() >= 1 { - ICON_BLACKLIST_REGEX.clear(); - } - - // Generate the regex to store in too the Lazy Static HashMap. - let blacklist_regex = Regex::new(&blacklist).unwrap(); - let is_match = blacklist_regex.is_match(domain); - ICON_BLACKLIST_REGEX.insert(blacklist.clone(), blacklist_regex); +pub fn is_domain_blacklisted(domain: &str) -> bool { + let Some(config_blacklist) = CONFIG.icon_blacklist_regex() else { + return false; + }; - is_match - }; + // Compiled domain blacklist + static COMPILED_BLACKLIST: Mutex> = Mutex::new(None); + let mut guard = COMPILED_BLACKLIST.lock().unwrap(); - if is_match { - debug!("Blacklisted domain: {} matched ICON_BLACKLIST_REGEX", domain); - return Some(DomainBlacklistReason::Regex); + // If the stored regex is up to date, use it + if let Some((value, regex)) = &*guard { + if value == &config_blacklist { + return regex.is_match(domain); } } - if CONFIG.icon_blacklist_non_global_ips() { - if let Ok(s) = lookup_host((domain, 0)).await { - for addr in s { - if !is_global(addr.ip()) { - debug!("IP {} for domain '{}' is not a global IP!", addr.ip(), domain); - return Some(DomainBlacklistReason::IP); - } - } - } - } + // If we don't have a regex stored, or it's not up to date, recreate it + let regex = Regex::new(&config_blacklist).unwrap(); + let is_match = regex.is_match(domain); + *guard = Some((config_blacklist, regex)); - None + is_match } async fn get_icon(domain: &str) -> Option<(Vec, String)> { @@ -342,6 +195,13 @@ async fn get_icon(domain: &str) -> Option<(Vec, String)> { Some((icon.to_vec(), icon_type.unwrap_or("x-icon").to_string())) } Err(e) => { + // If this error comes from the custom resolver, this means this is a blacklisted domain + // or non global IP, don't save the miss file in this case to avoid leaking it + if let Some(error) = CustomResolverError::downcast_ref(&e) { + warn!("{error}"); + return None; + } + warn!("Unable to download icon: {:?}", e); let miss_indicator = path + ".miss"; save_icon(&miss_indicator, &[]).await; @@ -491,42 +351,48 @@ async fn get_icon_url(domain: &str) -> Result { let ssldomain = format!("https://{domain}"); let httpdomain = format!("http://{domain}"); - // First check the domain as given during the request for both HTTPS and HTTP. - let resp = match get_page(&ssldomain).or_else(|_| get_page(&httpdomain)).await { - Ok(c) => Ok(c), - Err(e) => { - let mut sub_resp = Err(e); - - // When the domain is not an IP, and has more then one dot, remove all subdomains. - let is_ip = domain.parse::(); - if is_ip.is_err() && domain.matches('.').count() > 1 { - let mut domain_parts = domain.split('.'); - let base_domain = format!( - "{base}.{tld}", - tld = domain_parts.next_back().unwrap(), - base = domain_parts.next_back().unwrap() - ); - if is_valid_domain(&base_domain) { - let sslbase = format!("https://{base_domain}"); - let httpbase = format!("http://{base_domain}"); - debug!("[get_icon_url]: Trying without subdomains '{base_domain}'"); - - sub_resp = get_page(&sslbase).or_else(|_| get_page(&httpbase)).await; - } + // First check the domain as given during the request for HTTPS. + let resp = match get_page(&ssldomain).await { + Err(e) if CustomResolverError::downcast_ref(&e).is_none() => { + // If we get an error that is not caused by the blacklist, we retry with HTTP + match get_page(&httpdomain).await { + mut sub_resp @ Err(_) => { + // When the domain is not an IP, and has more then one dot, remove all subdomains. + let is_ip = domain.parse::(); + if is_ip.is_err() && domain.matches('.').count() > 1 { + let mut domain_parts = domain.split('.'); + let base_domain = format!( + "{base}.{tld}", + tld = domain_parts.next_back().unwrap(), + base = domain_parts.next_back().unwrap() + ); + if is_valid_domain(&base_domain) { + let sslbase = format!("https://{base_domain}"); + let httpbase = format!("http://{base_domain}"); + debug!("[get_icon_url]: Trying without subdomains '{base_domain}'"); + + sub_resp = get_page(&sslbase).or_else(|_| get_page(&httpbase)).await; + } - // When the domain is not an IP, and has less then 2 dots, try to add www. infront of it. - } else if is_ip.is_err() && domain.matches('.').count() < 2 { - let www_domain = format!("www.{domain}"); - if is_valid_domain(&www_domain) { - let sslwww = format!("https://{www_domain}"); - let httpwww = format!("http://{www_domain}"); - debug!("[get_icon_url]: Trying with www. prefix '{www_domain}'"); + // When the domain is not an IP, and has less then 2 dots, try to add www. infront of it. + } else if is_ip.is_err() && domain.matches('.').count() < 2 { + let www_domain = format!("www.{domain}"); + if is_valid_domain(&www_domain) { + let sslwww = format!("https://{www_domain}"); + let httpwww = format!("http://{www_domain}"); + debug!("[get_icon_url]: Trying with www. prefix '{www_domain}'"); - sub_resp = get_page(&sslwww).or_else(|_| get_page(&httpwww)).await; + sub_resp = get_page(&sslwww).or_else(|_| get_page(&httpwww)).await; + } + } + sub_resp } + res => res, } - sub_resp } + + // If we get a result or a blacklist error, just continue + res => res, }; // Create the iconlist @@ -573,21 +439,12 @@ async fn get_page(url: &str) -> Result { } async fn get_page_with_referer(url: &str, referer: &str) -> Result { - match check_domain_blacklist_reason(url::Url::parse(url).unwrap().host_str().unwrap_or_default()).await { - Some(DomainBlacklistReason::Regex) => warn!("Favicon '{}' is from a blacklisted domain!", url), - Some(DomainBlacklistReason::IP) => warn!("Favicon '{}' is hosted on a non-global IP!", url), - None => (), - } - let mut client = CLIENT.get(url); if !referer.is_empty() { client = client.header("Referer", referer) } - match client.send().await { - Ok(c) => c.error_for_status().map_err(Into::into), - Err(e) => err_silent!(format!("{e}")), - } + Ok(client.send().await?.error_for_status()?) } /// Returns a Integer with the priority of the type of the icon which to prefer. @@ -670,12 +527,6 @@ fn parse_sizes(sizes: &str) -> (u16, u16) { } async fn download_icon(domain: &str) -> Result<(Bytes, Option<&str>), Error> { - match check_domain_blacklist_reason(domain).await { - Some(DomainBlacklistReason::Regex) => err_silent!("Domain is blacklisted", domain), - Some(DomainBlacklistReason::IP) => err_silent!("Host resolves to a non-global IP", domain), - None => (), - } - let icon_result = get_icon_url(domain).await?; let mut buffer = Bytes::new(); @@ -711,22 +562,19 @@ async fn download_icon(domain: &str) -> Result<(Bytes, Option<&str>), Error> { _ => debug!("Extracted icon from data:image uri is invalid"), }; } else { - match get_page_with_referer(&icon.href, &icon_result.referer).await { - Ok(res) => { - buffer = stream_to_bytes_limit(res, 5120 * 1024).await?; // 5120KB/5MB for each icon max (Same as icons.bitwarden.net) - - // Check if the icon type is allowed, else try an icon from the list. - icon_type = get_icon_type(&buffer); - if icon_type.is_none() { - buffer.clear(); - debug!("Icon from {}, is not a valid image type", icon.href); - continue; - } - info!("Downloaded icon from {}", icon.href); - break; - } - Err(e) => debug!("{:?}", e), - }; + let res = get_page_with_referer(&icon.href, &icon_result.referer).await?; + + buffer = stream_to_bytes_limit(res, 5120 * 1024).await?; // 5120KB/5MB for each icon max (Same as icons.bitwarden.net) + + // Check if the icon type is allowed, else try an icon from the list. + icon_type = get_icon_type(&buffer); + if icon_type.is_none() { + buffer.clear(); + debug!("Icon from {}, is not a valid image type", icon.href); + continue; + } + info!("Downloaded icon from {}", icon.href); + break; } } diff --git a/src/api/identity.rs b/src/api/identity.rs index 9f3cd1bf..fbf8d506 100644 --- a/src/api/identity.rs +++ b/src/api/identity.rs @@ -15,7 +15,7 @@ use crate::{ two_factor::{authenticator, duo, email, enforce_2fa_policy, webauthn, yubikey}, }, push::register_push_device, - ApiResult, EmptyResult, JsonResult, JsonUpcase, + ApiResult, EmptyResult, JsonResult, }, auth::{generate_organization_api_key_login_claims, ClientHeaders, ClientIp}, db::{models::*, DbConn}, @@ -295,7 +295,12 @@ async fn _password_login( "KdfIterations": user.client_kdf_iter, "KdfMemory": user.client_kdf_memory, "KdfParallelism": user.client_kdf_parallelism, - "ResetMasterPassword": false,// TODO: Same as above + "ResetMasterPassword": false, // TODO: Same as above + "ForcePasswordReset": false, + "MasterPasswordPolicy": { + "object": "masterPasswordPolicy", + }, + "scope": scope, "unofficialServer": true, "UserDecryptionOptions": { @@ -559,8 +564,11 @@ async fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &mut DbCo let mut result = json!({ "error" : "invalid_grant", "error_description" : "Two factor required.", - "TwoFactorProviders" : providers, - "TwoFactorProviders2" : {} // { "0" : null } + "TwoFactorProviders" : providers.iter().map(ToString::to_string).collect::>(), + "TwoFactorProviders2" : {}, // { "0" : null } + "MasterPasswordPolicy": { + "Object": "masterPasswordPolicy" + } }); for provider in providers { @@ -597,7 +605,7 @@ async fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &mut DbCo let yubikey_metadata: yubikey::YubikeyMetadata = serde_json::from_str(&twofactor.data)?; result["TwoFactorProviders2"][provider.to_string()] = json!({ - "Nfc": yubikey_metadata.Nfc, + "Nfc": yubikey_metadata.nfc, }) } @@ -626,19 +634,18 @@ async fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &mut DbCo } #[post("/accounts/prelogin", data = "")] -async fn prelogin(data: JsonUpcase, conn: DbConn) -> Json { +async fn prelogin(data: Json, conn: DbConn) -> Json { _prelogin(data, conn).await } #[post("/accounts/register", data = "")] -async fn identity_register(data: JsonUpcase, conn: DbConn) -> JsonResult { +async fn identity_register(data: Json, conn: DbConn) -> JsonResult { _register(data, conn).await } // https://github.com/bitwarden/jslib/blob/master/common/src/models/request/tokenRequest.ts // https://github.com/bitwarden/mobile/blob/master/src/Core/Models/Request/TokenRequest.cs #[derive(Debug, Clone, Default, FromForm)] -#[allow(non_snake_case)] struct ConnectData { #[field(name = uncased("grant_type"))] #[field(name = uncased("granttype"))] diff --git a/src/api/mod.rs b/src/api/mod.rs index 99915bdf..d5281bda 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -20,10 +20,10 @@ pub use crate::api::{ core::two_factor::send_incomplete_2fa_notifications, core::{emergency_notification_reminder_job, emergency_request_timeout_job}, core::{event_cleanup_job, events_routes as core_events_routes}, - icons::routes as icons_routes, + icons::{is_domain_blacklisted, routes as icons_routes}, identity::routes as identity_routes, notifications::routes as notifications_routes, - notifications::{start_notification_server, AnonymousNotify, Notify, UpdateType, WS_ANONYMOUS_SUBSCRIPTIONS}, + notifications::{AnonymousNotify, Notify, UpdateType, WS_ANONYMOUS_SUBSCRIPTIONS, WS_USERS}, push::{ push_cipher_update, push_folder_update, push_logout, push_send_update, push_user_update, register_push_device, unregister_push_device, @@ -33,23 +33,18 @@ pub use crate::api::{ web::static_files, }; use crate::db::{models::User, DbConn}; -use crate::util; // Type aliases for API methods results type ApiResult = Result; pub type JsonResult = ApiResult>; pub type EmptyResult = ApiResult<()>; -type JsonUpcase = Json>; -type JsonUpcaseVec = Json>>; -type JsonVec = Json>; - // Common structs representing JSON data received #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] struct PasswordOrOtpData { - MasterPasswordHash: Option, - Otp: Option, + master_password_hash: Option, + otp: Option, } impl PasswordOrOtpData { @@ -59,7 +54,7 @@ impl PasswordOrOtpData { pub async fn validate(&self, user: &User, delete_if_valid: bool, conn: &mut DbConn) -> EmptyResult { use crate::api::core::two_factor::protected_actions::validate_protected_action_otp; - match (self.MasterPasswordHash.as_deref(), self.Otp.as_deref()) { + match (self.master_password_hash.as_deref(), self.otp.as_deref()) { (Some(pw_hash), None) => { if !user.check_valid_password(pw_hash) { err!("Invalid password"); diff --git a/src/api/notifications.rs b/src/api/notifications.rs index da2664cf..e4707399 100644 --- a/src/api/notifications.rs +++ b/src/api/notifications.rs @@ -1,23 +1,11 @@ -use std::{ - net::{IpAddr, SocketAddr}, - sync::Arc, - time::Duration, -}; +use std::{net::IpAddr, sync::Arc, time::Duration}; use chrono::{NaiveDateTime, Utc}; use rmpv::Value; -use rocket::{ - futures::{SinkExt, StreamExt}, - Route, -}; -use tokio::{ - net::{TcpListener, TcpStream}, - sync::mpsc::Sender, -}; -use tokio_tungstenite::{ - accept_hdr_async, - tungstenite::{handshake, Message}, -}; +use rocket::{futures::StreamExt, Route}; +use tokio::sync::mpsc::Sender; + +use rocket_ws::{Message, WebSocket}; use crate::{ auth::{ClientIp, WsAccessTokenHeader}, @@ -30,7 +18,7 @@ use crate::{ use once_cell::sync::Lazy; -static WS_USERS: Lazy> = Lazy::new(|| { +pub static WS_USERS: Lazy> = Lazy::new(|| { Arc::new(WebSocketUsers { map: Arc::new(dashmap::DashMap::new()), }) @@ -47,8 +35,15 @@ use super::{ push_send_update, push_user_update, }; +static NOTIFICATIONS_DISABLED: Lazy = Lazy::new(|| !CONFIG.enable_websocket() && !CONFIG.push_enabled()); + pub fn routes() -> Vec { - routes![websockets_hub, anonymous_websockets_hub] + if CONFIG.enable_websocket() { + routes![websockets_hub, anonymous_websockets_hub] + } else { + info!("WebSocket are disabled, realtime sync functionality will not work!"); + routes![] + } } #[derive(FromForm, Debug)] @@ -108,7 +103,7 @@ impl Drop for WSAnonymousEntryMapGuard { #[get("/hub?")] fn websockets_hub<'r>( - ws: rocket_ws::WebSocket, + ws: WebSocket, data: WsAccessToken, ip: ClientIp, header_token: WsAccessTokenHeader, @@ -192,11 +187,7 @@ fn websockets_hub<'r>( } #[get("/anonymous-hub?")] -fn anonymous_websockets_hub<'r>( - ws: rocket_ws::WebSocket, - token: String, - ip: ClientIp, -) -> Result { +fn anonymous_websockets_hub<'r>(ws: WebSocket, token: String, ip: ClientIp) -> Result { let addr = ip.ip; info!("Accepting Anonymous Rocket WS connection from {addr}"); @@ -297,8 +288,8 @@ fn serialize(val: Value) -> Vec { } fn serialize_date(date: NaiveDateTime) -> Value { - let seconds: i64 = date.timestamp(); - let nanos: i64 = date.timestamp_subsec_nanos().into(); + let seconds: i64 = date.and_utc().timestamp(); + let nanos: i64 = date.and_utc().timestamp_subsec_nanos().into(); let timestamp = nanos << 34 | seconds; let bs = timestamp.to_be_bytes(); @@ -349,13 +340,19 @@ impl WebSocketUsers { // NOTE: The last modified date needs to be updated before calling these methods pub async fn send_user_update(&self, ut: UpdateType, user: &User) { + // Skip any processing if both WebSockets and Push are not active + if *NOTIFICATIONS_DISABLED { + return; + } let data = create_update( vec![("UserId".into(), user.uuid.clone().into()), ("Date".into(), serialize_date(user.updated_at))], ut, None, ); - self.send_update(&user.uuid, &data).await; + if CONFIG.enable_websocket() { + self.send_update(&user.uuid, &data).await; + } if CONFIG.push_enabled() { push_user_update(ut, user); @@ -363,13 +360,19 @@ impl WebSocketUsers { } pub async fn send_logout(&self, user: &User, acting_device_uuid: Option) { + // Skip any processing if both WebSockets and Push are not active + if *NOTIFICATIONS_DISABLED { + return; + } let data = create_update( vec![("UserId".into(), user.uuid.clone().into()), ("Date".into(), serialize_date(user.updated_at))], UpdateType::LogOut, acting_device_uuid.clone(), ); - self.send_update(&user.uuid, &data).await; + if CONFIG.enable_websocket() { + self.send_update(&user.uuid, &data).await; + } if CONFIG.push_enabled() { push_logout(user, acting_device_uuid); @@ -383,6 +386,10 @@ impl WebSocketUsers { acting_device_uuid: &String, conn: &mut DbConn, ) { + // Skip any processing if both WebSockets and Push are not active + if *NOTIFICATIONS_DISABLED { + return; + } let data = create_update( vec![ ("Id".into(), folder.uuid.clone().into()), @@ -393,7 +400,9 @@ impl WebSocketUsers { Some(acting_device_uuid.into()), ); - self.send_update(&folder.user_uuid, &data).await; + if CONFIG.enable_websocket() { + self.send_update(&folder.user_uuid, &data).await; + } if CONFIG.push_enabled() { push_folder_update(ut, folder, acting_device_uuid, conn).await; @@ -409,6 +418,10 @@ impl WebSocketUsers { collection_uuids: Option>, conn: &mut DbConn, ) { + // Skip any processing if both WebSockets and Push are not active + if *NOTIFICATIONS_DISABLED { + return; + } let org_uuid = convert_option(cipher.organization_uuid.clone()); // Depending if there are collections provided or not, we need to have different values for the following variables. // The user_uuid should be `null`, and the revision date should be set to now, else the clients won't sync the collection change. @@ -434,8 +447,10 @@ impl WebSocketUsers { Some(acting_device_uuid.into()), ); - for uuid in user_uuids { - self.send_update(uuid, &data).await; + if CONFIG.enable_websocket() { + for uuid in user_uuids { + self.send_update(uuid, &data).await; + } } if CONFIG.push_enabled() && user_uuids.len() == 1 { @@ -451,6 +466,10 @@ impl WebSocketUsers { acting_device_uuid: &String, conn: &mut DbConn, ) { + // Skip any processing if both WebSockets and Push are not active + if *NOTIFICATIONS_DISABLED { + return; + } let user_uuid = convert_option(send.user_uuid.clone()); let data = create_update( @@ -463,8 +482,10 @@ impl WebSocketUsers { None, ); - for uuid in user_uuids { - self.send_update(uuid, &data).await; + if CONFIG.enable_websocket() { + for uuid in user_uuids { + self.send_update(uuid, &data).await; + } } if CONFIG.push_enabled() && user_uuids.len() == 1 { push_send_update(ut, send, acting_device_uuid, conn).await; @@ -478,12 +499,18 @@ impl WebSocketUsers { acting_device_uuid: &String, conn: &mut DbConn, ) { + // Skip any processing if both WebSockets and Push are not active + if *NOTIFICATIONS_DISABLED { + return; + } let data = create_update( vec![("Id".into(), auth_request_uuid.clone().into()), ("UserId".into(), user_uuid.clone().into())], UpdateType::AuthRequest, Some(acting_device_uuid.to_string()), ); - self.send_update(user_uuid, &data).await; + if CONFIG.enable_websocket() { + self.send_update(user_uuid, &data).await; + } if CONFIG.push_enabled() { push_auth_request(user_uuid.to_string(), auth_request_uuid.to_string(), conn).await; @@ -497,12 +524,18 @@ impl WebSocketUsers { approving_device_uuid: String, conn: &mut DbConn, ) { + // Skip any processing if both WebSockets and Push are not active + if *NOTIFICATIONS_DISABLED { + return; + } let data = create_update( vec![("Id".into(), auth_response_uuid.to_owned().into()), ("UserId".into(), user_uuid.clone().into())], UpdateType::AuthRequestResponse, approving_device_uuid.clone().into(), ); - self.send_update(auth_response_uuid, &data).await; + if CONFIG.enable_websocket() { + self.send_update(auth_response_uuid, &data).await; + } if CONFIG.push_enabled() { push_auth_response(user_uuid.to_string(), auth_response_uuid.to_string(), approving_device_uuid, conn) @@ -526,6 +559,9 @@ impl AnonymousWebSocketSubscriptions { } pub async fn send_auth_response(&self, user_uuid: &String, auth_response_uuid: &str) { + if !CONFIG.enable_websocket() { + return; + } let data = create_anonymous_update( vec![("Id".into(), auth_response_uuid.to_owned().into()), ("UserId".into(), user_uuid.clone().into())], UpdateType::AuthRequestResponse, @@ -620,127 +656,3 @@ pub enum UpdateType { pub type Notify<'a> = &'a rocket::State>; pub type AnonymousNotify<'a> = &'a rocket::State>; - -pub fn start_notification_server() -> Arc { - let users = Arc::clone(&WS_USERS); - if CONFIG.websocket_enabled() { - let users2 = Arc::::clone(&users); - tokio::spawn(async move { - let addr = (CONFIG.websocket_address(), CONFIG.websocket_port()); - info!("Starting WebSockets server on {}:{}", addr.0, addr.1); - let listener = TcpListener::bind(addr).await.expect("Can't listen on websocket port"); - - let (shutdown_tx, mut shutdown_rx) = tokio::sync::oneshot::channel::<()>(); - CONFIG.set_ws_shutdown_handle(shutdown_tx); - - loop { - tokio::select! { - Ok((stream, addr)) = listener.accept() => { - tokio::spawn(handle_connection(stream, Arc::::clone(&users2), addr)); - } - - _ = &mut shutdown_rx => { - break; - } - } - } - - info!("Shutting down WebSockets server!") - }); - } - - users -} - -async fn handle_connection(stream: TcpStream, users: Arc, addr: SocketAddr) -> Result<(), Error> { - let mut user_uuid: Option = None; - - info!("Accepting WS connection from {addr}"); - - // Accept connection, do initial handshake, validate auth token and get the user ID - use handshake::server::{Request, Response}; - let mut stream = accept_hdr_async(stream, |req: &Request, res: Response| { - if let Some(token) = get_request_token(req) { - if let Ok(claims) = crate::auth::decode_login(&token) { - user_uuid = Some(claims.sub); - return Ok(res); - } - } - Err(Response::builder().status(401).body(None).unwrap()) - }) - .await?; - - let user_uuid = user_uuid.expect("User UUID should be set after the handshake"); - - let (mut rx, guard) = { - // Add a channel to send messages to this client to the map - let entry_uuid = uuid::Uuid::new_v4(); - let (tx, rx) = tokio::sync::mpsc::channel::(100); - users.map.entry(user_uuid.clone()).or_default().push((entry_uuid, tx)); - - // Once the guard goes out of scope, the connection will have been closed and the entry will be deleted from the map - (rx, WSEntryMapGuard::new(users, user_uuid, entry_uuid, addr.ip())) - }; - - let _guard = guard; - let mut interval = tokio::time::interval(Duration::from_secs(15)); - loop { - tokio::select! { - res = stream.next() => { - match res { - Some(Ok(message)) => { - match message { - // Respond to any pings - Message::Ping(ping) => stream.send(Message::Pong(ping)).await?, - Message::Pong(_) => {/* Ignored */}, - - // We should receive an initial message with the protocol and version, and we will reply to it - Message::Text(ref message) => { - let msg = message.strip_suffix(RECORD_SEPARATOR as char).unwrap_or(message); - - if serde_json::from_str(msg).ok() == Some(INITIAL_MESSAGE) { - stream.send(Message::binary(INITIAL_RESPONSE)).await?; - continue; - } - } - // Just echo anything else the client sends - _ => stream.send(message).await?, - } - } - _ => break, - } - } - - res = rx.recv() => { - match res { - Some(res) => stream.send(res).await?, - None => break, - } - } - - _ = interval.tick() => stream.send(Message::Ping(create_ping())).await? - } - } - - Ok(()) -} - -fn get_request_token(req: &handshake::server::Request) -> Option { - const ACCESS_TOKEN_KEY: &str = "access_token="; - - if let Some(Ok(auth)) = req.headers().get("Authorization").map(|a| a.to_str()) { - if let Some(token_part) = auth.strip_prefix("Bearer ") { - return Some(token_part.to_owned()); - } - } - - if let Some(params) = req.uri().query() { - let params_iter = params.split('&').take(1); - for val in params_iter { - if let Some(stripped) = val.strip_prefix(ACCESS_TOKEN_KEY) { - return Some(stripped.to_owned()); - } - } - } - None -} diff --git a/src/api/push.rs b/src/api/push.rs index 79966c94..607fb7ea 100644 --- a/src/api/push.rs +++ b/src/api/push.rs @@ -114,11 +114,11 @@ pub async fn register_push_device(device: &mut Device, conn: &mut crate::db::DbC .await? .error_for_status() { - err!(format!("An error occured while proceeding registration of a device: {e}")); + err!(format!("An error occurred while proceeding registration of a device: {e}")); } if let Err(e) = device.save(conn).await { - err!(format!("An error occured while trying to save the (registered) device push uuid: {e}")); + err!(format!("An error occurred while trying to save the (registered) device push uuid: {e}")); } Ok(()) diff --git a/src/api/web.rs b/src/api/web.rs index 67248c83..6983719b 100644 --- a/src/api/web.rs +++ b/src/api/web.rs @@ -170,7 +170,7 @@ pub fn static_files(filename: &str) -> Result<(ContentType, &'static [u8]), Erro } "bootstrap.css" => Ok((ContentType::CSS, include_bytes!("../static/scripts/bootstrap.css"))), "bootstrap.bundle.js" => Ok((ContentType::JavaScript, include_bytes!("../static/scripts/bootstrap.bundle.js"))), - "jdenticon.js" => Ok((ContentType::JavaScript, include_bytes!("../static/scripts/jdenticon.js"))), + "jdenticon-3.3.0.js" => Ok((ContentType::JavaScript, include_bytes!("../static/scripts/jdenticon-3.3.0.js"))), "datatables.js" => Ok((ContentType::JavaScript, include_bytes!("../static/scripts/datatables.js"))), "datatables.css" => Ok((ContentType::CSS, include_bytes!("../static/scripts/datatables.css"))), "jquery-3.7.1.slim.js" => { diff --git a/src/auth.rs b/src/auth.rs index 85b6359e..c8060a28 100644 --- a/src/auth.rs +++ b/src/auth.rs @@ -1,10 +1,11 @@ // JWT Handling // -use chrono::{Duration, Utc}; +use chrono::{TimeDelta, Utc}; use num_traits::FromPrimitive; -use once_cell::sync::Lazy; +use once_cell::sync::{Lazy, OnceCell}; -use jsonwebtoken::{self, errors::ErrorKind, Algorithm, DecodingKey, EncodingKey, Header}; +use jsonwebtoken::{errors::ErrorKind, Algorithm, DecodingKey, EncodingKey, Header}; +use openssl::rsa::Rsa; use serde::de::DeserializeOwned; use serde::ser::Serialize; @@ -12,7 +13,7 @@ use crate::{error::Error, CONFIG}; const JWT_ALGORITHM: Algorithm = Algorithm::RS256; -pub static DEFAULT_VALIDITY: Lazy = Lazy::new(|| Duration::hours(2)); +pub static DEFAULT_VALIDITY: Lazy = Lazy::new(|| TimeDelta::try_hours(2).unwrap()); static JWT_HEADER: Lazy
= Lazy::new(|| Header::new(JWT_ALGORITHM)); pub static JWT_LOGIN_ISSUER: Lazy = Lazy::new(|| format!("{}|login", CONFIG.domain_origin())); @@ -26,23 +27,46 @@ static JWT_SEND_ISSUER: Lazy = Lazy::new(|| format!("{}|send", CONFIG.do static JWT_ORG_API_KEY_ISSUER: Lazy = Lazy::new(|| format!("{}|api.organization", CONFIG.domain_origin())); static JWT_FILE_DOWNLOAD_ISSUER: Lazy = Lazy::new(|| format!("{}|file_download", CONFIG.domain_origin())); -static PRIVATE_RSA_KEY: Lazy = Lazy::new(|| { - let key = - std::fs::read(CONFIG.private_rsa_key()).unwrap_or_else(|e| panic!("Error loading private RSA Key. \n{e}")); - EncodingKey::from_rsa_pem(&key).unwrap_or_else(|e| panic!("Error decoding private RSA Key.\n{e}")) -}); -static PUBLIC_RSA_KEY: Lazy = Lazy::new(|| { - let key = std::fs::read(CONFIG.public_rsa_key()).unwrap_or_else(|e| panic!("Error loading public RSA Key. \n{e}")); - DecodingKey::from_rsa_pem(&key).unwrap_or_else(|e| panic!("Error decoding public RSA Key.\n{e}")) -}); +static PRIVATE_RSA_KEY: OnceCell = OnceCell::new(); +static PUBLIC_RSA_KEY: OnceCell = OnceCell::new(); -pub fn load_keys() { - Lazy::force(&PRIVATE_RSA_KEY); - Lazy::force(&PUBLIC_RSA_KEY); +pub fn initialize_keys() -> Result<(), crate::error::Error> { + let mut priv_key_buffer = Vec::with_capacity(2048); + + let priv_key = { + let mut priv_key_file = + File::options().create(true).truncate(false).read(true).write(true).open(CONFIG.private_rsa_key())?; + + #[allow(clippy::verbose_file_reads)] + let bytes_read = priv_key_file.read_to_end(&mut priv_key_buffer)?; + + if bytes_read > 0 { + Rsa::private_key_from_pem(&priv_key_buffer[..bytes_read])? + } else { + // Only create the key if the file doesn't exist or is empty + let rsa_key = openssl::rsa::Rsa::generate(2048)?; + priv_key_buffer = rsa_key.private_key_to_pem()?; + priv_key_file.write_all(&priv_key_buffer)?; + info!("Private key created correctly."); + rsa_key + } + }; + + let pub_key_buffer = priv_key.public_key_to_pem()?; + + let enc = EncodingKey::from_rsa_pem(&priv_key_buffer)?; + let dec: DecodingKey = DecodingKey::from_rsa_pem(&pub_key_buffer)?; + if PRIVATE_RSA_KEY.set(enc).is_err() { + err!("PRIVATE_RSA_KEY must only be initialized once") + } + if PUBLIC_RSA_KEY.set(dec).is_err() { + err!("PUBLIC_RSA_KEY must only be initialized once") + } + Ok(()) } pub fn encode_jwt(claims: &T) -> String { - match jsonwebtoken::encode(&JWT_HEADER, claims, &PRIVATE_RSA_KEY) { + match jsonwebtoken::encode(&JWT_HEADER, claims, PRIVATE_RSA_KEY.wait()) { Ok(token) => token, Err(e) => panic!("Error encoding jwt {e}"), } @@ -56,7 +80,7 @@ fn decode_jwt(token: &str, issuer: String) -> Result Ok(d.claims), Err(err) => match *err.kind() { ErrorKind::InvalidToken => err!("Token is invalid"), @@ -164,11 +188,11 @@ pub fn generate_invite_claims( user_org_id: Option, invited_by_email: Option, ) -> InviteJwtClaims { - let time_now = Utc::now().naive_utc(); + let time_now = Utc::now(); let expire_hours = i64::from(CONFIG.invitation_expiration_hours()); InviteJwtClaims { nbf: time_now.timestamp(), - exp: (time_now + Duration::hours(expire_hours)).timestamp(), + exp: (time_now + TimeDelta::try_hours(expire_hours).unwrap()).timestamp(), iss: JWT_INVITE_ISSUER.to_string(), sub: uuid, email, @@ -202,11 +226,11 @@ pub fn generate_emergency_access_invite_claims( grantor_name: String, grantor_email: String, ) -> EmergencyAccessInviteJwtClaims { - let time_now = Utc::now().naive_utc(); + let time_now = Utc::now(); let expire_hours = i64::from(CONFIG.invitation_expiration_hours()); EmergencyAccessInviteJwtClaims { nbf: time_now.timestamp(), - exp: (time_now + Duration::hours(expire_hours)).timestamp(), + exp: (time_now + TimeDelta::try_hours(expire_hours).unwrap()).timestamp(), iss: JWT_EMERGENCY_ACCESS_INVITE_ISSUER.to_string(), sub: uuid, email, @@ -233,10 +257,10 @@ pub struct OrgApiKeyLoginJwtClaims { } pub fn generate_organization_api_key_login_claims(uuid: String, org_id: String) -> OrgApiKeyLoginJwtClaims { - let time_now = Utc::now().naive_utc(); + let time_now = Utc::now(); OrgApiKeyLoginJwtClaims { nbf: time_now.timestamp(), - exp: (time_now + Duration::hours(1)).timestamp(), + exp: (time_now + TimeDelta::try_hours(1).unwrap()).timestamp(), iss: JWT_ORG_API_KEY_ISSUER.to_string(), sub: uuid, client_id: format!("organization.{org_id}"), @@ -260,10 +284,10 @@ pub struct FileDownloadClaims { } pub fn generate_file_download_claims(uuid: String, file_id: String) -> FileDownloadClaims { - let time_now = Utc::now().naive_utc(); + let time_now = Utc::now(); FileDownloadClaims { nbf: time_now.timestamp(), - exp: (time_now + Duration::minutes(5)).timestamp(), + exp: (time_now + TimeDelta::try_minutes(5).unwrap()).timestamp(), iss: JWT_FILE_DOWNLOAD_ISSUER.to_string(), sub: uuid, file_id, @@ -283,42 +307,42 @@ pub struct BasicJwtClaims { } pub fn generate_delete_claims(uuid: String) -> BasicJwtClaims { - let time_now = Utc::now().naive_utc(); + let time_now = Utc::now(); let expire_hours = i64::from(CONFIG.invitation_expiration_hours()); BasicJwtClaims { nbf: time_now.timestamp(), - exp: (time_now + Duration::hours(expire_hours)).timestamp(), + exp: (time_now + TimeDelta::try_hours(expire_hours).unwrap()).timestamp(), iss: JWT_DELETE_ISSUER.to_string(), sub: uuid, } } pub fn generate_verify_email_claims(uuid: String) -> BasicJwtClaims { - let time_now = Utc::now().naive_utc(); + let time_now = Utc::now(); let expire_hours = i64::from(CONFIG.invitation_expiration_hours()); BasicJwtClaims { nbf: time_now.timestamp(), - exp: (time_now + Duration::hours(expire_hours)).timestamp(), + exp: (time_now + TimeDelta::try_hours(expire_hours).unwrap()).timestamp(), iss: JWT_VERIFYEMAIL_ISSUER.to_string(), sub: uuid, } } pub fn generate_admin_claims() -> BasicJwtClaims { - let time_now = Utc::now().naive_utc(); + let time_now = Utc::now(); BasicJwtClaims { nbf: time_now.timestamp(), - exp: (time_now + Duration::minutes(CONFIG.admin_session_lifetime())).timestamp(), + exp: (time_now + TimeDelta::try_minutes(CONFIG.admin_session_lifetime()).unwrap()).timestamp(), iss: JWT_ADMIN_ISSUER.to_string(), sub: "admin_panel".to_string(), } } pub fn generate_send_claims(send_id: &str, file_id: &str) -> BasicJwtClaims { - let time_now = Utc::now().naive_utc(); + let time_now = Utc::now(); BasicJwtClaims { nbf: time_now.timestamp(), - exp: (time_now + Duration::minutes(2)).timestamp(), + exp: (time_now + TimeDelta::try_minutes(2).unwrap()).timestamp(), iss: JWT_SEND_ISSUER.to_string(), sub: format!("{send_id}/{file_id}"), } @@ -367,10 +391,8 @@ impl<'r> FromRequest<'r> for Host { let host = if let Some(host) = headers.get_one("X-Forwarded-Host") { host - } else if let Some(host) = headers.get_one("Host") { - host } else { - "" + headers.get_one("Host").unwrap_or_default() }; format!("{protocol}://{host}") @@ -383,7 +405,6 @@ impl<'r> FromRequest<'r> for Host { } pub struct ClientHeaders { - pub host: String, pub device_type: i32, pub ip: ClientIp, } @@ -393,7 +414,6 @@ impl<'r> FromRequest<'r> for ClientHeaders { type Error = &'static str; async fn from_request(request: &'r Request<'_>) -> Outcome { - let host = try_outcome!(Host::from_request(request).await).host; let ip = match ClientIp::from_request(request).await { Outcome::Success(ip) => ip, _ => err_handler!("Error getting Client IP"), @@ -403,7 +423,6 @@ impl<'r> FromRequest<'r> for ClientHeaders { request.headers().get_one("device-type").map(|d| d.parse().unwrap_or(14)).unwrap_or_else(|| 14); Outcome::Success(ClientHeaders { - host, device_type, ip, }) @@ -475,7 +494,7 @@ impl<'r> FromRequest<'r> for Headers { // Check if the stamp exception has expired first. // Then, check if the current route matches any of the allowed routes. // After that check the stamp in exception matches the one in the claims. - if Utc::now().naive_utc().timestamp() > stamp_exception.expire { + if Utc::now().timestamp() > stamp_exception.expire { // If the stamp exception has been expired remove it from the database. // This prevents checking this stamp exception for new requests. let mut user = user; @@ -509,7 +528,6 @@ pub struct OrgHeaders { pub user: User, pub org_user_type: UserOrgType, pub org_user: UserOrganization, - pub org_id: String, pub ip: ClientIp, } @@ -572,7 +590,6 @@ impl<'r> FromRequest<'r> for OrgHeaders { } }, org_user, - org_id: String::from(org_id), ip: headers.ip, }) } @@ -649,7 +666,6 @@ pub struct ManagerHeaders { pub host: String, pub device: Device, pub user: User, - pub org_user_type: UserOrgType, pub ip: ClientIp, } @@ -667,7 +683,7 @@ impl<'r> FromRequest<'r> for ManagerHeaders { _ => err_handler!("Error getting DB"), }; - if !can_access_collection(&headers.org_user, &col_id, &mut conn).await { + if !Collection::can_access_collection(&headers.org_user, &col_id, &mut conn).await { err_handler!("The current user isn't a manager for this collection") } } @@ -678,7 +694,6 @@ impl<'r> FromRequest<'r> for ManagerHeaders { host: headers.host, device: headers.device, user: headers.user, - org_user_type: headers.org_user_type, ip: headers.ip, }) } else { @@ -705,7 +720,6 @@ pub struct ManagerHeadersLoose { pub device: Device, pub user: User, pub org_user: UserOrganization, - pub org_user_type: UserOrgType, pub ip: ClientIp, } @@ -721,7 +735,6 @@ impl<'r> FromRequest<'r> for ManagerHeadersLoose { device: headers.device, user: headers.user, org_user: headers.org_user, - org_user_type: headers.org_user_type, ip: headers.ip, }) } else { @@ -740,10 +753,6 @@ impl From for Headers { } } } -async fn can_access_collection(org_user: &UserOrganization, col_id: &str, conn: &mut DbConn) -> bool { - org_user.has_full_access() - || Collection::has_access_by_collection_and_user_uuid(col_id, &org_user.user_uuid, conn).await -} impl ManagerHeaders { pub async fn from_loose( @@ -755,7 +764,7 @@ impl ManagerHeaders { if uuid::Uuid::parse_str(col_id).is_err() { err!("Collection Id is malformed!"); } - if !can_access_collection(&h.org_user, col_id, conn).await { + if !Collection::can_access_collection(&h.org_user, col_id, conn).await { err!("You don't have access to all collections!"); } } @@ -764,14 +773,12 @@ impl ManagerHeaders { host: h.host, device: h.device, user: h.user, - org_user_type: h.org_user_type, ip: h.ip, }) } } pub struct OwnerHeaders { - pub host: String, pub device: Device, pub user: User, pub ip: ClientIp, @@ -785,7 +792,6 @@ impl<'r> FromRequest<'r> for OwnerHeaders { let headers = try_outcome!(OrgHeaders::from_request(request).await); if headers.org_user_type == UserOrgType::Owner { Outcome::Success(Self { - host: headers.host, device: headers.device, user: headers.user, ip: headers.ip, @@ -799,7 +805,11 @@ impl<'r> FromRequest<'r> for OwnerHeaders { // // Client IP address detection // -use std::net::IpAddr; +use std::{ + fs::File, + io::{Read, Write}, + net::IpAddr, +}; pub struct ClientIp { pub ip: IpAddr, diff --git a/src/config.rs b/src/config.rs index 2f0e9264..489a229d 100644 --- a/src/config.rs +++ b/src/config.rs @@ -39,7 +39,6 @@ macro_rules! make_config { struct Inner { rocket_shutdown_handle: Option, - ws_shutdown_handle: Option>, templates: Handlebars<'static>, config: ConfigItems, @@ -361,7 +360,7 @@ make_config! { /// Sends folder sends_folder: String, false, auto, |c| format!("{}/{}", c.data_folder, "sends"); /// Temp folder |> Used for storing temporary file uploads - tmp_folder: String, false, auto, |c| format!("{}/{}", c.data_folder, "tmp"); + tmp_folder: String, false, auto, |c| format!("{}/{}", c.data_folder, "tmp"); /// Templates folder templates_folder: String, false, auto, |c| format!("{}/{}", c.data_folder, "templates"); /// Session JWT key @@ -371,11 +370,7 @@ make_config! { }, ws { /// Enable websocket notifications - websocket_enabled: bool, false, def, false; - /// Websocket address - websocket_address: String, false, def, "0.0.0.0".to_string(); - /// Websocket port - websocket_port: u16, false, def, 3012; + enable_websocket: bool, false, def, true; }, push { /// Enable push notifications @@ -691,6 +686,10 @@ make_config! { email_expiration_time: u64, true, def, 600; /// Maximum attempts |> Maximum attempts before an email token is reset and a new email will need to be sent email_attempts_limit: u64, true, def, 3; + /// Automatically enforce at login |> Setup email 2FA provider regardless of any organization policy + email_2fa_enforce_on_verified_invite: bool, true, def, false; + /// Auto-enable 2FA (Know the risks!) |> Automatically setup email 2FA as fallback provider when needed + email_2fa_auto_fallback: bool, true, def, false; }, } @@ -893,6 +892,13 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { err!("To enable email 2FA, a mail transport must be configured") } + if !cfg._enable_email_2fa && cfg.email_2fa_enforce_on_verified_invite { + err!("To enforce email 2FA on verified invitations, email 2fa has to be enabled!"); + } + if !cfg._enable_email_2fa && cfg.email_2fa_auto_fallback { + err!("To use email 2FA as automatic fallback, email 2fa has to be enabled!"); + } + // Check if the icon blacklist regex is valid if let Some(ref r) = cfg.icon_blacklist_regex { let validate_regex = regex::Regex::new(r); @@ -1071,7 +1077,6 @@ impl Config { Ok(Config { inner: RwLock::new(Inner { rocket_shutdown_handle: None, - ws_shutdown_handle: None, templates: load_templates(&config.templates_folder), config, _env, @@ -1164,7 +1169,7 @@ impl Config { } pub fn delete_user_config(&self) -> Result<(), Error> { - crate::util::delete_file(&CONFIG_FILE)?; + std::fs::remove_file(&*CONFIG_FILE)?; // Empty user config let usr = ConfigBuilder::default(); @@ -1189,9 +1194,6 @@ impl Config { pub fn private_rsa_key(&self) -> String { format!("{}.pem", CONFIG.rsa_key_filename()) } - pub fn public_rsa_key(&self) -> String { - format!("{}.pub.pem", CONFIG.rsa_key_filename()) - } pub fn mail_enabled(&self) -> bool { let inner = &self.inner.read().unwrap().config; inner._enable_smtp && (inner.smtp_host.is_some() || inner.use_sendmail) @@ -1240,16 +1242,8 @@ impl Config { self.inner.write().unwrap().rocket_shutdown_handle = Some(handle); } - pub fn set_ws_shutdown_handle(&self, handle: tokio::sync::oneshot::Sender<()>) { - self.inner.write().unwrap().ws_shutdown_handle = Some(handle); - } - pub fn shutdown(&self) { if let Ok(mut c) = self.inner.write() { - if let Some(handle) = c.ws_shutdown_handle.take() { - handle.send(()).ok(); - } - if let Some(handle) = c.rocket_shutdown_handle.take() { handle.notify(); } diff --git a/src/db/mod.rs b/src/db/mod.rs index f59ac98d..824b3c71 100644 --- a/src/db/mod.rs +++ b/src/db/mod.rs @@ -389,13 +389,13 @@ pub async fn backup_database(conn: &mut DbConn) -> Result<(), Error> { pub async fn get_sql_server_version(conn: &mut DbConn) -> String { db_run! {@raw conn: postgresql, mysql { - sql_function!{ + define_sql_function!{ fn version() -> diesel::sql_types::Text; } diesel::select(version()).get_result::(conn).unwrap_or_else(|_| "Unknown".to_string()) } sqlite { - sql_function!{ + define_sql_function!{ fn sqlite_version() -> diesel::sql_types::Text; } diesel::select(sqlite_version()).get_result::(conn).unwrap_or_else(|_| "Unknown".to_string()) diff --git a/src/db/models/attachment.rs b/src/db/models/attachment.rs index 8f05e6b4..65855cc0 100644 --- a/src/db/models/attachment.rs +++ b/src/db/models/attachment.rs @@ -42,13 +42,13 @@ impl Attachment { pub fn to_json(&self, host: &str) -> Value { json!({ - "Id": self.id, - "Url": self.get_url(host), - "FileName": self.file_name, - "Size": self.file_size.to_string(), - "SizeName": crate::util::get_display_size(self.file_size), - "Key": self.akey, - "Object": "attachment" + "id": self.id, + "url": self.get_url(host), + "fileName": self.file_name, + "size": self.file_size.to_string(), + "sizeName": crate::util::get_display_size(self.file_size), + "key": self.akey, + "object": "attachment" }) } } @@ -95,7 +95,7 @@ impl Attachment { pub async fn delete(&self, conn: &mut DbConn) -> EmptyResult { db_run! { conn: { - crate::util::retry( + let _: () = crate::util::retry( || diesel::delete(attachments::table.filter(attachments::id.eq(&self.id))).execute(conn), 10, ) @@ -103,7 +103,7 @@ impl Attachment { let file_path = &self.get_file_path(); - match crate::util::delete_file(file_path) { + match std::fs::remove_file(file_path) { // Ignore "file not found" errors. This can happen when the // upstream caller has already cleaned up the file as part of // its own error handling. diff --git a/src/db/models/auth_request.rs b/src/db/models/auth_request.rs index 2a004fb1..9388c71a 100644 --- a/src/db/models/auth_request.rs +++ b/src/db/models/auth_request.rs @@ -140,7 +140,7 @@ impl AuthRequest { } pub async fn purge_expired_auth_requests(conn: &mut DbConn) { - let expiry_time = Utc::now().naive_utc() - chrono::Duration::minutes(5); //after 5 minutes, clients reject the request + let expiry_time = Utc::now().naive_utc() - chrono::TimeDelta::try_minutes(5).unwrap(); //after 5 minutes, clients reject the request for auth_request in Self::find_created_before(&expiry_time, conn).await { auth_request.delete(conn).await.ok(); } diff --git a/src/db/models/cipher.rs b/src/db/models/cipher.rs index 61683d85..446749d4 100644 --- a/src/db/models/cipher.rs +++ b/src/db/models/cipher.rs @@ -1,5 +1,6 @@ +use crate::util::LowerCase; use crate::CONFIG; -use chrono::{Duration, NaiveDateTime, Utc}; +use chrono::{NaiveDateTime, TimeDelta, Utc}; use serde_json::Value; use super::{ @@ -81,7 +82,7 @@ impl Cipher { pub fn validate_notes(cipher_data: &[CipherData]) -> EmptyResult { let mut validation_errors = serde_json::Map::new(); for (index, cipher) in cipher_data.iter().enumerate() { - if let Some(note) = &cipher.Notes { + if let Some(note) = &cipher.notes { if note.len() > 10_000 { validation_errors.insert( format!("Ciphers[{index}].Notes"), @@ -135,10 +136,6 @@ impl Cipher { } } - let fields_json = self.fields.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null); - let password_history_json = - self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null); - // We don't need these values at all for Organizational syncs // Skip any other database calls if this is the case and just return false. let (read_only, hide_passwords) = if sync_type == CipherSyncType::User { @@ -153,32 +150,60 @@ impl Cipher { (false, false) }; + let fields_json: Vec<_> = self + .fields + .as_ref() + .and_then(|s| { + serde_json::from_str::>>(s) + .inspect_err(|e| warn!("Error parsing fields {:?}", e)) + .ok() + }) + .map(|d| d.into_iter().map(|d| d.data).collect()) + .unwrap_or_default(); + let password_history_json: Vec<_> = self + .password_history + .as_ref() + .and_then(|s| { + serde_json::from_str::>>(s) + .inspect_err(|e| warn!("Error parsing password history {:?}", e)) + .ok() + }) + .map(|d| d.into_iter().map(|d| d.data).collect()) + .unwrap_or_default(); + // Get the type_data or a default to an empty json object '{}'. // If not passing an empty object, mobile clients will crash. - let mut type_data_json: Value = - serde_json::from_str(&self.data).unwrap_or_else(|_| Value::Object(serde_json::Map::new())); + let mut type_data_json = serde_json::from_str::>(&self.data) + .map(|d| d.data) + .unwrap_or_else(|_| Value::Object(serde_json::Map::new())); // NOTE: This was marked as *Backwards Compatibility Code*, but as of January 2021 this is still being used by upstream // Set the first element of the Uris array as Uri, this is needed several (mobile) clients. if self.atype == 1 { - if type_data_json["Uris"].is_array() { - let uri = type_data_json["Uris"][0]["Uri"].clone(); - type_data_json["Uri"] = uri; + if type_data_json["uris"].is_array() { + let uri = type_data_json["uris"][0]["uri"].clone(); + type_data_json["uri"] = uri; } else { // Upstream always has an Uri key/value - type_data_json["Uri"] = Value::Null; + type_data_json["uri"] = Value::Null; } } + // Fix secure note issues when data is `{}` + // This breaks at least the native mobile clients + if self.atype == 2 && (self.data.eq("{}") || self.data.to_ascii_lowercase().eq("{\"type\":null}")) { + type_data_json = json!({"type": 0}); + } + // Clone the type_data and add some default value. let mut data_json = type_data_json.clone(); // NOTE: This was marked as *Backwards Compatibility Code*, but as of January 2021 this is still being used by upstream // data_json should always contain the following keys with every atype - data_json["Fields"] = fields_json.clone(); - data_json["Name"] = json!(self.name); - data_json["Notes"] = json!(self.notes); - data_json["PasswordHistory"] = password_history_json.clone(); + data_json["fields"] = Value::Array(fields_json.clone()); + data_json["name"] = json!(self.name); + data_json["notes"] = json!(self.notes); + data_json["passwordHistory"] = Value::Array(password_history_json.clone()); let collection_ids = if let Some(cipher_sync_data) = cipher_sync_data { if let Some(cipher_collections) = cipher_sync_data.cipher_collections.get(&self.uuid) { @@ -187,7 +212,7 @@ impl Cipher { Cow::from(Vec::with_capacity(0)) } } else { - Cow::from(self.get_collections(user_uuid.to_string(), conn).await) + Cow::from(self.get_admin_collections(user_uuid.to_string(), conn).await) }; // There are three types of cipher response models in upstream @@ -198,48 +223,48 @@ impl Cipher { // // Ref: https://github.com/bitwarden/server/blob/master/src/Core/Models/Api/Response/CipherResponseModel.cs let mut json_object = json!({ - "Object": "cipherDetails", - "Id": self.uuid, - "Type": self.atype, - "CreationDate": format_date(&self.created_at), - "RevisionDate": format_date(&self.updated_at), - "DeletedDate": self.deleted_at.map_or(Value::Null, |d| Value::String(format_date(&d))), - "Reprompt": self.reprompt.unwrap_or(RepromptType::None as i32), - "OrganizationId": self.organization_uuid, - "Key": self.key, - "Attachments": attachments_json, + "object": "cipherDetails", + "id": self.uuid, + "type": self.atype, + "creationDate": format_date(&self.created_at), + "revisionDate": format_date(&self.updated_at), + "deletedDate": self.deleted_at.map_or(Value::Null, |d| Value::String(format_date(&d))), + "reprompt": self.reprompt.unwrap_or(RepromptType::None as i32), + "organizationId": self.organization_uuid, + "key": self.key, + "attachments": attachments_json, // We have UseTotp set to true by default within the Organization model. // This variable together with UsersGetPremium is used to show or hide the TOTP counter. - "OrganizationUseTotp": true, + "organizationUseTotp": true, // This field is specific to the cipherDetails type. - "CollectionIds": collection_ids, + "collectionIds": collection_ids, - "Name": self.name, - "Notes": self.notes, - "Fields": fields_json, + "name": self.name, + "notes": self.notes, + "fields": fields_json, - "Data": data_json, + "data": data_json, - "PasswordHistory": password_history_json, + "passwordHistory": password_history_json, // All Cipher types are included by default as null, but only the matching one will be populated - "Login": null, - "SecureNote": null, - "Card": null, - "Identity": null, + "login": null, + "secureNote": null, + "card": null, + "identity": null, }); // These values are only needed for user/default syncs // Not during an organizational sync like `get_org_details` // Skip adding these fields in that case if sync_type == CipherSyncType::User { - json_object["FolderId"] = json!(if let Some(cipher_sync_data) = cipher_sync_data { + json_object["folderId"] = json!(if let Some(cipher_sync_data) = cipher_sync_data { cipher_sync_data.cipher_folders.get(&self.uuid).map(|c| c.to_string()) } else { self.get_folder_uuid(user_uuid, conn).await }); - json_object["Favorite"] = json!(if let Some(cipher_sync_data) = cipher_sync_data { + json_object["favorite"] = json!(if let Some(cipher_sync_data) = cipher_sync_data { cipher_sync_data.cipher_favorites.contains(&self.uuid) } else { self.is_favorite(user_uuid, conn).await @@ -247,15 +272,15 @@ impl Cipher { // These values are true by default, but can be false if the // cipher belongs to a collection or group where the org owner has enabled // the "Read Only" or "Hide Passwords" restrictions for the user. - json_object["Edit"] = json!(!read_only); - json_object["ViewPassword"] = json!(!hide_passwords); + json_object["edit"] = json!(!read_only); + json_object["viewPassword"] = json!(!hide_passwords); } let key = match self.atype { - 1 => "Login", - 2 => "SecureNote", - 3 => "Card", - 4 => "Identity", + 1 => "login", + 2 => "secureNote", + 3 => "card", + 4 => "identity", _ => panic!("Wrong type"), }; @@ -361,7 +386,7 @@ impl Cipher { pub async fn purge_trash(conn: &mut DbConn) { if let Some(auto_delete_days) = CONFIG.trash_auto_delete_days() { let now = Utc::now().naive_utc(); - let dt = now - Duration::days(auto_delete_days); + let dt = now - TimeDelta::try_days(auto_delete_days).unwrap(); for cipher in Self::find_deleted_before(&dt, conn).await { cipher.delete(conn).await.ok(); } @@ -431,7 +456,7 @@ impl Cipher { } if let Some(ref org_uuid) = self.organization_uuid { if let Some(cipher_sync_data) = cipher_sync_data { - return cipher_sync_data.user_group_full_access_for_organizations.get(org_uuid).is_some(); + return cipher_sync_data.user_group_full_access_for_organizations.contains(org_uuid); } else { return Group::is_in_full_access_group(user_uuid, org_uuid, conn).await; } @@ -754,30 +779,123 @@ impl Cipher { } pub async fn get_collections(&self, user_id: String, conn: &mut DbConn) -> Vec { - db_run! {conn: { - ciphers_collections::table - .inner_join(collections::table.on( - collections::uuid.eq(ciphers_collections::collection_uuid) - )) - .inner_join(users_organizations::table.on( - users_organizations::org_uuid.eq(collections::org_uuid).and( - users_organizations::user_uuid.eq(user_id.clone()) - ) - )) - .left_join(users_collections::table.on( - users_collections::collection_uuid.eq(ciphers_collections::collection_uuid).and( - users_collections::user_uuid.eq(user_id.clone()) - ) - )) - .filter(ciphers_collections::cipher_uuid.eq(&self.uuid)) - .filter(users_collections::user_uuid.eq(user_id).or( // User has access to collection - users_organizations::access_all.eq(true).or( // User has access all - users_organizations::atype.le(UserOrgType::Admin as i32) // User is admin or owner - ) - )) - .select(ciphers_collections::collection_uuid) - .load::(conn).unwrap_or_default() - }} + if CONFIG.org_groups_enabled() { + db_run! {conn: { + ciphers_collections::table + .filter(ciphers_collections::cipher_uuid.eq(&self.uuid)) + .inner_join(collections::table.on( + collections::uuid.eq(ciphers_collections::collection_uuid) + )) + .left_join(users_organizations::table.on( + users_organizations::org_uuid.eq(collections::org_uuid) + .and(users_organizations::user_uuid.eq(user_id.clone())) + )) + .left_join(users_collections::table.on( + users_collections::collection_uuid.eq(ciphers_collections::collection_uuid) + .and(users_collections::user_uuid.eq(user_id.clone())) + )) + .left_join(groups_users::table.on( + groups_users::users_organizations_uuid.eq(users_organizations::uuid) + )) + .left_join(groups::table.on(groups::uuid.eq(groups_users::groups_uuid))) + .left_join(collections_groups::table.on( + collections_groups::collections_uuid.eq(ciphers_collections::collection_uuid) + .and(collections_groups::groups_uuid.eq(groups::uuid)) + )) + .filter(users_organizations::access_all.eq(true) // User has access all + .or(users_collections::user_uuid.eq(user_id) // User has access to collection + .and(users_collections::read_only.eq(false))) + .or(groups::access_all.eq(true)) // Access via groups + .or(collections_groups::collections_uuid.is_not_null() // Access via groups + .and(collections_groups::read_only.eq(false))) + ) + .select(ciphers_collections::collection_uuid) + .load::(conn).unwrap_or_default() + }} + } else { + db_run! {conn: { + ciphers_collections::table + .filter(ciphers_collections::cipher_uuid.eq(&self.uuid)) + .inner_join(collections::table.on( + collections::uuid.eq(ciphers_collections::collection_uuid) + )) + .inner_join(users_organizations::table.on( + users_organizations::org_uuid.eq(collections::org_uuid) + .and(users_organizations::user_uuid.eq(user_id.clone())) + )) + .left_join(users_collections::table.on( + users_collections::collection_uuid.eq(ciphers_collections::collection_uuid) + .and(users_collections::user_uuid.eq(user_id.clone())) + )) + .filter(users_organizations::access_all.eq(true) // User has access all + .or(users_collections::user_uuid.eq(user_id) // User has access to collection + .and(users_collections::read_only.eq(false))) + ) + .select(ciphers_collections::collection_uuid) + .load::(conn).unwrap_or_default() + }} + } + } + + pub async fn get_admin_collections(&self, user_id: String, conn: &mut DbConn) -> Vec { + if CONFIG.org_groups_enabled() { + db_run! {conn: { + ciphers_collections::table + .filter(ciphers_collections::cipher_uuid.eq(&self.uuid)) + .inner_join(collections::table.on( + collections::uuid.eq(ciphers_collections::collection_uuid) + )) + .left_join(users_organizations::table.on( + users_organizations::org_uuid.eq(collections::org_uuid) + .and(users_organizations::user_uuid.eq(user_id.clone())) + )) + .left_join(users_collections::table.on( + users_collections::collection_uuid.eq(ciphers_collections::collection_uuid) + .and(users_collections::user_uuid.eq(user_id.clone())) + )) + .left_join(groups_users::table.on( + groups_users::users_organizations_uuid.eq(users_organizations::uuid) + )) + .left_join(groups::table.on(groups::uuid.eq(groups_users::groups_uuid))) + .left_join(collections_groups::table.on( + collections_groups::collections_uuid.eq(ciphers_collections::collection_uuid) + .and(collections_groups::groups_uuid.eq(groups::uuid)) + )) + .filter(users_organizations::access_all.eq(true) // User has access all + .or(users_collections::user_uuid.eq(user_id) // User has access to collection + .and(users_collections::read_only.eq(false))) + .or(groups::access_all.eq(true)) // Access via groups + .or(collections_groups::collections_uuid.is_not_null() // Access via groups + .and(collections_groups::read_only.eq(false))) + .or(users_organizations::atype.le(UserOrgType::Admin as i32)) // User is admin or owner + ) + .select(ciphers_collections::collection_uuid) + .load::(conn).unwrap_or_default() + }} + } else { + db_run! {conn: { + ciphers_collections::table + .filter(ciphers_collections::cipher_uuid.eq(&self.uuid)) + .inner_join(collections::table.on( + collections::uuid.eq(ciphers_collections::collection_uuid) + )) + .inner_join(users_organizations::table.on( + users_organizations::org_uuid.eq(collections::org_uuid) + .and(users_organizations::user_uuid.eq(user_id.clone())) + )) + .left_join(users_collections::table.on( + users_collections::collection_uuid.eq(ciphers_collections::collection_uuid) + .and(users_collections::user_uuid.eq(user_id.clone())) + )) + .filter(users_organizations::access_all.eq(true) // User has access all + .or(users_collections::user_uuid.eq(user_id) // User has access to collection + .and(users_collections::read_only.eq(false))) + .or(users_organizations::atype.le(UserOrgType::Admin as i32)) // User is admin or owner + ) + .select(ciphers_collections::collection_uuid) + .load::(conn).unwrap_or_default() + }} + } } /// Return a Vec with (cipher_uuid, collection_uuid) diff --git a/src/db/models/collection.rs b/src/db/models/collection.rs index 4d3ccd2e..3ba6c516 100644 --- a/src/db/models/collection.rs +++ b/src/db/models/collection.rs @@ -1,6 +1,6 @@ use serde_json::Value; -use super::{CollectionGroup, User, UserOrgStatus, UserOrgType, UserOrganization}; +use super::{CollectionGroup, GroupUser, User, UserOrgStatus, UserOrgType, UserOrganization}; use crate::CONFIG; db_object! { @@ -49,11 +49,11 @@ impl Collection { pub fn to_json(&self) -> Value { json!({ - "ExternalId": self.external_id, - "Id": self.uuid, - "OrganizationId": self.org_uuid, - "Name": self.name, - "Object": "collection", + "externalId": self.external_id, + "id": self.uuid, + "organizationId": self.org_uuid, + "name": self.name, + "object": "collection", }) } @@ -97,11 +97,20 @@ impl Collection { }; let mut json_object = self.to_json(); - json_object["Object"] = json!("collectionDetails"); - json_object["ReadOnly"] = json!(read_only); - json_object["HidePasswords"] = json!(hide_passwords); + json_object["object"] = json!("collectionDetails"); + json_object["readOnly"] = json!(read_only); + json_object["hidePasswords"] = json!(hide_passwords); json_object } + + pub async fn can_access_collection(org_user: &UserOrganization, col_id: &str, conn: &mut DbConn) -> bool { + org_user.has_status(UserOrgStatus::Confirmed) + && (org_user.has_full_access() + || CollectionUser::has_access_to_collection_by_user(col_id, &org_user.user_uuid, conn).await + || (CONFIG.org_groups_enabled() + && (GroupUser::has_full_access_by_member(&org_user.org_uuid, &org_user.uuid, conn).await + || GroupUser::has_access_to_collection_by_member(col_id, &org_user.uuid, conn).await))) + } } use crate::db::DbConn; @@ -252,17 +261,6 @@ impl Collection { } } - // Check if a user has access to a specific collection - // FIXME: This needs to be reviewed. The query used by `find_by_user_uuid` could be adjusted to filter when needed. - // For now this is a good solution without making to much changes. - pub async fn has_access_by_collection_and_user_uuid( - collection_uuid: &str, - user_uuid: &str, - conn: &mut DbConn, - ) -> bool { - Self::find_by_user_uuid(user_uuid.to_owned(), conn).await.into_iter().any(|c| c.uuid == collection_uuid) - } - pub async fn find_by_organization_and_user_uuid(org_uuid: &str, user_uuid: &str, conn: &mut DbConn) -> Vec { Self::find_by_user_uuid(user_uuid.to_owned(), conn) .await @@ -373,48 +371,64 @@ impl Collection { pub async fn is_writable_by_user(&self, user_uuid: &str, conn: &mut DbConn) -> bool { let user_uuid = user_uuid.to_string(); - db_run! { conn: { - collections::table - .left_join(users_collections::table.on( - users_collections::collection_uuid.eq(collections::uuid).and( - users_collections::user_uuid.eq(user_uuid.clone()) - ) - )) - .left_join(users_organizations::table.on( - collections::org_uuid.eq(users_organizations::org_uuid).and( - users_organizations::user_uuid.eq(user_uuid) - ) - )) - .left_join(groups_users::table.on( - groups_users::users_organizations_uuid.eq(users_organizations::uuid) - )) - .left_join(groups::table.on( - groups::uuid.eq(groups_users::groups_uuid) - )) - .left_join(collections_groups::table.on( - collections_groups::groups_uuid.eq(groups_users::groups_uuid).and( - collections_groups::collections_uuid.eq(collections::uuid) - ) - )) - .filter(collections::uuid.eq(&self.uuid)) - .filter( - users_collections::collection_uuid.eq(&self.uuid).and(users_collections::read_only.eq(false)).or(// Directly accessed collection - users_organizations::access_all.eq(true).or( // access_all in Organization - users_organizations::atype.le(UserOrgType::Admin as i32) // Org admin or owner - )).or( - groups::access_all.eq(true) // access_all in groups - ).or( // access via groups - groups_users::users_organizations_uuid.eq(users_organizations::uuid).and( - collections_groups::collections_uuid.is_not_null().and( - collections_groups::read_only.eq(false)) + if CONFIG.org_groups_enabled() { + db_run! { conn: { + collections::table + .filter(collections::uuid.eq(&self.uuid)) + .inner_join(users_organizations::table.on( + collections::org_uuid.eq(users_organizations::org_uuid) + .and(users_organizations::user_uuid.eq(user_uuid.clone())) + )) + .left_join(users_collections::table.on( + users_collections::collection_uuid.eq(collections::uuid) + .and(users_collections::user_uuid.eq(user_uuid)) + )) + .left_join(groups_users::table.on( + groups_users::users_organizations_uuid.eq(users_organizations::uuid) + )) + .left_join(groups::table.on( + groups::uuid.eq(groups_users::groups_uuid) + )) + .left_join(collections_groups::table.on( + collections_groups::groups_uuid.eq(groups_users::groups_uuid) + .and(collections_groups::collections_uuid.eq(collections::uuid)) + )) + .filter(users_organizations::atype.le(UserOrgType::Admin as i32) // Org admin or owner + .or(users_organizations::access_all.eq(true)) // access_all via membership + .or(users_collections::collection_uuid.eq(&self.uuid) // write access given to collection + .and(users_collections::read_only.eq(false))) + .or(groups::access_all.eq(true)) // access_all via group + .or(collections_groups::collections_uuid.is_not_null() // write access given via group + .and(collections_groups::read_only.eq(false))) ) - ) - ) - .count() - .first::(conn) - .ok() - .unwrap_or(0) != 0 - }} + .count() + .first::(conn) + .ok() + .unwrap_or(0) != 0 + }} + } else { + db_run! { conn: { + collections::table + .filter(collections::uuid.eq(&self.uuid)) + .inner_join(users_organizations::table.on( + collections::org_uuid.eq(users_organizations::org_uuid) + .and(users_organizations::user_uuid.eq(user_uuid.clone())) + )) + .left_join(users_collections::table.on( + users_collections::collection_uuid.eq(collections::uuid) + .and(users_collections::user_uuid.eq(user_uuid)) + )) + .filter(users_organizations::atype.le(UserOrgType::Admin as i32) // Org admin or owner + .or(users_organizations::access_all.eq(true)) // access_all via membership + .or(users_collections::collection_uuid.eq(&self.uuid) // write access given to collection + .and(users_collections::read_only.eq(false))) + ) + .count() + .first::(conn) + .ok() + .unwrap_or(0) != 0 + }} + } } pub async fn hide_passwords_for_user(&self, user_uuid: &str, conn: &mut DbConn) -> bool { @@ -634,7 +648,7 @@ impl CollectionUser { db_run! { conn: { for user in collectionusers { - diesel::delete(users_collections::table.filter( + let _: () = diesel::delete(users_collections::table.filter( users_collections::user_uuid.eq(user_uuid) .and(users_collections::collection_uuid.eq(user.collection_uuid)) )) @@ -644,6 +658,10 @@ impl CollectionUser { Ok(()) }} } + + pub async fn has_access_to_collection_by_user(col_id: &str, user_uuid: &str, conn: &mut DbConn) -> bool { + Self::find_by_collection_and_user(col_id, user_uuid, conn).await.is_some() + } } /// Database methods diff --git a/src/db/models/device.rs b/src/db/models/device.rs index de612e69..60c63589 100644 --- a/src/db/models/device.rs +++ b/src/db/models/device.rs @@ -67,8 +67,8 @@ impl Device { } // Update the expiration of the device and the last update date - let time_now = Utc::now().naive_utc(); - self.updated_at = time_now; + let time_now = Utc::now(); + self.updated_at = time_now.naive_utc(); // --- // Disabled these keys to be added to the JWT since they could cause the JWT to get too large diff --git a/src/db/models/emergency_access.rs b/src/db/models/emergency_access.rs index ccb21e5b..ecfe86fe 100644 --- a/src/db/models/emergency_access.rs +++ b/src/db/models/emergency_access.rs @@ -58,11 +58,11 @@ impl EmergencyAccess { pub fn to_json(&self) -> Value { json!({ - "Id": self.uuid, - "Status": self.status, - "Type": self.atype, - "WaitTimeDays": self.wait_time_days, - "Object": "emergencyAccess", + "id": self.uuid, + "status": self.status, + "type": self.atype, + "waitTimeDays": self.wait_time_days, + "object": "emergencyAccess", }) } @@ -70,36 +70,43 @@ impl EmergencyAccess { let grantor_user = User::find_by_uuid(&self.grantor_uuid, conn).await.expect("Grantor user not found."); json!({ - "Id": self.uuid, - "Status": self.status, - "Type": self.atype, - "WaitTimeDays": self.wait_time_days, - "GrantorId": grantor_user.uuid, - "Email": grantor_user.email, - "Name": grantor_user.name, - "Object": "emergencyAccessGrantorDetails", + "id": self.uuid, + "status": self.status, + "type": self.atype, + "waitTimeDays": self.wait_time_days, + "grantorId": grantor_user.uuid, + "email": grantor_user.email, + "name": grantor_user.name, + "object": "emergencyAccessGrantorDetails", }) } - pub async fn to_json_grantee_details(&self, conn: &mut DbConn) -> Value { + pub async fn to_json_grantee_details(&self, conn: &mut DbConn) -> Option { let grantee_user = if let Some(grantee_uuid) = self.grantee_uuid.as_deref() { - Some(User::find_by_uuid(grantee_uuid, conn).await.expect("Grantee user not found.")) + User::find_by_uuid(grantee_uuid, conn).await.expect("Grantee user not found.") } else if let Some(email) = self.email.as_deref() { - Some(User::find_by_mail(email, conn).await.expect("Grantee user not found.")) + match User::find_by_mail(email, conn).await { + Some(user) => user, + None => { + // remove outstanding invitations which should not exist + let _ = Self::delete_all_by_grantee_email(email, conn).await; + return None; + } + } } else { - None + return None; }; - json!({ - "Id": self.uuid, - "Status": self.status, - "Type": self.atype, - "WaitTimeDays": self.wait_time_days, - "GranteeId": grantee_user.as_ref().map_or("", |u| &u.uuid), - "Email": grantee_user.as_ref().map_or("", |u| &u.email), - "Name": grantee_user.as_ref().map_or("", |u| &u.name), - "Object": "emergencyAccessGranteeDetails", - }) + Some(json!({ + "id": self.uuid, + "status": self.status, + "type": self.atype, + "waitTimeDays": self.wait_time_days, + "granteeId": grantee_user.uuid, + "email": grantee_user.email, + "name": grantee_user.name, + "object": "emergencyAccessGranteeDetails", + })) } } @@ -174,7 +181,7 @@ impl EmergencyAccess { // Update the grantee so that it will refresh it's status. User::update_uuid_revision(self.grantee_uuid.as_ref().expect("Error getting grantee"), conn).await; self.status = status; - self.updated_at = date.to_owned(); + date.clone_into(&mut self.updated_at); db_run! {conn: { crate::util::retry(|| { @@ -192,7 +199,7 @@ impl EmergencyAccess { conn: &mut DbConn, ) -> EmptyResult { self.last_notification_at = Some(date.to_owned()); - self.updated_at = date.to_owned(); + date.clone_into(&mut self.updated_at); db_run! {conn: { crate::util::retry(|| { @@ -214,6 +221,13 @@ impl EmergencyAccess { Ok(()) } + pub async fn delete_all_by_grantee_email(grantee_email: &str, conn: &mut DbConn) -> EmptyResult { + for ea in Self::find_all_invited_by_grantee_email(grantee_email, conn).await { + ea.delete(conn).await?; + } + Ok(()) + } + pub async fn delete(self, conn: &mut DbConn) -> EmptyResult { User::update_uuid_revision(&self.grantor_uuid, conn).await; @@ -224,15 +238,6 @@ impl EmergencyAccess { }} } - pub async fn find_by_uuid(uuid: &str, conn: &mut DbConn) -> Option { - db_run! { conn: { - emergency_access::table - .filter(emergency_access::uuid.eq(uuid)) - .first::(conn) - .ok().from_db() - }} - } - pub async fn find_by_grantor_uuid_and_grantee_uuid_or_email( grantor_uuid: &str, grantee_uuid: &str, @@ -267,6 +272,26 @@ impl EmergencyAccess { }} } + pub async fn find_by_uuid_and_grantee_uuid(uuid: &str, grantee_uuid: &str, conn: &mut DbConn) -> Option { + db_run! { conn: { + emergency_access::table + .filter(emergency_access::uuid.eq(uuid)) + .filter(emergency_access::grantee_uuid.eq(grantee_uuid)) + .first::(conn) + .ok().from_db() + }} + } + + pub async fn find_by_uuid_and_grantee_email(uuid: &str, grantee_email: &str, conn: &mut DbConn) -> Option { + db_run! { conn: { + emergency_access::table + .filter(emergency_access::uuid.eq(uuid)) + .filter(emergency_access::email.eq(grantee_email)) + .first::(conn) + .ok().from_db() + }} + } + pub async fn find_all_by_grantee_uuid(grantee_uuid: &str, conn: &mut DbConn) -> Vec { db_run! { conn: { emergency_access::table @@ -285,6 +310,15 @@ impl EmergencyAccess { }} } + pub async fn find_all_invited_by_grantee_email(grantee_email: &str, conn: &mut DbConn) -> Vec { + db_run! { conn: { + emergency_access::table + .filter(emergency_access::email.eq(grantee_email)) + .filter(emergency_access::status.eq(EmergencyAccessStatus::Invited as i32)) + .load::(conn).expect("Error loading emergency_access").from_db() + }} + } + pub async fn find_all_by_grantor_uuid(grantor_uuid: &str, conn: &mut DbConn) -> Vec { db_run! { conn: { emergency_access::table @@ -292,6 +326,21 @@ impl EmergencyAccess { .load::(conn).expect("Error loading emergency_access").from_db() }} } + + pub async fn accept_invite(&mut self, grantee_uuid: &str, grantee_email: &str, conn: &mut DbConn) -> EmptyResult { + if self.email.is_none() || self.email.as_ref().unwrap() != grantee_email { + err!("User email does not match invite."); + } + + if self.status == EmergencyAccessStatus::Accepted as i32 { + err!("Emergency contact already accepted."); + } + + self.status = EmergencyAccessStatus::Accepted as i32; + self.grantee_uuid = Some(String::from(grantee_uuid)); + self.email = None; + self.save(conn).await + } } // endregion diff --git a/src/db/models/event.rs b/src/db/models/event.rs index af2f6c66..22d8fb00 100644 --- a/src/db/models/event.rs +++ b/src/db/models/event.rs @@ -3,7 +3,7 @@ use serde_json::Value; use crate::{api::EmptyResult, error::MapResult, CONFIG}; -use chrono::{Duration, NaiveDateTime, Utc}; +use chrono::{NaiveDateTime, TimeDelta, Utc}; // https://bitwarden.com/help/event-logs/ @@ -316,7 +316,7 @@ impl Event { pub async fn clean_events(conn: &mut DbConn) -> EmptyResult { if let Some(days_to_retain) = CONFIG.events_days_retain() { - let dt = Utc::now().naive_utc() - Duration::days(days_to_retain); + let dt = Utc::now().naive_utc() - TimeDelta::try_days(days_to_retain).unwrap(); db_run! { conn: { diesel::delete(event::table.filter(event::event_date.lt(dt))) .execute(conn) diff --git a/src/db/models/folder.rs b/src/db/models/folder.rs index 9385e78d..5370c9dd 100644 --- a/src/db/models/folder.rs +++ b/src/db/models/folder.rs @@ -43,10 +43,10 @@ impl Folder { use crate::util::format_date; json!({ - "Id": self.uuid, - "RevisionDate": format_date(&self.updated_at), - "Name": self.name, - "Object": "folder", + "id": self.uuid, + "revisionDate": format_date(&self.updated_at), + "name": self.name, + "object": "folder", }) } } diff --git a/src/db/models/group.rs b/src/db/models/group.rs index 670e3114..f6ccc710 100644 --- a/src/db/models/group.rs +++ b/src/db/models/group.rs @@ -58,14 +58,14 @@ impl Group { use crate::util::format_date; json!({ - "Id": self.uuid, - "OrganizationId": self.organizations_uuid, - "Name": self.name, - "AccessAll": self.access_all, - "ExternalId": self.external_id, - "CreationDate": format_date(&self.creation_date), - "RevisionDate": format_date(&self.revision_date), - "Object": "group" + "id": self.uuid, + "organizationId": self.organizations_uuid, + "name": self.name, + "accessAll": self.access_all, + "externalId": self.external_id, + "creationDate": format_date(&self.creation_date), + "revisionDate": format_date(&self.revision_date), + "object": "group" }) } @@ -75,21 +75,21 @@ impl Group { .iter() .map(|entry| { json!({ - "Id": entry.collections_uuid, - "ReadOnly": entry.read_only, - "HidePasswords": entry.hide_passwords + "id": entry.collections_uuid, + "readOnly": entry.read_only, + "hidePasswords": entry.hide_passwords }) }) .collect(); json!({ - "Id": self.uuid, - "OrganizationId": self.organizations_uuid, - "Name": self.name, - "AccessAll": self.access_all, - "ExternalId": self.external_id, - "Collections": collections_groups, - "Object": "groupDetails" + "id": self.uuid, + "organizationId": self.organizations_uuid, + "name": self.name, + "accessAll": self.access_all, + "externalId": self.external_id, + "collections": collections_groups, + "object": "groupDetails" }) } @@ -203,10 +203,11 @@ impl Group { }} } - pub async fn find_by_external_id(id: &str, conn: &mut DbConn) -> Option { + pub async fn find_by_external_id_and_org(external_id: &str, org_uuid: &str, conn: &mut DbConn) -> Option { db_run! { conn: { groups::table - .filter(groups::external_id.eq(id)) + .filter(groups::external_id.eq(external_id)) + .filter(groups::organizations_uuid.eq(org_uuid)) .first::(conn) .ok() .from_db() @@ -486,6 +487,39 @@ impl GroupUser { }} } + pub async fn has_access_to_collection_by_member( + collection_uuid: &str, + member_uuid: &str, + conn: &mut DbConn, + ) -> bool { + db_run! { conn: { + groups_users::table + .inner_join(collections_groups::table.on( + collections_groups::groups_uuid.eq(groups_users::groups_uuid) + )) + .filter(collections_groups::collections_uuid.eq(collection_uuid)) + .filter(groups_users::users_organizations_uuid.eq(member_uuid)) + .count() + .first::(conn) + .unwrap_or(0) != 0 + }} + } + + pub async fn has_full_access_by_member(org_uuid: &str, member_uuid: &str, conn: &mut DbConn) -> bool { + db_run! { conn: { + groups_users::table + .inner_join(groups::table.on( + groups::uuid.eq(groups_users::groups_uuid) + )) + .filter(groups::organizations_uuid.eq(org_uuid)) + .filter(groups::access_all.eq(true)) + .filter(groups_users::users_organizations_uuid.eq(member_uuid)) + .count() + .first::(conn) + .unwrap_or(0) != 0 + }} + } + pub async fn update_user_revision(&self, conn: &mut DbConn) { match UserOrganization::find_by_uuid(&self.users_organizations_uuid, conn).await { Some(user) => User::update_uuid_revision(&user.user_uuid, conn).await, diff --git a/src/db/models/org_policy.rs b/src/db/models/org_policy.rs index 8b3f1271..d1e8aa0f 100644 --- a/src/db/models/org_policy.rs +++ b/src/db/models/org_policy.rs @@ -4,7 +4,6 @@ use serde_json::Value; use crate::api::EmptyResult; use crate::db::DbConn; use crate::error::MapResult; -use crate::util::UpCase; use super::{TwoFactor, UserOrgStatus, UserOrgType, UserOrganization}; @@ -39,16 +38,18 @@ pub enum OrgPolicyType { // https://github.com/bitwarden/server/blob/5cbdee137921a19b1f722920f0fa3cd45af2ef0f/src/Core/Models/Data/Organizations/Policies/SendOptionsPolicyData.cs #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] pub struct SendOptionsPolicyData { - pub DisableHideEmail: bool, + #[serde(rename = "disableHideEmail", alias = "DisableHideEmail")] + pub disable_hide_email: bool, } // https://github.com/bitwarden/server/blob/5cbdee137921a19b1f722920f0fa3cd45af2ef0f/src/Core/Models/Data/Organizations/Policies/ResetPasswordDataModel.cs #[derive(Deserialize)] -#[allow(non_snake_case)] +#[serde(rename_all = "camelCase")] pub struct ResetPasswordDataModel { - pub AutoEnrollEnabled: bool, + #[serde(rename = "autoEnrollEnabled", alias = "AutoEnrollEnabled")] + pub auto_enroll_enabled: bool, } pub type OrgPolicyResult = Result<(), OrgPolicyErr>; @@ -78,12 +79,12 @@ impl OrgPolicy { pub fn to_json(&self) -> Value { let data_json: Value = serde_json::from_str(&self.data).unwrap_or(Value::Null); json!({ - "Id": self.uuid, - "OrganizationId": self.org_uuid, - "Type": self.atype, - "Data": data_json, - "Enabled": self.enabled, - "Object": "policy", + "id": self.uuid, + "organizationId": self.org_uuid, + "type": self.atype, + "data": data_json, + "enabled": self.enabled, + "object": "policy", }) } } @@ -114,7 +115,7 @@ impl OrgPolicy { // We need to make sure we're not going to violate the unique constraint on org_uuid and atype. // This happens automatically on other DBMS backends due to replace_into(). PostgreSQL does // not support multiple constraints on ON CONFLICT clauses. - diesel::delete( + let _: () = diesel::delete( org_policies::table .filter(org_policies::org_uuid.eq(&self.org_uuid)) .filter(org_policies::atype.eq(&self.atype)), @@ -307,9 +308,9 @@ impl OrgPolicy { pub async fn org_is_reset_password_auto_enroll(org_uuid: &str, conn: &mut DbConn) -> bool { match OrgPolicy::find_by_org_and_type(org_uuid, OrgPolicyType::ResetPassword, conn).await { - Some(policy) => match serde_json::from_str::>(&policy.data) { + Some(policy) => match serde_json::from_str::(&policy.data) { Ok(opts) => { - return policy.enabled && opts.data.AutoEnrollEnabled; + return policy.enabled && opts.auto_enroll_enabled; } _ => error!("Failed to deserialize ResetPasswordDataModel: {}", policy.data), }, @@ -327,9 +328,9 @@ impl OrgPolicy { { if let Some(user) = UserOrganization::find_by_user_and_org(user_uuid, &policy.org_uuid, conn).await { if user.atype < UserOrgType::Admin { - match serde_json::from_str::>(&policy.data) { + match serde_json::from_str::(&policy.data) { Ok(opts) => { - if opts.data.DisableHideEmail { + if opts.disable_hide_email { return true; } } @@ -340,4 +341,11 @@ impl OrgPolicy { } false } + + pub async fn is_enabled_by_org(org_uuid: &str, policy_type: OrgPolicyType, conn: &mut DbConn) -> bool { + if let Some(policy) = OrgPolicy::find_by_org_and_type(org_uuid, policy_type, conn).await { + return policy.enabled; + } + false + } } diff --git a/src/db/models/organization.rs b/src/db/models/organization.rs index 180b1c1d..f378ba40 100644 --- a/src/db/models/organization.rs +++ b/src/db/models/organization.rs @@ -153,39 +153,39 @@ impl Organization { // https://github.com/bitwarden/server/blob/13d1e74d6960cf0d042620b72d85bf583a4236f7/src/Api/Models/Response/Organizations/OrganizationResponseModel.cs pub fn to_json(&self) -> Value { json!({ - "Id": self.uuid, - "Identifier": null, // not supported by us - "Name": self.name, - "Seats": 10, // The value doesn't matter, we don't check server-side - // "MaxAutoscaleSeats": null, // The value doesn't matter, we don't check server-side - "MaxCollections": 10, // The value doesn't matter, we don't check server-side - "MaxStorageGb": 10, // The value doesn't matter, we don't check server-side - "Use2fa": true, - "UseDirectory": false, // Is supported, but this value isn't checked anywhere (yet) - "UseEvents": CONFIG.org_events_enabled(), - "UseGroups": CONFIG.org_groups_enabled(), - "UseTotp": true, - "UsePolicies": true, - // "UseScim": false, // Not supported (Not AGPLv3 Licensed) - "UseSso": false, // Not supported - // "UseKeyConnector": false, // Not supported - "SelfHost": true, - "UseApi": true, - "HasPublicAndPrivateKeys": self.private_key.is_some() && self.public_key.is_some(), - "UseResetPassword": CONFIG.mail_enabled(), - - "BusinessName": null, - "BusinessAddress1": null, - "BusinessAddress2": null, - "BusinessAddress3": null, - "BusinessCountry": null, - "BusinessTaxNumber": null, - - "BillingEmail": self.billing_email, - "Plan": "TeamsAnnually", - "PlanType": 5, // TeamsAnnually plan - "UsersGetPremium": true, - "Object": "organization", + "id": self.uuid, + "identifier": null, // not supported by us + "name": self.name, + "seats": 10, // The value doesn't matter, we don't check server-side + // "maxAutoscaleSeats": null, // The value doesn't matter, we don't check server-side + "maxCollections": 10, // The value doesn't matter, we don't check server-side + "maxStorageGb": 10, // The value doesn't matter, we don't check server-side + "use2fa": true, + "useDirectory": false, // Is supported, but this value isn't checked anywhere (yet) + "useEvents": CONFIG.org_events_enabled(), + "useGroups": CONFIG.org_groups_enabled(), + "useTotp": true, + "usePolicies": true, + // "useScim": false, // Not supported (Not AGPLv3 Licensed) + "useSso": false, // Not supported + // "useKeyConnector": false, // Not supported + "selfHost": true, + "useApi": true, + "hasPublicAndPrivateKeys": self.private_key.is_some() && self.public_key.is_some(), + "useResetPassword": CONFIG.mail_enabled(), + + "businessName": null, + "businessAddress1": null, + "businessAddress2": null, + "businessAddress3": null, + "businessCountry": null, + "businessTaxNumber": null, + + "billingEmail": self.billing_email, + "plan": "TeamsAnnually", + "planType": 5, // TeamsAnnually plan + "usersGetPremium": true, + "object": "organization", }) } } @@ -316,6 +316,7 @@ impl Organization { UserOrganization::delete_all_by_organization(&self.uuid, conn).await?; OrgPolicy::delete_all_by_organization(&self.uuid, conn).await?; Group::delete_all_by_organization(&self.uuid, conn).await?; + OrganizationApiKey::delete_all_by_organization(&self.uuid, conn).await?; db_run! { conn: { diesel::delete(organizations::table.filter(organizations::uuid.eq(self.uuid))) @@ -344,65 +345,81 @@ impl UserOrganization { pub async fn to_json(&self, conn: &mut DbConn) -> Value { let org = Organization::find_by_uuid(&self.org_uuid, conn).await.unwrap(); + let permissions = json!({ + // TODO: Add support for Custom User Roles + // See: https://bitwarden.com/help/article/user-types-access-control/#custom-role + "accessEventLogs": false, + "accessImportExport": false, + "accessReports": false, + "createNewCollections": false, + "editAnyCollection": false, + "deleteAnyCollection": false, + "editAssignedCollections": false, + "deleteAssignedCollections": false, + "manageGroups": false, + "managePolicies": false, + "manageSso": false, // Not supported + "manageUsers": false, + "manageResetPassword": false, + "manageScim": false // Not supported (Not AGPLv3 Licensed) + }); + // https://github.com/bitwarden/server/blob/13d1e74d6960cf0d042620b72d85bf583a4236f7/src/Api/Models/Response/ProfileOrganizationResponseModel.cs json!({ - "Id": self.org_uuid, - "Identifier": null, // Not supported - "Name": org.name, - "Seats": 10, // The value doesn't matter, we don't check server-side - "MaxCollections": 10, // The value doesn't matter, we don't check server-side - "UsersGetPremium": true, - "Use2fa": true, - "UseDirectory": false, // Is supported, but this value isn't checked anywhere (yet) - "UseEvents": CONFIG.org_events_enabled(), - "UseGroups": CONFIG.org_groups_enabled(), - "UseTotp": true, - // "UseScim": false, // Not supported (Not AGPLv3 Licensed) - "UsePolicies": true, - "UseApi": true, - "SelfHost": true, - "HasPublicAndPrivateKeys": org.private_key.is_some() && org.public_key.is_some(), - "ResetPasswordEnrolled": self.reset_password_key.is_some(), - "UseResetPassword": CONFIG.mail_enabled(), - "SsoBound": false, // Not supported - "UseSso": false, // Not supported - "ProviderId": null, - "ProviderName": null, - // "KeyConnectorEnabled": false, - // "KeyConnectorUrl": null, - - // TODO: Add support for Custom User Roles - // See: https://bitwarden.com/help/article/user-types-access-control/#custom-role - // "Permissions": { - // "AccessEventLogs": false, - // "AccessImportExport": false, - // "AccessReports": false, - // "ManageAllCollections": false, - // "CreateNewCollections": false, - // "EditAnyCollection": false, - // "DeleteAnyCollection": false, - // "ManageAssignedCollections": false, - // "editAssignedCollections": false, - // "deleteAssignedCollections": false, - // "ManageCiphers": false, - // "ManageGroups": false, - // "ManagePolicies": false, - // "ManageResetPassword": false, - // "ManageSso": false, // Not supported - // "ManageUsers": false, - // "ManageScim": false, // Not supported (Not AGPLv3 Licensed) - // }, - - "MaxStorageGb": 10, // The value doesn't matter, we don't check server-side + "id": self.org_uuid, + "identifier": null, // Not supported + "name": org.name, + "seats": 10, // The value doesn't matter, we don't check server-side + "maxCollections": 10, // The value doesn't matter, we don't check server-side + "usersGetPremium": true, + "use2fa": true, + "useDirectory": false, // Is supported, but this value isn't checked anywhere (yet) + "useEvents": CONFIG.org_events_enabled(), + "useGroups": CONFIG.org_groups_enabled(), + "useTotp": true, + "useScim": false, // Not supported (Not AGPLv3 Licensed) + "usePolicies": true, + "useApi": true, + "selfHost": true, + "hasPublicAndPrivateKeys": org.private_key.is_some() && org.public_key.is_some(), + "resetPasswordEnrolled": self.reset_password_key.is_some(), + "useResetPassword": CONFIG.mail_enabled(), + "ssoBound": false, // Not supported + "useSso": false, // Not supported + "useKeyConnector": false, + "useSecretsManager": false, + "usePasswordManager": true, + "useCustomPermissions": false, + "useActivateAutofillPolicy": false, + + "providerId": null, + "providerName": null, + "providerType": null, + "familySponsorshipFriendlyName": null, + "familySponsorshipAvailable": false, + "planProductType": 0, + "keyConnectorEnabled": false, + "keyConnectorUrl": null, + "familySponsorshipLastSyncDate": null, + "familySponsorshipValidUntil": null, + "familySponsorshipToDelete": null, + "accessSecretsManager": false, + "limitCollectionCreationDeletion": true, + "allowAdminAccessToAllCollectionItems": true, + "flexibleCollections": false, + + "permissions": permissions, + + "maxStorageGb": 10, // The value doesn't matter, we don't check server-side // These are per user - "UserId": self.user_uuid, - "Key": self.akey, - "Status": self.status, - "Type": self.atype, - "Enabled": true, + "userId": self.user_uuid, + "key": self.akey, + "status": self.status, + "type": self.atype, + "enabled": true, - "Object": "profileOrganization", + "object": "profileOrganization", }) } @@ -438,9 +455,9 @@ impl UserOrganization { .iter() .map(|cu| { json!({ - "Id": cu.collection_uuid, - "ReadOnly": cu.read_only, - "HidePasswords": cu.hide_passwords, + "id": cu.collection_uuid, + "readOnly": cu.read_only, + "hidePasswords": cu.hide_passwords, }) }) .collect() @@ -449,29 +466,29 @@ impl UserOrganization { }; json!({ - "Id": self.uuid, - "UserId": self.user_uuid, - "Name": user.name, - "Email": user.email, - "ExternalId": self.external_id, - "Groups": groups, - "Collections": collections, - - "Status": status, - "Type": self.atype, - "AccessAll": self.access_all, - "TwoFactorEnabled": twofactor_enabled, - "ResetPasswordEnrolled": self.reset_password_key.is_some(), - - "Object": "organizationUserUserDetails", + "id": self.uuid, + "userId": self.user_uuid, + "name": user.name, + "email": user.email, + "externalId": self.external_id, + "groups": groups, + "collections": collections, + + "status": status, + "type": self.atype, + "accessAll": self.access_all, + "twoFactorEnabled": twofactor_enabled, + "resetPasswordEnrolled": self.reset_password_key.is_some(), + + "object": "organizationUserUserDetails", }) } pub fn to_json_user_access_restrictions(&self, col_user: &CollectionUser) -> Value { json!({ - "Id": self.uuid, - "ReadOnly": col_user.read_only, - "HidePasswords": col_user.hide_passwords, + "id": self.uuid, + "readOnly": col_user.read_only, + "hidePasswords": col_user.hide_passwords, }) } @@ -485,9 +502,9 @@ impl UserOrganization { .iter() .map(|c| { json!({ - "Id": c.collection_uuid, - "ReadOnly": c.read_only, - "HidePasswords": c.hide_passwords, + "id": c.collection_uuid, + "readOnly": c.read_only, + "hidePasswords": c.hide_passwords, }) }) .collect() @@ -502,15 +519,15 @@ impl UserOrganization { }; json!({ - "Id": self.uuid, - "UserId": self.user_uuid, + "id": self.uuid, + "userId": self.user_uuid, - "Status": status, - "Type": self.atype, - "AccessAll": self.access_all, - "Collections": coll_uuids, + "status": status, + "type": self.atype, + "accessAll": self.access_all, + "collections": coll_uuids, - "Object": "organizationUserDetails", + "object": "organizationUserDetails", }) } pub async fn save(&self, conn: &mut DbConn) -> EmptyResult { @@ -887,6 +904,14 @@ impl OrganizationApiKey { .ok().from_db() }} } + + pub async fn delete_all_by_organization(org_uuid: &str, conn: &mut DbConn) -> EmptyResult { + db_run! { conn: { + diesel::delete(organization_api_key::table.filter(organization_api_key::org_uuid.eq(org_uuid))) + .execute(conn) + .map_res("Error removing organization api key from organization") + }} + } } #[cfg(test)] diff --git a/src/db/models/send.rs b/src/db/models/send.rs index 7cfeb478..36944281 100644 --- a/src/db/models/send.rs +++ b/src/db/models/send.rs @@ -1,6 +1,8 @@ use chrono::{NaiveDateTime, Utc}; use serde_json::Value; +use crate::util::LowerCase; + use super::User; db_object! { @@ -122,48 +124,58 @@ impl Send { use data_encoding::BASE64URL_NOPAD; use uuid::Uuid; - let data: Value = serde_json::from_str(&self.data).unwrap_or_default(); + let mut data = serde_json::from_str::>(&self.data).map(|d| d.data).unwrap_or_default(); + + // Mobile clients expect size to be a string instead of a number + if let Some(size) = data.get("size").and_then(|v| v.as_i64()) { + data["size"] = Value::String(size.to_string()); + } json!({ - "Id": self.uuid, - "AccessId": BASE64URL_NOPAD.encode(Uuid::parse_str(&self.uuid).unwrap_or_default().as_bytes()), - "Type": self.atype, - - "Name": self.name, - "Notes": self.notes, - "Text": if self.atype == SendType::Text as i32 { Some(&data) } else { None }, - "File": if self.atype == SendType::File as i32 { Some(&data) } else { None }, - - "Key": self.akey, - "MaxAccessCount": self.max_access_count, - "AccessCount": self.access_count, - "Password": self.password_hash.as_deref().map(|h| BASE64URL_NOPAD.encode(h)), - "Disabled": self.disabled, - "HideEmail": self.hide_email, - - "RevisionDate": format_date(&self.revision_date), - "ExpirationDate": self.expiration_date.as_ref().map(format_date), - "DeletionDate": format_date(&self.deletion_date), - "Object": "send", + "id": self.uuid, + "accessId": BASE64URL_NOPAD.encode(Uuid::parse_str(&self.uuid).unwrap_or_default().as_bytes()), + "type": self.atype, + + "name": self.name, + "notes": self.notes, + "text": if self.atype == SendType::Text as i32 { Some(&data) } else { None }, + "file": if self.atype == SendType::File as i32 { Some(&data) } else { None }, + + "key": self.akey, + "maxAccessCount": self.max_access_count, + "accessCount": self.access_count, + "password": self.password_hash.as_deref().map(|h| BASE64URL_NOPAD.encode(h)), + "disabled": self.disabled, + "hideEmail": self.hide_email, + + "revisionDate": format_date(&self.revision_date), + "expirationDate": self.expiration_date.as_ref().map(format_date), + "deletionDate": format_date(&self.deletion_date), + "object": "send", }) } pub async fn to_json_access(&self, conn: &mut DbConn) -> Value { use crate::util::format_date; - let data: Value = serde_json::from_str(&self.data).unwrap_or_default(); + let mut data = serde_json::from_str::>(&self.data).map(|d| d.data).unwrap_or_default(); + + // Mobile clients expect size to be a string instead of a number + if let Some(size) = data.get("size").and_then(|v| v.as_i64()) { + data["size"] = Value::String(size.to_string()); + } json!({ - "Id": self.uuid, - "Type": self.atype, + "id": self.uuid, + "type": self.atype, - "Name": self.name, - "Text": if self.atype == SendType::Text as i32 { Some(&data) } else { None }, - "File": if self.atype == SendType::File as i32 { Some(&data) } else { None }, + "name": self.name, + "text": if self.atype == SendType::Text as i32 { Some(&data) } else { None }, + "file": if self.atype == SendType::File as i32 { Some(&data) } else { None }, - "ExpirationDate": self.expiration_date.as_ref().map(format_date), - "CreatorIdentifier": self.creator_identifier(conn).await, - "Object": "send-access", + "expirationDate": self.expiration_date.as_ref().map(format_date), + "creatorIdentifier": self.creator_identifier(conn).await, + "object": "send-access", }) } } @@ -290,25 +302,18 @@ impl Send { pub async fn size_by_user(user_uuid: &str, conn: &mut DbConn) -> Option { let sends = Self::find_by_user(user_uuid, conn).await; - #[allow(non_snake_case)] - #[derive(serde::Deserialize, Default)] + #[derive(serde::Deserialize)] struct FileData { - Size: Option, - size: Option, + #[serde(rename = "size", alias = "Size")] + size: NumberOrString, } let mut total: i64 = 0; for send in sends { if send.atype == SendType::File as i32 { - let data: FileData = serde_json::from_str(&send.data).unwrap_or_default(); - - let size = match (data.size, data.Size) { - (Some(s), _) => s.into_i64(), - (_, Some(s)) => s.into_i64(), - (None, None) => continue, - }; - - if let Ok(size) = size { + if let Ok(size) = + serde_json::from_str::(&send.data).map_err(Into::into).and_then(|d| d.size.into_i64()) + { total = total.checked_add(size)?; }; } diff --git a/src/db/models/two_factor.rs b/src/db/models/two_factor.rs index 93fb3385..9155c518 100644 --- a/src/db/models/two_factor.rs +++ b/src/db/models/two_factor.rs @@ -12,7 +12,7 @@ db_object! { pub atype: i32, pub enabled: bool, pub data: String, - pub last_used: i32, + pub last_used: i64, } } @@ -54,17 +54,17 @@ impl TwoFactor { pub fn to_json(&self) -> Value { json!({ - "Enabled": self.enabled, - "Key": "", // This key and value vary - "Object": "twoFactorAuthenticator" // This value varies + "enabled": self.enabled, + "key": "", // This key and value vary + "Oobject": "twoFactorAuthenticator" // This value varies }) } pub fn to_json_provider(&self) -> Value { json!({ - "Enabled": self.enabled, - "Type": self.atype, - "Object": "twoFactorProvider" + "enabled": self.enabled, + "type": self.atype, + "object": "twoFactorProvider" }) } } @@ -95,7 +95,7 @@ impl TwoFactor { // We need to make sure we're not going to violate the unique constraint on user_uuid and atype. // This happens automatically on other DBMS backends due to replace_into(). PostgreSQL does // not support multiple constraints on ON CONFLICT clauses. - diesel::delete(twofactor::table.filter(twofactor::user_uuid.eq(&self.user_uuid)).filter(twofactor::atype.eq(&self.atype))) + let _: () = diesel::delete(twofactor::table.filter(twofactor::user_uuid.eq(&self.user_uuid)).filter(twofactor::atype.eq(&self.atype))) .execute(conn) .map_res("Error deleting twofactor for insert")?; diff --git a/src/db/models/user.rs b/src/db/models/user.rs index 1475d637..a02b694d 100644 --- a/src/db/models/user.rs +++ b/src/db/models/user.rs @@ -1,4 +1,4 @@ -use chrono::{Duration, NaiveDateTime, Utc}; +use chrono::{NaiveDateTime, TimeDelta, Utc}; use serde_json::Value; use crate::crypto; @@ -202,7 +202,7 @@ impl User { let stamp_exception = UserStampException { routes: route_exception, security_stamp: self.security_stamp.clone(), - expire: (Utc::now().naive_utc() + Duration::minutes(2)).timestamp(), + expire: (Utc::now() + TimeDelta::try_minutes(2).unwrap()).timestamp(), }; self.stamp_exception = Some(serde_json::to_string(&stamp_exception).unwrap_or_default()); } @@ -240,24 +240,26 @@ impl User { }; json!({ - "_Status": status as i32, - "Id": self.uuid, - "Name": self.name, - "Email": self.email, - "EmailVerified": !CONFIG.mail_enabled() || self.verified_at.is_some(), - "Premium": true, - "MasterPasswordHint": self.password_hint, - "Culture": "en-US", - "TwoFactorEnabled": twofactor_enabled, - "Key": self.akey, - "PrivateKey": self.private_key, - "SecurityStamp": self.security_stamp, - "Organizations": orgs_json, - "Providers": [], - "ProviderOrganizations": [], - "ForcePasswordReset": false, - "AvatarColor": self.avatar_color, - "Object": "profile", + "_status": status as i32, + "id": self.uuid, + "name": self.name, + "email": self.email, + "emailVerified": !CONFIG.mail_enabled() || self.verified_at.is_some(), + "premium": true, + "premiumFromOrganization": false, + "masterPasswordHint": self.password_hint, + "culture": "en-US", + "twoFactorEnabled": twofactor_enabled, + "key": self.akey, + "privateKey": self.private_key, + "securityStamp": self.security_stamp, + "organizations": orgs_json, + "providers": [], + "providerOrganizations": [], + "forcePasswordReset": false, + "avatarColor": self.avatar_color, + "usesKeyConnector": false, + "object": "profile", }) } @@ -311,6 +313,7 @@ impl User { Send::delete_all_by_user(&self.uuid, conn).await?; EmergencyAccess::delete_all_by_user(&self.uuid, conn).await?; + EmergencyAccess::delete_all_by_grantee_email(&self.email, conn).await?; UserOrganization::delete_all_by_user(&self.uuid, conn).await?; Cipher::delete_all_by_user(&self.uuid, conn).await?; Favorite::delete_all_by_user(&self.uuid, conn).await?; diff --git a/src/db/schemas/mysql/schema.rs b/src/db/schemas/mysql/schema.rs index 737e13b3..0fb286a4 100644 --- a/src/db/schemas/mysql/schema.rs +++ b/src/db/schemas/mysql/schema.rs @@ -160,7 +160,7 @@ table! { atype -> Integer, enabled -> Bool, data -> Text, - last_used -> Integer, + last_used -> BigInt, } } diff --git a/src/db/schemas/postgresql/schema.rs b/src/db/schemas/postgresql/schema.rs index 4e946b4f..26bf4b68 100644 --- a/src/db/schemas/postgresql/schema.rs +++ b/src/db/schemas/postgresql/schema.rs @@ -160,7 +160,7 @@ table! { atype -> Integer, enabled -> Bool, data -> Text, - last_used -> Integer, + last_used -> BigInt, } } diff --git a/src/db/schemas/sqlite/schema.rs b/src/db/schemas/sqlite/schema.rs index 4e946b4f..26bf4b68 100644 --- a/src/db/schemas/sqlite/schema.rs +++ b/src/db/schemas/sqlite/schema.rs @@ -160,7 +160,7 @@ table! { atype -> Integer, enabled -> Bool, data -> Text, - last_used -> Integer, + last_used -> BigInt, } } diff --git a/src/error.rs b/src/error.rs index f0969bff..afb1dc83 100644 --- a/src/error.rs +++ b/src/error.rs @@ -52,7 +52,6 @@ use rocket::error::Error as RocketErr; use serde_json::{Error as SerdeErr, Value}; use std::io::Error as IoErr; use std::time::SystemTimeError as TimeErr; -use tokio_tungstenite::tungstenite::Error as TungstError; use webauthn_rs::error::WebauthnError as WebauthnErr; use yubico::yubicoerror::YubicoError as YubiErr; @@ -91,7 +90,6 @@ make_error! { DieselCon(DieselConErr): _has_source, _api_error, Webauthn(WebauthnErr): _has_source, _api_error, - WebSocket(TungstError): _has_source, _api_error, } impl std::fmt::Debug for Error { @@ -181,18 +179,18 @@ fn _serialize(e: &impl serde::Serialize, _msg: &str) -> String { fn _api_error(_: &impl std::any::Any, msg: &str) -> String { let json = json!({ - "Message": msg, + "message": msg, "error": "", "error_description": "", - "ValidationErrors": {"": [ msg ]}, - "ErrorModel": { - "Message": msg, - "Object": "error" + "validationErrors": {"": [ msg ]}, + "errorModel": { + "message": msg, + "object": "error" }, - "ExceptionMessage": null, - "ExceptionStackTrace": null, - "InnerExceptionMessage": null, - "Object": "error" + "exceptionMessage": null, + "exceptionStackTrace": null, + "innerExceptionMessage": null, + "object": "error" }); _serialize(&json, "") } diff --git a/src/main.rs b/src/main.rs index 05f43c5a..73085901 100644 --- a/src/main.rs +++ b/src/main.rs @@ -3,7 +3,7 @@ // The more key/value pairs there are the more recursion occurs. // We want to keep this as low as possible, but not higher then 128. // If you go above 128 it will cause rust-analyzer to fail, -#![recursion_limit = "103"] +#![recursion_limit = "200"] // When enabled use MiMalloc as malloc instead of the default malloc #[cfg(feature = "enable_mimalloc")] @@ -52,7 +52,7 @@ mod ratelimit; mod util; use crate::api::purge_auth_requests; -use crate::api::WS_ANONYMOUS_SUBSCRIPTIONS; +use crate::api::{WS_ANONYMOUS_SUBSCRIPTIONS, WS_USERS}; pub use config::CONFIG; pub use error::{Error, MapResult}; use rocket::data::{Limits, ToByteUnit}; @@ -65,13 +65,17 @@ async fn main() -> Result<(), Error> { launch_info(); use log::LevelFilter as LF; - let level = LF::from_str(&CONFIG.log_level()).expect("Valid log level"); + let level = LF::from_str(&CONFIG.log_level()).unwrap_or_else(|_| { + let valid_log_levels = LF::iter().map(|lvl| lvl.as_str().to_lowercase()).collect::>().join(", "); + println!("Log level must be one of the following: {valid_log_levels}"); + exit(1); + }); init_logging(level).ok(); let extra_debug = matches!(level, LF::Trace | LF::Debug); check_data_folder().await; - check_rsa_keys().unwrap_or_else(|_| { + auth::initialize_keys().unwrap_or_else(|_| { error!("Error creating keys, exiting..."); exit(1); }); @@ -207,9 +211,9 @@ fn launch_info() { } fn init_logging(level: log::LevelFilter) -> Result<(), fern::InitError> { - // Depending on the main log level we either want to disable or enable logging for trust-dns. - // Else if there are timeouts it will clutter the logs since trust-dns uses warn for this. - let trust_dns_level = if level >= log::LevelFilter::Debug { + // Depending on the main log level we either want to disable or enable logging for hickory. + // Else if there are timeouts it will clutter the logs since hickory uses warn for this. + let hickory_level = if level >= log::LevelFilter::Debug { level } else { log::LevelFilter::Off @@ -262,9 +266,9 @@ fn init_logging(level: log::LevelFilter) -> Result<(), fern::InitError> { .level_for("handlebars::render", handlebars_level) // Prevent cookie_store logs .level_for("cookie_store", log::LevelFilter::Off) - // Variable level for trust-dns used by reqwest - .level_for("trust_dns_resolver::name_server::name_server", trust_dns_level) - .level_for("trust_dns_proto::xfer", trust_dns_level) + // Variable level for hickory used by reqwest + .level_for("hickory_resolver::name_server::name_server", hickory_level) + .level_for("hickory_proto::xfer", hickory_level) .level_for("diesel_logger", diesel_logger_level) .chain(std::io::stdout()); @@ -444,31 +448,6 @@ async fn container_data_folder_is_persistent(data_folder: &str) -> bool { true } -fn check_rsa_keys() -> Result<(), crate::error::Error> { - // If the RSA keys don't exist, try to create them - let priv_path = CONFIG.private_rsa_key(); - let pub_path = CONFIG.public_rsa_key(); - - if !util::file_exists(&priv_path) { - let rsa_key = openssl::rsa::Rsa::generate(2048)?; - - let priv_key = rsa_key.private_key_to_pem()?; - crate::util::write_file(&priv_path, &priv_key)?; - info!("Private key created correctly."); - } - - if !util::file_exists(&pub_path) { - let rsa_key = openssl::rsa::Rsa::private_key_from_pem(&std::fs::read(&priv_path)?)?; - - let pub_key = rsa_key.public_key_to_pem()?; - crate::util::write_file(&pub_path, &pub_key)?; - info!("Public key created correctly."); - } - - auth::load_keys(); - Ok(()) -} - fn check_web_vault() { if !CONFIG.web_vault_enabled() { return; @@ -522,7 +501,7 @@ async fn launch_rocket(pool: db::DbPool, extra_debug: bool) -> Result<(), Error> .register([basepath, "/api"].concat(), api::core_catchers()) .register([basepath, "/admin"].concat(), api::admin_catchers()) .manage(pool) - .manage(api::start_notification_server()) + .manage(Arc::clone(&WS_USERS)) .manage(Arc::clone(&WS_ANONYMOUS_SUBSCRIPTIONS)) .attach(util::AppHeaders()) .attach(util::Cors()) diff --git a/src/static/global_domains.json b/src/static/global_domains.json index 78458cbb..e3f08813 100644 --- a/src/static/global_domains.json +++ b/src/static/global_domains.json @@ -1,80 +1,80 @@ [ { - "Type": 2, - "Domains": [ + "type": 2, + "domains": [ "ameritrade.com", "tdameritrade.com" ], - "Excluded": false + "excluded": false }, { - "Type": 3, - "Domains": [ + "type": 3, + "domains": [ "bankofamerica.com", "bofa.com", "mbna.com", "usecfo.com" ], - "Excluded": false + "excluded": false }, { - "Type": 4, - "Domains": [ + "type": 4, + "domains": [ "sprint.com", "sprintpcs.com", "nextel.com" ], - "Excluded": false + "excluded": false }, { - "Type": 0, - "Domains": [ + "type": 0, + "domains": [ "youtube.com", "google.com", "gmail.com" ], - "Excluded": false + "excluded": false }, { - "Type": 1, - "Domains": [ + "type": 1, + "domains": [ "apple.com", "icloud.com" ], - "Excluded": false + "excluded": false }, { - "Type": 5, - "Domains": [ + "type": 5, + "domains": [ "wellsfargo.com", "wf.com", "wellsfargoadvisors.com" ], - "Excluded": false + "excluded": false }, { - "Type": 6, - "Domains": [ + "type": 6, + "domains": [ "mymerrill.com", "ml.com", "merrilledge.com" ], - "Excluded": false + "excluded": false }, { - "Type": 7, - "Domains": [ + "type": 7, + "domains": [ "accountonline.com", "citi.com", "citibank.com", "citicards.com", "citibankonline.com" ], - "Excluded": false + "excluded": false }, { - "Type": 8, - "Domains": [ + "type": 8, + "domains": [ "cnet.com", "cnettv.com", "com.com", @@ -83,21 +83,21 @@ "search.com", "upload.com" ], - "Excluded": false + "excluded": false }, { - "Type": 9, - "Domains": [ + "type": 9, + "domains": [ "bananarepublic.com", "gap.com", "oldnavy.com", "piperlime.com" ], - "Excluded": false + "excluded": false }, { - "Type": 10, - "Domains": [ + "type": 10, + "domains": [ "bing.com", "hotmail.com", "live.com", @@ -113,53 +113,53 @@ "azure.com", "windowsazure.com" ], - "Excluded": false + "excluded": false }, { - "Type": 11, - "Domains": [ + "type": 11, + "domains": [ "ua2go.com", "ual.com", "united.com", "unitedwifi.com" ], - "Excluded": false + "excluded": false }, { - "Type": 12, - "Domains": [ + "type": 12, + "domains": [ "overture.com", "yahoo.com" ], - "Excluded": false + "excluded": false }, { - "Type": 13, - "Domains": [ + "type": 13, + "domains": [ "zonealarm.com", "zonelabs.com" ], - "Excluded": false + "excluded": false }, { - "Type": 14, - "Domains": [ + "type": 14, + "domains": [ "paypal.com", "paypal-search.com" ], - "Excluded": false + "excluded": false }, { - "Type": 15, - "Domains": [ + "type": 15, + "domains": [ "avon.com", "youravon.com" ], - "Excluded": false + "excluded": false }, { - "Type": 16, - "Domains": [ + "type": 16, + "domains": [ "diapers.com", "soap.com", "wag.com", @@ -172,19 +172,19 @@ "look.com", "vinemarket.com" ], - "Excluded": false + "excluded": false }, { - "Type": 17, - "Domains": [ + "type": 17, + "domains": [ "1800contacts.com", "800contacts.com" ], - "Excluded": false + "excluded": false }, { - "Type": 18, - "Domains": [ + "type": 18, + "domains": [ "amazon.com", "amazon.com.be", "amazon.ae", @@ -205,240 +205,240 @@ "amazon.se", "amazon.sg" ], - "Excluded": false + "excluded": false }, { - "Type": 19, - "Domains": [ + "type": 19, + "domains": [ "cox.com", "cox.net", "coxbusiness.com" ], - "Excluded": false + "excluded": false }, { - "Type": 20, - "Domains": [ + "type": 20, + "domains": [ "mynortonaccount.com", "norton.com" ], - "Excluded": false + "excluded": false }, { - "Type": 21, - "Domains": [ + "type": 21, + "domains": [ "verizon.com", "verizon.net" ], - "Excluded": false + "excluded": false }, { - "Type": 22, - "Domains": [ + "type": 22, + "domains": [ "rakuten.com", "buy.com" ], - "Excluded": false + "excluded": false }, { - "Type": 23, - "Domains": [ + "type": 23, + "domains": [ "siriusxm.com", "sirius.com" ], - "Excluded": false + "excluded": false }, { - "Type": 24, - "Domains": [ + "type": 24, + "domains": [ "ea.com", "origin.com", "play4free.com", "tiberiumalliance.com" ], - "Excluded": false + "excluded": false }, { - "Type": 25, - "Domains": [ + "type": 25, + "domains": [ "37signals.com", "basecamp.com", "basecamphq.com", "highrisehq.com" ], - "Excluded": false + "excluded": false }, { - "Type": 26, - "Domains": [ + "type": 26, + "domains": [ "steampowered.com", "steamcommunity.com", "steamgames.com" ], - "Excluded": false + "excluded": false }, { - "Type": 27, - "Domains": [ + "type": 27, + "domains": [ "chart.io", "chartio.com" ], - "Excluded": false + "excluded": false }, { - "Type": 28, - "Domains": [ + "type": 28, + "domains": [ "gotomeeting.com", "citrixonline.com" ], - "Excluded": false + "excluded": false }, { - "Type": 29, - "Domains": [ + "type": 29, + "domains": [ "gogoair.com", "gogoinflight.com" ], - "Excluded": false + "excluded": false }, { - "Type": 30, - "Domains": [ + "type": 30, + "domains": [ "mysql.com", "oracle.com" ], - "Excluded": false + "excluded": false }, { - "Type": 31, - "Domains": [ + "type": 31, + "domains": [ "discover.com", "discovercard.com" ], - "Excluded": false + "excluded": false }, { - "Type": 32, - "Domains": [ + "type": 32, + "domains": [ "dcu.org", "dcu-online.org" ], - "Excluded": false + "excluded": false }, { - "Type": 33, - "Domains": [ + "type": 33, + "domains": [ "healthcare.gov", "cuidadodesalud.gov", "cms.gov" ], - "Excluded": false + "excluded": false }, { - "Type": 34, - "Domains": [ + "type": 34, + "domains": [ "pepco.com", "pepcoholdings.com" ], - "Excluded": false + "excluded": false }, { - "Type": 35, - "Domains": [ + "type": 35, + "domains": [ "century21.com", "21online.com" ], - "Excluded": false + "excluded": false }, { - "Type": 36, - "Domains": [ + "type": 36, + "domains": [ "comcast.com", "comcast.net", "xfinity.com" ], - "Excluded": false + "excluded": false }, { - "Type": 37, - "Domains": [ + "type": 37, + "domains": [ "cricketwireless.com", "aiowireless.com" ], - "Excluded": false + "excluded": false }, { - "Type": 38, - "Domains": [ + "type": 38, + "domains": [ "mandtbank.com", "mtb.com" ], - "Excluded": false + "excluded": false }, { - "Type": 39, - "Domains": [ + "type": 39, + "domains": [ "dropbox.com", "getdropbox.com" ], - "Excluded": false + "excluded": false }, { - "Type": 40, - "Domains": [ + "type": 40, + "domains": [ "snapfish.com", "snapfish.ca" ], - "Excluded": false + "excluded": false }, { - "Type": 41, - "Domains": [ + "type": 41, + "domains": [ "alibaba.com", "aliexpress.com", "aliyun.com", "net.cn" ], - "Excluded": false + "excluded": false }, { - "Type": 42, - "Domains": [ + "type": 42, + "domains": [ "playstation.com", "sonyentertainmentnetwork.com" ], - "Excluded": false + "excluded": false }, { - "Type": 43, - "Domains": [ + "type": 43, + "domains": [ "mercadolivre.com", "mercadolivre.com.br", "mercadolibre.com", "mercadolibre.com.ar", "mercadolibre.com.mx" ], - "Excluded": false + "excluded": false }, { - "Type": 44, - "Domains": [ + "type": 44, + "domains": [ "zendesk.com", "zopim.com" ], - "Excluded": false + "excluded": false }, { - "Type": 45, - "Domains": [ + "type": 45, + "domains": [ "autodesk.com", "tinkercad.com" ], - "Excluded": false + "excluded": false }, { - "Type": 46, - "Domains": [ + "type": 46, + "domains": [ "railnation.ru", "railnation.de", "rail-nation.com", @@ -447,152 +447,152 @@ "trucknation.de", "traviangames.com" ], - "Excluded": false + "excluded": false }, { - "Type": 47, - "Domains": [ + "type": 47, + "domains": [ "wpcu.coop", "wpcuonline.com" ], - "Excluded": false + "excluded": false }, { - "Type": 48, - "Domains": [ + "type": 48, + "domains": [ "mathletics.com", "mathletics.com.au", "mathletics.co.uk" ], - "Excluded": false + "excluded": false }, { - "Type": 49, - "Domains": [ + "type": 49, + "domains": [ "discountbank.co.il", "telebank.co.il" ], - "Excluded": false + "excluded": false }, { - "Type": 50, - "Domains": [ + "type": 50, + "domains": [ "mi.com", "xiaomi.com" ], - "Excluded": false + "excluded": false }, { - "Type": 52, - "Domains": [ + "type": 52, + "domains": [ "postepay.it", "poste.it" ], - "Excluded": false + "excluded": false }, { - "Type": 51, - "Domains": [ + "type": 51, + "domains": [ "facebook.com", "messenger.com" ], - "Excluded": false + "excluded": false }, { - "Type": 53, - "Domains": [ + "type": 53, + "domains": [ "skysports.com", "skybet.com", "skyvegas.com" ], - "Excluded": false + "excluded": false }, { - "Type": 54, - "Domains": [ + "type": 54, + "domains": [ "disneymoviesanywhere.com", "go.com", "disney.com", "dadt.com", "disneyplus.com" ], - "Excluded": false + "excluded": false }, { - "Type": 55, - "Domains": [ + "type": 55, + "domains": [ "pokemon-gl.com", "pokemon.com" ], - "Excluded": false + "excluded": false }, { - "Type": 56, - "Domains": [ + "type": 56, + "domains": [ "myuv.com", "uvvu.com" ], - "Excluded": false + "excluded": false }, { - "Type": 58, - "Domains": [ + "type": 58, + "domains": [ "mdsol.com", "imedidata.com" ], - "Excluded": false + "excluded": false }, { - "Type": 57, - "Domains": [ + "type": 57, + "domains": [ "bank-yahav.co.il", "bankhapoalim.co.il" ], - "Excluded": false + "excluded": false }, { - "Type": 59, - "Domains": [ + "type": 59, + "domains": [ "sears.com", "shld.net" ], - "Excluded": false + "excluded": false }, { - "Type": 60, - "Domains": [ + "type": 60, + "domains": [ "xiami.com", "alipay.com" ], - "Excluded": false + "excluded": false }, { - "Type": 61, - "Domains": [ + "type": 61, + "domains": [ "belkin.com", "seedonk.com" ], - "Excluded": false + "excluded": false }, { - "Type": 62, - "Domains": [ + "type": 62, + "domains": [ "turbotax.com", "intuit.com" ], - "Excluded": false + "excluded": false }, { - "Type": 63, - "Domains": [ + "type": 63, + "domains": [ "shopify.com", "myshopify.com" ], - "Excluded": false + "excluded": false }, { - "Type": 64, - "Domains": [ + "type": 64, + "domains": [ "ebay.com", "ebay.at", "ebay.be", @@ -617,53 +617,53 @@ "ebay.ph", "ebay.pl" ], - "Excluded": false + "excluded": false }, { - "Type": 65, - "Domains": [ + "type": 65, + "domains": [ "techdata.com", "techdata.ch" ], - "Excluded": false + "excluded": false }, { - "Type": 66, - "Domains": [ + "type": 66, + "domains": [ "schwab.com", "schwabplan.com" ], - "Excluded": false + "excluded": false }, { - "Type": 68, - "Domains": [ + "type": 68, + "domains": [ "tesla.com", "teslamotors.com" ], - "Excluded": false + "excluded": false }, { - "Type": 69, - "Domains": [ + "type": 69, + "domains": [ "morganstanley.com", "morganstanleyclientserv.com", "stockplanconnect.com", "ms.com" ], - "Excluded": false + "excluded": false }, { - "Type": 70, - "Domains": [ + "type": 70, + "domains": [ "taxact.com", "taxactonline.com" ], - "Excluded": false + "excluded": false }, { - "Type": 71, - "Domains": [ + "type": 71, + "domains": [ "mediawiki.org", "wikibooks.org", "wikidata.org", @@ -676,11 +676,11 @@ "wikivoyage.org", "wiktionary.org" ], - "Excluded": false + "excluded": false }, { - "Type": 72, - "Domains": [ + "type": 72, + "domains": [ "airbnb.at", "airbnb.be", "airbnb.ca", @@ -735,11 +735,11 @@ "airbnb.ru", "airbnb.se" ], - "Excluded": false + "excluded": false }, { - "Type": 73, - "Domains": [ + "type": 73, + "domains": [ "eventbrite.at", "eventbrite.be", "eventbrite.ca", @@ -767,11 +767,11 @@ "eventbrite.se", "eventbrite.sg" ], - "Excluded": false + "excluded": false }, { - "Type": 74, - "Domains": [ + "type": 74, + "domains": [ "stackexchange.com", "superuser.com", "stackoverflow.com", @@ -780,19 +780,19 @@ "askubuntu.com", "stackapps.com" ], - "Excluded": false + "excluded": false }, { - "Type": 75, - "Domains": [ + "type": 75, + "domains": [ "docusign.com", "docusign.net" ], - "Excluded": false + "excluded": false }, { - "Type": 76, - "Domains": [ + "type": 76, + "domains": [ "envato.com", "themeforest.net", "codecanyon.net", @@ -802,28 +802,28 @@ "photodune.net", "3docean.net" ], - "Excluded": false + "excluded": false }, { - "Type": 77, - "Domains": [ + "type": 77, + "domains": [ "x10hosting.com", "x10premium.com" ], - "Excluded": false + "excluded": false }, { - "Type": 78, - "Domains": [ + "type": 78, + "domains": [ "dnsomatic.com", "opendns.com", "umbrella.com" ], - "Excluded": false + "excluded": false }, { - "Type": 79, - "Domains": [ + "type": 79, + "domains": [ "cagreatamerica.com", "canadaswonderland.com", "carowinds.com", @@ -838,36 +838,36 @@ "visitkingsisland.com", "worldsoffun.com" ], - "Excluded": false + "excluded": false }, { - "Type": 80, - "Domains": [ + "type": 80, + "domains": [ "ubnt.com", "ui.com" ], - "Excluded": false + "excluded": false }, { - "Type": 81, - "Domains": [ + "type": 81, + "domains": [ "discordapp.com", "discord.com" ], - "Excluded": false + "excluded": false }, { - "Type": 82, - "Domains": [ + "type": 82, + "domains": [ "netcup.de", "netcup.eu", "customercontrolpanel.de" ], - "Excluded": false + "excluded": false }, { - "Type": 83, - "Domains": [ + "type": 83, + "domains": [ "yandex.com", "ya.ru", "yandex.az", @@ -891,44 +891,44 @@ "yandex.ua", "yandex.uz" ], - "Excluded": false + "excluded": false }, { - "Type": 84, - "Domains": [ + "type": 84, + "domains": [ "sonyentertainmentnetwork.com", "sony.com" ], - "Excluded": false + "excluded": false }, { - "Type": 85, - "Domains": [ + "type": 85, + "domains": [ "proton.me", "protonmail.com", "protonvpn.com" ], - "Excluded": false + "excluded": false }, { - "Type": 86, - "Domains": [ + "type": 86, + "domains": [ "ubisoft.com", "ubi.com" ], - "Excluded": false + "excluded": false }, { - "Type": 87, - "Domains": [ + "type": 87, + "domains": [ "transferwise.com", "wise.com" ], - "Excluded": false + "excluded": false }, { - "Type": 88, - "Domains": [ + "type": 88, + "domains": [ "takeaway.com", "just-eat.dk", "just-eat.no", @@ -939,11 +939,11 @@ "thuisbezorgd.nl", "pyszne.pl" ], - "Excluded": false + "excluded": false }, { - "Type": 89, - "Domains": [ + "type": 89, + "domains": [ "atlassian.com", "bitbucket.org", "trello.com", @@ -951,11 +951,11 @@ "atlassian.net", "jira.com" ], - "Excluded": false + "excluded": false }, { - "Type": 90, - "Domains": [ + "type": 90, + "domains": [ "pinterest.com", "pinterest.com.au", "pinterest.cl", @@ -970,6 +970,6 @@ "pinterest.pt", "pinterest.se" ], - "Excluded": false + "excluded": false } ] \ No newline at end of file diff --git a/src/static/scripts/admin_diagnostics.js b/src/static/scripts/admin_diagnostics.js index a079c0aa..9f2aca66 100644 --- a/src/static/scripts/admin_diagnostics.js +++ b/src/static/scripts/admin_diagnostics.js @@ -21,7 +21,11 @@ const browserUTC = `${year}-${month}-${day} ${hour}:${minute}:${seconds} UTC`; // ================================ // Check if the output is a valid IP -const isValidIp = value => (/^(?:(?:^|\.)(?:2(?:5[0-5]|[0-4]\d)|1?\d?\d)){4}$/.test(value) ? true : false); +function isValidIp(ip) { + const ipv4Regex = /^(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$/; + const ipv6Regex = /^(?:[a-fA-F0-9]{1,4}:){7}[a-fA-F0-9]{1,4}|((?:[a-fA-F0-9]{1,4}:){1,7}:|:(:[a-fA-F0-9]{1,4}){1,7}|[a-fA-F0-9]{1,4}:((:[a-fA-F0-9]{1,4}){1,6}))$/; + return ipv4Regex.test(ip) || ipv6Regex.test(ip); +} function checkVersions(platform, installed, latest, commit=null) { if (installed === "-" || latest === "-") { diff --git a/src/static/scripts/bootstrap.bundle.js b/src/static/scripts/bootstrap.bundle.js index 491038c2..6294dff3 100644 --- a/src/static/scripts/bootstrap.bundle.js +++ b/src/static/scripts/bootstrap.bundle.js @@ -1,6 +1,6 @@ /*! - * Bootstrap v5.3.2 (https://getbootstrap.com/) - * Copyright 2011-2023 The Bootstrap Authors (https://github.com/twbs/bootstrap/graphs/contributors) + * Bootstrap v5.3.3 (https://getbootstrap.com/) + * Copyright 2011-2024 The Bootstrap Authors (https://github.com/twbs/bootstrap/graphs/contributors) * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE) */ (function (global, factory) { @@ -210,7 +210,6 @@ const reflow = element => { element.offsetHeight; // eslint-disable-line no-unused-expressions }; - const getjQuery = () => { if (window.jQuery && !document.body.hasAttribute('data-bs-no-jquery')) { return window.jQuery; @@ -648,7 +647,7 @@ * Constants */ - const VERSION = '5.3.2'; + const VERSION = '5.3.3'; /** * Class definition @@ -729,9 +728,9 @@ if (hrefAttribute.includes('#') && !hrefAttribute.startsWith('#')) { hrefAttribute = `#${hrefAttribute.split('#')[1]}`; } - selector = hrefAttribute && hrefAttribute !== '#' ? parseSelector(hrefAttribute.trim()) : null; + selector = hrefAttribute && hrefAttribute !== '#' ? hrefAttribute.trim() : null; } - return selector; + return selector ? selector.split(',').map(sel => parseSelector(sel)).join(',') : null; }; const SelectorEngine = { find(selector, element = document.documentElement) { @@ -3916,7 +3915,6 @@ // if false, we use the backdrop helper without adding any element to the dom rootElement: 'body' // give the choice to place backdrop under different elements }; - const DefaultType$8 = { className: 'string', clickCallback: '(function|null)', @@ -4041,7 +4039,6 @@ autofocus: true, trapElement: null // The element to trap focus inside of }; - const DefaultType$7 = { autofocus: 'boolean', trapElement: 'element' @@ -4768,7 +4765,10 @@ br: [], col: [], code: [], + dd: [], div: [], + dl: [], + dt: [], em: [], hr: [], h1: [], @@ -6311,3 +6311,4 @@ return index_umd; })); +//# sourceMappingURL=bootstrap.bundle.js.map diff --git a/src/static/scripts/bootstrap.css b/src/static/scripts/bootstrap.css index 32ea6e9c..b7ab57f2 100644 --- a/src/static/scripts/bootstrap.css +++ b/src/static/scripts/bootstrap.css @@ -1,7 +1,7 @@ @charset "UTF-8"; /*! - * Bootstrap v5.3.2 (https://getbootstrap.com/) - * Copyright 2011-2023 The Bootstrap Authors + * Bootstrap v5.3.3 (https://getbootstrap.com/) + * Copyright 2011-2024 The Bootstrap Authors * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE) */ :root, @@ -3042,6 +3042,9 @@ textarea.form-control-lg { .btn-check:checked + .btn:focus-visible, :not(.btn-check) + .btn:active:focus-visible, .btn:first-child:active:focus-visible, .btn.active:focus-visible, .btn.show:focus-visible { box-shadow: var(--bs-btn-focus-box-shadow); } +.btn-check:checked:focus-visible + .btn { + box-shadow: var(--bs-btn-focus-box-shadow); +} .btn:disabled, .btn.disabled, fieldset:disabled .btn { color: var(--bs-btn-disabled-color); pointer-events: none; @@ -4573,12 +4576,11 @@ textarea.form-control-lg { --bs-accordion-btn-padding-y: 1rem; --bs-accordion-btn-color: var(--bs-body-color); --bs-accordion-btn-bg: var(--bs-accordion-bg); - --bs-accordion-btn-icon: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23212529'%3e%3cpath fill-rule='evenodd' d='M1.646 4.646a.5.5 0 0 1 .708 0L8 10.293l5.646-5.647a.5.5 0 0 1 .708.708l-6 6a.5.5 0 0 1-.708 0l-6-6a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e"); + --bs-accordion-btn-icon: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='none' stroke='%23212529' stroke-linecap='round' stroke-linejoin='round'%3e%3cpath d='M2 5L8 11L14 5'/%3e%3c/svg%3e"); --bs-accordion-btn-icon-width: 1.25rem; --bs-accordion-btn-icon-transform: rotate(-180deg); --bs-accordion-btn-icon-transition: transform 0.2s ease-in-out; - --bs-accordion-btn-active-icon: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23052c65'%3e%3cpath fill-rule='evenodd' d='M1.646 4.646a.5.5 0 0 1 .708 0L8 10.293l5.646-5.647a.5.5 0 0 1 .708.708l-6 6a.5.5 0 0 1-.708 0l-6-6a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e"); - --bs-accordion-btn-focus-border-color: #86b7fe; + --bs-accordion-btn-active-icon: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='none' stroke='%23052c65' stroke-linecap='round' stroke-linejoin='round'%3e%3cpath d='M2 5L8 11L14 5'/%3e%3c/svg%3e"); --bs-accordion-btn-focus-box-shadow: 0 0 0 0.25rem rgba(13, 110, 253, 0.25); --bs-accordion-body-padding-x: 1.25rem; --bs-accordion-body-padding-y: 1rem; @@ -4636,7 +4638,6 @@ textarea.form-control-lg { } .accordion-button:focus { z-index: 3; - border-color: var(--bs-accordion-btn-focus-border-color); outline: 0; box-shadow: var(--bs-accordion-btn-focus-box-shadow); } @@ -4654,7 +4655,7 @@ textarea.form-control-lg { border-top-left-radius: var(--bs-accordion-border-radius); border-top-right-radius: var(--bs-accordion-border-radius); } -.accordion-item:first-of-type .accordion-button { +.accordion-item:first-of-type > .accordion-header .accordion-button { border-top-left-radius: var(--bs-accordion-inner-border-radius); border-top-right-radius: var(--bs-accordion-inner-border-radius); } @@ -4665,11 +4666,11 @@ textarea.form-control-lg { border-bottom-right-radius: var(--bs-accordion-border-radius); border-bottom-left-radius: var(--bs-accordion-border-radius); } -.accordion-item:last-of-type .accordion-button.collapsed { +.accordion-item:last-of-type > .accordion-header .accordion-button.collapsed { border-bottom-right-radius: var(--bs-accordion-inner-border-radius); border-bottom-left-radius: var(--bs-accordion-inner-border-radius); } -.accordion-item:last-of-type .accordion-collapse { +.accordion-item:last-of-type > .accordion-collapse { border-bottom-right-radius: var(--bs-accordion-border-radius); border-bottom-left-radius: var(--bs-accordion-border-radius); } @@ -4678,21 +4679,21 @@ textarea.form-control-lg { padding: var(--bs-accordion-body-padding-y) var(--bs-accordion-body-padding-x); } -.accordion-flush .accordion-collapse { - border-width: 0; -} -.accordion-flush .accordion-item { +.accordion-flush > .accordion-item { border-right: 0; border-left: 0; border-radius: 0; } -.accordion-flush .accordion-item:first-child { +.accordion-flush > .accordion-item:first-child { border-top: 0; } -.accordion-flush .accordion-item:last-child { +.accordion-flush > .accordion-item:last-child { border-bottom: 0; } -.accordion-flush .accordion-item .accordion-button, .accordion-flush .accordion-item .accordion-button.collapsed { +.accordion-flush > .accordion-item > .accordion-header .accordion-button, .accordion-flush > .accordion-item > .accordion-header .accordion-button.collapsed { + border-radius: 0; +} +.accordion-flush > .accordion-item > .accordion-collapse { border-radius: 0; } @@ -5578,7 +5579,6 @@ textarea.form-control-lg { display: flex; flex-shrink: 0; align-items: center; - justify-content: space-between; padding: var(--bs-modal-header-padding); border-bottom: var(--bs-modal-header-border-width) solid var(--bs-modal-header-border-color); border-top-left-radius: var(--bs-modal-inner-border-radius); @@ -6144,20 +6144,12 @@ textarea.form-control-lg { background-size: 100% 100%; } -/* rtl:options: { - "autoRename": true, - "stringMap":[ { - "name" : "prev-next", - "search" : "prev", - "replace" : "next" - } ] -} */ .carousel-control-prev-icon { - background-image: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23fff'%3e%3cpath d='M11.354 1.646a.5.5 0 0 1 0 .708L5.707 8l5.647 5.646a.5.5 0 0 1-.708.708l-6-6a.5.5 0 0 1 0-.708l6-6a.5.5 0 0 1 .708 0z'/%3e%3c/svg%3e"); + background-image: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23fff'%3e%3cpath d='M11.354 1.646a.5.5 0 0 1 0 .708L5.707 8l5.647 5.646a.5.5 0 0 1-.708.708l-6-6a.5.5 0 0 1 0-.708l6-6a.5.5 0 0 1 .708 0z'/%3e%3c/svg%3e") /*rtl:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23fff'%3e%3cpath d='M4.646 1.646a.5.5 0 0 1 .708 0l6 6a.5.5 0 0 1 0 .708l-6 6a.5.5 0 0 1-.708-.708L10.293 8 4.646 2.354a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e")*/; } .carousel-control-next-icon { - background-image: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23fff'%3e%3cpath d='M4.646 1.646a.5.5 0 0 1 .708 0l6 6a.5.5 0 0 1 0 .708l-6 6a.5.5 0 0 1-.708-.708L10.293 8 4.646 2.354a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e"); + background-image: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23fff'%3e%3cpath d='M4.646 1.646a.5.5 0 0 1 .708 0l6 6a.5.5 0 0 1 0 .708l-6 6a.5.5 0 0 1-.708-.708L10.293 8 4.646 2.354a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e") /*rtl:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23fff'%3e%3cpath d='M11.354 1.646a.5.5 0 0 1 0 .708L5.707 8l5.647 5.646a.5.5 0 0 1-.708.708l-6-6a.5.5 0 0 1 0-.708l6-6a.5.5 0 0 1 .708 0z'/%3e%3c/svg%3e")*/; } .carousel-indicators { @@ -6777,14 +6769,11 @@ textarea.form-control-lg { .offcanvas-header { display: flex; align-items: center; - justify-content: space-between; padding: var(--bs-offcanvas-padding-y) var(--bs-offcanvas-padding-x); } .offcanvas-header .btn-close { padding: calc(var(--bs-offcanvas-padding-y) * 0.5) calc(var(--bs-offcanvas-padding-x) * 0.5); - margin-top: calc(-0.5 * var(--bs-offcanvas-padding-y)); - margin-right: calc(-0.5 * var(--bs-offcanvas-padding-x)); - margin-bottom: calc(-0.5 * var(--bs-offcanvas-padding-y)); + margin: calc(-0.5 * var(--bs-offcanvas-padding-y)) calc(-0.5 * var(--bs-offcanvas-padding-x)) calc(-0.5 * var(--bs-offcanvas-padding-y)) auto; } .offcanvas-title { @@ -12064,3 +12053,5 @@ textarea.form-control-lg { display: none !important; } } + +/*# sourceMappingURL=bootstrap.css.map */ \ No newline at end of file diff --git a/src/static/scripts/datatables.css b/src/static/scripts/datatables.css index 4c93a7d5..83e4f44b 100644 --- a/src/static/scripts/datatables.css +++ b/src/static/scripts/datatables.css @@ -4,10 +4,10 @@ * * To rebuild or modify this file with the latest versions of the included * software please visit: - * https://datatables.net/download/#bs5/dt-2.0.0 + * https://datatables.net/download/#bs5/dt-2.0.7 * * Included libraries: - * DataTables 2.0.0 + * DataTables 2.0.7 */ @charset "UTF-8"; @@ -347,7 +347,7 @@ table.table.dataTable.table-striped > tbody > tr:nth-of-type(2n+1) > * { box-shadow: none; } table.table.dataTable > :not(caption) > * > * { - background-color: transparent; + background-color: var(--bs-table-bg); } table.table.dataTable > tbody > tr { background-color: transparent; @@ -463,10 +463,18 @@ div.dt-scroll-foot > .dt-scroll-footInner > table > tfoot > tr:first-child { justify-content: center !important; } } -table.dataTable.table-sm > thead > tr > th:not(.sorting_disabled) { +table.dataTable.table-sm > thead > tr th.dt-orderable-asc, table.dataTable.table-sm > thead > tr th.dt-orderable-desc, table.dataTable.table-sm > thead > tr th.dt-ordering-asc, table.dataTable.table-sm > thead > tr th.dt-ordering-desc, +table.dataTable.table-sm > thead > tr td.dt-orderable-asc, +table.dataTable.table-sm > thead > tr td.dt-orderable-desc, +table.dataTable.table-sm > thead > tr td.dt-ordering-asc, +table.dataTable.table-sm > thead > tr td.dt-ordering-desc { padding-right: 20px; } -table.dataTable.table-sm > thead > tr > th:not(.sorting_disabled):before, table.dataTable.table-sm > thead > tr > th:not(.sorting_disabled):after { +table.dataTable.table-sm > thead > tr th.dt-orderable-asc span.dt-column-order, table.dataTable.table-sm > thead > tr th.dt-orderable-desc span.dt-column-order, table.dataTable.table-sm > thead > tr th.dt-ordering-asc span.dt-column-order, table.dataTable.table-sm > thead > tr th.dt-ordering-desc span.dt-column-order, +table.dataTable.table-sm > thead > tr td.dt-orderable-asc span.dt-column-order, +table.dataTable.table-sm > thead > tr td.dt-orderable-desc span.dt-column-order, +table.dataTable.table-sm > thead > tr td.dt-ordering-asc span.dt-column-order, +table.dataTable.table-sm > thead > tr td.dt-ordering-desc span.dt-column-order { right: 5px; } diff --git a/src/static/scripts/datatables.js b/src/static/scripts/datatables.js index f67d3f28..88d0b627 100644 --- a/src/static/scripts/datatables.js +++ b/src/static/scripts/datatables.js @@ -4,20 +4,20 @@ * * To rebuild or modify this file with the latest versions of the included * software please visit: - * https://datatables.net/download/#bs5/dt-2.0.0 + * https://datatables.net/download/#bs5/dt-2.0.7 * * Included libraries: - * DataTables 2.0.0 + * DataTables 2.0.7 */ -/*! DataTables 2.0.0 +/*! DataTables 2.0.7 * © SpryMedia Ltd - datatables.net/license */ /** * @summary DataTables * @description Paginate, search and order HTML tables - * @version 2.0.0 + * @version 2.0.7 * @author SpryMedia Ltd * @contact www.datatables.net * @copyright SpryMedia Ltd. @@ -329,7 +329,7 @@ _fnCamelToHungarian( defaults.oLanguage, json ); $.extend( true, oLanguage, json, oSettings.oInit.oLanguage ); - _fnCallbackFire( oSettings, null, 'i18n', [oSettings]); + _fnCallbackFire( oSettings, null, 'i18n', [oSettings], true); _fnInitialise( oSettings ); }, error: function () { @@ -563,7 +563,7 @@ * * @type string */ - build:"bs5/dt-2.0.0", + builder: "bs5/dt-2.0.7", /** @@ -1108,7 +1108,8 @@ var _re_dic = {}; var _re_new_lines = /[\r\n\u2028]/g; - var _re_html = /<.*?>/g; + var _re_html = /<([^>]*>)/g; + var _max_str_len = Math.pow(2, 28); // This is not strict ISO8601 - Date.parse() is quite lax, although // implementations differ between browsers. @@ -1296,10 +1297,24 @@ }; // Replaceable function in api.util - var _stripHtml = function ( d ) { - return d - .replace( _re_html, '' ) // Complete tags - .replace(/ - + diff --git a/src/static/templates/admin/users.hbs b/src/static/templates/admin/users.hbs index d1fd3e5d..1765876a 100644 --- a/src/static/templates/admin/users.hbs +++ b/src/static/templates/admin/users.hbs @@ -18,21 +18,21 @@ {{#each page_data}} - +
- {{Name}} - {{Email}} + {{name}} + {{email}} {{#unless user_enabled}} Disabled {{/unless}} - {{#if TwoFactorEnabled}} + {{#if twoFactorEnabled}} 2FA {{/if}} - {{#case _Status 1}} + {{#case _status 1}} Invited {{/case}} - {{#if EmailVerified}} + {{#if emailVerified}} Verified {{/if}} @@ -54,15 +54,15 @@ {{/if}} -
- {{#each Organizations}} - +
+ {{#each organizations}} + {{/each}}
- - {{#if TwoFactorEnabled}} + + {{#if twoFactorEnabled}}
{{/if}}
@@ -72,7 +72,7 @@ {{else}}
{{/if}} - {{#case _Status 1}} + {{#case _status 1}}
{{/case}}
@@ -143,4 +143,4 @@ - + diff --git a/src/static/templates/email/change_email.hbs b/src/static/templates/email/change_email.hbs index 4262fd74..f5276512 100644 --- a/src/static/templates/email/change_email.hbs +++ b/src/static/templates/email/change_email.hbs @@ -2,5 +2,5 @@ Your Email Change To finalize changing your email address enter the following code in web vault: {{token}} -If you did not try to change an email address, you can safely ignore this email. +If you did not try to change your email address, contact your administrator. {{> email/email_footer_text }} diff --git a/src/static/templates/email/change_email.html.hbs b/src/static/templates/email/change_email.html.hbs index aecf8859..488b085c 100644 --- a/src/static/templates/email/change_email.html.hbs +++ b/src/static/templates/email/change_email.html.hbs @@ -9,7 +9,7 @@ Your Email Change - If you did not try to change an email address, you can safely ignore this email. + If you did not try to change your email address, contact your administrator. diff --git a/src/util.rs b/src/util.rs index 0bf37959..29df7bbc 100644 --- a/src/util.rs +++ b/src/util.rs @@ -1,13 +1,10 @@ // // Web Headers and caching // -use std::{ - collections::HashMap, - io::{Cursor, ErrorKind}, - ops::Deref, -}; +use std::{collections::HashMap, io::Cursor, ops::Deref, path::Path}; use num_traits::ToPrimitive; +use once_cell::sync::Lazy; use rocket::{ fairing::{Fairing, Info, Kind}, http::{ContentType, Header, HeaderMap, Method, Status}, @@ -218,7 +215,7 @@ impl<'r, R: 'r + Responder<'r, 'static> + Send> Responder<'r, 'static> for Cache res.set_raw_header("Cache-Control", cache_control_header); let time_now = chrono::Local::now(); - let expiry_time = time_now + chrono::Duration::seconds(self.ttl.try_into().unwrap()); + let expiry_time = time_now + chrono::TimeDelta::try_seconds(self.ttl.try_into().unwrap()).unwrap(); res.set_raw_header("Expires", format_datetime_http(&expiry_time)); Ok(res) } @@ -334,40 +331,6 @@ impl Fairing for BetterLogging { } } -// -// File handling -// -use std::{ - fs::{self, File}, - io::Result as IOResult, - path::Path, -}; - -pub fn file_exists(path: &str) -> bool { - Path::new(path).exists() -} - -pub fn write_file(path: &str, content: &[u8]) -> Result<(), crate::error::Error> { - use std::io::Write; - let mut f = match File::create(path) { - Ok(file) => file, - Err(e) => { - if e.kind() == ErrorKind::PermissionDenied { - error!("Can't create '{}': Permission denied", path); - } - return Err(From::from(e)); - } - }; - - f.write_all(content)?; - f.flush()?; - Ok(()) -} - -pub fn delete_file(path: &str) -> IOResult<()> { - fs::remove_file(path) -} - pub fn get_display_size(size: i64) -> String { const UNITS: [&str; 6] = ["bytes", "KB", "MB", "GB", "TB", "PB"]; @@ -444,7 +407,7 @@ pub fn get_env_str_value(key: &str) -> Option { match (value_from_env, value_file) { (Ok(_), Ok(_)) => panic!("You should not define both {key} and {key_file}!"), (Ok(v_env), Err(_)) => Some(v_env), - (Err(_), Ok(v_file)) => match fs::read_to_string(v_file) { + (Err(_), Ok(v_file)) => match std::fs::read_to_string(v_file) { Ok(content) => Some(content.trim().to_string()), Err(e) => panic!("Failed to load {key}: {e:?}"), }, @@ -558,30 +521,38 @@ pub fn container_base_image() -> &'static str { use std::fmt; use serde::de::{self, DeserializeOwned, Deserializer, MapAccess, SeqAccess, Visitor}; -use serde_json::{self, Value}; +use serde_json::Value; pub type JsonMap = serde_json::Map; #[derive(Serialize, Deserialize)] -pub struct UpCase { - #[serde(deserialize_with = "upcase_deserialize")] +pub struct LowerCase { + #[serde(deserialize_with = "lowercase_deserialize")] #[serde(flatten)] pub data: T, } +impl Default for LowerCase { + fn default() -> Self { + Self { + data: Value::Null, + } + } +} + // https://github.com/serde-rs/serde/issues/586 -pub fn upcase_deserialize<'de, T, D>(deserializer: D) -> Result +pub fn lowercase_deserialize<'de, T, D>(deserializer: D) -> Result where T: DeserializeOwned, D: Deserializer<'de>, { - let d = deserializer.deserialize_any(UpCaseVisitor)?; + let d = deserializer.deserialize_any(LowerCaseVisitor)?; T::deserialize(d).map_err(de::Error::custom) } -struct UpCaseVisitor; +struct LowerCaseVisitor; -impl<'de> Visitor<'de> for UpCaseVisitor { +impl<'de> Visitor<'de> for LowerCaseVisitor { type Value = Value; fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -595,7 +566,7 @@ impl<'de> Visitor<'de> for UpCaseVisitor { let mut result_map = JsonMap::new(); while let Some((key, value)) = map.next_entry()? { - result_map.insert(upcase_first(key), upcase_value(value)); + result_map.insert(_process_key(key), convert_json_key_lcase_first(value)); } Ok(Value::Object(result_map)) @@ -608,45 +579,23 @@ impl<'de> Visitor<'de> for UpCaseVisitor { let mut result_seq = Vec::::new(); while let Some(value) = seq.next_element()? { - result_seq.push(upcase_value(value)); + result_seq.push(convert_json_key_lcase_first(value)); } Ok(Value::Array(result_seq)) } } -fn upcase_value(value: Value) -> Value { - if let Value::Object(map) = value { - let mut new_value = Value::Object(serde_json::Map::new()); - - for (key, val) in map.into_iter() { - let processed_key = _process_key(&key); - new_value[processed_key] = upcase_value(val); - } - new_value - } else if let Value::Array(array) = value { - // Initialize array with null values - let mut new_value = Value::Array(vec![Value::Null; array.len()]); - - for (index, val) in array.into_iter().enumerate() { - new_value[index] = upcase_value(val); - } - new_value - } else { - value - } -} - // Inner function to handle a special case for the 'ssn' key. // This key is part of the Identity Cipher (Social Security Number) fn _process_key(key: &str) -> String { match key.to_lowercase().as_ref() { - "ssn" => "SSN".into(), - _ => self::upcase_first(key), + "ssn" => "ssn".into(), + _ => self::lcase_first(key), } } -#[derive(Deserialize, Debug, Clone)] +#[derive(Clone, Debug, Deserialize)] #[serde(untagged)] pub enum NumberOrString { Number(i64), @@ -739,14 +688,9 @@ where use reqwest::{header, Client, ClientBuilder}; -pub fn get_reqwest_client() -> Client { - match get_reqwest_client_builder().build() { - Ok(client) => client, - Err(e) => { - error!("Possible trust-dns error, trying with trust-dns disabled: '{e}'"); - get_reqwest_client_builder().trust_dns(false).build().expect("Failed to build client") - } - } +pub fn get_reqwest_client() -> &'static Client { + static INSTANCE: Lazy = Lazy::new(|| get_reqwest_client_builder().build().expect("Failed to build client")); + &INSTANCE } pub fn get_reqwest_client_builder() -> ClientBuilder { @@ -768,25 +712,25 @@ pub fn convert_json_key_lcase_first(src_json: Value) -> Value { Value::Object(obj) => { let mut json_map = JsonMap::new(); - for (key, value) in obj.iter() { + for (key, value) in obj.into_iter() { match (key, value) { (key, Value::Object(elm)) => { - let inner_value = convert_json_key_lcase_first(Value::Object(elm.clone())); - json_map.insert(lcase_first(key), inner_value); + let inner_value = convert_json_key_lcase_first(Value::Object(elm)); + json_map.insert(_process_key(&key), inner_value); } (key, Value::Array(elm)) => { let mut inner_array: Vec = Vec::with_capacity(elm.len()); for inner_obj in elm { - inner_array.push(convert_json_key_lcase_first(inner_obj.clone())); + inner_array.push(convert_json_key_lcase_first(inner_obj)); } - json_map.insert(lcase_first(key), Value::Array(inner_array)); + json_map.insert(_process_key(&key), Value::Array(inner_array)); } (key, value) => { - json_map.insert(lcase_first(key), value.clone()); + json_map.insert(_process_key(&key), value); } } } @@ -805,3 +749,248 @@ pub fn parse_experimental_client_feature_flags(experimental_client_feature_flags feature_states } + +mod dns_resolver { + use std::{ + fmt, + net::{IpAddr, SocketAddr}, + sync::Arc, + }; + + use hickory_resolver::{system_conf::read_system_conf, TokioAsyncResolver}; + use once_cell::sync::Lazy; + use reqwest::dns::{Name, Resolve, Resolving}; + + use crate::{util::is_global, CONFIG}; + + #[derive(Debug, Clone)] + pub enum CustomResolverError { + Blacklist { + domain: String, + }, + NonGlobalIp { + domain: String, + ip: IpAddr, + }, + } + + impl CustomResolverError { + pub fn downcast_ref(e: &dyn std::error::Error) -> Option<&Self> { + let mut source = e.source(); + + while let Some(err) = source { + source = err.source(); + if let Some(err) = err.downcast_ref::() { + return Some(err); + } + } + None + } + } + + impl fmt::Display for CustomResolverError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Blacklist { + domain, + } => write!(f, "Blacklisted domain: {domain} matched ICON_BLACKLIST_REGEX"), + Self::NonGlobalIp { + domain, + ip, + } => write!(f, "IP {ip} for domain '{domain}' is not a global IP!"), + } + } + } + + impl std::error::Error for CustomResolverError {} + + #[derive(Debug, Clone)] + pub enum CustomDnsResolver { + Default(), + Hickory(Arc), + } + type BoxError = Box; + + impl CustomDnsResolver { + pub fn instance() -> Arc { + static INSTANCE: Lazy> = Lazy::new(CustomDnsResolver::new); + Arc::clone(&*INSTANCE) + } + + fn new() -> Arc { + match read_system_conf() { + Ok((config, opts)) => { + let resolver = TokioAsyncResolver::tokio(config.clone(), opts.clone()); + Arc::new(Self::Hickory(Arc::new(resolver))) + } + Err(e) => { + warn!("Error creating Hickory resolver, falling back to default: {e:?}"); + Arc::new(Self::Default()) + } + } + } + + // Note that we get an iterator of addresses, but we only grab the first one for convenience + async fn resolve_domain(&self, name: &str) -> Result, BoxError> { + pre_resolve(name)?; + + let result = match self { + Self::Default() => tokio::net::lookup_host(name).await?.next(), + Self::Hickory(r) => r.lookup_ip(name).await?.iter().next().map(|a| SocketAddr::new(a, 0)), + }; + + if let Some(addr) = &result { + post_resolve(name, addr.ip())?; + } + + Ok(result) + } + } + + fn pre_resolve(name: &str) -> Result<(), CustomResolverError> { + if crate::api::is_domain_blacklisted(name) { + return Err(CustomResolverError::Blacklist { + domain: name.to_string(), + }); + } + + Ok(()) + } + + fn post_resolve(name: &str, ip: IpAddr) -> Result<(), CustomResolverError> { + if CONFIG.icon_blacklist_non_global_ips() && !is_global(ip) { + Err(CustomResolverError::NonGlobalIp { + domain: name.to_string(), + ip, + }) + } else { + Ok(()) + } + } + + impl Resolve for CustomDnsResolver { + fn resolve(&self, name: Name) -> Resolving { + let this = self.clone(); + Box::pin(async move { + let name = name.as_str(); + let result = this.resolve_domain(name).await?; + Ok::(Box::new(result.into_iter())) + }) + } + } +} + +pub use dns_resolver::{CustomDnsResolver, CustomResolverError}; + +/// TODO: This is extracted from IpAddr::is_global, which is unstable: +/// https://doc.rust-lang.org/nightly/std/net/enum.IpAddr.html#method.is_global +/// Remove once https://github.com/rust-lang/rust/issues/27709 is merged +#[allow(clippy::nonminimal_bool)] +#[cfg(any(not(feature = "unstable"), test))] +pub fn is_global_hardcoded(ip: std::net::IpAddr) -> bool { + match ip { + std::net::IpAddr::V4(ip) => { + !(ip.octets()[0] == 0 // "This network" + || ip.is_private() + || (ip.octets()[0] == 100 && (ip.octets()[1] & 0b1100_0000 == 0b0100_0000)) //ip.is_shared() + || ip.is_loopback() + || ip.is_link_local() + // addresses reserved for future protocols (`192.0.0.0/24`) + ||(ip.octets()[0] == 192 && ip.octets()[1] == 0 && ip.octets()[2] == 0) + || ip.is_documentation() + || (ip.octets()[0] == 198 && (ip.octets()[1] & 0xfe) == 18) // ip.is_benchmarking() + || (ip.octets()[0] & 240 == 240 && !ip.is_broadcast()) //ip.is_reserved() + || ip.is_broadcast()) + } + std::net::IpAddr::V6(ip) => { + !(ip.is_unspecified() + || ip.is_loopback() + // IPv4-mapped Address (`::ffff:0:0/96`) + || matches!(ip.segments(), [0, 0, 0, 0, 0, 0xffff, _, _]) + // IPv4-IPv6 Translat. (`64:ff9b:1::/48`) + || matches!(ip.segments(), [0x64, 0xff9b, 1, _, _, _, _, _]) + // Discard-Only Address Block (`100::/64`) + || matches!(ip.segments(), [0x100, 0, 0, 0, _, _, _, _]) + // IETF Protocol Assignments (`2001::/23`) + || (matches!(ip.segments(), [0x2001, b, _, _, _, _, _, _] if b < 0x200) + && !( + // Port Control Protocol Anycast (`2001:1::1`) + u128::from_be_bytes(ip.octets()) == 0x2001_0001_0000_0000_0000_0000_0000_0001 + // Traversal Using Relays around NAT Anycast (`2001:1::2`) + || u128::from_be_bytes(ip.octets()) == 0x2001_0001_0000_0000_0000_0000_0000_0002 + // AMT (`2001:3::/32`) + || matches!(ip.segments(), [0x2001, 3, _, _, _, _, _, _]) + // AS112-v6 (`2001:4:112::/48`) + || matches!(ip.segments(), [0x2001, 4, 0x112, _, _, _, _, _]) + // ORCHIDv2 (`2001:20::/28`) + || matches!(ip.segments(), [0x2001, b, _, _, _, _, _, _] if (0x20..=0x2F).contains(&b)) + )) + || ((ip.segments()[0] == 0x2001) && (ip.segments()[1] == 0xdb8)) // ip.is_documentation() + || ((ip.segments()[0] & 0xfe00) == 0xfc00) //ip.is_unique_local() + || ((ip.segments()[0] & 0xffc0) == 0xfe80)) //ip.is_unicast_link_local() + } + } +} + +#[cfg(not(feature = "unstable"))] +pub use is_global_hardcoded as is_global; + +#[cfg(feature = "unstable")] +#[inline(always)] +pub fn is_global(ip: std::net::IpAddr) -> bool { + ip.is_global() +} + +/// These are some tests to check that the implementations match +/// The IPv4 can be all checked in 30 seconds or so and they are correct as of nightly 2023-07-17 +/// The IPV6 can't be checked in a reasonable time, so we check over a hundred billion random ones, so far correct +/// Note that the is_global implementation is subject to change as new IP RFCs are created +/// +/// To run while showing progress output: +/// cargo +nightly test --release --features sqlite,unstable -- --nocapture --ignored +#[cfg(test)] +#[cfg(feature = "unstable")] +mod tests { + use super::*; + use std::net::IpAddr; + + #[test] + #[ignore] + fn test_ipv4_global() { + for a in 0..u8::MAX { + println!("Iter: {}/255", a); + for b in 0..u8::MAX { + for c in 0..u8::MAX { + for d in 0..u8::MAX { + let ip = IpAddr::V4(std::net::Ipv4Addr::new(a, b, c, d)); + assert_eq!(ip.is_global(), is_global_hardcoded(ip), "IP mismatch: {}", ip) + } + } + } + } + } + + #[test] + #[ignore] + fn test_ipv6_global() { + use rand::Rng; + + std::thread::scope(|s| { + for t in 0..16 { + let handle = s.spawn(move || { + let mut v = [0u8; 16]; + let mut rng = rand::thread_rng(); + + for i in 0..20 { + println!("Thread {t} Iter: {i}/50"); + for _ in 0..500_000_000 { + rng.fill(&mut v); + let ip = IpAddr::V6(std::net::Ipv6Addr::from(v)); + assert_eq!(ip.is_global(), is_global_hardcoded(ip), "IP mismatch: {ip}"); + } + } + }); + } + }); + } +} diff --git a/tools/global_domains.py b/tools/global_domains.py index bd1df58d..66edca31 100755 --- a/tools/global_domains.py +++ b/tools/global_domains.py @@ -71,9 +71,9 @@ with urllib.request.urlopen(DOMAIN_LISTS_URL) as response: global_domains = [] for name, domain_list in domain_lists.items(): entry = OrderedDict() - entry["Type"] = enums[name] - entry["Domains"] = domain_list - entry["Excluded"] = False + entry["type"] = enums[name] + entry["domains"] = domain_list + entry["excluded"] = False global_domains.append(entry) # Write out the global domains JSON file.