diff --git a/.dockerignore b/.dockerignore index 69f51d2a..c7ffe132 100644 --- a/.dockerignore +++ b/.dockerignore @@ -3,13 +3,18 @@ target # Data folder data + +# Misc .env .env.template .gitattributes +.gitignore +rustfmt.toml # IDE files .vscode .idea +.editorconfig *.iml # Documentation @@ -19,9 +24,17 @@ data *.yml *.yaml -# Docker folders +# Docker hooks tools +Dockerfile +.dockerignore +docker/** +!docker/healthcheck.sh +!docker/start.sh # Web vault -web-vault \ No newline at end of file +web-vault + +# Vaultwarden Resources +resources diff --git a/.env.template b/.env.template index 1f4c937f..66a04343 100644 --- a/.env.template +++ b/.env.template @@ -3,6 +3,11 @@ ## ## Be aware that most of these settings will be overridden if they were changed ## in the admin interface. Those overrides are stored within DATA_FOLDER/config.json . +## +## By default, vaultwarden expects for this file to be named ".env" and located +## in the current working directory. If this is not the case, the environment +## variable ENV_FILE can be set to the location of this file prior to starting +## vaultwarden. ## Main data folder # DATA_FOLDER=data @@ -24,11 +29,21 @@ ## Define the size of the connection pool used for connecting to the database. # DATABASE_MAX_CONNS=10 +## Database connection initialization +## Allows SQL statements to be run whenever a new database connection is created. +## This is mainly useful for connection-scoped pragmas. +## If empty, a database-specific default is used: +## - SQLite: "PRAGMA busy_timeout = 5000; PRAGMA synchronous = NORMAL;" +## - MySQL: "" +## - PostgreSQL: "" +# DATABASE_CONN_INIT="" + ## Individual folders, these override %DATA_FOLDER% # RSA_KEY_FILENAME=data/rsa_key # ICON_CACHE_FOLDER=data/icon_cache # ATTACHMENTS_FOLDER=data/attachments # SENDS_FOLDER=data/sends +# TMP_FOLDER=data/tmp ## Templates data folder, by default uses embedded templates ## Check source code to see the format @@ -102,12 +117,10 @@ # LOG_TIMESTAMP_FORMAT="%Y-%m-%d %H:%M:%S.%3f" ## Logging to file -## It's recommended to also set 'ROCKET_CLI_COLORS=off' # LOG_FILE=/path/to/log ## Logging to Syslog ## This requires extended logging -## It's recommended to also set 'ROCKET_CLI_COLORS=off' # USE_SYSLOG=false ## Log level @@ -185,7 +198,7 @@ # EMAIL_EXPIRATION_TIME=600 ## Email token size -## Number of digits in an email token (min: 6, max: 19). +## Number of digits in an email 2FA token (min: 6, max: 255). ## Note that the Bitwarden clients are hardcoded to mention 6 digit codes regardless of this setting! # EMAIL_TOKEN_SIZE=6 @@ -257,6 +270,9 @@ ## The change only applies when the password is changed # PASSWORD_ITERATIONS=100000 +## Controls whether users can set password hints. This setting applies globally to all users. +# PASSWORD_HINTS_ALLOWED=true + ## Controls whether a password hint should be shown directly in the web page if ## SMTP service is not configured. Not recommended for publicly-accessible instances ## as this provides unauthenticated access to potentially sensitive data. @@ -267,7 +283,7 @@ ## It's recommended to configure this value, otherwise certain functionality might not work, ## like attachment downloads, email links and U2F. ## For U2F to work, the server must use HTTPS, you can use Let's Encrypt for free certs -# DOMAIN=https://bw.domain.tld:8443 +# DOMAIN=https://vw.domain.tld:8443 ## Allowed iframe ancestors (Know the risks!) ## https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy/frame-ancestors @@ -331,9 +347,8 @@ # SMTP_HOST=smtp.domain.tld # SMTP_FROM=vaultwarden@domain.tld # SMTP_FROM_NAME=Vaultwarden +# SMTP_SECURITY=starttls # ("starttls", "force_tls", "off") Enable a secure connection. Default is "starttls" (Explicit - ports 587 or 25), "force_tls" (Implicit - port 465) or "off", no encryption (port 25) # SMTP_PORT=587 # Ports 587 (submission) and 25 (smtp) are standard without encryption and with encryption via STARTTLS (Explicit TLS). Port 465 is outdated and used with Implicit TLS. -# SMTP_SSL=true # (Explicit) - This variable by default configures Explicit STARTTLS, it will upgrade an insecure connection to a secure one. Unless SMTP_EXPLICIT_TLS is set to true. Either port 587 or 25 are default. -# SMTP_EXPLICIT_TLS=true # (Implicit) - N.B. This variable configures Implicit TLS. It's currently mislabelled (see bug #851) - SMTP_SSL Needs to be set to true for this option to work. Usually port 465 is used here. # SMTP_USERNAME=username # SMTP_PASSWORD=password # SMTP_TIMEOUT=15 diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f92e6e54..90763d1b 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -8,7 +8,6 @@ on: - "migrations/**" - "Cargo.*" - "build.rs" - - "diesel.toml" - "rust-toolchain" pull_request: paths: @@ -17,11 +16,11 @@ on: - "migrations/**" - "Cargo.*" - "build.rs" - - "diesel.toml" - "rust-toolchain" jobs: build: + runs-on: ubuntu-20.04 # Make warnings errors, this is to prevent warnings slipping through. # This is done globally to prevent rebuilds when the RUSTFLAGS env variable changes. env: @@ -30,118 +29,169 @@ jobs: fail-fast: false matrix: channel: - - nightly - target-triple: - - x86_64-unknown-linux-gnu - include: - - target-triple: x86_64-unknown-linux-gnu - host-triple: x86_64-unknown-linux-gnu - features: [sqlite,mysql,postgresql] # Remember to update the `cargo test` to match the amount of features - channel: nightly - os: ubuntu-20.04 - ext: "" - - name: Building ${{ matrix.channel }}-${{ matrix.target-triple }} - runs-on: ${{ matrix.os }} + - "rust-toolchain" # The version defined in rust-toolchain + - "1.60.0" # The supported MSRV + + name: Build and Test ${{ matrix.channel }} + steps: # Checkout the repo - - name: Checkout - uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f # v2.3.4 + - name: "Checkout" + uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # v3.0.2 # End Checkout the repo - - # Install musl-tools when needed - - name: Install musl tools - run: sudo apt-get update && sudo apt-get install -y --no-install-recommends musl-dev musl-tools cmake - if: matrix.target-triple == 'x86_64-unknown-linux-musl' - # End Install musl-tools when needed - - # Install dependencies - - name: Install dependencies Ubuntu - run: sudo apt-get update && sudo apt-get install -y --no-install-recommends openssl sqlite build-essential libmariadb-dev-compat libpq-dev libssl-dev pkgconf - if: startsWith( matrix.os, 'ubuntu' ) + - name: "Install dependencies Ubuntu" + run: sudo apt-get update && sudo apt-get install -y --no-install-recommends openssl sqlite build-essential libmariadb-dev-compat libpq-dev libssl-dev pkg-config # End Install dependencies - # Enable Rust Caching - - uses: Swatinem/rust-cache@842ef286fff290e445b90b4002cc9807c3669641 # v1.3.0 - # End Enable Rust Caching - - # Uses the rust-toolchain file to determine version - - name: 'Install ${{ matrix.channel }}-${{ matrix.host-triple }} for target: ${{ matrix.target-triple }}' + - name: "Install rust-toolchain version" uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f # v1.0.6 + if: ${{ matrix.channel == 'rust-toolchain' }} with: profile: minimal - target: ${{ matrix.target-triple }} components: clippy, rustfmt # End Uses the rust-toolchain file to determine version + # Install the MSRV channel to be used + - name: "Install MSRV version" + uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f # v1.0.6 + if: ${{ matrix.channel != 'rust-toolchain' }} + with: + profile: minimal + override: true + toolchain: ${{ matrix.channel }} + # End Install the MSRV channel to be used + + + # Enable Rust Caching + - uses: Swatinem/rust-cache@6720f05bc48b77f96918929a9019fb2203ff71f8 # v2.0.0 + # End Enable Rust Caching + + + # Show environment + - name: "Show environment" + run: | + rustc -vV + cargo -vV + # End Show environment + + # Run cargo tests (In release mode to speed up future builds) # First test all features together, afterwards test them separately. - - name: "`cargo test --release --features ${{ join(matrix.features, ',') }} --target ${{ matrix.target-triple }}`" - uses: actions-rs/cargo@ae10961054e4aa8b4aa7dffede299aaf087aa33b # v1.0.1 + - name: "test features: sqlite,mysql,postgresql,enable_mimalloc" + id: test_sqlite_mysql_postgresql_mimalloc + uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3 + if: $${{ always() }} with: command: test - args: --release --features ${{ join(matrix.features, ',') }} --target ${{ matrix.target-triple }} - # Test single features - # 0: sqlite - - name: "`cargo test --release --features ${{ matrix.features[0] }} --target ${{ matrix.target-triple }}`" - uses: actions-rs/cargo@ae10961054e4aa8b4aa7dffede299aaf087aa33b # v1.0.1 + args: --release --features sqlite,mysql,postgresql,enable_mimalloc + + - name: "test features: sqlite,mysql,postgresql" + id: test_sqlite_mysql_postgresql + uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3 + if: $${{ always() }} + with: + command: test + args: --release --features sqlite,mysql,postgresql + + - name: "test features: sqlite" + id: test_sqlite + uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3 + if: $${{ always() }} with: command: test - args: --release --features ${{ matrix.features[0] }} --target ${{ matrix.target-triple }} - if: ${{ matrix.features[0] != '' }} - # 1: mysql - - name: "`cargo test --release --features ${{ matrix.features[1] }} --target ${{ matrix.target-triple }}`" - uses: actions-rs/cargo@ae10961054e4aa8b4aa7dffede299aaf087aa33b # v1.0.1 + args: --release --features sqlite + + - name: "test features: mysql" + id: test_mysql + uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3 + if: $${{ always() }} with: command: test - args: --release --features ${{ matrix.features[1] }} --target ${{ matrix.target-triple }} - if: ${{ matrix.features[1] != '' }} - # 2: postgresql - - name: "`cargo test --release --features ${{ matrix.features[2] }} --target ${{ matrix.target-triple }}`" - uses: actions-rs/cargo@ae10961054e4aa8b4aa7dffede299aaf087aa33b # v1.0.1 + args: --release --features mysql + + - name: "test features: postgresql" + id: test_postgresql + uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3 + if: $${{ always() }} with: command: test - args: --release --features ${{ matrix.features[2] }} --target ${{ matrix.target-triple }} - if: ${{ matrix.features[2] != '' }} + args: --release --features postgresql # End Run cargo tests # Run cargo clippy, and fail on warnings (In release mode to speed up future builds) - - name: "`cargo clippy --release --features ${{ join(matrix.features, ',') }} --target ${{ matrix.target-triple }}`" - uses: actions-rs/cargo@ae10961054e4aa8b4aa7dffede299aaf087aa33b # v1.0.1 + - name: "clippy features: sqlite,mysql,postgresql,enable_mimalloc" + id: clippy + uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3 + if: ${{ always() && matrix.channel == 'rust-toolchain' }} with: command: clippy - args: --release --features ${{ join(matrix.features, ',') }} --target ${{ matrix.target-triple }} -- -D warnings + args: --release --features sqlite,mysql,postgresql,enable_mimalloc -- -D warnings # End Run cargo clippy - # Run cargo fmt - - name: '`cargo fmt`' - uses: actions-rs/cargo@ae10961054e4aa8b4aa7dffede299aaf087aa33b # v1.0.1 + # Run cargo fmt (Only run on rust-toolchain defined version) + - name: "check formatting" + id: formatting + uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3 + if: ${{ always() && matrix.channel == 'rust-toolchain' }} with: command: fmt args: --all -- --check # End Run cargo fmt - # Build the binary - - name: "`cargo build --release --features ${{ join(matrix.features, ',') }} --target ${{ matrix.target-triple }}`" - uses: actions-rs/cargo@ae10961054e4aa8b4aa7dffede299aaf087aa33b # v1.0.1 + # Check for any previous failures, if there are stop, else continue. + # This is useful so all test/clippy/fmt actions are done, and they can all be addressed + - name: "Some checks failed" + if: ${{ failure() }} + run: | + echo "### :x: Checks Failed!" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "|Job|Status|" >> $GITHUB_STEP_SUMMARY + echo "|---|------|" >> $GITHUB_STEP_SUMMARY + echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}|" >> $GITHUB_STEP_SUMMARY + echo "|test (sqlite,mysql,postgresql)|${{ steps.test_sqlite_mysql_postgresql.outcome }}|" >> $GITHUB_STEP_SUMMARY + echo "|test (sqlite)|${{ steps.test_sqlite.outcome }}|" >> $GITHUB_STEP_SUMMARY + echo "|test (mysql)|${{ steps.test_mysql.outcome }}|" >> $GITHUB_STEP_SUMMARY + echo "|test (postgresql)|${{ steps.test_postgresql.outcome }}|" >> $GITHUB_STEP_SUMMARY + echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc)|${{ steps.clippy.outcome }}|" >> $GITHUB_STEP_SUMMARY + echo "|fmt|${{ steps.formatting.outcome }}|" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "Please check the failed jobs and fix where needed." >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + exit 1 + + + # Check for any previous failures, if there are stop, else continue. + # This is useful so all test/clippy/fmt actions are done, and they can all be addressed + - name: "All checks passed" + if: ${{ success() }} + run: | + echo "### :tada: Checks Passed!" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + + + # Build the binary to upload to the artifacts + - name: "build features: sqlite,mysql,postgresql" + uses: actions-rs/cargo@844f36862e911db73fe0815f00a4a2602c279505 # v1.0.3 + if: ${{ matrix.channel == 'rust-toolchain' }} with: command: build - args: --release --features ${{ join(matrix.features, ',') }} --target ${{ matrix.target-triple }} + args: --release --features sqlite,mysql,postgresql # End Build the binary # Upload artifact to Github Actions - - name: Upload artifact - uses: actions/upload-artifact@27121b0bdffd731efa15d66772be8dc71245d074 # v2.2.4 + - name: "Upload artifact" + uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # v3.1.0 + if: ${{ matrix.channel == 'rust-toolchain' }} with: - name: vaultwarden-${{ matrix.target-triple }}${{ matrix.ext }} - path: target/${{ matrix.target-triple }}/release/vaultwarden${{ matrix.ext }} + name: vaultwarden + path: target/${{ matrix.target-triple }}/release/vaultwarden # End Upload artifact to Github Actions diff --git a/.github/workflows/hadolint.yml b/.github/workflows/hadolint.yml index 375e437a..5865bf63 100644 --- a/.github/workflows/hadolint.yml +++ b/.github/workflows/hadolint.yml @@ -16,18 +16,18 @@ jobs: steps: # Checkout the repo - name: Checkout - uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f # v2.3.4 + uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # v3.0.2 # End Checkout the repo - # Download hadolint + # Download hadolint - https://github.com/hadolint/hadolint/releases - name: Download hadolint shell: bash run: | sudo curl -L https://github.com/hadolint/hadolint/releases/download/v${HADOLINT_VERSION}/hadolint-$(uname -s)-$(uname -m) -o /usr/local/bin/hadolint && \ sudo chmod +x /usr/local/bin/hadolint env: - HADOLINT_VERSION: 2.7.0 + HADOLINT_VERSION: 2.10.0 # End Download hadolint # Test Dockerfiles diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 4b425c01..7d0d2ade 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -31,7 +31,7 @@ jobs: steps: - name: Skip Duplicates Actions id: skip_check - uses: fkirc/skip-duplicate-actions@f75dd6564bb646f95277dc8c3b80612e46a4a1ea # v3.4.1 + uses: fkirc/skip-duplicate-actions@9d116fa7e55f295019cfab7e3ab72b478bcf7fdd # v4.0.0 with: cancel_others: 'true' # Only run this when not creating a tag @@ -60,13 +60,13 @@ jobs: steps: # Checkout the repo - name: Checkout - uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f # v2.3.4 + uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # v3.0.2 with: fetch-depth: 0 # Login to Docker Hub - name: Login to Docker Hub - uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 # v1.10.0 + uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # v2.0.0 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b26d8445..eb44b47f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,7 @@ --- repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.0.1 + rev: v4.3.0 hooks: - id: check-yaml - id: check-json @@ -25,14 +25,16 @@ repos: description: Test the package for errors. entry: cargo test language: system - args: ["--features", "sqlite,mysql,postgresql", "--"] - types: [rust] + args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--"] + types_or: [rust, file] + files: (Cargo.toml|Cargo.lock|.*\.rs$) pass_filenames: false - id: cargo-clippy name: cargo clippy description: Lint Rust sources entry: cargo clippy language: system - args: ["--features", "sqlite,mysql,postgresql", "--", "-D", "warnings"] - types: [rust] + args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--", "-D", "warnings"] + types_or: [rust, file] + files: (Cargo.toml|Cargo.lock|.*\.rs$) pass_filenames: false diff --git a/Cargo.lock b/Cargo.lock index 8af4f0ea..2dfb8ce0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -18,10 +18,39 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] -name = "ahash" -version = "0.3.8" +name = "aead" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b613b8e1e3cf911a086f53f03bf286f52fd7a7258e4fa606f0ef220d39d8877" +dependencies = [ + "generic-array", +] + +[[package]] +name = "aes" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e8b47f52ea9bae42228d07ec09eb676433d7c4ed1ebdf0f1d1c29ed446f1ab8" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", + "opaque-debug", +] + +[[package]] +name = "aes-gcm" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8fd72866655d1904d6b0997d0b07ba561047d070fbe29de039031c641b61217" +checksum = "df5f85a83a7d8b0442b6aa7b504b8212c1733da07b98aae43d4bc21b2cb3cdf6" +dependencies = [ + "aead", + "aes", + "cipher", + "ctr", + "ghash", + "subtle", +] [[package]] name = "aho-corasick" @@ -47,11 +76,20 @@ dependencies = [ "alloc-no-stdlib", ] +[[package]] +name = "ansi_term" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" +dependencies = [ + "winapi", +] + [[package]] name = "async-compression" -version = "0.3.8" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443ccbb270374a2b1055fc72da40e1f237809cd6bb0e97e66d264cd138473a6" +checksum = "345fd392ab01f746c717b1357165b76f0b67a60192007b234058c9045fdcf695" dependencies = [ "brotli", "flate2", @@ -61,15 +99,51 @@ dependencies = [ "tokio", ] +[[package]] +name = "async-stream" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dad5c83079eae9969be7fadefe640a1c566901f05ff91ab221de4b6f68d9507e" +dependencies = [ + "async-stream-impl", + "futures-core", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10f203db73a71dfa2fb6dd22763990fa26f3d2625a6da2da900d23b87d26be27" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "async-trait" -version = "0.1.52" +version = "0.1.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76464446b8bc32758d7e88ee1a804d9914cd9b1cb264c029899680b0be29826f" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "async_once" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ce4f10ea3abcd6617873bae9f91d1c5332b4a778bd9ce34d0cd517474c1de82" + +[[package]] +name = "atomic" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "061a7acccaa286c011ddc30970520b98fa40e00c9d644633fb26b5fc63a265e3" +checksum = "b88d82667eca772c4aa12f0f1348b3ae643424c8876448f3f7bd5787032e234c" dependencies = [ - "proc-macro2 1.0.34", - "quote 1.0.10", - "syn 1.0.84", + "autocfg", ] [[package]] @@ -80,58 +154,30 @@ checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" dependencies = [ "hermit-abi", "libc", - "winapi 0.3.9", + "winapi", ] [[package]] name = "autocfg" -version = "1.0.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "backtrace" -version = "0.3.63" +version = "0.3.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "321629d8ba6513061f26707241fa9bc89524ff1cd7a915a97ef0c62c666ce1b6" +checksum = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7" dependencies = [ "addr2line", "cc", - "cfg-if 1.0.0", + "cfg-if", "libc", "miniz_oxide", "object", "rustc-demangle", ] -[[package]] -name = "base-x" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4521f3e3d031370679b3b140beb36dfe4801b09ac77e30c61941f97df3ef28b" - -[[package]] -name = "base64" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "489d6c0ed21b11d038c31b6ceccca973e65d73ba3bd8ecb9a2babf5546164643" -dependencies = [ - "byteorder", - "safemem", -] - -[[package]] -name = "base64" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b41b7ea54a0c9d92199de89e20e58d49f02f8e699814ef3fdf266f6f748d15c7" - -[[package]] -name = "base64" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" - [[package]] name = "base64" version = "0.13.0" @@ -152,39 +198,18 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "block-buffer" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" -dependencies = [ - "block-padding", - "byte-tools", - "byteorder", - "generic-array 0.12.4", -] - -[[package]] -name = "block-buffer" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" -dependencies = [ - "generic-array 0.14.4", -] - -[[package]] -name = "block-padding" -version = "0.1.5" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5" +checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324" dependencies = [ - "byte-tools", + "generic-array", ] [[package]] name = "brotli" -version = "3.3.2" +version = "3.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71cb90ade945043d3d53597b2fc359bb063db8ade2bcffe7997351d0756e9d50" +checksum = "a1a0b1dbcc8ae29329621f8d4f0d835787c1c38bb1401979b49d13b0b305ff68" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -201,27 +226,11 @@ dependencies = [ "alloc-stdlib", ] -[[package]] -name = "buf_redux" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b953a6887648bb07a535631f2bc00fbdb2a2216f135552cb3f534ed136b9c07f" -dependencies = [ - "memchr", - "safemem", -] - [[package]] name = "bumpalo" -version = "3.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f1e260c3a9040a7c19a12468758f4c16f31a81a1fe087482be9570ec864bb6c" - -[[package]] -name = "byte-tools" -version = "0.3.1" +version = "3.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" +checksum = "37ccbd214614c6783386c1af30caf03192f17891059cecc394b4fb119e363de3" [[package]] name = "byteorder" @@ -231,31 +240,52 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" -version = "0.4.12" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "206fdffcfa2df7cbe15601ef46c813fce0965eb3286db6b56c583b814b51c81c" +checksum = "ec8a7b6a70fde80372154c65702f00a0f56f3e1c36abbc6c440484be248856db" + +[[package]] +name = "cached" +version = "0.38.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27e6092f8c7ba6e65a46f6f26d7d7997201d3a6f0e69ff5d2440b930d7c0513a" dependencies = [ - "byteorder", - "iovec", + "async-trait", + "async_once", + "cached_proc_macro", + "cached_proc_macro_types", + "futures", + "hashbrown 0.12.3", + "instant", + "lazy_static", + "once_cell", + "thiserror", + "tokio", ] [[package]] -name = "bytes" -version = "1.1.0" +name = "cached_proc_macro" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" +checksum = "751f7f4e7a091545e7f6c65bacc404eaee7e87bfb1f9ece234a1caa173dc16f2" +dependencies = [ + "cached_proc_macro_types", + "darling", + "quote", + "syn", +] [[package]] -name = "cc" -version = "1.0.72" +name = "cached_proc_macro_types" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22a9137b95ea06864e018375b72adfb7db6e6f68cfc8df5a04d00288050485ee" +checksum = "3a4f925191b4367301851c6d99b09890311d74b0d43f274c0b34c86d308a3663" [[package]] -name = "cfg-if" -version = "0.1.10" +name = "cc" +version = "1.0.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" +checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11" [[package]] name = "cfg-if" @@ -263,101 +293,91 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" -[[package]] -name = "chashmap" -version = "2.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff41a3c2c1e39921b9003de14bf0439c7b63a9039637c291e1a64925d8ddfa45" -dependencies = [ - "owning_ref", - "parking_lot 0.4.8", -] - [[package]] name = "chrono" -version = "0.4.19" +version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73" +checksum = "6127248204b9aba09a362f6c930ef6a78f2c1b2215f8a7b398c06e1083f17af0" dependencies = [ - "libc", + "js-sys", "num-integer", "num-traits", "serde", - "time 0.1.44", - "winapi 0.3.9", + "time 0.1.43", + "wasm-bindgen", + "winapi", ] [[package]] name = "chrono-tz" -version = "0.6.1" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58549f1842da3080ce63002102d5bc954c7bc843d4f47818e642abdc36253552" +checksum = "29c39203181991a7dd4343b8005bd804e7a9a37afb8ac070e43771e8c820bbde" dependencies = [ "chrono", "chrono-tz-build", - "phf 0.10.1", + "phf", ] [[package]] name = "chrono-tz-build" -version = "0.0.2" +version = "0.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db058d493fb2f65f41861bfed7e3fe6335264a9f0f92710cab5bdf01fef09069" +checksum = "6f509c3a87b33437b05e2458750a0700e5bdd6956176773e6c7d6dd15a283a0c" dependencies = [ "parse-zoneinfo", - "phf 0.10.1", - "phf_codegen 0.10.0", + "phf", + "phf_codegen", ] [[package]] -name = "const_fn" -version = "0.4.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f92cfa0fd5690b3cf8c1ef2cabbd9b7ef22fa53cf5e1f92b05103f6d5d1cf6e7" - -[[package]] -name = "cookie" -version = "0.14.4" +name = "cipher" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03a5d7b21829bc7b4bf4754a978a241ae54ea55a40f92bb20216e54096f4b951" +checksum = "7ee52072ec15386f770805afd189a01c8841be8696bed250fa2f13c4c0d6dfb7" dependencies = [ - "percent-encoding 2.1.0", - "time 0.2.27", - "version_check 0.9.3", + "generic-array", ] [[package]] name = "cookie" -version = "0.15.1" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5f1c7727e460397e56abc4bddc1d49e07a1ad78fc98eb2e1c8f032a58a2f80d" +checksum = "94d4706de1b0fa5b132270cddffa8585166037822e260a944fe161acd137ca05" dependencies = [ + "aes-gcm", + "base64", + "hkdf", + "hmac", "percent-encoding 2.1.0", - "time 0.2.27", - "version_check 0.9.3", + "rand", + "sha2", + "subtle", + "time 0.3.12", + "version_check", ] [[package]] name = "cookie_store" -version = "0.15.1" +version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3f7034c0932dc36f5bd8ec37368d971346809435824f277cb3b8299fc56167c" +checksum = "2e4b6aa369f41f5faa04bb80c9b1f4216ea81646ed6124d76ba5c49a7aafd9cd" dependencies = [ - "cookie 0.15.1", + "cookie", "idna 0.2.3", - "log 0.4.14", + "log", "publicsuffix", "serde", "serde_json", - "time 0.2.27", + "time 0.3.12", "url 2.2.2", ] [[package]] name = "core-foundation" -version = "0.9.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6888e10551bb93e424d8df1d07f1a8b4fceb0001a3a4b048bfc47554946f47b3" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" dependencies = [ "core-foundation-sys", "libc", @@ -371,61 +391,117 @@ checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" [[package]] name = "cpufeatures" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469" +checksum = "59a6001667ab124aebae2a495118e11d30984c3a653e99d86d58971708cf5e4b" dependencies = [ "libc", ] [[package]] name = "crc32fast" -version = "1.3.0" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "738c290dfaea84fc1ca15ad9c168d083b05a714e1efddd8edaab678dc28d2836" +checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] name = "cron" -version = "0.9.0" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e009ed0b762cf7a967a34dfdc67d5967d3f828f12901d37081432c3dd1668f8f" +checksum = "d76219e9243e100d5a37676005f08379297f8addfebc247613299600625c734d" dependencies = [ "chrono", - "nom 4.1.1", + "nom", "once_cell", ] [[package]] -name = "crypto-mac" -version = "0.10.1" +name = "crossbeam-utils" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bff07008ec701e8028e2ceb8f83f0e4274ee62bd2dbdc4fefff2e9a91824081a" +checksum = "51887d4adc7b564537b15adcfb307936f8075dfcd5f00dde9a9f1d29383682bc" dependencies = [ - "generic-array 0.14.4", - "subtle", + "cfg-if", + "once_cell", ] [[package]] -name = "crypto-mac" -version = "0.11.1" +name = "crypto-common" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1d1a86f49236c215f271d40892d5fc950490551400b02ef360692c29815c714" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ - "generic-array 0.14.4", - "subtle", + "generic-array", + "typenum", +] + +[[package]] +name = "ctr" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "049bb91fb4aaf0e3c7efa6cd5ef877dbbbd15b39dad06d9948de4ec8a75761ea" +dependencies = [ + "cipher", +] + +[[package]] +name = "ctrlc" +version = "3.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b37feaa84e6861e00a1f5e5aa8da3ee56d605c9992d33e082786754828e20865" +dependencies = [ + "nix", + "winapi", +] + +[[package]] +name = "darling" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn", +] + +[[package]] +name = "darling_macro" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" +dependencies = [ + "darling_core", + "quote", + "syn", ] [[package]] name = "dashmap" -version = "4.0.2" +version = "5.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e77a43b28d0668df09411cb0bc9a8c2adc40f9a048afe863e05fd43251e8e39c" +checksum = "3495912c9c1ccf2e18976439f4443f3fee0fd61f424ff99fde6a66b15ecb448f" dependencies = [ - "cfg-if 1.0.0", - "num_cpus", + "cfg-if", + "hashbrown 0.12.3", + "lock_api", + "parking_lot_core", ] [[package]] @@ -445,8 +521,9 @@ dependencies = [ [[package]] name = "devise" -version = "0.3.0" -source = "git+https://github.com/SergioBenitez/Devise.git?rev=e58b3ac9a#e58b3ac9afc3b6ff10a8aaf02a3e768a8f530089" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50c7580b072f1c8476148f16e0a0d5dedddab787da98d86c5082c5e9ed8ab595" dependencies = [ "devise_codegen", "devise_core", @@ -454,22 +531,25 @@ dependencies = [ [[package]] name = "devise_codegen" -version = "0.3.0" -source = "git+https://github.com/SergioBenitez/Devise.git?rev=e58b3ac9a#e58b3ac9afc3b6ff10a8aaf02a3e768a8f530089" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "123c73e7a6e51b05c75fe1a1b2f4e241399ea5740ed810b0e3e6cacd9db5e7b2" dependencies = [ "devise_core", - "quote 1.0.10", + "quote", ] [[package]] name = "devise_core" -version = "0.3.0" -source = "git+https://github.com/SergioBenitez/Devise.git?rev=e58b3ac9a#e58b3ac9afc3b6ff10a8aaf02a3e768a8f530089" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841ef46f4787d9097405cac4e70fb8644fc037b526e8c14054247c0263c400d0" dependencies = [ "bitflags", - "proc-macro2 1.0.34", - "quote 1.0.10", - "syn 1.0.84", + "proc-macro2", + "proc-macro2-diagnostics", + "quote", + "syn", ] [[package]] @@ -495,9 +575,9 @@ version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "45f5098f628d02a7a0f68ddba586fb61e80edec3bdc1be3b921f4ceec60858d3" dependencies = [ - "proc-macro2 1.0.34", - "quote 1.0.10", - "syn 1.0.84", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -512,104 +592,136 @@ dependencies = [ [[package]] name = "digest" -version = "0.8.1" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5" +checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506" dependencies = [ - "generic-array 0.12.4", + "block-buffer", + "crypto-common", + "subtle", ] [[package]] -name = "digest" -version = "0.9.0" +name = "dirs" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" +checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059" dependencies = [ - "generic-array 0.14.4", + "dirs-sys", ] [[package]] -name = "discard" -version = "1.0.4" +name = "dirs-sys" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "212d0f5754cb6769937f4501cc0e67f4f4483c8d2c3e1e922ee9edbe4ab4c7c0" +checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6" +dependencies = [ + "libc", + "redox_users", + "winapi", +] [[package]] -name = "dotenv" -version = "0.15.0" +name = "dotenvy" +version = "0.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f" +checksum = "7e851a83c30366fd01d75b913588e95e74a1705c1ecc5d58b1f8e1a6d556525f" +dependencies = [ + "dirs", +] [[package]] name = "either" -version = "1.6.1" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f107b87b6afc2a64fd13cac55fe06d6c8859f12d4b14cbcdd2c67d0976781be" + +[[package]] +name = "email-encoding" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34dd14c63662e0206599796cd5e1ad0268ab2b9d19b868d6050d688eba2bbf98" +dependencies = [ + "base64", + "memchr", +] + +[[package]] +name = "email_address" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" +checksum = "8684b7c9cb4857dfa1e5b9629ef584ba618c9b93bae60f58cb23f4f271d0468e" [[package]] name = "encoding_rs" -version = "0.8.30" +version = "0.8.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7896dc8abb250ffdda33912550faa54c88ec8b998dec0b2c55ab224921ce11df" +checksum = "9852635589dc9f9ea1b6fe9f05b50ef208c85c834a562f0c6abb1c475736ec2b" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] name = "enum-as-inner" -version = "0.3.3" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c5f0096a91d210159eceb2ff5e1c4da18388a170e1e3ce948aac9c8fdbbf595" +checksum = "21cdad81446a7f7dc43f6a77409efeb9733d2fa65553efef6018ef257c959b73" dependencies = [ "heck", - "proc-macro2 1.0.34", - "quote 1.0.10", - "syn 1.0.84", + "proc-macro2", + "quote", + "syn", ] [[package]] name = "error-chain" -version = "0.11.0" +version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff511d5dc435d703f4971bc399647c9bc38e20cb41452e3b9feb4765419ed3f3" +checksum = "2d2f06b9cac1506ece98fe3231e3cc9c4410ec3d5b1f24ae1c8946f0742cdefc" dependencies = [ - "backtrace", + "version_check", ] -[[package]] -name = "fake-simd" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" - [[package]] name = "fastrand" -version = "1.6.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "779d043b6a0b90cc4c0ed7ee380a6504394cee7efd7db050e3774eee387324b2" +checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499" dependencies = [ "instant", ] [[package]] name = "fern" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c9a4820f0ccc8a7afd67c39a0f1a0f4b07ca1725164271a64939d7aeb9af065" +checksum = "3bdd7b0849075e79ee9a1836df22c717d1eba30451796fdc631b04565dd11e2a" dependencies = [ - "log 0.4.14", + "log", "syslog", ] +[[package]] +name = "figment" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "790b4292c72618abbab50f787a477014fe15634f96291de45672ce46afe122df" +dependencies = [ + "atomic", + "pear", + "serde", + "toml", + "uncased", + "version_check", +] + [[package]] name = "flate2" -version = "1.0.22" +version = "1.0.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f" +checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6" dependencies = [ - "cfg-if 1.0.0", "crc32fast", - "libc", "miniz_oxide", ] @@ -644,43 +756,11 @@ dependencies = [ "percent-encoding 2.1.0", ] -[[package]] -name = "fuchsia-cprng" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" - -[[package]] -name = "fuchsia-zircon" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" -dependencies = [ - "bitflags", - "fuchsia-zircon-sys", -] - -[[package]] -name = "fuchsia-zircon-sys" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" - -[[package]] -name = "futf" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c9c1ce3fa9336301af935ab852c437817d14cd33690446569392e65170aac3b" -dependencies = [ - "mac", - "new_debug_unreachable", -] - [[package]] name = "futures" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28560757fe2bb34e79f907794bb6b22ae8b0e5c669b638a1132f2592b19035b4" +checksum = "f73fe65f54d1e12b726f517d3e2135ca3125a437b6d998caf1962961f7172d9e" dependencies = [ "futures-channel", "futures-core", @@ -693,9 +773,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3dda0b6588335f360afc675d0564c17a77a2bda81ca178a4b6081bd86c7f0b" +checksum = "c3083ce4b914124575708913bca19bfe887522d6e2e6d0952943f5eac4a74010" dependencies = [ "futures-core", "futures-sink", @@ -703,15 +783,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0c8ff0461b82559810cdccfde3215c3f373807f5e5232b71479bff7bb2583d7" +checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3" [[package]] name = "futures-executor" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29d6d2ff5bb10fb95c85b8ce46538a2e5f5e7fdc755623a7d4529ab8a4ed9d2a" +checksum = "9420b90cfa29e327d0429f19be13e7ddb68fa1cccb09d65e5706b8c7a749b8a6" dependencies = [ "futures-core", "futures-task", @@ -720,32 +800,32 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1f9d34af5a1aac6fb380f735fe510746c38067c5bf16c7fd250280503c971b2" +checksum = "fc4045962a5a5e935ee2fdedaa4e08284547402885ab326734432bed5d12966b" [[package]] name = "futures-macro" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dbd947adfffb0efc70599b3ddcf7b5597bb5fa9e245eb99f62b3a5f7bb8bd3c" +checksum = "33c1e13800337f4d4d7a316bf45a567dbcb6ffe087f16424852d97e97a91f512" dependencies = [ - "proc-macro2 1.0.34", - "quote 1.0.10", - "syn 1.0.84", + "proc-macro2", + "quote", + "syn", ] [[package]] name = "futures-sink" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3055baccb68d74ff6480350f8d6eb8fcfa3aa11bdc1a1ae3afdd0514617d508" +checksum = "21163e139fa306126e6eedaf49ecdb4588f939600f0b1e770f4205ee4b7fa868" [[package]] name = "futures-task" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ee7c6485c30167ce4dfb83ac568a849fe53274c831081476ee13e0dce1aad72" +checksum = "57c66a976bf5909d801bbef33416c41372779507e7a6b3a5e25e4749c58f776a" [[package]] name = "futures-timer" @@ -755,9 +835,9 @@ checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" [[package]] name = "futures-util" -version = "0.3.19" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b5cf40b47a271f77a8b1bec03ca09044d99d2372c0de244e66430761127164" +checksum = "d8b7abd5d659d9b90c8cba917f6ec750a74e2dc23902ef9cd4cc8c8b22e6036a" dependencies = [ "futures-channel", "futures-core", @@ -772,51 +852,54 @@ dependencies = [ ] [[package]] -name = "generic-array" -version = "0.12.4" +name = "generator" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffdf9f34f1447443d37393cc6c2b8313aebddcd96906caf34e54c68d8e57d7bd" +checksum = "cc184cace1cea8335047a471cc1da80f18acf8a76f3bab2028d499e328948ec7" dependencies = [ - "typenum", + "cc", + "libc", + "log", + "rustversion", + "windows", ] [[package]] name = "generic-array" -version = "0.14.4" +version = "0.14.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "501466ecc8a30d1d3b7fc9229b122b2ce8ed6e9d9223f1138d4babb253e51817" +checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" dependencies = [ "typenum", - "version_check 0.9.3", + "version_check", ] [[package]] name = "getrandom" -version = "0.1.16" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +checksum = "4eb1a864a501629691edf6c15a593b7a51eebaa1e8468e9ddc623de7c9b58ec6" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", - "wasi 0.9.0+wasi-snapshot-preview1", + "wasi 0.11.0+wasi-snapshot-preview1", ] [[package]] -name = "getrandom" -version = "0.2.3" +name = "ghash" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753" +checksum = "1583cc1656d7839fd3732b80cf4f38850336cdb9b8ded1cd399ca62958de3c99" dependencies = [ - "cfg-if 1.0.0", - "libc", - "wasi 0.10.0+wasi-snapshot-preview1", + "opaque-debug", + "polyval", ] [[package]] name = "gimli" -version = "0.26.1" +version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78cc372d058dcf6d5ecd98510e7fbc9e5aec4d21de70f65fea8fecebcd881bd4" +checksum = "22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d" [[package]] name = "glob" @@ -826,28 +909,28 @@ checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" [[package]] name = "governor" -version = "0.3.2" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06c5d2f987ee8f6dff3fa1a352058dc59b990e447e4c7846aa7d804971314f7b" +checksum = "19775995ee20209163239355bc3ad2f33f83da35d9ef72dea26e5af753552c87" dependencies = [ "dashmap", "futures", "futures-timer", "no-std-compat", "nonzero_ext", - "parking_lot 0.11.2", + "parking_lot", "quanta", - "rand 0.8.4", - "smallvec 1.7.0", + "rand", + "smallvec", ] [[package]] name = "h2" -version = "0.3.9" +version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f072413d126e57991455e0a922b31e4c8ba7c2ffbebf6b78b4f8521397d65cd" +checksum = "37a82c6d637fc9515a4694bbf1cb2457b79d81ce52b3108bdeea58b07dd34a57" dependencies = [ - "bytes 1.1.0", + "bytes", "fnv", "futures-core", "futures-sink", @@ -868,43 +951,36 @@ checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" [[package]] name = "handlebars" -version = "4.1.6" +version = "4.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "167fa173496c9eadd8749cca6f8339ac88e248f3ad2442791d0b743318a94fc0" +checksum = "360d9740069b2f6cbb63ce2dbaa71a20d3185350cbb990d7bebeb9318415eb17" dependencies = [ - "log 0.4.14", + "log", "pest", "pest_derive", - "quick-error 2.0.1", "serde", "serde_json", + "thiserror", "walkdir", ] [[package]] name = "hashbrown" -version = "0.8.2" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91b62f79061a0bc2e046024cb7ba44b08419ed238ecbd9adbd787434b9e8c25" -dependencies = [ - "ahash", - "autocfg", -] +checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" [[package]] name = "hashbrown" -version = "0.11.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" [[package]] name = "heck" -version = "0.3.3" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" -dependencies = [ - "unicode-segmentation", -] +checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" [[package]] name = "hermit-abi" @@ -916,23 +992,21 @@ dependencies = [ ] [[package]] -name = "hmac" -version = "0.10.1" +name = "hkdf" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1441c6b1e930e2817404b5046f1f989899143a12bf92de603b69f4e0aee1e15" +checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437" dependencies = [ - "crypto-mac 0.10.1", - "digest 0.9.0", + "hmac", ] [[package]] name = "hmac" -version = "0.11.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a2a2320eb7ec0ebe8da8f744d7812d9fc4cb4d09344ac01898dbcb6a20ae69b" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" dependencies = [ - "crypto-mac 0.11.1", - "digest 0.9.0", + "digest", ] [[package]] @@ -943,50 +1017,45 @@ checksum = "3c731c3e10504cc8ed35cfe2f1db4c9274c3d35fa486e3b31df46f068ef3e867" dependencies = [ "libc", "match_cfg", - "winapi 0.3.9", + "winapi", ] [[package]] -name = "html5ever" -version = "0.25.1" +name = "html5gum" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aafcf38a1a36118242d29b92e1b08ef84e67e4a5ed06e0a80be20e6a32bfed6b" +checksum = "3404cc217cc3e11d09c8ac9ccf8b1e540f64477c253d6dc70b5a5074782d934d" dependencies = [ - "log 0.4.14", - "mac", - "markup5ever", - "proc-macro2 1.0.34", - "quote 1.0.10", - "syn 1.0.84", + "jetscii", ] [[package]] name = "http" -version = "0.2.5" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1323096b05d41827dadeaee54c9981958c0f94e670bc94ed80037d1a7b8b186b" +checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399" dependencies = [ - "bytes 1.1.0", + "bytes", "fnv", - "itoa 0.4.8", + "itoa", ] [[package]] name = "http-body" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ff4f84919677303da5f147645dbea6b1881f368d03ac84e1dc09031ebd7b2c6" +checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" dependencies = [ - "bytes 1.1.0", + "bytes", "http", "pin-project-lite", ] [[package]] name = "httparse" -version = "1.5.1" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acd94fdbe1d4ff688b67b04eee2e17bd50995534a61539e45adfefb45e5e5503" +checksum = "496ce29bb5a52785b44e0f7ca2847ae0bb839c9bd28f69acac9b99d461c0c04c" [[package]] name = "httpdate" @@ -996,30 +1065,11 @@ checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" [[package]] name = "hyper" -version = "0.10.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a0652d9a2609a968c14be1a9ea00bf4b1d64e2e1f53a1b51b6fff3a6e829273" -dependencies = [ - "base64 0.9.3", - "httparse", - "language-tags", - "log 0.3.9", - "mime 0.2.6", - "num_cpus", - "time 0.1.44", - "traitobject", - "typeable", - "unicase 1.4.2", - "url 1.7.2", -] - -[[package]] -name = "hyper" -version = "0.14.16" +version = "0.14.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7ec3e62bdc98a2f0393a5048e4c30ef659440ea6e0e572965103e72bd836f55" +checksum = "02c929dc5c39e335a03c405292728118860721b10190d98c2a0f0efd5baafbac" dependencies = [ - "bytes 1.1.0", + "bytes", "futures-channel", "futures-core", "futures-util", @@ -1028,40 +1078,34 @@ dependencies = [ "http-body", "httparse", "httpdate", - "itoa 0.4.8", + "itoa", "pin-project-lite", - "socket2 0.4.2", + "socket2", "tokio", "tower-service", "tracing", "want", ] -[[package]] -name = "hyper-sync-rustls" -version = "0.3.0-rc.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cb014c4ea00486e2b62860b5e15229d37516d4924177218beafbf46583de3ab" -dependencies = [ - "hyper 0.10.16", - "rustls", - "webpki", - "webpki-roots", -] - [[package]] name = "hyper-tls" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ - "bytes 1.1.0", - "hyper 0.14.16", + "bytes", + "hyper", "native-tls", "tokio", "tokio-native-tls", ] +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + [[package]] name = "idna" version = "0.1.5" @@ -1086,66 +1130,65 @@ dependencies = [ [[package]] name = "indexmap" -version = "1.7.0" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc633605454125dec4b66843673f01c7df2b89479b32e0ed634e43a91cff62a5" +checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e" dependencies = [ "autocfg", - "hashbrown 0.11.2", + "hashbrown 0.12.3", + "serde", ] [[package]] -name = "instant" -version = "0.1.12" +name = "inlinable_string" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" -dependencies = [ - "cfg-if 1.0.0", -] +checksum = "c8fae54786f62fb2918dcfae3d568594e50eb9b5c25bf04371af6fe7516452fb" [[package]] -name = "iovec" -version = "0.1.4" +name = "instant" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" dependencies = [ - "libc", + "cfg-if", ] [[package]] name = "ipconfig" -version = "0.2.2" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7e2f18aece9709094573a9f24f483c4f65caa4298e2f7ae1b71cc65d853fad7" +checksum = "723519edce41262b05d4143ceb95050e4c614f483e78e9fd9e39a8275a84ad98" dependencies = [ - "socket2 0.3.19", + "socket2", "widestring", - "winapi 0.3.9", - "winreg 0.6.2", + "winapi", + "winreg 0.7.0", ] [[package]] name = "ipnet" -version = "2.3.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68f2d64f2edebec4ce84ad108148e67e1064789bee435edc5b60ad398714a3a9" +checksum = "879d54834c8c76457ef4293a689b2a8c59b076067ad77b15efafbb05f92a592b" [[package]] name = "itoa" -version = "0.4.8" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" +checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754" [[package]] -name = "itoa" -version = "1.0.1" +name = "jetscii" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" +checksum = "47f142fe24a9c9944451e8349de0a56af5f3e7226dc46f3ed4d4ecc0b85af75e" [[package]] -name = "job_scheduler" -version = "1.2.1" -source = "git+https://github.com/jjlin/job_scheduler?rev=ee023418dbba2bfe1e30a5fd7d937f9e33739806#ee023418dbba2bfe1e30a5fd7d937f9e33739806" +name = "job_scheduler_ng" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e488bbc07c44295a7a07bfedfa36c9c77509c2e02599c1b5aef977779afca4d" dependencies = [ "chrono", "cron", @@ -1154,20 +1197,20 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.55" +version = "0.3.59" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cc9ffccd38c451a86bf13657df244e9c3f37493cce8e5e21e940963777acc84" +checksum = "258451ab10b34f8af53416d1fdab72c22e805f0c92a1136d59470ec0b11138b2" dependencies = [ "wasm-bindgen", ] [[package]] name = "jsonwebtoken" -version = "7.2.0" +version = "8.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afabcc15e437a6484fc4f12d0fd63068fe457bf93f1c148d3d9649c60b103f32" +checksum = "1aa4b4af834c6cfd35d8763d359661b90f2e45d8f750a0849156c7f4671af09c" dependencies = [ - "base64 0.12.3", + "base64", "pem", "ring", "serde", @@ -1175,60 +1218,54 @@ dependencies = [ "simple_asn1", ] -[[package]] -name = "kernel32-sys" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" -dependencies = [ - "winapi 0.2.8", - "winapi-build", -] - -[[package]] -name = "language-tags" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a91d884b6667cd606bb5a69aa0c99ba811a115fc68915e7056ec08a46e93199a" - [[package]] name = "lazy_static" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" -[[package]] -name = "lazycell" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" - [[package]] name = "lettre" -version = "0.10.0-rc.4" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71d8da8f34d086b081c9cc3b57d3bb3b51d16fc06b5c848a188e2f14d58ac2a5" +checksum = "2eabca5e0b4d0e98e7f2243fb5b7520b6af2b65d8f87bcc86f2c75185a6ff243" dependencies = [ - "base64 0.13.0", + "async-trait", + "base64", + "email-encoding", + "email_address", "fastrand", + "futures-io", + "futures-util", "hostname", "httpdate", "idna 0.2.3", - "mime 0.3.16", + "mime", "native-tls", - "nom 7.1.0", + "nom", "once_cell", "quoted_printable", - "regex", "serde", + "socket2", + "tokio", + "tokio-native-tls", "tracing", ] [[package]] name = "libc" -version = "0.2.112" +version = "0.2.127" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b03d17f364a3a042d5e5d46b053bbbf82c92c9430c592dd4c064dc6ee997125" +checksum = "505e71a4706fa491e9b1b55f51b95d4037d0821ee40131190475f692b35b009b" + +[[package]] +name = "libmimalloc-sys" +version = "0.1.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11ca136052550448f55df7898c6dbe651c6b574fe38a0d9ea687a9f8088a2e2c" +dependencies = [ + "cc", +] [[package]] name = "libsqlite3-sys" @@ -1243,35 +1280,42 @@ dependencies = [ [[package]] name = "linked-hash-map" -version = "0.5.4" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "lock_api" -version = "0.4.5" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712a4d093c9976e24e7dbca41db895dabcbac38eb5f4045393d17a95bdfb1109" +checksum = "327fa5b6a6940e4699ec49a9beae1ea4845c6bab9314e4f84ac68742139d8c53" dependencies = [ + "autocfg", "scopeguard", ] [[package]] name = "log" -version = "0.3.9" +version = "0.4.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e19e8d5c34a3e0e2223db8e060f9e8264aeeb5c5fc64a4ee9965c062211c024b" +checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" dependencies = [ - "log 0.4.14", + "cfg-if", ] [[package]] -name = "log" -version = "0.4.14" +name = "loom" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" +checksum = "ff50ecb28bb86013e935fb6683ab1f6d3a20016f123c76fd4c27470076ac30f5" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", + "generator", + "scoped-tls", + "serde", + "serde_json", + "tracing", + "tracing-subscriber", ] [[package]] @@ -1284,48 +1328,28 @@ dependencies = [ ] [[package]] -name = "mac" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" - -[[package]] -name = "maplit" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" - -[[package]] -name = "markup5ever" -version = "0.10.1" +name = "mach" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a24f40fb03852d1cdd84330cddcaf98e9ec08a7b7768e952fad3b4cf048ec8fd" +checksum = "b823e83b2affd8f40a9ee8c29dbc56404c1e34cd2710921f2801e2cf29527afa" dependencies = [ - "log 0.4.14", - "phf 0.8.0", - "phf_codegen 0.8.0", - "string_cache", - "string_cache_codegen", - "tendril", + "libc", ] [[package]] -name = "markup5ever_rcdom" +name = "match_cfg" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f015da43bcd8d4f144559a3423f4591d69b8ce0652c905374da7205df336ae2b" -dependencies = [ - "html5ever", - "markup5ever", - "tendril", - "xml5ever", -] +checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" [[package]] -name = "match_cfg" +name = "matchers" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata", +] [[package]] name = "matches" @@ -1333,17 +1357,11 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" -[[package]] -name = "maybe-uninit" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" - [[package]] name = "memchr" -version = "2.4.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" [[package]] name = "migrations_internals" @@ -1361,18 +1379,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9753f12909fd8d923f75ae5c3258cae1ed3c8ec052e1b38c93c21a6d157f789c" dependencies = [ "migrations_internals", - "proc-macro2 1.0.34", - "quote 1.0.10", - "syn 1.0.84", + "proc-macro2", + "quote", + "syn", ] [[package]] -name = "mime" -version = "0.2.6" +name = "mimalloc" +version = "0.1.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba626b8a6de5da682e1caa06bdb42a335aee5a84db8e5046a3e8ab17ba0a3ae0" +checksum = "2f64ad83c969af2e732e907564deb0d0ed393cec4af80776f77dd77a1a427698" dependencies = [ - "log 0.3.9", + "libmimalloc-sys", ] [[package]] @@ -1381,16 +1399,6 @@ version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" -[[package]] -name = "mime_guess" -version = "2.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2684d4c2e97d99848d30b324b00c8fcc7e5c897b7cbb5819b09e7c90e8baf212" -dependencies = [ - "mime 0.3.16", - "unicase 2.6.0", -] - [[package]] name = "minimal-lexical" version = "0.2.1" @@ -1399,102 +1407,49 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.4.4" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" +checksum = "6f5c75688da582b8ffc1f1799e9db273f32133c49e048f614d22ec3256773ccc" dependencies = [ "adler", - "autocfg", -] - -[[package]] -name = "mio" -version = "0.6.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4afd66f5b91bf2a3bc13fad0e21caedac168ca4c707504e75585648ae80e4cc4" -dependencies = [ - "cfg-if 0.1.10", - "fuchsia-zircon", - "fuchsia-zircon-sys", - "iovec", - "kernel32-sys", - "libc", - "log 0.4.14", - "miow 0.2.2", - "net2", - "slab", - "winapi 0.2.8", ] [[package]] name = "mio" -version = "0.7.14" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8067b404fe97c70829f082dec8bcf4f71225d7eaea1d8645349cb76fa06205cc" +checksum = "57ee1c23c7c63b0c9250c339ffdc69255f110b298b901b9f6c82547b7b87caaf" dependencies = [ "libc", - "log 0.4.14", - "miow 0.3.7", - "ntapi", - "winapi 0.3.9", -] - -[[package]] -name = "mio-extras" -version = "2.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52403fe290012ce777c4626790c8951324a2b9e3316b3143779c72b029742f19" -dependencies = [ - "lazycell", - "log 0.4.14", - "mio 0.6.23", - "slab", + "log", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys", ] [[package]] -name = "miow" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebd808424166322d4a38da87083bfddd3ac4c131334ed55856112eb06d46944d" -dependencies = [ - "kernel32-sys", - "net2", - "winapi 0.2.8", - "ws2_32-sys", -] - -[[package]] -name = "miow" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9f1c5b025cda876f66ef43a113f91ebc9f4ccef34843000e0adf6ebbab84e21" -dependencies = [ - "winapi 0.3.9", -] - -[[package]] -name = "multipart" -version = "0.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00dec633863867f29cb39df64a397cdf4a6354708ddd7759f70c7fb51c5f9182" +name = "multer" +version = "2.0.3" +source = "git+https://github.com/BlackDex/multer-rs?rev=73e83fa5eb183646cc56606e5d902acb30a45b3d#73e83fa5eb183646cc56606e5d902acb30a45b3d" dependencies = [ - "buf_redux", + "bytes", + "encoding_rs", + "futures-util", + "http", "httparse", - "log 0.4.14", - "mime 0.3.16", - "mime_guess", - "quick-error 1.2.3", - "rand 0.8.4", - "safemem", - "tempfile", - "twoway", + "log", + "memchr", + "mime", + "spin 0.9.4", + "tokio", + "tokio-util", + "version_check", ] [[package]] name = "mysqlclient-sys" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e9637d93448044078aaafea7419aed69d301b4a12bcc4aa0ae856eb169bef85" +checksum = "f61b381528ba293005c42a409dd73d034508e273bf90481f17ec2e964a6e969b" dependencies = [ "pkg-config", "vcpkg", @@ -1502,13 +1457,13 @@ dependencies = [ [[package]] name = "native-tls" -version = "0.2.8" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48ba9f7719b5a0f42f338907614285fb5fd70e53858141f69898a1fb7203b24d" +checksum = "fd7e2f3618557f980e0b17e8856252eee3c97fa12c54dff0ca290fb6266ca4a9" dependencies = [ "lazy_static", "libc", - "log 0.4.14", + "log", "openssl", "openssl-probe", "openssl-sys", @@ -1519,71 +1474,43 @@ dependencies = [ ] [[package]] -name = "net2" -version = "0.2.37" +name = "nix" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "391630d12b68002ae1e25e8f974306474966550ad82dac6886fb8910c19568ae" +checksum = "195cdbc1741b8134346d515b3a56a1c94b0912758009cfd53f99ea0f57b065fc" dependencies = [ - "cfg-if 0.1.10", + "bitflags", + "cfg-if", "libc", - "winapi 0.3.9", ] -[[package]] -name = "new_debug_unreachable" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" - [[package]] name = "no-std-compat" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b93853da6d84c2e3c7d730d6473e8817692dd89be387eb01b94d7f108ecb5b8c" -dependencies = [ - "hashbrown 0.8.2", -] - -[[package]] -name = "nom" -version = "4.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c349f68f25f596b9f44cf0e7c69752a5c633b0550c3ff849518bfba0233774a" -dependencies = [ - "memchr", -] [[package]] name = "nom" -version = "7.1.0" +version = "7.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b1d11e1ef389c76fe5b81bcaf2ea32cf88b62bc494e19f493d0b30e7a930109" +checksum = "a8903e5a29a317527874d0402f867152a3d21c908bb0b933e416c65e301d4c36" dependencies = [ "memchr", "minimal-lexical", - "version_check 0.9.3", ] [[package]] name = "nonzero_ext" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44a1290799eababa63ea60af0cbc3f03363e328e58f32fb0294798ed3e85f444" - -[[package]] -name = "ntapi" -version = "0.3.6" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f6bb902e437b6d86e03cce10a7e2af662292c5dfef23b65899ea3ac9354ad44" -dependencies = [ - "winapi 0.3.9", -] +checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21" [[package]] name = "num-bigint" -version = "0.2.6" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "090c7f9998ee0ff65aa5b723e4009f7b217707f1fb5ea551329cc4d6231fb304" +checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" dependencies = [ "autocfg", "num-integer", @@ -1596,16 +1523,16 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" dependencies = [ - "proc-macro2 1.0.34", - "quote 1.0.10", - "syn 1.0.84", + "proc-macro2", + "quote", + "syn", ] [[package]] name = "num-integer" -version = "0.1.44" +version = "0.1.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" +checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" dependencies = [ "autocfg", "num-traits", @@ -1613,9 +1540,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" +checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" dependencies = [ "autocfg", ] @@ -1631,25 +1558,28 @@ dependencies = [ ] [[package]] -name = "object" -version = "0.27.1" +name = "num_threads" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67ac1d3f9a1d3616fd9a60c8d74296f22406a238b6a72f5cc1e6f314df4ffbf9" +checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44" dependencies = [ - "memchr", + "libc", ] [[package]] -name = "once_cell" -version = "1.9.0" +name = "object" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5" +checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53" +dependencies = [ + "memchr", +] [[package]] -name = "opaque-debug" -version = "0.2.3" +name = "once_cell" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c" +checksum = "18a6dbe30758c9f83eb00cbea4ac95966305f5a7772f3f42ebfc7fc7eddbd8e1" [[package]] name = "opaque-debug" @@ -1659,38 +1589,50 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "openssl" -version = "0.10.38" +version = "0.10.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c7ae222234c30df141154f159066c5093ff73b63204dcda7121eb082fc56a95" +checksum = "618febf65336490dfcf20b73f885f5651a0c89c64c2d4a8c3662585a70bf5bd0" dependencies = [ "bitflags", - "cfg-if 1.0.0", + "cfg-if", "foreign-types", "libc", "once_cell", + "openssl-macros", "openssl-sys", ] +[[package]] +name = "openssl-macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b501e44f11665960c7e7fcf062c7d96a14ade4aa98116c004b2e37b5be7d736c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "openssl-probe" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28988d872ab76095a6e6ac88d99b54fd267702734fd7ffe610ca27f533ddb95a" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-src" -version = "111.17.0+1.1.1m" +version = "111.22.0+1.1.1q" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05d6a336abd10814198f66e2a91ccd7336611f30334119ca8ce300536666fcf4" +checksum = "8f31f0d509d1c1ae9cada2f9539ff8f37933831fd5098879e482aa687d659853" dependencies = [ "cc", ] [[package]] name = "openssl-sys" -version = "0.9.72" +version = "0.9.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e46109c383602735fa0a2e48dd2b7c892b048e1bf69e5c3b1d804b7d9c203cb" +checksum = "e5f9bd0c2710541a3cda73d6f9ac4f1b240de4ae261065d309dbe73d9dceb42f" dependencies = [ "autocfg", "cc", @@ -1701,125 +1643,73 @@ dependencies = [ ] [[package]] -name = "owning_ref" -version = "0.3.3" +name = "parking_lot" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdf84f41639e037b484f93433aa3897863b561ed65c6e59c7073d7c561710f37" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ - "stable_deref_trait", + "lock_api", + "parking_lot_core", ] [[package]] -name = "parity-ws" -version = "0.11.1" +name = "parking_lot_core" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5983d3929ad50f12c3eb9a6743f19d691866ecd44da74c0a3308c3f8a56df0c6" +checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929" dependencies = [ - "byteorder", - "bytes 0.4.12", - "httparse", - "log 0.4.14", - "mio 0.6.23", - "mio-extras", - "rand 0.7.3", - "sha-1 0.8.2", - "slab", - "url 2.2.2", + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-sys", ] [[package]] -name = "parking_lot" -version = "0.4.8" +name = "parse-zoneinfo" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "149d8f5b97f3c1133e3cfcd8886449959e856b557ff281e292b733d7c69e005e" +checksum = "c705f256449c60da65e11ff6626e0c16a0a0b96aaa348de61376b249bc340f41" dependencies = [ - "owning_ref", - "parking_lot_core 0.2.14", + "regex", ] [[package]] -name = "parking_lot" -version = "0.11.2" +name = "paste" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" -dependencies = [ - "instant", - "lock_api", - "parking_lot_core 0.8.5", -] +checksum = "9423e2b32f7a043629287a536f21951e8c6a82482d0acb1eeebfc90bc2225b22" [[package]] -name = "parking_lot_core" -version = "0.2.14" +name = "pear" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4db1a8ccf734a7bce794cc19b3df06ed87ab2f3907036b693c68f56b4d4537fa" -dependencies = [ - "libc", - "rand 0.4.6", - "smallvec 0.6.14", - "winapi 0.3.9", -] - -[[package]] -name = "parking_lot_core" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216" -dependencies = [ - "cfg-if 1.0.0", - "instant", - "libc", - "redox_syscall", - "smallvec 1.7.0", - "winapi 0.3.9", -] - -[[package]] -name = "parse-zoneinfo" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c705f256449c60da65e11ff6626e0c16a0a0b96aaa348de61376b249bc340f41" -dependencies = [ - "regex", -] - -[[package]] -name = "paste" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0744126afe1a6dd7f394cb50a716dbe086cb06e255e53d8d0185d82828358fb5" - -[[package]] -name = "pear" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5320f212db967792b67cfe12bd469d08afd6318a249bd917d5c19bc92200ab8a" +checksum = "15e44241c5e4c868e3eaa78b7c1848cadd6344ed4f54d029832d32b415a58702" dependencies = [ + "inlinable_string", "pear_codegen", + "yansi", ] [[package]] name = "pear_codegen" -version = "0.1.4" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfc1c836fdc3d1ef87c348b237b5b5c4dff922156fb2d968f57734f9669768ca" +checksum = "82a5ca643c2303ecb740d506539deba189e16f2754040a42901cd8105d0282d0" dependencies = [ - "proc-macro2 0.4.30", - "quote 0.6.13", - "syn 0.15.44", - "version_check 0.9.3", - "yansi", + "proc-macro2", + "proc-macro2-diagnostics", + "quote", + "syn", ] [[package]] name = "pem" -version = "0.8.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd56cbd21fea48d0c440b41cd69c589faacade08c992d9a54e471b79d0fd13eb" +checksum = "03c64931a1a212348ec4f3b4362585eca7159d0d09cbdf4a7f74f02173596fd4" dependencies = [ - "base64 0.13.0", - "once_cell", - "regex", + "base64", ] [[package]] @@ -1836,18 +1726,19 @@ checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" [[package]] name = "pest" -version = "2.1.3" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53" +checksum = "69486e2b8c2d2aeb9762db7b4e00b0331156393555cff467f4163ff06821eef8" dependencies = [ + "thiserror", "ucd-trie", ] [[package]] name = "pest_derive" -version = "2.1.0" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "833d1ae558dc601e9a60366421196a8d94bc0ac980476d0b67e1d0988d72b2d0" +checksum = "b13570633aff33c6d22ce47dd566b10a3b9122c2fe9d8e7501895905be532b91" dependencies = [ "pest", "pest_generator", @@ -1855,100 +1746,62 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.1.3" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99b8db626e31e5b81787b9783425769681b347011cc59471e33ea46d2ea0cf55" +checksum = "b3c567e5702efdc79fb18859ea74c3eb36e14c43da7b8c1f098a4ed6514ec7a0" dependencies = [ "pest", "pest_meta", - "proc-macro2 1.0.34", - "quote 1.0.10", - "syn 1.0.84", + "proc-macro2", + "quote", + "syn", ] [[package]] name = "pest_meta" -version = "2.1.3" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54be6e404f5317079812fc8f9f5279de376d8856929e21c184ecf6bbd692a11d" +checksum = "5eb32be5ee3bbdafa8c7a18b0a8a8d962b66cfa2ceee4037f49267a50ee821fe" dependencies = [ - "maplit", + "once_cell", "pest", - "sha-1 0.8.2", -] - -[[package]] -name = "phf" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dfb61232e34fcb633f43d12c58f83c1df82962dcdfa565a4e866ffc17dafe12" -dependencies = [ - "phf_shared 0.8.0", + "sha-1", ] [[package]] name = "phf" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259" -dependencies = [ - "phf_shared 0.10.0", -] - -[[package]] -name = "phf_codegen" -version = "0.8.0" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbffee61585b0411840d3ece935cce9cb6321f01c45477d30066498cd5e1a815" +checksum = "4724fa946c8d1e7cd881bd3dbee63ce32fc1e9e191e35786b3dc1320a3f68131" dependencies = [ - "phf_generator 0.8.0", - "phf_shared 0.8.0", + "phf_shared", ] [[package]] name = "phf_codegen" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb1c3a8bc4dd4e5cfce29b44ffc14bedd2ee294559a294e2a4d4c9e9a6a13cd" -dependencies = [ - "phf_generator 0.10.0", - "phf_shared 0.10.0", -] - -[[package]] -name = "phf_generator" -version = "0.8.0" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17367f0cc86f2d25802b2c26ee58a7b23faeccf78a396094c13dced0d0182526" +checksum = "32ba0c43d7a1b6492b2924a62290cfd83987828af037b0743b38e6ab092aee58" dependencies = [ - "phf_shared 0.8.0", - "rand 0.7.3", + "phf_generator", + "phf_shared", ] [[package]] name = "phf_generator" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" -dependencies = [ - "phf_shared 0.10.0", - "rand 0.8.4", -] - -[[package]] -name = "phf_shared" -version = "0.8.0" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c00cf8b9eafe68dde5e9eaa2cef8ee84a9336a47d566ec55ca16589633b65af7" +checksum = "5b450720b6f75cfbfabc195814bd3765f337a4f9a83186f8537297cac12f6705" dependencies = [ - "siphasher", + "phf_shared", + "rand", ] [[package]] name = "phf_shared" -version = "0.10.0" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" +checksum = "9dd5609d4b2df87167f908a32e1b146ce309c16cf35df76bc11f440b756048e4" dependencies = [ "siphasher", "uncased", @@ -1956,15 +1809,15 @@ dependencies = [ [[package]] name = "pico-args" -version = "0.4.2" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db8bcd96cb740d03149cbad5518db9fd87126a10ab519c011893b1754134c468" +checksum = "5be167a7af36ee22fe3115051bc51f6e6c7054c9348e28deb4f49bd6f705a315" [[package]] name = "pin-project-lite" -version = "0.2.7" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d31d11c69a6b52a174b42bdc0c30e5e11670f90788b2c471c31c1d17d449443" +checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" [[package]] name = "pin-utils" @@ -1974,31 +1827,37 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "pkg-config" -version = "0.3.24" +version = "0.3.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae" + +[[package]] +name = "polyval" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58893f751c9b0412871a09abd62ecd2a00298c6c83befa223ef98c52aef40cbe" +checksum = "8419d2b623c7c0896ff2d5d96e2cb4ede590fed28fcc34934f4c33c036e620a1" +dependencies = [ + "cfg-if", + "cpufeatures", + "opaque-debug", + "universal-hash", +] [[package]] name = "ppv-lite86" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed0cfbc8191465bed66e1718596ee0b0b35d5ee1f41c5df2189d0fe8bde535ba" +checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" [[package]] name = "pq-sys" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ac25eee5a0582f45a67e837e350d784e7003bd29a5f460796772061ca49ffda" +checksum = "3b845d6d8ec554f972a2c5298aad68953fd64e7441e846075450b44656a016d1" dependencies = [ "vcpkg", ] -[[package]] -name = "precomputed-hash" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" - [[package]] name = "proc-macro-hack" version = "0.5.19" @@ -2007,20 +1866,24 @@ checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" [[package]] name = "proc-macro2" -version = "0.4.30" +version = "1.0.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" +checksum = "0a2ca2c61bc9f3d74d2886294ab7b9853abd9c1ad903a3ac7815c58989bb7bab" dependencies = [ - "unicode-xid 0.1.0", + "unicode-ident", ] [[package]] -name = "proc-macro2" -version = "1.0.34" +name = "proc-macro2-diagnostics" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f84e92c0f7c9d58328b85a78557813e4bd845130db68d7184635344399423b1" +checksum = "4bf29726d67464d49fa6224a1d07936a8c08bb3fba727c7493f6cf1616fdaada" dependencies = [ - "unicode-xid 0.2.2", + "proc-macro2", + "quote", + "syn", + "version_check", + "yansi", ] [[package]] @@ -2043,12 +1906,18 @@ dependencies = [ [[package]] name = "quanta" -version = "0.4.1" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d98dc777a7a39b76b1a26ae9d3f691f4c1bc0455090aa0b64dfa8cb7fc34c135" +checksum = "20afe714292d5e879d8b12740aa223c6a88f118af41870e8b6196e39a02238a8" dependencies = [ + "crossbeam-utils", "libc", - "winapi 0.3.9", + "mach", + "once_cell", + "raw-cpuid", + "wasi 0.10.2+wasi-snapshot-preview1", + "web-sys", + "winapi", ] [[package]] @@ -2057,28 +1926,13 @@ version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" -[[package]] -name = "quick-error" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" - [[package]] name = "quote" -version = "0.6.13" +version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" +checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179" dependencies = [ - "proc-macro2 0.4.30", -] - -[[package]] -name = "quote" -version = "1.0.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05" -dependencies = [ - "proc-macro2 1.0.34", + "proc-macro2", ] [[package]] @@ -2089,62 +1943,24 @@ checksum = "3fee2dce59f7a43418e3382c766554c614e06a552d53a8f07ef499ea4b332c0f" [[package]] name = "r2d2" -version = "0.8.9" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "545c5bc2b880973c9c10e4067418407a0ccaa3091781d1671d46eb35107cb26f" +checksum = "51de85fb3fb6524929c8a2eb85e6b6d363de4e8c48f9e2c2eac4944abc181c93" dependencies = [ - "log 0.4.14", - "parking_lot 0.11.2", + "log", + "parking_lot", "scheduled-thread-pool", ] [[package]] name = "rand" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" -dependencies = [ - "fuchsia-cprng", - "libc", - "rand_core 0.3.1", - "rdrand", - "winapi 0.3.9", -] - -[[package]] -name = "rand" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" -dependencies = [ - "getrandom 0.1.16", - "libc", - "rand_chacha 0.2.2", - "rand_core 0.5.1", - "rand_hc 0.2.0", - "rand_pcg", -] - -[[package]] -name = "rand" -version = "0.8.4" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha 0.3.1", - "rand_core 0.6.3", - "rand_hc 0.3.1", -] - -[[package]] -name = "rand_chacha" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" -dependencies = [ - "ppv-lite86", - "rand_core 0.5.1", + "rand_chacha", + "rand_core", ] [[package]] @@ -2154,31 +1970,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core 0.6.3", -] - -[[package]] -name = "rand_core" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" -dependencies = [ - "rand_core 0.4.2", -] - -[[package]] -name = "rand_core" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" - -[[package]] -name = "rand_core" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" -dependencies = [ - "getrandom 0.1.16", + "rand_core", ] [[package]] @@ -2187,70 +1979,83 @@ version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" dependencies = [ - "getrandom 0.2.3", + "getrandom", ] [[package]] -name = "rand_hc" -version = "0.2.0" +name = "raw-cpuid" +version = "10.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +checksum = "2c49596760fce12ca21550ac21dc5a9617b2ea4b6e0aa7d8dab8ff2824fc2bba" dependencies = [ - "rand_core 0.5.1", + "bitflags", ] [[package]] -name = "rand_hc" -version = "0.3.1" +name = "redox_syscall" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d51e9f596de227fda2ea6c84607f5558e196eeaf43c986b724ba4fb8fdf497e7" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" dependencies = [ - "rand_core 0.6.3", + "bitflags", ] [[package]] -name = "rand_pcg" -version = "0.2.1" +name = "redox_users" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429" +checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" dependencies = [ - "rand_core 0.5.1", + "getrandom", + "redox_syscall", + "thiserror", ] [[package]] -name = "rdrand" -version = "0.4.0" +name = "ref-cast" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +checksum = "ed13bcd201494ab44900a96490291651d200730904221832b9547d24a87d332b" dependencies = [ - "rand_core 0.3.1", + "ref-cast-impl", ] [[package]] -name = "redox_syscall" -version = "0.2.10" +name = "ref-cast-impl" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff" +checksum = "5234cd6063258a5e32903b53b1b6ac043a0541c8adc1f610f67b0326c7a578fa" dependencies = [ - "bitflags", + "proc-macro2", + "quote", + "syn", ] [[package]] name = "regex" -version = "1.5.4" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" +checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax", +] + [[package]] name = "regex-syntax" -version = "0.6.25" +version = "0.6.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" +checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244" [[package]] name = "remove_dir_all" @@ -2258,32 +2063,33 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" dependencies = [ - "winapi 0.3.9", + "winapi", ] [[package]] name = "reqwest" -version = "0.11.8" +version = "0.11.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c4e0a76dc12a116108933f6301b95e83634e0c47b0afbed6abbaa0601e99258" +checksum = "b75aa69a3f06bbcc66ede33af2af253c6f7a86b1ca0033f60c580a27074fbf92" dependencies = [ "async-compression", - "base64 0.13.0", - "bytes 1.1.0", - "cookie 0.15.1", + "base64", + "bytes", + "cookie", "cookie_store", "encoding_rs", "futures-core", "futures-util", + "h2", "http", "http-body", - "hyper 0.14.16", + "hyper", "hyper-tls", "ipnet", "js-sys", "lazy_static", - "log 0.4.14", - "mime 0.3.16", + "log", + "mime", "native-tls", "percent-encoding 2.1.0", "pin-project-lite", @@ -2295,12 +2101,13 @@ dependencies = [ "tokio-native-tls", "tokio-socks", "tokio-util", + "tower-service", "trust-dns-resolver", "url 2.2.2", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", - "winreg 0.7.0", + "winreg 0.10.1", ] [[package]] @@ -2310,7 +2117,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52e44394d2086d010551b14b53b1f24e31647570cd1deb0379e2c21b329aba00" dependencies = [ "hostname", - "quick-error 1.2.3", + "quick-error", ] [[package]] @@ -2322,20 +2129,21 @@ dependencies = [ "cc", "libc", "once_cell", - "spin", + "spin 0.5.2", "untrusted", "web-sys", - "winapi 0.3.9", + "winapi", ] [[package]] name = "rmp" -version = "0.8.10" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f55e5fa1446c4d5dd1f5daeed2a4fe193071771a2636274d0d7a3b082aa7ad6" +checksum = "44519172358fd6d58656c86ab8e7fbc9e1490c3e8f14d35ed78ca0dd07403c9f" dependencies = [ "byteorder", "num-traits", + "paste", ] [[package]] @@ -2350,65 +2158,87 @@ dependencies = [ [[package]] name = "rocket" -version = "0.5.0-dev" -source = "git+https://github.com/SergioBenitez/Rocket?rev=263e39b5b429de1913ce7e3036575a7b4d88b6d7#263e39b5b429de1913ce7e3036575a7b4d88b6d7" +version = "0.5.0-rc.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "98ead083fce4a405feb349cf09abdf64471c6077f14e0ce59364aa90d4b99317" dependencies = [ + "async-stream", + "async-trait", + "atomic", "atty", "binascii", - "log 0.4.14", + "bytes", + "either", + "figment", + "futures", + "indexmap", + "log", "memchr", + "multer", "num_cpus", - "pear", + "parking_lot", + "pin-project-lite", + "rand", + "ref-cast", "rocket_codegen", "rocket_http", + "serde", + "serde_json", "state", - "time 0.2.27", - "toml", - "version_check 0.9.3", + "tempfile", + "time 0.3.12", + "tokio", + "tokio-stream", + "tokio-util", + "ubyte", + "version_check", "yansi", ] [[package]] name = "rocket_codegen" -version = "0.5.0-dev" -source = "git+https://github.com/SergioBenitez/Rocket?rev=263e39b5b429de1913ce7e3036575a7b4d88b6d7#263e39b5b429de1913ce7e3036575a7b4d88b6d7" +version = "0.5.0-rc.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6aeb6bb9c61e9cd2c00d70ea267bf36f76a4cc615e5908b349c2f9d93999b47" dependencies = [ "devise", "glob", "indexmap", - "quote 1.0.10", + "proc-macro2", + "quote", "rocket_http", - "version_check 0.9.3", - "yansi", -] - -[[package]] -name = "rocket_contrib" -version = "0.5.0-dev" -source = "git+https://github.com/SergioBenitez/Rocket?rev=263e39b5b429de1913ce7e3036575a7b4d88b6d7#263e39b5b429de1913ce7e3036575a7b4d88b6d7" -dependencies = [ - "log 0.4.14", - "rocket", - "serde", - "serde_json", + "syn", + "unicode-xid", ] [[package]] name = "rocket_http" -version = "0.5.0-dev" -source = "git+https://github.com/SergioBenitez/Rocket?rev=263e39b5b429de1913ce7e3036575a7b4d88b6d7#263e39b5b429de1913ce7e3036575a7b4d88b6d7" +version = "0.5.0-rc.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ded65d127954de3c12471630bf4b81a2792f065984461e65b91d0fdaafc17a2" dependencies = [ - "cookie 0.14.4", - "hyper 0.10.16", - "hyper-sync-rustls", + "cookie", + "either", + "futures", + "http", + "hyper", "indexmap", + "log", + "memchr", "pear", - "percent-encoding 1.0.1", + "percent-encoding 2.1.0", + "pin-project-lite", + "ref-cast", "rustls", - "smallvec 1.7.0", + "rustls-pemfile", + "serde", + "smallvec", + "stable-pattern", "state", - "time 0.2.27", - "unicode-xid 0.2.2", + "time 0.3.12", + "tokio", + "tokio-rustls", + "uncased", ] [[package]] @@ -2418,38 +2248,37 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342" [[package]] -name = "rustc_version" -version = "0.2.3" +name = "rustls" +version = "0.20.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" +checksum = "5aab8ee6c7097ed6057f43c187a62418d0c05a4bd5f18b3571db50ee0f9ce033" dependencies = [ - "semver", + "log", + "ring", + "sct", + "webpki", ] [[package]] -name = "rustls" -version = "0.17.0" +name = "rustls-pemfile" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0d4a31f5d68413404705d6982529b0e11a9aacd4839d1d6222ee3b8cb4015e1" +checksum = "0864aeff53f8c05aa08d86e5ef839d3dfcf07aeba2db32f12db0ef716e87bd55" dependencies = [ - "base64 0.11.0", - "log 0.4.14", - "ring", - "sct", - "webpki", + "base64", ] [[package]] -name = "ryu" +name = "rustversion" version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" +checksum = "97477e48b4cf8603ad5f7aaf897467cf42ab4218a38ef76fb14c2d6773a6d6a8" [[package]] -name = "safemem" -version = "0.3.3" +name = "ryu" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072" +checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09" [[package]] name = "same-file" @@ -2462,23 +2291,29 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.19" +version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f05ba609c234e60bee0d547fe94a4c7e9da733d1c962cf6e59efa4cd9c8bc75" +checksum = "88d6731146462ea25d9244b2ed5fd1d716d25c52e4d54aa4fb0f3c4e9854dbe2" dependencies = [ "lazy_static", - "winapi 0.3.9", + "windows-sys", ] [[package]] name = "scheduled-thread-pool" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc6f74fd1204073fa02d5d5d68bec8021be4c38690b61264b2fdb48083d0e7d7" +checksum = "977a7519bff143a44f842fd07e80ad1329295bd71686457f18e496736f4bf9bf" dependencies = [ - "parking_lot 0.11.2", + "parking_lot", ] +[[package]] +name = "scoped-tls" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea6a9290e3c9cf0f18145ef7ffa62d68ee0bf5fcd651017e586dc7fd5da448c2" + [[package]] name = "scopeguard" version = "1.1.0" @@ -2487,9 +2322,9 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "sct" -version = "0.6.1" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b362b83898e0e69f38515b82ee15aa80636befe47c3b6d3d89a911e78fc228ce" +checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" dependencies = [ "ring", "untrusted", @@ -2497,9 +2332,9 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.4.2" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "525bc1abfda2e1998d152c45cf13e696f76d0a4972310b22fac1658b05df7c87" +checksum = "2dc14f172faf8a0194a3aded622712b0de276821addc574fa54fc0a1167e10dc" dependencies = [ "bitflags", "core-foundation", @@ -2510,47 +2345,23 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.4.2" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9dd14d83160b528b7bfd66439110573efcfbe281b17fc2ca9f39f550d619c7e" +checksum = "0160a13a177a45bfb43ce71c01580998474f556ad854dcbca936dd2841a5c556" dependencies = [ "core-foundation-sys", "libc", ] -[[package]] -name = "semver" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" -dependencies = [ - "semver-parser", -] - -[[package]] -name = "semver-parser" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" - [[package]] name = "serde" -version = "1.0.132" +version = "1.0.142" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b9875c23cf305cd1fd7eb77234cbb705f21ea6a72c637a5c6db5fe4b8e7f008" +checksum = "e590c437916fb6b221e1d00df6e3294f3fccd70ca7e92541c475d6ed6ef5fee2" dependencies = [ "serde_derive", ] -[[package]] -name = "serde_bytes" -version = "0.11.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16ae07dd2f88a366f15bd0632ba725227018c69a1c8550a927324f8eb8368bb9" -dependencies = [ - "serde", -] - [[package]] name = "serde_cbor" version = "0.11.2" @@ -2563,242 +2374,167 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.132" +version = "1.0.142" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc0db5cb2556c0e558887d9bbdcf6ac4471e83ff66cf696e5419024d1606276" +checksum = "34b5b8d809babe02f538c2cfec6f2c1ed10804c0e5a6a041a049a4f5588ccc2e" dependencies = [ - "proc-macro2 1.0.34", - "quote 1.0.10", - "syn 1.0.84", + "proc-macro2", + "quote", + "syn", ] [[package]] name = "serde_json" -version = "1.0.73" +version = "1.0.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcbd0344bc6533bc7ec56df11d42fb70f1b912351c0825ccb7211b59d8af7cf5" +checksum = "38dd04e3c8279e75b31ef29dbdceebfe5ad89f4d0937213c53f7d49d01b3d5a7" dependencies = [ - "itoa 1.0.1", + "itoa", "ryu", "serde", ] [[package]] name = "serde_urlencoded" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edfa57a7f8d9c1d260a549e7224100f6c43d43f9103e06dd8b4095a9b2b43ce9" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" dependencies = [ "form_urlencoded", - "itoa 0.4.8", + "itoa", "ryu", "serde", ] [[package]] name = "sha-1" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7d94d0bede923b3cea61f3f1ff57ff8cdfd77b400fb8f9998949e0cf04163df" -dependencies = [ - "block-buffer 0.7.3", - "digest 0.8.1", - "fake-simd", - "opaque-debug 0.2.3", -] - -[[package]] -name = "sha-1" -version = "0.9.8" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6" +checksum = "028f48d513f9678cda28f6e4064755b3fbb2af6acd672f2c209b62323f7aea0f" dependencies = [ - "block-buffer 0.9.0", - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", - "digest 0.9.0", - "opaque-debug 0.3.0", + "digest", ] [[package]] name = "sha1" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2579985fda508104f7587689507983eadd6a6e84dd35d6d115361f530916fa0d" - -[[package]] -name = "sha2" -version = "0.9.8" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b69f9a4c9740d74c5baa3fd2e547f9525fa8088a8a958e0ca2409a514e33f5fa" +checksum = "c77f4e7f65455545c2153c1253d25056825e77ee2533f0e41deb65a93a34852f" dependencies = [ - "block-buffer 0.9.0", - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", - "digest 0.9.0", - "opaque-debug 0.3.0", + "digest", ] [[package]] -name = "simple_asn1" -version = "0.4.1" +name = "sha2" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "692ca13de57ce0613a363c8c2f1de925adebc81b04c923ac60c5488bb44abe4b" +checksum = "55deaec60f81eefe3cce0dc50bda92d6d8e88f2a27df7c5033b42afeb1ed2676" dependencies = [ - "chrono", - "num-bigint", - "num-traits", + "cfg-if", + "cpufeatures", + "digest", ] [[package]] -name = "siphasher" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "533494a8f9b724d33625ab53c6c4800f7cc445895924a8ef649222dcb76e938b" - -[[package]] -name = "slab" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9def91fd1e018fe007022791f865d0ccc9b3a0d5001e01aabb8b40e46000afb5" - -[[package]] -name = "smallvec" -version = "0.6.14" +name = "sharded-slab" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97fcaeba89edba30f044a10c6a3cc39df9c3f17d7cd829dd1446cab35f890e0" +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" dependencies = [ - "maybe-uninit", + "lazy_static", ] [[package]] -name = "smallvec" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ecab6c735a6bb4139c0caafd0cc3635748bbb3acf4550e8138122099251f309" - -[[package]] -name = "socket2" -version = "0.3.19" +name = "signal-hook-registry" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "122e570113d28d773067fab24266b66753f6ea915758651696b6e35e49f88d6e" +checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0" dependencies = [ - "cfg-if 1.0.0", "libc", - "winapi 0.3.9", ] [[package]] -name = "socket2" -version = "0.4.2" +name = "simple_asn1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dc90fe6c7be1a323296982db1836d1ea9e47b6839496dde9a541bc496df3516" +checksum = "adc4e5204eb1910f40f9cfa375f6f05b68c3abac4b6fd879c8ff5e7ae8a0a085" dependencies = [ - "libc", - "winapi 0.3.9", + "num-bigint", + "num-traits", + "thiserror", + "time 0.3.12", ] [[package]] -name = "spin" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" - -[[package]] -name = "stable_deref_trait" -version = "1.2.0" +name = "siphasher" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" +checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de" [[package]] -name = "standback" -version = "0.2.17" +name = "slab" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e113fb6f3de07a243d434a56ec6f186dfd51cb08448239fe7bcae73f87ff28ff" +checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef" dependencies = [ - "version_check 0.9.3", + "autocfg", ] [[package]] -name = "state" -version = "0.4.2" +name = "smallvec" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3015a7d0a5fd5105c91c3710d42f9ccf0abfb287d62206484dcc67f9569a6483" +checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1" [[package]] -name = "stdweb" -version = "0.4.20" +name = "socket2" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d022496b16281348b52d0e30ae99e01a73d737b2f45d38fed4edf79f9325a1d5" +checksum = "66d72b759436ae32898a2af0a14218dbf55efde3feeb170eb623637db85ee1e0" dependencies = [ - "discard", - "rustc_version", - "stdweb-derive", - "stdweb-internal-macros", - "stdweb-internal-runtime", - "wasm-bindgen", + "libc", + "winapi", ] [[package]] -name = "stdweb-derive" -version = "0.5.3" +name = "spin" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c87a60a40fccc84bef0652345bbbbbe20a605bf5d0ce81719fc476f5c03b50ef" -dependencies = [ - "proc-macro2 1.0.34", - "quote 1.0.10", - "serde", - "serde_derive", - "syn 1.0.84", -] +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" [[package]] -name = "stdweb-internal-macros" -version = "0.2.9" +name = "spin" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58fa5ff6ad0d98d1ffa8cb115892b6e69d67799f6763e162a1c9db421dc22e11" -dependencies = [ - "base-x", - "proc-macro2 1.0.34", - "quote 1.0.10", - "serde", - "serde_derive", - "serde_json", - "sha1", - "syn 1.0.84", -] +checksum = "7f6002a767bff9e83f8eeecf883ecb8011875a21ae8da43bffb817a57e78cc09" [[package]] -name = "stdweb-internal-runtime" -version = "0.1.5" +name = "stable-pattern" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "213701ba3370744dcd1a12960caa4843b3d68b4d1c0a5d575e0d65b2ee9d16c0" +checksum = "4564168c00635f88eaed410d5efa8131afa8d8699a612c80c455a0ba05c21045" +dependencies = [ + "memchr", +] [[package]] -name = "string_cache" -version = "0.8.2" +name = "state" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "923f0f39b6267d37d23ce71ae7235602134b250ace715dd2c90421998ddac0c6" +checksum = "dbe866e1e51e8260c9eed836a042a5e7f6726bb2b411dffeaa712e19c388f23b" dependencies = [ - "lazy_static", - "new_debug_unreachable", - "parking_lot 0.11.2", - "phf_shared 0.8.0", - "precomputed-hash", - "serde", + "loom", ] [[package]] -name = "string_cache_codegen" -version = "0.5.1" +name = "strsim" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f24c8e5e19d22a726626f1a5e16fe15b132dcf21d10177fa5a45ce7962996b97" -dependencies = [ - "phf_generator 0.8.0", - "phf_shared 0.8.0", - "proc-macro2 1.0.34", - "quote 1.0.10", -] +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" [[package]] name = "subtle" @@ -2808,81 +2544,69 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" [[package]] name = "syn" -version = "0.15.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5" -dependencies = [ - "proc-macro2 0.4.30", - "quote 0.6.13", - "unicode-xid 0.1.0", -] - -[[package]] -name = "syn" -version = "1.0.84" +version = "1.0.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecb2e6da8ee5eb9a61068762a32fa9619cc591ceb055b3687f4cd4051ec2e06b" +checksum = "58dbef6ec655055e20b86b15a8cc6d439cca19b667537ac6a1369572d151ab13" dependencies = [ - "proc-macro2 1.0.34", - "quote 1.0.10", - "unicode-xid 0.2.2", + "proc-macro2", + "quote", + "unicode-ident", ] [[package]] name = "syslog" -version = "4.0.1" +version = "6.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0641142b4081d3d44beffa4eefd7346a228cdf91ed70186db2ca2cef762d327" +checksum = "978044cc68150ad5e40083c9f6a725e6fd02d7ba1bcf691ec2ff0d66c0b41acc" dependencies = [ "error-chain", + "hostname", "libc", - "log 0.4.14", - "time 0.1.44", + "log", + "time 0.3.12", ] [[package]] name = "tempfile" -version = "3.2.0" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dac1c663cfc93810f88aed9b8941d48cabf856a1b111c29a40439018d870eb22" +checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", + "fastrand", "libc", - "rand 0.8.4", "redox_syscall", "remove_dir_all", - "winapi 0.3.9", + "winapi", ] [[package]] -name = "tendril" -version = "0.4.2" +name = "thiserror" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9ef557cb397a4f0a5a3a628f06515f78563f2209e64d47055d9dc6052bf5e33" +checksum = "f5f6586b7f764adc0231f4c79be7b920e766bb2f3e51b3661cdb263828f19994" dependencies = [ - "futf", - "mac", - "utf-8", + "thiserror-impl", ] [[package]] -name = "thiserror" -version = "1.0.30" +name = "thiserror-impl" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" +checksum = "12bafc5b54507e0149cdf1b145a5d80ab80a90bcd9275df43d4fff68460f6c21" dependencies = [ - "thiserror-impl", + "proc-macro2", + "quote", + "syn", ] [[package]] -name = "thiserror-impl" -version = "1.0.30" +name = "thread_local" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" +checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" dependencies = [ - "proc-macro2 1.0.34", - "quote 1.0.10", - "syn 1.0.84", + "once_cell", ] [[package]] @@ -2896,58 +2620,38 @@ dependencies = [ [[package]] name = "time" -version = "0.1.44" +version = "0.1.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255" +checksum = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438" dependencies = [ "libc", - "wasi 0.10.0+wasi-snapshot-preview1", - "winapi 0.3.9", + "winapi", ] [[package]] name = "time" -version = "0.2.27" +version = "0.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4752a97f8eebd6854ff91f1c1824cd6160626ac4bd44287f7f4ea2035a02a242" +checksum = "74b7cc93fc23ba97fde84f7eea56c55d1ba183f495c6715defdfc7b9cb8c870f" dependencies = [ - "const_fn", + "itoa", + "js-sys", "libc", - "standback", - "stdweb", + "num_threads", "time-macros", - "version_check 0.9.3", - "winapi 0.3.9", ] [[package]] name = "time-macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "957e9c6e26f12cb6d0dd7fc776bb67a706312e7299aed74c8dd5b17ebb27e2f1" -dependencies = [ - "proc-macro-hack", - "time-macros-impl", -] - -[[package]] -name = "time-macros-impl" -version = "0.1.2" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd3c141a1b43194f3f56a1411225df8646c55781d5f26db825b3d98507eb482f" -dependencies = [ - "proc-macro-hack", - "proc-macro2 1.0.34", - "quote 1.0.10", - "standback", - "syn 1.0.84", -] +checksum = "42657b1a6f4d817cda8e7a0ace261fe0cc946cf3a80314390b22cc61ae080792" [[package]] name = "tinyvec" -version = "1.5.1" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c1c1d5a42b6245520c249549ec267180beaffcc0615401ac8e31853d4b6d8d2" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" dependencies = [ "tinyvec_macros", ] @@ -2960,17 +2664,34 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "1.15.0" +version = "1.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbbf1c778ec206785635ce8ad57fe52b3009ae9e0c9f574a728f3049d3e55838" +checksum = "7a8325f63a7d4774dd041e363b2409ed1c5cbbd0f867795e661df066b2b0a581" dependencies = [ - "bytes 1.1.0", + "autocfg", + "bytes", "libc", "memchr", - "mio 0.7.14", + "mio", "num_cpus", + "once_cell", + "parking_lot", "pin-project-lite", - "winapi 0.3.9", + "signal-hook-registry", + "socket2", + "tokio-macros", + "winapi", +] + +[[package]] +name = "tokio-macros" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9724f9a975fb987ef7a3cd9be0350edcbe130698af5b8f7a631e23d42d052484" +dependencies = [ + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -2983,6 +2704,17 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-rustls" +version = "0.23.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" +dependencies = [ + "rustls", + "tokio", + "webpki", +] + [[package]] name = "tokio-socks" version = "0.5.1" @@ -2995,55 +2727,78 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-stream" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df54d54117d6fdc4e4fea40fe1e4e566b3505700e148a6827e59b34b0d2600d9" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-tungstenite" +version = "0.17.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f714dd15bead90401d77e04243611caec13726c2408afd5b31901dfcdcb3b181" +dependencies = [ + "futures-util", + "log", + "tokio", + "tungstenite", +] + [[package]] name = "tokio-util" -version = "0.6.9" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e99e1983e5d376cd8eb4b66604d2e99e79f5bd988c3055891dcd8c9e2604cc0" +checksum = "cc463cd8deddc3770d20f9852143d50bf6094e640b485cb2e189a2099085ff45" dependencies = [ - "bytes 1.1.0", + "bytes", "futures-core", "futures-sink", - "log 0.4.14", "pin-project-lite", "tokio", + "tracing", ] [[package]] name = "toml" -version = "0.4.10" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "758664fc71a3a69038656bee8b6be6477d2a6c315a6b81f7081f591bffa4111f" +checksum = "8d82e1a7758622a465f8cee077614c73484dac5b836c02ff6a40d5d1010324d7" dependencies = [ "serde", ] [[package]] name = "totp-lite" -version = "1.0.3" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b18009e8be74bfb2e2cc59a63d078d95c042858a1ca1128a294e1f9ce225148b" +checksum = "5cc496875d9c8fe9a0ce19e3ee8e8808c60376831a439543f0aac71c9dd129fa" dependencies = [ - "digest 0.9.0", - "hmac 0.11.0", - "sha-1 0.9.8", + "digest", + "hmac", + "sha-1", "sha2", ] [[package]] name = "tower-service" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" -version = "0.1.29" +version = "0.1.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "375a639232caf30edfc78e8d89b2d4c375515393e7af7e16f01cd96917fb2105" +checksum = "2fce9567bd60a67d08a16488756721ba392f24f29006402881e43b19aac64307" dependencies = [ - "cfg-if 1.0.0", - "log 0.4.14", + "cfg-if", + "log", "pin-project-lite", "tracing-attributes", "tracing-core", @@ -3051,38 +2806,62 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.18" +version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4f480b8f81512e825f337ad51e94c1eb5d3bbdf2b363dcd01e2b19a9ffe3f8e" +checksum = "11c75893af559bc8e10716548bdef5cb2b983f8e637db9d0e15126b61b484ee2" dependencies = [ - "proc-macro2 1.0.34", - "quote 1.0.10", - "syn 1.0.84", + "proc-macro2", + "quote", + "syn", ] [[package]] name = "tracing-core" -version = "0.1.21" +version = "0.1.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f4ed65637b8390770814083d20756f87bfa2c21bf2f110babdc5438351746e4" +checksum = "5aeea4303076558a00714b823f9ad67d58a3bbda1df83d8827d21193156e22f7" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" dependencies = [ "lazy_static", + "log", + "tracing-core", ] [[package]] -name = "traitobject" -version = "0.1.0" +name = "tracing-subscriber" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efd1f82c56340fdf16f2a953d7bda4f8fdffba13d93b00844c25572110b26079" +checksum = "60db860322da191b40952ad9affe65ea23e7dd6a5c442c2c42865810c6ab8e6b" +dependencies = [ + "ansi_term", + "matchers", + "once_cell", + "regex", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] [[package]] name = "trust-dns-proto" -version = "0.20.3" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0d7f5db438199a6e2609debe3f69f808d074e0a2888ee0bccb45fe234d03f4" +checksum = "9c31f240f59877c3d4bb3b3ea0ec5a6a0cff07323580ff8c7a605cd7d08b255d" dependencies = [ "async-trait", - "cfg-if 1.0.0", + "cfg-if", "data-encoding", "enum-as-inner", "futures-channel", @@ -3091,9 +2870,9 @@ dependencies = [ "idna 0.2.3", "ipnet", "lazy_static", - "log 0.4.14", - "rand 0.8.4", - "smallvec 1.7.0", + "log", + "rand", + "smallvec", "thiserror", "tinyvec", "tokio", @@ -3102,19 +2881,19 @@ dependencies = [ [[package]] name = "trust-dns-resolver" -version = "0.20.3" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6ad17b608a64bd0735e67bde16b0636f8aa8591f831a25d18443ed00a699770" +checksum = "e4ba72c2ea84515690c9fcef4c6c660bb9df3036ed1051686de84605b74fd558" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "futures-util", "ipconfig", "lazy_static", - "log 0.4.14", + "log", "lru-cache", - "parking_lot 0.11.2", + "parking_lot", "resolv-conf", - "smallvec 1.7.0", + "smallvec", "thiserror", "tokio", "trust-dns-proto", @@ -3127,20 +2906,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" [[package]] -name = "twoway" -version = "0.1.8" +name = "tungstenite" +version = "0.17.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59b11b2b5241ba34be09c3cc85a36e56e48f9888862e19cedf23336d35316ed1" +checksum = "e27992fd6a8c29ee7eef28fc78349aa244134e10ad447ce3b9f0ac0ed0fa4ce0" dependencies = [ - "memchr", + "base64", + "byteorder", + "bytes", + "http", + "httparse", + "log", + "rand", + "sha-1", + "thiserror", + "url 2.2.2", + "utf-8", ] -[[package]] -name = "typeable" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1410f6f91f21d1612654e7cc69193b0334f909dcf2c790c4826254fbb86f8887" - [[package]] name = "typenum" version = "1.15.0" @@ -3148,87 +2931,66 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" [[package]] -name = "u2f" -version = "0.2.0" +name = "ubyte" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2f285392366190c4d46823458f4543ac0f35174759c78e80c5baa39e1f7aa4f" +checksum = "a58e29f263341a29bb79e14ad7fda5f63b1c7e48929bad4c685d7876b1d04e94" dependencies = [ - "base64 0.11.0", - "byteorder", - "bytes 0.4.12", - "chrono", - "openssl", "serde", - "serde_derive", - "serde_json", - "time 0.1.44", ] [[package]] name = "ucd-trie" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c" +checksum = "89570599c4fe5585de2b388aab47e99f7fa4e9238a1399f707a02e356058141c" [[package]] name = "uncased" -version = "0.9.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5baeed7327e25054889b9bd4f975f32e5f4c5d434042d59ab6cd4142c0a76ed0" -dependencies = [ - "version_check 0.9.3", -] - -[[package]] -name = "unicase" -version = "1.4.2" +version = "0.9.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f4765f83163b74f957c797ad9253caf97f103fb064d3999aea9568d09fc8a33" +checksum = "09b01702b0fd0b3fadcf98e098780badda8742d4f4a7676615cad90e8ac73622" dependencies = [ - "version_check 0.1.5", + "serde", + "version_check", ] [[package]] -name = "unicase" -version = "2.6.0" +name = "unicode-bidi" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" -dependencies = [ - "version_check 0.9.3", -] +checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992" [[package]] -name = "unicode-bidi" -version = "0.3.7" +name = "unicode-ident" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a01404663e3db436ed2746d9fefef640d868edae3cceb81c3b8d5732fda678f" +checksum = "c4f5b37a154999a8f3f98cc23a628d850e154479cd94decf3414696e12e31aaf" [[package]] name = "unicode-normalization" -version = "0.1.19" +version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9" +checksum = "854cbdc4f7bc6ae19c820d44abdc3277ac3e1b2b93db20a636825d9322fb60e6" dependencies = [ "tinyvec", ] -[[package]] -name = "unicode-segmentation" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8895849a949e7845e06bd6dc1aa51731a103c42707010a5b591c0038fb73385b" - [[package]] name = "unicode-xid" -version = "0.1.0" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" +checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04" [[package]] -name = "unicode-xid" -version = "0.2.2" +name = "universal-hash" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" +checksum = "9f214e8f697e925001e66ec2c6e37a4ef93f0f78c2eed7814394e10c62025b05" +dependencies = [ + "generic-array", + "subtle", +] [[package]] name = "untrusted" @@ -3268,63 +3030,69 @@ checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" [[package]] name = "uuid" -version = "0.8.2" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" +checksum = "dd6469f4314d5f1ffec476e05f17cc9a78bc7a27a6a857842170bdf8d6f98d2f" dependencies = [ - "getrandom 0.2.3", + "getrandom", ] +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + [[package]] name = "vaultwarden" version = "1.0.0" dependencies = [ "backtrace", - "bytes 1.1.0", - "chashmap", + "bytes", + "cached", "chrono", "chrono-tz", - "cookie 0.15.1", + "cookie", "cookie_store", + "ctrlc", + "dashmap", "data-encoding", "data-url", "diesel", "diesel_migrations", - "dotenv", + "dotenvy", "fern", + "futures", "governor", "handlebars", - "html5ever", - "idna 0.2.3", - "job_scheduler", + "html5gum", + "job_scheduler_ng", "jsonwebtoken", "lettre", "libsqlite3-sys", - "log 0.4.14", - "markup5ever_rcdom", - "multipart", + "log", + "mimalloc", "num-derive", "num-traits", "once_cell", "openssl", - "parity-ws", "paste", "percent-encoding 2.1.0", "pico-args", - "rand 0.8.4", + "rand", "regex", "reqwest", "ring", "rmpv", "rocket", - "rocket_contrib", "serde", "serde_json", "syslog", - "time 0.2.27", + "time 0.3.12", + "tokio", + "tokio-tungstenite", "totp-lite", "tracing", - "u2f", "url 2.2.2", "uuid", "webauthn-rs", @@ -3339,15 +3107,9 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "version_check" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd" - -[[package]] -name = "version_check" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" [[package]] name = "walkdir" @@ -3356,7 +3118,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" dependencies = [ "same-file", - "winapi 0.3.9", + "winapi", "winapi-util", ] @@ -3366,54 +3128,54 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" dependencies = [ - "log 0.4.14", + "log", "try-lock", ] [[package]] name = "wasi" -version = "0.9.0+wasi-snapshot-preview1" +version = "0.10.2+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" +checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" [[package]] name = "wasi" -version = "0.10.0+wasi-snapshot-preview1" +version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.78" +version = "0.2.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "632f73e236b219150ea279196e54e610f5dbafa5d61786303d4da54f84e47fce" +checksum = "fc7652e3f6c4706c8d9cd54832c4a4ccb9b5336e2c3bd154d5cccfbf1c1f5f7d" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.78" +version = "0.2.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a317bf8f9fba2476b4b2c85ef4c4af8ff39c3c7f0cdfeed4f82c34a880aa837b" +checksum = "662cd44805586bd52971b9586b1df85cdbbd9112e4ef4d8f41559c334dc6ac3f" dependencies = [ "bumpalo", - "lazy_static", - "log 0.4.14", - "proc-macro2 1.0.34", - "quote 1.0.10", - "syn 1.0.84", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.28" +version = "0.4.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e8d7523cb1f2a4c96c1317ca690031b714a51cc14e05f712446691f413f5d39" +checksum = "fa76fb221a1f8acddf5b54ace85912606980ad661ac7a503b4570ffd3a624dad" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "js-sys", "wasm-bindgen", "web-sys", @@ -3421,38 +3183,38 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.78" +version = "0.2.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d56146e7c495528bf6587663bea13a8eb588d39b36b679d83972e1a2dbbdacf9" +checksum = "b260f13d3012071dfb1512849c033b1925038373aea48ced3012c09df952c602" dependencies = [ - "quote 1.0.10", + "quote", "wasm-bindgen-macro-support", ] [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.78" +version = "0.2.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7803e0eea25835f8abdc585cd3021b3deb11543c6fe226dcd30b228857c5c5ab" +checksum = "5be8e654bdd9b79216c2929ab90721aa82faf65c48cdf08bdc4e7f51357b80da" dependencies = [ - "proc-macro2 1.0.34", - "quote 1.0.10", - "syn 1.0.84", + "proc-macro2", + "quote", + "syn", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.78" +version = "0.2.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0237232789cf037d5480773fe568aac745bfe2afbc11a863e97901780a6b47cc" +checksum = "6598dd0bd3c7d51095ff6531a5b23e02acdc81804e30d8f07afb77b7215a140a" [[package]] name = "web-sys" -version = "0.3.55" +version = "0.3.59" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38eb105f1c59d9eaa6b5cdc92b859d85b926e82cb2e0945cd0c9259faa6fe9fb" +checksum = "ed055ab27f941423197eb86b2035720b1a3ce40504df082cac2ecc6ed73335a1" dependencies = [ "js-sys", "wasm-bindgen", @@ -3460,16 +3222,15 @@ dependencies = [ [[package]] name = "webauthn-rs" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "261310333d5941ec47038eed295ee0ed6f5cee36b7575dc6c4b63ee8e7fe590c" +checksum = "90b266eccb4b32595876f5c73ea443b0516da0b1df72ca07bc08ed9ba7f96ec1" dependencies = [ - "base64 0.13.0", - "nom 4.1.1", + "base64", + "nom", "openssl", - "rand 0.8.4", + "rand", "serde", - "serde_bytes", "serde_cbor", "serde_derive", "serde_json", @@ -3480,34 +3241,19 @@ dependencies = [ [[package]] name = "webpki" -version = "0.21.4" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8e38c0608262c46d4a56202ebabdeb094cef7e560ca7a226c6bf055188aa4ea" +checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" dependencies = [ "ring", "untrusted", ] -[[package]] -name = "webpki-roots" -version = "0.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8eff4b7516a57307f9349c64bf34caa34b940b66fed4b2fb3136cb7386e5739" -dependencies = [ - "webpki", -] - [[package]] name = "widestring" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c168940144dd21fd8046987c16a46a33d5fc84eec29ef9dcddc2ac9e31526b7c" - -[[package]] -name = "winapi" -version = "0.2.8" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" +checksum = "17882f045410753661207383517a6f62ec3dbeb6a4ed2acce01f0728238d1983" [[package]] name = "winapi" @@ -3519,12 +3265,6 @@ dependencies = [ "winapi-x86_64-pc-windows-gnu", ] -[[package]] -name = "winapi-build" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" - [[package]] name = "winapi-i686-pc-windows-gnu" version = "0.4.0" @@ -3537,7 +3277,7 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" dependencies = [ - "winapi 0.3.9", + "winapi", ] [[package]] @@ -3547,64 +3287,127 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] -name = "winreg" -version = "0.6.2" +name = "windows" +version = "0.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2986deb581c4fe11b621998a5e53361efe6b48a151178d0cd9eeffa4dc6acc9" +checksum = "fbedf6db9096bc2364adce0ae0aa636dcd89f3c3f2cd67947062aaf0ca2a10ec" dependencies = [ - "winapi 0.3.9", + "windows_aarch64_msvc 0.32.0", + "windows_i686_gnu 0.32.0", + "windows_i686_msvc 0.32.0", + "windows_x86_64_gnu 0.32.0", + "windows_x86_64_msvc 0.32.0", ] [[package]] -name = "winreg" -version = "0.7.0" +name = "windows-sys" +version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0120db82e8a1e0b9fb3345a539c478767c0048d842860994d96113d5b667bd69" +checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2" dependencies = [ - "winapi 0.3.9", + "windows_aarch64_msvc 0.36.1", + "windows_i686_gnu 0.36.1", + "windows_i686_msvc 0.36.1", + "windows_x86_64_gnu 0.36.1", + "windows_x86_64_msvc 0.36.1", ] [[package]] -name = "ws2_32-sys" -version = "0.2.1" +name = "windows_aarch64_msvc" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8e92753b1c443191654ec532f14c199742964a061be25d77d7a96f09db20bf5" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47" + +[[package]] +name = "windows_i686_gnu" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a711c68811799e017b6038e0922cb27a5e2f43a2ddb609fe0b6f3eeda9de615" + +[[package]] +name = "windows_i686_gnu" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6" + +[[package]] +name = "windows_i686_msvc" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "146c11bb1a02615db74680b32a68e2d61f553cc24c4eb5b4ca10311740e44172" + +[[package]] +name = "windows_i686_msvc" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c912b12f7454c6620635bbff3450962753834be2a594819bd5e945af18ec64bc" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "504a2476202769977a040c6364301a3f65d0cc9e3fb08600b2bda150a0488316" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680" + +[[package]] +name = "winreg" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e" +checksum = "0120db82e8a1e0b9fb3345a539c478767c0048d842860994d96113d5b667bd69" dependencies = [ - "winapi 0.2.8", - "winapi-build", + "winapi", ] [[package]] -name = "xml5ever" -version = "0.16.2" +name = "winreg" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9234163818fd8e2418fcde330655e757900d4236acd8cc70fef345ef91f6d865" +checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d" dependencies = [ - "log 0.4.14", - "mac", - "markup5ever", - "time 0.1.44", + "winapi", ] [[package]] name = "yansi" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fc79f4a1e39857fc00c3f662cbf2651c771f00e9c15fe2abc341806bd46bd71" +checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" [[package]] name = "yubico" -version = "0.10.0" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d3c3f584739059f479ca4de114cbfe032315752abb3be60afb30db40a802169" +checksum = "173f75d2c4010429a2d74ae3a114a69930c59e2b1a4c97b1c75d259a4960d5fb" dependencies = [ - "base64 0.13.0", - "crypto-mac 0.10.1", + "base64", + "form_urlencoded", "futures", - "hmac 0.10.1", - "rand 0.8.4", + "hmac", + "rand", "reqwest", - "sha-1 0.9.8", + "sha1", "threadpool", - "url 1.7.2", ] diff --git a/Cargo.toml b/Cargo.toml index 7b9ef3a3..0ee42851 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,7 +3,7 @@ name = "vaultwarden" version = "1.0.0" authors = ["Daniel García "] edition = "2021" -rust-version = "1.60" +rust-version = "1.60.0" resolver = "2" repository = "https://github.com/dani-garcia/vaultwarden" @@ -13,6 +13,7 @@ publish = false build = "build.rs" [features] +# default = ["sqlite"] # Empty to keep compatibility, prefer to set USE_SYSLOG=true enable_syslog = [] mysql = ["diesel/mysql", "diesel_migrations/mysql"] @@ -20,135 +21,138 @@ postgresql = ["diesel/postgres", "diesel_migrations/postgres"] sqlite = ["diesel/sqlite", "diesel_migrations/sqlite", "libsqlite3-sys"] # Enable to use a vendored and statically linked openssl vendored_openssl = ["openssl/vendored"] +# Enable MiMalloc memory allocator to replace the default malloc +# This can improve performance for Alpine builds +enable_mimalloc = ["mimalloc"] # Enable unstable features, requires nightly # Currently only used to enable rusts official ip support unstable = [] [target."cfg(not(windows))".dependencies] -syslog = "4.0.1" +# Logging +syslog = "6.0.1" # Needs to be v4 until fern is updated [dependencies] -# Web framework for nightly with a focus on ease-of-use, expressibility, and speed. -rocket = { version = "=0.5.0-dev", features = ["tls"], default-features = false } -rocket_contrib = "=0.5.0-dev" +# Logging +log = "0.4.17" +fern = { version = "0.6.1", features = ["syslog-6"] } +tracing = { version = "0.1.36", features = ["log"] } # Needed to have lettre and webauthn-rs trace logging to work -# HTTP client -reqwest = { version = "0.11.8", features = ["blocking", "json", "gzip", "brotli", "socks", "cookies", "trust-dns"] } +backtrace = "0.3.66" # Logging panics to logfile instead stderr only -# Used for custom short lived cookie jar -cookie = "0.15.1" -cookie_store = "0.15.1" -bytes = "1.1.0" -url = "2.2.2" +# A `dotenv` implementation for Rust +dotenvy = { version = "0.15.1", default-features = false } + +# Lazy initialization +once_cell = "1.13.0" -# multipart/form-data support -multipart = { version = "0.18.0", features = ["server"], default-features = false } +# Numerical libraries +num-traits = "0.2.15" +num-derive = "0.3.3" -# WebSockets library -ws = { version = "0.11.1", package = "parity-ws" } +# Web framework +rocket = { version = "0.5.0-rc.2", features = ["tls", "json"], default-features = false } -# MessagePack library -rmpv = "1.0.0" +# WebSockets libraries +tokio-tungstenite = "0.17.2" +rmpv = "1.0.0" # MessagePack library +dashmap = "5.3.4" # Concurrent hashmap implementation -# Concurrent hashmap implementation -chashmap = "2.2.2" +# Async futures +futures = "0.3.21" +tokio = { version = "1.20.1", features = ["rt-multi-thread", "fs", "io-util", "parking_lot", "time"] } # A generic serialization/deserialization framework -serde = { version = "1.0.132", features = ["derive"] } -serde_json = "1.0.73" - -# Logging -log = "0.4.14" -fern = { version = "0.6.0", features = ["syslog-4"] } +serde = { version = "1.0.142", features = ["derive"] } +serde_json = "1.0.83" # A safe, extensible ORM and Query builder -diesel = { version = "1.4.8", features = [ "chrono", "r2d2"] } +diesel = { version = "1.4.8", features = ["chrono", "r2d2"] } diesel_migrations = "1.4.0" # Bundled SQLite libsqlite3-sys = { version = "0.22.2", features = ["bundled"], optional = true } # Crypto-related libraries -rand = "0.8.4" +rand = { version = "0.8.5", features = ["small_rng"] } ring = "0.16.20" # UUID generation -uuid = { version = "0.8.2", features = ["v4"] } +uuid = { version = "1.1.2", features = ["v4"] } # Date and time libraries -chrono = { version = "0.4.19", features = ["serde"] } -chrono-tz = "0.6.1" -time = "0.2.27" +chrono = { version = "0.4.20", features = ["clock", "serde"], default-features = false } +chrono-tz = "0.6.3" +time = "0.3.12" # Job scheduler -job_scheduler = "1.2.1" +job_scheduler_ng = "2.0.1" -# TOTP library -totp-lite = "1.0.3" - -# Data encoding library +# Data encoding library Hex/Base32/Base64 data-encoding = "2.3.2" # JWT library -jsonwebtoken = "7.2.0" +jsonwebtoken = "8.1.1" -# U2F library -u2f = "0.2.0" -webauthn-rs = "0.3.1" +# TOTP library +totp-lite = "2.0.0" # Yubico Library -yubico = { version = "0.10.0", features = ["online-tokio"], default-features = false } +yubico = { version = "0.11.0", features = ["online-tokio"], default-features = false } -# A `dotenv` implementation for Rust -dotenv = { version = "0.15.0", default-features = false } - -# Lazy initialization -once_cell = "1.9.0" +# WebAuthn libraries +webauthn-rs = "0.3.2" -# Numerical libraries -num-traits = "0.2.14" -num-derive = "0.3.3" +# Handling of URL's for WebAuthn +url = "2.2.2" -# Email libraries -tracing = { version = "0.1.29", features = ["log"] } # Needed to have lettre trace logging used when SMTP_DEBUG is enabled. -lettre = { version = "0.10.0-rc.4", features = ["smtp-transport", "builder", "serde", "native-tls", "hostname", "tracing"], default-features = false } +# Email librariese-Base, Update crates and small change. +lettre = { version = "0.10.1", features = ["smtp-transport", "builder", "serde", "tokio1-native-tls", "hostname", "tracing", "tokio1"], default-features = false } +percent-encoding = "2.1.0" # URL encoding library used for URL's in the emails # Template library -handlebars = { version = "4.1.6", features = ["dir_source"] } +handlebars = { version = "4.3.3", features = ["dir_source"] } + +# HTTP client +reqwest = { version = "0.11.11", features = ["stream", "json", "gzip", "brotli", "socks", "cookies", "trust-dns"] } # For favicon extraction from main website -html5ever = "0.25.1" -markup5ever_rcdom = "0.1.0" -regex = { version = "1.5.4", features = ["std", "perf", "unicode-perl"], default-features = false } +html5gum = "0.5.2" +regex = { version = "1.6.0", features = ["std", "perf", "unicode-perl"], default-features = false } data-url = "0.1.1" +bytes = "1.2.1" +cached = "0.38.0" -# Used by U2F, JWT and Postgres -openssl = "0.10.38" +# Used for custom short lived cookie jar during favicon extraction +cookie = "0.16.0" +cookie_store = "0.16.1" -# URL encoding library -percent-encoding = "2.1.0" -# Punycode conversion -idna = "0.2.3" +# Used by U2F, JWT and Postgres +openssl = "0.10.41" # CLI argument parsing -pico-args = "0.4.2" - -# Logging panics to logfile instead stderr only -backtrace = "0.3.63" +pico-args = "0.5.0" # Macro ident concatenation -paste = "1.0.6" -governor = "0.3.2" +paste = "1.0.8" +governor = "0.4.2" + +# Capture CTRL+C +ctrlc = { version = "3.2.2", features = ["termination"] } + +# Allow overriding the default memory allocator +# Mainly used for the musl builds, since the default musl malloc is very slow +mimalloc = { version = "0.1.29", features = ["secure"], default-features = false, optional = true } [patch.crates-io] -# Use newest ring -rocket = { git = 'https://github.com/SergioBenitez/Rocket', rev = '263e39b5b429de1913ce7e3036575a7b4d88b6d7' } -rocket_contrib = { git = 'https://github.com/SergioBenitez/Rocket', rev = '263e39b5b429de1913ce7e3036575a7b4d88b6d7' } - -# The maintainer of the `job_scheduler` crate doesn't seem to have responded -# to any issues or PRs for almost a year (as of April 2021). This hopefully -# temporary fork updates Cargo.toml to use more up-to-date dependencies. -# In particular, `cron` has since implemented parsing of some common syntax -# that wasn't previously supported (https://github.com/zslayton/cron/pull/64). -job_scheduler = { git = 'https://github.com/jjlin/job_scheduler', rev = 'ee023418dbba2bfe1e30a5fd7d937f9e33739806' } +# Using a patched version of multer-rs (Used by Rocket) to fix attachment/send file uploads +# Issue: https://github.com/dani-garcia/vaultwarden/issues/2644 +# Patch: https://github.com/BlackDex/multer-rs/commit/73e83fa5eb183646cc56606e5d902acb30a45b3d +multer = { git = "https://github.com/BlackDex/multer-rs", rev = "73e83fa5eb183646cc56606e5d902acb30a45b3d" } + +# Strip debuginfo from the release builds +# Also enable thin LTO for some optimizations +[profile.release] +strip = "debuginfo" +lto = "thin" diff --git a/Rocket.toml b/Rocket.toml deleted file mode 100644 index e8409cb3..00000000 --- a/Rocket.toml +++ /dev/null @@ -1,2 +0,0 @@ -[global.limits] -json = 10485760 # 10 MiB diff --git a/docker/Dockerfile.buildx b/docker/Dockerfile.buildx index ed0d23b3..c250312c 100644 --- a/docker/Dockerfile.buildx +++ b/docker/Dockerfile.buildx @@ -1,3 +1,4 @@ +# syntax=docker/dockerfile:1 # The cross-built images have the build arch (`amd64`) embedded in the image # manifest, rather than the target arch. For example: # diff --git a/docker/Dockerfile.j2 b/docker/Dockerfile.j2 index 78bbc619..ac42cee8 100644 --- a/docker/Dockerfile.j2 +++ b/docker/Dockerfile.j2 @@ -3,39 +3,39 @@ # This file was generated using a Jinja2 template. # Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles. -{% set build_stage_base_image = "rust:1.58-buster" %} +{% set build_stage_base_image = "rust:1.61-bullseye" %} {% if "alpine" in target_file %} {% if "amd64" in target_file %} -{% set build_stage_base_image = "blackdex/rust-musl:x86_64-musl-nightly-2022-01-23" %} -{% set runtime_stage_base_image = "alpine:3.15" %} +{% set build_stage_base_image = "blackdex/rust-musl:x86_64-musl-stable-1.61.0" %} +{% set runtime_stage_base_image = "alpine:3.16" %} {% set package_arch_target = "x86_64-unknown-linux-musl" %} {% elif "armv7" in target_file %} -{% set build_stage_base_image = "blackdex/rust-musl:armv7-musleabihf-nightly-2022-01-23" %} -{% set runtime_stage_base_image = "balenalib/armv7hf-alpine:3.15" %} +{% set build_stage_base_image = "blackdex/rust-musl:armv7-musleabihf-stable-1.61.0" %} +{% set runtime_stage_base_image = "balenalib/armv7hf-alpine:3.16" %} {% set package_arch_target = "armv7-unknown-linux-musleabihf" %} {% elif "armv6" in target_file %} -{% set build_stage_base_image = "blackdex/rust-musl:arm-musleabi-nightly-2022-01-23" %} -{% set runtime_stage_base_image = "balenalib/rpi-alpine:3.15" %} +{% set build_stage_base_image = "blackdex/rust-musl:arm-musleabi-stable-1.61.0" %} +{% set runtime_stage_base_image = "balenalib/rpi-alpine:3.16" %} {% set package_arch_target = "arm-unknown-linux-musleabi" %} {% elif "arm64" in target_file %} -{% set build_stage_base_image = "blackdex/rust-musl:aarch64-musl-nightly-2022-01-23" %} -{% set runtime_stage_base_image = "balenalib/aarch64-alpine:3.15" %} +{% set build_stage_base_image = "blackdex/rust-musl:aarch64-musl-stable-1.61.0" %} +{% set runtime_stage_base_image = "balenalib/aarch64-alpine:3.16" %} {% set package_arch_target = "aarch64-unknown-linux-musl" %} {% endif %} {% elif "amd64" in target_file %} -{% set runtime_stage_base_image = "debian:buster-slim" %} +{% set runtime_stage_base_image = "debian:bullseye-slim" %} {% elif "arm64" in target_file %} -{% set runtime_stage_base_image = "balenalib/aarch64-debian:buster" %} +{% set runtime_stage_base_image = "balenalib/aarch64-debian:bullseye" %} {% set package_arch_name = "arm64" %} {% set package_arch_target = "aarch64-unknown-linux-gnu" %} {% set package_cross_compiler = "aarch64-linux-gnu" %} {% elif "armv6" in target_file %} -{% set runtime_stage_base_image = "balenalib/rpi-debian:buster" %} +{% set runtime_stage_base_image = "balenalib/rpi-debian:bullseye" %} {% set package_arch_name = "armel" %} {% set package_arch_target = "arm-unknown-linux-gnueabi" %} {% set package_cross_compiler = "arm-linux-gnueabi" %} {% elif "armv7" in target_file %} -{% set runtime_stage_base_image = "balenalib/armv7hf-debian:buster" %} +{% set runtime_stage_base_image = "balenalib/armv7hf-debian:bullseye" %} {% set package_arch_name = "armhf" %} {% set package_arch_target = "armv7-unknown-linux-gnueabihf" %} {% set package_cross_compiler = "arm-linux-gnueabihf" %} @@ -59,8 +59,8 @@ # https://docs.docker.com/develop/develop-images/multistage-build/ # https://whitfin.io/speeding-up-rust-docker-builds/ ####################### VAULT BUILD IMAGE ####################### -{% set vault_version = "2.25.1" %} -{% set vault_image_digest = "sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965" %} +{% set vault_version = "v2022.6.2" %} +{% set vault_image_digest = "sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70" %} # The web-vault digest specifies a particular web-vault build on Docker Hub. # Using the digest instead of the tag name provides better security, # as the digest of an image is immutable, whereas a tag name can later @@ -70,13 +70,13 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull vaultwarden/web-vault:v{{ vault_version }} -# $ docker image inspect --format "{{ '{{' }}.RepoDigests}}" vaultwarden/web-vault:v{{ vault_version }} +# $ docker pull vaultwarden/web-vault:{{ vault_version }} +# $ docker image inspect --format "{{ '{{' }}.RepoDigests}}" vaultwarden/web-vault:{{ vault_version }} # [vaultwarden/web-vault@{{ vault_image_digest }}] # # - Conversely, to get the tag name from the digest: # $ docker image inspect --format "{{ '{{' }}.RepoTags}}" vaultwarden/web-vault@{{ vault_image_digest }} -# [vaultwarden/web-vault:v{{ vault_version }}] +# [vaultwarden/web-vault:{{ vault_version }}] # FROM vaultwarden/web-vault@{{ vault_image_digest }} as vault @@ -93,22 +93,15 @@ ENV DEBIAN_FRONTEND=noninteractive \ CARGO_HOME="/root/.cargo" \ USER="root" -{# {% if "alpine" not in target_file and "buildx" in target_file %} -# Debian based Buildx builds can use some special apt caching to speedup building. -# By default Debian based images have some rules to keep docker builds clean, we need to remove this. -# See: https://hub.docker.com/r/docker/dockerfile -RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache -{% endif %} #} # Create CARGO_HOME folder and don't download rust docs RUN {{ mount_rust_cache -}} mkdir -pv "${CARGO_HOME}" \ && rustup set profile minimal {% if "alpine" in target_file %} -ENV RUSTFLAGS='-C link-arg=-s' -{% if "armv7" in target_file %} -{#- https://gcc.gnu.org/onlinedocs/gcc/ARM-Options.html -#} -ENV CFLAGS_armv7_unknown_linux_musleabihf="-mfpu=vfpv3-d16" +{% if "armv6" in target_file %} +# To be able to build the armv6 image with mimalloc we need to specifically specify the libatomic.a file location +ENV RUSTFLAGS='-Clink-arg=/usr/local/musl/{{ package_arch_target }}/lib/libatomic.a' {% endif %} {% elif "arm" in target_file %} # @@ -163,7 +156,12 @@ RUN {{ mount_rust_cache -}} rustup target add {{ package_arch_target }} {% endif %} # Configure the DB ARG as late as possible to not invalidate the cached layers above +{% if "alpine" in target_file %} +# Enable MiMalloc to improve performance on Alpine builds +ARG DB=sqlite,mysql,postgresql,enable_mimalloc +{% else %} ARG DB=sqlite,mysql,postgresql +{% endif %} # Builds your dependencies and removes the # dummy project, except the target folder @@ -182,21 +180,15 @@ RUN touch src/main.rs # your actual source files being built # hadolint ignore=DL3059 RUN {{ mount_rust_cache -}} cargo build --features ${DB} --release{{ package_arch_target_param }} -{% if "alpine" in target_file %} -{% if "armv7" in target_file %} -# hadolint ignore=DL3059 -RUN musl-strip target/{{ package_arch_target }}/release/vaultwarden -{% endif %} -{% endif %} ######################## RUNTIME IMAGE ######################## # Create a new stage with a minimal image # because we already have a binary built FROM {{ runtime_stage_base_image }} -ENV ROCKET_ENV="staging" \ - ROCKET_PORT=80 \ - ROCKET_WORKERS=10 +ENV ROCKET_PROFILE="release" \ + ROCKET_ADDRESS=0.0.0.0 \ + ROCKET_PORT=80 {%- if "alpine" in runtime_stage_base_image %} \ SSL_CERT_DIR=/etc/ssl/certs {% endif %} @@ -214,7 +206,6 @@ RUN mkdir /data \ openssl \ tzdata \ curl \ - dumb-init \ ca-certificates {% else %} && apt-get update && apt-get install -y \ @@ -222,13 +213,20 @@ RUN mkdir /data \ openssl \ ca-certificates \ curl \ - dumb-init \ libmariadb-dev-compat \ libpq5 \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* {% endif %} +{% if "armv6" in target_file and "alpine" not in target_file %} +# In the Balena Bullseye images for armv6/rpi-debian there is a missing symlink. +# This symlink was there in the buster images, and for some reason this is needed. +# hadolint ignore=DL3059 +RUN ln -v -s /lib/ld-linux-armhf.so.3 /lib/ld-linux.so.3 + +{% endif -%} + {% if "amd64" not in target_file %} # hadolint ignore=DL3059 RUN [ "cross-build-end" ] @@ -241,7 +239,6 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage WORKDIR / -COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault {% if package_arch_target is defined %} COPY --from=build /app/target/{{ package_arch_target }}/release/vaultwarden . @@ -254,6 +251,4 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] -# Configures the startup! -ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/amd64/Dockerfile b/docker/amd64/Dockerfile index d4f14620..5435cc8a 100644 --- a/docker/amd64/Dockerfile +++ b/docker/amd64/Dockerfile @@ -16,18 +16,18 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull vaultwarden/web-vault:v2.25.1 -# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1 -# [vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965] +# $ docker pull vaultwarden/web-vault:v2022.6.2 +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.6.2 +# [vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 -# [vaultwarden/web-vault:v2.25.1] +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 +# [vaultwarden/web-vault:v2022.6.2] # -FROM vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 as vault +FROM vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 as vault ########################## BUILD IMAGE ########################## -FROM rust:1.58-buster as build +FROM rust:1.61-bullseye as build @@ -87,11 +87,11 @@ RUN cargo build --features ${DB} --release ######################## RUNTIME IMAGE ######################## # Create a new stage with a minimal image # because we already have a binary built -FROM debian:buster-slim +FROM debian:bullseye-slim -ENV ROCKET_ENV="staging" \ - ROCKET_PORT=80 \ - ROCKET_WORKERS=10 +ENV ROCKET_PROFILE="release" \ + ROCKET_ADDRESS=0.0.0.0 \ + ROCKET_PORT=80 # Create data folder and Install needed libraries @@ -101,7 +101,6 @@ RUN mkdir /data \ openssl \ ca-certificates \ curl \ - dumb-init \ libmariadb-dev-compat \ libpq5 \ && apt-get clean \ @@ -115,7 +114,6 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage WORKDIR / -COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/release/vaultwarden . @@ -124,6 +122,4 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] -# Configures the startup! -ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/amd64/Dockerfile.alpine b/docker/amd64/Dockerfile.alpine index 6da8db8c..5e092305 100644 --- a/docker/amd64/Dockerfile.alpine +++ b/docker/amd64/Dockerfile.alpine @@ -16,18 +16,18 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull vaultwarden/web-vault:v2.25.1 -# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1 -# [vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965] +# $ docker pull vaultwarden/web-vault:v2022.6.2 +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.6.2 +# [vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 -# [vaultwarden/web-vault:v2.25.1] +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 +# [vaultwarden/web-vault:v2022.6.2] # -FROM vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 as vault +FROM vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 as vault ########################## BUILD IMAGE ########################## -FROM blackdex/rust-musl:x86_64-musl-nightly-2022-01-23 as build +FROM blackdex/rust-musl:x86_64-musl-stable-1.61.0 as build @@ -44,7 +44,6 @@ ENV DEBIAN_FRONTEND=noninteractive \ RUN mkdir -pv "${CARGO_HOME}" \ && rustup set profile minimal -ENV RUSTFLAGS='-C link-arg=-s' # Creates a dummy project used to grab dependencies RUN USER=root cargo new --bin /app @@ -58,7 +57,8 @@ COPY ./build.rs ./build.rs RUN rustup target add x86_64-unknown-linux-musl # Configure the DB ARG as late as possible to not invalidate the cached layers above -ARG DB=sqlite,mysql,postgresql +# Enable MiMalloc to improve performance on Alpine builds +ARG DB=sqlite,mysql,postgresql,enable_mimalloc # Builds your dependencies and removes the # dummy project, except the target folder @@ -81,11 +81,11 @@ RUN cargo build --features ${DB} --release --target=x86_64-unknown-linux-musl ######################## RUNTIME IMAGE ######################## # Create a new stage with a minimal image # because we already have a binary built -FROM alpine:3.15 +FROM alpine:3.16 -ENV ROCKET_ENV="staging" \ +ENV ROCKET_PROFILE="release" \ + ROCKET_ADDRESS=0.0.0.0 \ ROCKET_PORT=80 \ - ROCKET_WORKERS=10 \ SSL_CERT_DIR=/etc/ssl/certs @@ -96,7 +96,6 @@ RUN mkdir /data \ openssl \ tzdata \ curl \ - dumb-init \ ca-certificates @@ -107,7 +106,6 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage WORKDIR / -COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/x86_64-unknown-linux-musl/release/vaultwarden . @@ -116,6 +114,4 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] -# Configures the startup! -ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/amd64/Dockerfile.buildx b/docker/amd64/Dockerfile.buildx index f93622ea..1bae9391 100644 --- a/docker/amd64/Dockerfile.buildx +++ b/docker/amd64/Dockerfile.buildx @@ -16,18 +16,18 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull vaultwarden/web-vault:v2.25.1 -# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1 -# [vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965] +# $ docker pull vaultwarden/web-vault:v2022.6.2 +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.6.2 +# [vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 -# [vaultwarden/web-vault:v2.25.1] +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 +# [vaultwarden/web-vault:v2022.6.2] # -FROM vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 as vault +FROM vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 as vault ########################## BUILD IMAGE ########################## -FROM rust:1.58-buster as build +FROM rust:1.61-bullseye as build @@ -87,11 +87,11 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/. ######################## RUNTIME IMAGE ######################## # Create a new stage with a minimal image # because we already have a binary built -FROM debian:buster-slim +FROM debian:bullseye-slim -ENV ROCKET_ENV="staging" \ - ROCKET_PORT=80 \ - ROCKET_WORKERS=10 +ENV ROCKET_PROFILE="release" \ + ROCKET_ADDRESS=0.0.0.0 \ + ROCKET_PORT=80 # Create data folder and Install needed libraries @@ -101,7 +101,6 @@ RUN mkdir /data \ openssl \ ca-certificates \ curl \ - dumb-init \ libmariadb-dev-compat \ libpq5 \ && apt-get clean \ @@ -115,7 +114,6 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage WORKDIR / -COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/release/vaultwarden . @@ -124,6 +122,4 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] -# Configures the startup! -ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/amd64/Dockerfile.buildx.alpine b/docker/amd64/Dockerfile.buildx.alpine index 71cfbcb2..15f979b5 100644 --- a/docker/amd64/Dockerfile.buildx.alpine +++ b/docker/amd64/Dockerfile.buildx.alpine @@ -16,18 +16,18 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull vaultwarden/web-vault:v2.25.1 -# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1 -# [vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965] +# $ docker pull vaultwarden/web-vault:v2022.6.2 +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.6.2 +# [vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 -# [vaultwarden/web-vault:v2.25.1] +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 +# [vaultwarden/web-vault:v2022.6.2] # -FROM vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 as vault +FROM vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 as vault ########################## BUILD IMAGE ########################## -FROM blackdex/rust-musl:x86_64-musl-nightly-2022-01-23 as build +FROM blackdex/rust-musl:x86_64-musl-stable-1.61.0 as build @@ -44,7 +44,6 @@ ENV DEBIAN_FRONTEND=noninteractive \ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \ && rustup set profile minimal -ENV RUSTFLAGS='-C link-arg=-s' # Creates a dummy project used to grab dependencies RUN USER=root cargo new --bin /app @@ -58,7 +57,8 @@ COPY ./build.rs ./build.rs RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry rustup target add x86_64-unknown-linux-musl # Configure the DB ARG as late as possible to not invalidate the cached layers above -ARG DB=sqlite,mysql,postgresql +# Enable MiMalloc to improve performance on Alpine builds +ARG DB=sqlite,mysql,postgresql,enable_mimalloc # Builds your dependencies and removes the # dummy project, except the target folder @@ -81,11 +81,11 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/. ######################## RUNTIME IMAGE ######################## # Create a new stage with a minimal image # because we already have a binary built -FROM alpine:3.15 +FROM alpine:3.16 -ENV ROCKET_ENV="staging" \ +ENV ROCKET_PROFILE="release" \ + ROCKET_ADDRESS=0.0.0.0 \ ROCKET_PORT=80 \ - ROCKET_WORKERS=10 \ SSL_CERT_DIR=/etc/ssl/certs @@ -96,7 +96,6 @@ RUN mkdir /data \ openssl \ tzdata \ curl \ - dumb-init \ ca-certificates @@ -107,7 +106,6 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage WORKDIR / -COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/x86_64-unknown-linux-musl/release/vaultwarden . @@ -116,6 +114,4 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] -# Configures the startup! -ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/arm64/Dockerfile b/docker/arm64/Dockerfile index 327e5011..0ab21c5b 100644 --- a/docker/arm64/Dockerfile +++ b/docker/arm64/Dockerfile @@ -16,18 +16,18 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull vaultwarden/web-vault:v2.25.1 -# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1 -# [vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965] +# $ docker pull vaultwarden/web-vault:v2022.6.2 +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.6.2 +# [vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 -# [vaultwarden/web-vault:v2.25.1] +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 +# [vaultwarden/web-vault:v2022.6.2] # -FROM vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 as vault +FROM vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 as vault ########################## BUILD IMAGE ########################## -FROM rust:1.58-buster as build +FROM rust:1.61-bullseye as build @@ -107,11 +107,11 @@ RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu ######################## RUNTIME IMAGE ######################## # Create a new stage with a minimal image # because we already have a binary built -FROM balenalib/aarch64-debian:buster +FROM balenalib/aarch64-debian:bullseye -ENV ROCKET_ENV="staging" \ - ROCKET_PORT=80 \ - ROCKET_WORKERS=10 +ENV ROCKET_PROFILE="release" \ + ROCKET_ADDRESS=0.0.0.0 \ + ROCKET_PORT=80 # hadolint ignore=DL3059 RUN [ "cross-build-start" ] @@ -123,7 +123,6 @@ RUN mkdir /data \ openssl \ ca-certificates \ curl \ - dumb-init \ libmariadb-dev-compat \ libpq5 \ && apt-get clean \ @@ -139,7 +138,6 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage WORKDIR / -COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/aarch64-unknown-linux-gnu/release/vaultwarden . @@ -148,6 +146,4 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] -# Configures the startup! -ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/arm64/Dockerfile.alpine b/docker/arm64/Dockerfile.alpine index 59356051..cfe8b3b3 100644 --- a/docker/arm64/Dockerfile.alpine +++ b/docker/arm64/Dockerfile.alpine @@ -16,18 +16,18 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull vaultwarden/web-vault:v2.25.1 -# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1 -# [vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965] +# $ docker pull vaultwarden/web-vault:v2022.6.2 +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.6.2 +# [vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 -# [vaultwarden/web-vault:v2.25.1] +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 +# [vaultwarden/web-vault:v2022.6.2] # -FROM vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 as vault +FROM vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 as vault ########################## BUILD IMAGE ########################## -FROM blackdex/rust-musl:aarch64-musl-nightly-2022-01-23 as build +FROM blackdex/rust-musl:aarch64-musl-stable-1.61.0 as build @@ -44,7 +44,6 @@ ENV DEBIAN_FRONTEND=noninteractive \ RUN mkdir -pv "${CARGO_HOME}" \ && rustup set profile minimal -ENV RUSTFLAGS='-C link-arg=-s' # Creates a dummy project used to grab dependencies RUN USER=root cargo new --bin /app @@ -58,7 +57,8 @@ COPY ./build.rs ./build.rs RUN rustup target add aarch64-unknown-linux-musl # Configure the DB ARG as late as possible to not invalidate the cached layers above -ARG DB=sqlite,mysql,postgresql +# Enable MiMalloc to improve performance on Alpine builds +ARG DB=sqlite,mysql,postgresql,enable_mimalloc # Builds your dependencies and removes the # dummy project, except the target folder @@ -81,11 +81,11 @@ RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-musl ######################## RUNTIME IMAGE ######################## # Create a new stage with a minimal image # because we already have a binary built -FROM balenalib/aarch64-alpine:3.15 +FROM balenalib/aarch64-alpine:3.16 -ENV ROCKET_ENV="staging" \ +ENV ROCKET_PROFILE="release" \ + ROCKET_ADDRESS=0.0.0.0 \ ROCKET_PORT=80 \ - ROCKET_WORKERS=10 \ SSL_CERT_DIR=/etc/ssl/certs @@ -98,7 +98,6 @@ RUN mkdir /data \ openssl \ tzdata \ curl \ - dumb-init \ ca-certificates # hadolint ignore=DL3059 @@ -111,7 +110,6 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage WORKDIR / -COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/aarch64-unknown-linux-musl/release/vaultwarden . @@ -120,6 +118,4 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] -# Configures the startup! -ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/arm64/Dockerfile.buildx b/docker/arm64/Dockerfile.buildx index cd4a78fe..4c5d0474 100644 --- a/docker/arm64/Dockerfile.buildx +++ b/docker/arm64/Dockerfile.buildx @@ -16,18 +16,18 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull vaultwarden/web-vault:v2.25.1 -# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1 -# [vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965] +# $ docker pull vaultwarden/web-vault:v2022.6.2 +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.6.2 +# [vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 -# [vaultwarden/web-vault:v2.25.1] +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 +# [vaultwarden/web-vault:v2022.6.2] # -FROM vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 as vault +FROM vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 as vault ########################## BUILD IMAGE ########################## -FROM rust:1.58-buster as build +FROM rust:1.61-bullseye as build @@ -107,11 +107,11 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/. ######################## RUNTIME IMAGE ######################## # Create a new stage with a minimal image # because we already have a binary built -FROM balenalib/aarch64-debian:buster +FROM balenalib/aarch64-debian:bullseye -ENV ROCKET_ENV="staging" \ - ROCKET_PORT=80 \ - ROCKET_WORKERS=10 +ENV ROCKET_PROFILE="release" \ + ROCKET_ADDRESS=0.0.0.0 \ + ROCKET_PORT=80 # hadolint ignore=DL3059 RUN [ "cross-build-start" ] @@ -123,7 +123,6 @@ RUN mkdir /data \ openssl \ ca-certificates \ curl \ - dumb-init \ libmariadb-dev-compat \ libpq5 \ && apt-get clean \ @@ -139,7 +138,6 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage WORKDIR / -COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/aarch64-unknown-linux-gnu/release/vaultwarden . @@ -148,6 +146,4 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] -# Configures the startup! -ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/arm64/Dockerfile.buildx.alpine b/docker/arm64/Dockerfile.buildx.alpine index 3eb79c11..787280d4 100644 --- a/docker/arm64/Dockerfile.buildx.alpine +++ b/docker/arm64/Dockerfile.buildx.alpine @@ -16,18 +16,18 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull vaultwarden/web-vault:v2.25.1 -# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1 -# [vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965] +# $ docker pull vaultwarden/web-vault:v2022.6.2 +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.6.2 +# [vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 -# [vaultwarden/web-vault:v2.25.1] +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 +# [vaultwarden/web-vault:v2022.6.2] # -FROM vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 as vault +FROM vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 as vault ########################## BUILD IMAGE ########################## -FROM blackdex/rust-musl:aarch64-musl-nightly-2022-01-23 as build +FROM blackdex/rust-musl:aarch64-musl-stable-1.61.0 as build @@ -44,7 +44,6 @@ ENV DEBIAN_FRONTEND=noninteractive \ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \ && rustup set profile minimal -ENV RUSTFLAGS='-C link-arg=-s' # Creates a dummy project used to grab dependencies RUN USER=root cargo new --bin /app @@ -58,7 +57,8 @@ COPY ./build.rs ./build.rs RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry rustup target add aarch64-unknown-linux-musl # Configure the DB ARG as late as possible to not invalidate the cached layers above -ARG DB=sqlite,mysql,postgresql +# Enable MiMalloc to improve performance on Alpine builds +ARG DB=sqlite,mysql,postgresql,enable_mimalloc # Builds your dependencies and removes the # dummy project, except the target folder @@ -81,11 +81,11 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/. ######################## RUNTIME IMAGE ######################## # Create a new stage with a minimal image # because we already have a binary built -FROM balenalib/aarch64-alpine:3.15 +FROM balenalib/aarch64-alpine:3.16 -ENV ROCKET_ENV="staging" \ +ENV ROCKET_PROFILE="release" \ + ROCKET_ADDRESS=0.0.0.0 \ ROCKET_PORT=80 \ - ROCKET_WORKERS=10 \ SSL_CERT_DIR=/etc/ssl/certs @@ -98,7 +98,6 @@ RUN mkdir /data \ openssl \ tzdata \ curl \ - dumb-init \ ca-certificates # hadolint ignore=DL3059 @@ -111,7 +110,6 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage WORKDIR / -COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/aarch64-unknown-linux-musl/release/vaultwarden . @@ -120,6 +118,4 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] -# Configures the startup! -ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/armv6/Dockerfile b/docker/armv6/Dockerfile index 78217817..b3bbbf92 100644 --- a/docker/armv6/Dockerfile +++ b/docker/armv6/Dockerfile @@ -16,18 +16,18 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull vaultwarden/web-vault:v2.25.1 -# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1 -# [vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965] +# $ docker pull vaultwarden/web-vault:v2022.6.2 +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.6.2 +# [vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 -# [vaultwarden/web-vault:v2.25.1] +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 +# [vaultwarden/web-vault:v2022.6.2] # -FROM vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 as vault +FROM vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 as vault ########################## BUILD IMAGE ########################## -FROM rust:1.58-buster as build +FROM rust:1.61-bullseye as build @@ -107,11 +107,11 @@ RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi ######################## RUNTIME IMAGE ######################## # Create a new stage with a minimal image # because we already have a binary built -FROM balenalib/rpi-debian:buster +FROM balenalib/rpi-debian:bullseye -ENV ROCKET_ENV="staging" \ - ROCKET_PORT=80 \ - ROCKET_WORKERS=10 +ENV ROCKET_PROFILE="release" \ + ROCKET_ADDRESS=0.0.0.0 \ + ROCKET_PORT=80 # hadolint ignore=DL3059 RUN [ "cross-build-start" ] @@ -123,12 +123,16 @@ RUN mkdir /data \ openssl \ ca-certificates \ curl \ - dumb-init \ libmariadb-dev-compat \ libpq5 \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* +# In the Balena Bullseye images for armv6/rpi-debian there is a missing symlink. +# This symlink was there in the buster images, and for some reason this is needed. +# hadolint ignore=DL3059 +RUN ln -v -s /lib/ld-linux-armhf.so.3 /lib/ld-linux.so.3 + # hadolint ignore=DL3059 RUN [ "cross-build-end" ] @@ -139,7 +143,6 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage WORKDIR / -COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/arm-unknown-linux-gnueabi/release/vaultwarden . @@ -148,6 +151,4 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] -# Configures the startup! -ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/armv6/Dockerfile.alpine b/docker/armv6/Dockerfile.alpine index 1fd7f376..e544fea4 100644 --- a/docker/armv6/Dockerfile.alpine +++ b/docker/armv6/Dockerfile.alpine @@ -16,18 +16,18 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull vaultwarden/web-vault:v2.25.1 -# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1 -# [vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965] +# $ docker pull vaultwarden/web-vault:v2022.6.2 +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.6.2 +# [vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 -# [vaultwarden/web-vault:v2.25.1] +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 +# [vaultwarden/web-vault:v2022.6.2] # -FROM vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 as vault +FROM vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 as vault ########################## BUILD IMAGE ########################## -FROM blackdex/rust-musl:arm-musleabi-nightly-2022-01-23 as build +FROM blackdex/rust-musl:arm-musleabi-stable-1.61.0 as build @@ -44,7 +44,8 @@ ENV DEBIAN_FRONTEND=noninteractive \ RUN mkdir -pv "${CARGO_HOME}" \ && rustup set profile minimal -ENV RUSTFLAGS='-C link-arg=-s' +# To be able to build the armv6 image with mimalloc we need to specifically specify the libatomic.a file location +ENV RUSTFLAGS='-Clink-arg=/usr/local/musl/arm-unknown-linux-musleabi/lib/libatomic.a' # Creates a dummy project used to grab dependencies RUN USER=root cargo new --bin /app @@ -58,7 +59,8 @@ COPY ./build.rs ./build.rs RUN rustup target add arm-unknown-linux-musleabi # Configure the DB ARG as late as possible to not invalidate the cached layers above -ARG DB=sqlite,mysql,postgresql +# Enable MiMalloc to improve performance on Alpine builds +ARG DB=sqlite,mysql,postgresql,enable_mimalloc # Builds your dependencies and removes the # dummy project, except the target folder @@ -81,11 +83,11 @@ RUN cargo build --features ${DB} --release --target=arm-unknown-linux-musleabi ######################## RUNTIME IMAGE ######################## # Create a new stage with a minimal image # because we already have a binary built -FROM balenalib/rpi-alpine:3.15 +FROM balenalib/rpi-alpine:3.16 -ENV ROCKET_ENV="staging" \ +ENV ROCKET_PROFILE="release" \ + ROCKET_ADDRESS=0.0.0.0 \ ROCKET_PORT=80 \ - ROCKET_WORKERS=10 \ SSL_CERT_DIR=/etc/ssl/certs @@ -98,7 +100,6 @@ RUN mkdir /data \ openssl \ tzdata \ curl \ - dumb-init \ ca-certificates # hadolint ignore=DL3059 @@ -111,7 +112,6 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage WORKDIR / -COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/arm-unknown-linux-musleabi/release/vaultwarden . @@ -120,6 +120,4 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] -# Configures the startup! -ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/armv6/Dockerfile.buildx b/docker/armv6/Dockerfile.buildx index 98da897b..4fd3aef4 100644 --- a/docker/armv6/Dockerfile.buildx +++ b/docker/armv6/Dockerfile.buildx @@ -16,18 +16,18 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull vaultwarden/web-vault:v2.25.1 -# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1 -# [vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965] +# $ docker pull vaultwarden/web-vault:v2022.6.2 +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.6.2 +# [vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 -# [vaultwarden/web-vault:v2.25.1] +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 +# [vaultwarden/web-vault:v2022.6.2] # -FROM vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 as vault +FROM vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 as vault ########################## BUILD IMAGE ########################## -FROM rust:1.58-buster as build +FROM rust:1.61-bullseye as build @@ -107,11 +107,11 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/. ######################## RUNTIME IMAGE ######################## # Create a new stage with a minimal image # because we already have a binary built -FROM balenalib/rpi-debian:buster +FROM balenalib/rpi-debian:bullseye -ENV ROCKET_ENV="staging" \ - ROCKET_PORT=80 \ - ROCKET_WORKERS=10 +ENV ROCKET_PROFILE="release" \ + ROCKET_ADDRESS=0.0.0.0 \ + ROCKET_PORT=80 # hadolint ignore=DL3059 RUN [ "cross-build-start" ] @@ -123,12 +123,16 @@ RUN mkdir /data \ openssl \ ca-certificates \ curl \ - dumb-init \ libmariadb-dev-compat \ libpq5 \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* +# In the Balena Bullseye images for armv6/rpi-debian there is a missing symlink. +# This symlink was there in the buster images, and for some reason this is needed. +# hadolint ignore=DL3059 +RUN ln -v -s /lib/ld-linux-armhf.so.3 /lib/ld-linux.so.3 + # hadolint ignore=DL3059 RUN [ "cross-build-end" ] @@ -139,7 +143,6 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage WORKDIR / -COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/arm-unknown-linux-gnueabi/release/vaultwarden . @@ -148,6 +151,4 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] -# Configures the startup! -ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/armv6/Dockerfile.buildx.alpine b/docker/armv6/Dockerfile.buildx.alpine index da032453..af4547c1 100644 --- a/docker/armv6/Dockerfile.buildx.alpine +++ b/docker/armv6/Dockerfile.buildx.alpine @@ -16,18 +16,18 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull vaultwarden/web-vault:v2.25.1 -# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1 -# [vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965] +# $ docker pull vaultwarden/web-vault:v2022.6.2 +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.6.2 +# [vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 -# [vaultwarden/web-vault:v2.25.1] +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 +# [vaultwarden/web-vault:v2022.6.2] # -FROM vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 as vault +FROM vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 as vault ########################## BUILD IMAGE ########################## -FROM blackdex/rust-musl:arm-musleabi-nightly-2022-01-23 as build +FROM blackdex/rust-musl:arm-musleabi-stable-1.61.0 as build @@ -44,7 +44,8 @@ ENV DEBIAN_FRONTEND=noninteractive \ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \ && rustup set profile minimal -ENV RUSTFLAGS='-C link-arg=-s' +# To be able to build the armv6 image with mimalloc we need to specifically specify the libatomic.a file location +ENV RUSTFLAGS='-Clink-arg=/usr/local/musl/arm-unknown-linux-musleabi/lib/libatomic.a' # Creates a dummy project used to grab dependencies RUN USER=root cargo new --bin /app @@ -58,7 +59,8 @@ COPY ./build.rs ./build.rs RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry rustup target add arm-unknown-linux-musleabi # Configure the DB ARG as late as possible to not invalidate the cached layers above -ARG DB=sqlite,mysql,postgresql +# Enable MiMalloc to improve performance on Alpine builds +ARG DB=sqlite,mysql,postgresql,enable_mimalloc # Builds your dependencies and removes the # dummy project, except the target folder @@ -81,11 +83,11 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/. ######################## RUNTIME IMAGE ######################## # Create a new stage with a minimal image # because we already have a binary built -FROM balenalib/rpi-alpine:3.15 +FROM balenalib/rpi-alpine:3.16 -ENV ROCKET_ENV="staging" \ +ENV ROCKET_PROFILE="release" \ + ROCKET_ADDRESS=0.0.0.0 \ ROCKET_PORT=80 \ - ROCKET_WORKERS=10 \ SSL_CERT_DIR=/etc/ssl/certs @@ -98,7 +100,6 @@ RUN mkdir /data \ openssl \ tzdata \ curl \ - dumb-init \ ca-certificates # hadolint ignore=DL3059 @@ -111,7 +112,6 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage WORKDIR / -COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/arm-unknown-linux-musleabi/release/vaultwarden . @@ -120,6 +120,4 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] -# Configures the startup! -ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/armv7/Dockerfile b/docker/armv7/Dockerfile index ed1c4726..11a7a70e 100644 --- a/docker/armv7/Dockerfile +++ b/docker/armv7/Dockerfile @@ -16,18 +16,18 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull vaultwarden/web-vault:v2.25.1 -# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1 -# [vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965] +# $ docker pull vaultwarden/web-vault:v2022.6.2 +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.6.2 +# [vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 -# [vaultwarden/web-vault:v2.25.1] +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 +# [vaultwarden/web-vault:v2022.6.2] # -FROM vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 as vault +FROM vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 as vault ########################## BUILD IMAGE ########################## -FROM rust:1.58-buster as build +FROM rust:1.61-bullseye as build @@ -107,11 +107,11 @@ RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabih ######################## RUNTIME IMAGE ######################## # Create a new stage with a minimal image # because we already have a binary built -FROM balenalib/armv7hf-debian:buster +FROM balenalib/armv7hf-debian:bullseye -ENV ROCKET_ENV="staging" \ - ROCKET_PORT=80 \ - ROCKET_WORKERS=10 +ENV ROCKET_PROFILE="release" \ + ROCKET_ADDRESS=0.0.0.0 \ + ROCKET_PORT=80 # hadolint ignore=DL3059 RUN [ "cross-build-start" ] @@ -123,7 +123,6 @@ RUN mkdir /data \ openssl \ ca-certificates \ curl \ - dumb-init \ libmariadb-dev-compat \ libpq5 \ && apt-get clean \ @@ -139,7 +138,6 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage WORKDIR / -COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/armv7-unknown-linux-gnueabihf/release/vaultwarden . @@ -148,6 +146,4 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] -# Configures the startup! -ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/armv7/Dockerfile.alpine b/docker/armv7/Dockerfile.alpine index cf2e440f..a64dd871 100644 --- a/docker/armv7/Dockerfile.alpine +++ b/docker/armv7/Dockerfile.alpine @@ -16,18 +16,18 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull vaultwarden/web-vault:v2.25.1 -# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1 -# [vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965] +# $ docker pull vaultwarden/web-vault:v2022.6.2 +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.6.2 +# [vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 -# [vaultwarden/web-vault:v2.25.1] +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 +# [vaultwarden/web-vault:v2022.6.2] # -FROM vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 as vault +FROM vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 as vault ########################## BUILD IMAGE ########################## -FROM blackdex/rust-musl:armv7-musleabihf-nightly-2022-01-23 as build +FROM blackdex/rust-musl:armv7-musleabihf-stable-1.61.0 as build @@ -44,8 +44,6 @@ ENV DEBIAN_FRONTEND=noninteractive \ RUN mkdir -pv "${CARGO_HOME}" \ && rustup set profile minimal -ENV RUSTFLAGS='-C link-arg=-s' -ENV CFLAGS_armv7_unknown_linux_musleabihf="-mfpu=vfpv3-d16" # Creates a dummy project used to grab dependencies RUN USER=root cargo new --bin /app @@ -59,7 +57,8 @@ COPY ./build.rs ./build.rs RUN rustup target add armv7-unknown-linux-musleabihf # Configure the DB ARG as late as possible to not invalidate the cached layers above -ARG DB=sqlite,mysql,postgresql +# Enable MiMalloc to improve performance on Alpine builds +ARG DB=sqlite,mysql,postgresql,enable_mimalloc # Builds your dependencies and removes the # dummy project, except the target folder @@ -78,17 +77,15 @@ RUN touch src/main.rs # your actual source files being built # hadolint ignore=DL3059 RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-musleabihf -# hadolint ignore=DL3059 -RUN musl-strip target/armv7-unknown-linux-musleabihf/release/vaultwarden ######################## RUNTIME IMAGE ######################## # Create a new stage with a minimal image # because we already have a binary built -FROM balenalib/armv7hf-alpine:3.15 +FROM balenalib/armv7hf-alpine:3.16 -ENV ROCKET_ENV="staging" \ +ENV ROCKET_PROFILE="release" \ + ROCKET_ADDRESS=0.0.0.0 \ ROCKET_PORT=80 \ - ROCKET_WORKERS=10 \ SSL_CERT_DIR=/etc/ssl/certs @@ -101,7 +98,6 @@ RUN mkdir /data \ openssl \ tzdata \ curl \ - dumb-init \ ca-certificates # hadolint ignore=DL3059 @@ -114,7 +110,6 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage WORKDIR / -COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/armv7-unknown-linux-musleabihf/release/vaultwarden . @@ -123,6 +118,4 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] -# Configures the startup! -ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/armv7/Dockerfile.buildx b/docker/armv7/Dockerfile.buildx index df5ec242..c3820a9d 100644 --- a/docker/armv7/Dockerfile.buildx +++ b/docker/armv7/Dockerfile.buildx @@ -16,18 +16,18 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull vaultwarden/web-vault:v2.25.1 -# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1 -# [vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965] +# $ docker pull vaultwarden/web-vault:v2022.6.2 +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.6.2 +# [vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 -# [vaultwarden/web-vault:v2.25.1] +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 +# [vaultwarden/web-vault:v2022.6.2] # -FROM vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 as vault +FROM vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 as vault ########################## BUILD IMAGE ########################## -FROM rust:1.58-buster as build +FROM rust:1.61-bullseye as build @@ -107,11 +107,11 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/. ######################## RUNTIME IMAGE ######################## # Create a new stage with a minimal image # because we already have a binary built -FROM balenalib/armv7hf-debian:buster +FROM balenalib/armv7hf-debian:bullseye -ENV ROCKET_ENV="staging" \ - ROCKET_PORT=80 \ - ROCKET_WORKERS=10 +ENV ROCKET_PROFILE="release" \ + ROCKET_ADDRESS=0.0.0.0 \ + ROCKET_PORT=80 # hadolint ignore=DL3059 RUN [ "cross-build-start" ] @@ -123,7 +123,6 @@ RUN mkdir /data \ openssl \ ca-certificates \ curl \ - dumb-init \ libmariadb-dev-compat \ libpq5 \ && apt-get clean \ @@ -139,7 +138,6 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage WORKDIR / -COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/armv7-unknown-linux-gnueabihf/release/vaultwarden . @@ -148,6 +146,4 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] -# Configures the startup! -ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/armv7/Dockerfile.buildx.alpine b/docker/armv7/Dockerfile.buildx.alpine index d2093bee..3be205aa 100644 --- a/docker/armv7/Dockerfile.buildx.alpine +++ b/docker/armv7/Dockerfile.buildx.alpine @@ -16,18 +16,18 @@ # - From https://hub.docker.com/r/vaultwarden/web-vault/tags, # click the tag name to view the digest of the image it currently points to. # - From the command line: -# $ docker pull vaultwarden/web-vault:v2.25.1 -# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.1 -# [vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965] +# $ docker pull vaultwarden/web-vault:v2022.6.2 +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2022.6.2 +# [vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70] # # - Conversely, to get the tag name from the digest: -# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 -# [vaultwarden/web-vault:v2.25.1] +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 +# [vaultwarden/web-vault:v2022.6.2] # -FROM vaultwarden/web-vault@sha256:4f9b7a6b0eaceb511cca8c6a5ed5aa92f527960b1b33d86fbbfd4e5795943965 as vault +FROM vaultwarden/web-vault@sha256:1dfda41cbddeac5bc59540261fff8defcac37170b5ba02d29c12fa1215498f70 as vault ########################## BUILD IMAGE ########################## -FROM blackdex/rust-musl:armv7-musleabihf-nightly-2022-01-23 as build +FROM blackdex/rust-musl:armv7-musleabihf-stable-1.61.0 as build @@ -44,8 +44,6 @@ ENV DEBIAN_FRONTEND=noninteractive \ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \ && rustup set profile minimal -ENV RUSTFLAGS='-C link-arg=-s' -ENV CFLAGS_armv7_unknown_linux_musleabihf="-mfpu=vfpv3-d16" # Creates a dummy project used to grab dependencies RUN USER=root cargo new --bin /app @@ -59,7 +57,8 @@ COPY ./build.rs ./build.rs RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry rustup target add armv7-unknown-linux-musleabihf # Configure the DB ARG as late as possible to not invalidate the cached layers above -ARG DB=sqlite,mysql,postgresql +# Enable MiMalloc to improve performance on Alpine builds +ARG DB=sqlite,mysql,postgresql,enable_mimalloc # Builds your dependencies and removes the # dummy project, except the target folder @@ -78,17 +77,15 @@ RUN touch src/main.rs # your actual source files being built # hadolint ignore=DL3059 RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=armv7-unknown-linux-musleabihf -# hadolint ignore=DL3059 -RUN musl-strip target/armv7-unknown-linux-musleabihf/release/vaultwarden ######################## RUNTIME IMAGE ######################## # Create a new stage with a minimal image # because we already have a binary built -FROM balenalib/armv7hf-alpine:3.15 +FROM balenalib/armv7hf-alpine:3.16 -ENV ROCKET_ENV="staging" \ +ENV ROCKET_PROFILE="release" \ + ROCKET_ADDRESS=0.0.0.0 \ ROCKET_PORT=80 \ - ROCKET_WORKERS=10 \ SSL_CERT_DIR=/etc/ssl/certs @@ -101,7 +98,6 @@ RUN mkdir /data \ openssl \ tzdata \ curl \ - dumb-init \ ca-certificates # hadolint ignore=DL3059 @@ -114,7 +110,6 @@ EXPOSE 3012 # Copies the files from the context (Rocket.toml file and web-vault) # and the binary from the "build" stage to the current stage WORKDIR / -COPY Rocket.toml . COPY --from=vault /web-vault ./web-vault COPY --from=build /app/target/armv7-unknown-linux-musleabihf/release/vaultwarden . @@ -123,6 +118,4 @@ COPY docker/start.sh /start.sh HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] -# Configures the startup! -ENTRYPOINT ["/usr/bin/dumb-init", "--"] CMD ["/start.sh"] diff --git a/docker/healthcheck.sh b/docker/healthcheck.sh index 8f9fe982..002a3c63 100755 --- a/docker/healthcheck.sh +++ b/docker/healthcheck.sh @@ -2,8 +2,8 @@ # Use the value of the corresponding env var (if present), # or a default value otherwise. -: ${DATA_FOLDER:="data"} -: ${ROCKET_PORT:="80"} +: "${DATA_FOLDER:="data"}" +: "${ROCKET_PORT:="80"}" CONFIG_FILE="${DATA_FOLDER}"/config.json diff --git a/docker/start.sh b/docker/start.sh index c3946c84..e9a932e4 100755 --- a/docker/start.sh +++ b/docker/start.sh @@ -9,15 +9,15 @@ fi if [ -d /etc/vaultwarden.d ]; then for f in /etc/vaultwarden.d/*.sh; do - if [ -r $f ]; then - . $f + if [ -r "${f}" ]; then + . "${f}" fi done elif [ -d /etc/bitwarden_rs.d ]; then echo "### You are using the old /etc/bitwarden_rs.d script directory, please migrate to /etc/vaultwarden.d ###" for f in /etc/bitwarden_rs.d/*.sh; do - if [ -r $f ]; then - . $f + if [ -r "${f}" ]; then + . "${f}" fi done fi diff --git a/migrations/mysql/2022-03-02-210038_update_devices_primary_key/down.sql b/migrations/mysql/2022-03-02-210038_update_devices_primary_key/down.sql new file mode 100644 index 00000000..e69de29b diff --git a/migrations/mysql/2022-03-02-210038_update_devices_primary_key/up.sql b/migrations/mysql/2022-03-02-210038_update_devices_primary_key/up.sql new file mode 100644 index 00000000..5eee7434 --- /dev/null +++ b/migrations/mysql/2022-03-02-210038_update_devices_primary_key/up.sql @@ -0,0 +1,4 @@ +-- First remove the previous primary key +ALTER TABLE devices DROP PRIMARY KEY; +-- Add a new combined one +ALTER TABLE devices ADD PRIMARY KEY (uuid, user_uuid); diff --git a/migrations/postgresql/2022-03-02-210038_update_devices_primary_key/down.sql b/migrations/postgresql/2022-03-02-210038_update_devices_primary_key/down.sql new file mode 100644 index 00000000..e69de29b diff --git a/migrations/postgresql/2022-03-02-210038_update_devices_primary_key/up.sql b/migrations/postgresql/2022-03-02-210038_update_devices_primary_key/up.sql new file mode 100644 index 00000000..0dc9ea67 --- /dev/null +++ b/migrations/postgresql/2022-03-02-210038_update_devices_primary_key/up.sql @@ -0,0 +1,4 @@ +-- First remove the previous primary key +ALTER TABLE devices DROP CONSTRAINT devices_pkey; +-- Add a new combined one +ALTER TABLE devices ADD PRIMARY KEY (uuid, user_uuid); diff --git a/migrations/sqlite/2022-03-02-210038_update_devices_primary_key/down.sql b/migrations/sqlite/2022-03-02-210038_update_devices_primary_key/down.sql new file mode 100644 index 00000000..e69de29b diff --git a/migrations/sqlite/2022-03-02-210038_update_devices_primary_key/up.sql b/migrations/sqlite/2022-03-02-210038_update_devices_primary_key/up.sql new file mode 100644 index 00000000..7bc5f0d6 --- /dev/null +++ b/migrations/sqlite/2022-03-02-210038_update_devices_primary_key/up.sql @@ -0,0 +1,23 @@ +-- Create new devices table with primary keys on both uuid and user_uuid +CREATE TABLE devices_new ( + uuid TEXT NOT NULL, + created_at DATETIME NOT NULL, + updated_at DATETIME NOT NULL, + user_uuid TEXT NOT NULL, + name TEXT NOT NULL, + atype INTEGER NOT NULL, + push_token TEXT, + refresh_token TEXT NOT NULL, + twofactor_remember TEXT, + PRIMARY KEY(uuid, user_uuid), + FOREIGN KEY(user_uuid) REFERENCES users(uuid) +); + +-- Transfer current data to new table +INSERT INTO devices_new SELECT * FROM devices; + +-- Drop the old table +DROP TABLE devices; + +-- Rename the new table to the original name +ALTER TABLE devices_new RENAME TO devices; diff --git a/rust-toolchain b/rust-toolchain index 4c62882b..91951fd8 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1 +1 @@ -nightly-2022-01-23 +1.61.0 diff --git a/rustfmt.toml b/rustfmt.toml index 630b42b2..2867b141 100644 --- a/rustfmt.toml +++ b/rustfmt.toml @@ -1,7 +1,7 @@ -version = "Two" -edition = "2018" +# version = "Two" +edition = "2021" max_width = 120 newline_style = "Unix" use_small_heuristics = "Off" -struct_lit_single_line = false -overflow_delimited_expr = true +# struct_lit_single_line = false +# overflow_delimited_expr = true diff --git a/src/api/admin.rs b/src/api/admin.rs index 38d30c99..2f946fc5 100644 --- a/src/api/admin.rs +++ b/src/api/admin.rs @@ -3,13 +3,14 @@ use serde::de::DeserializeOwned; use serde_json::Value; use std::env; +use rocket::serde::json::Json; use rocket::{ - http::{Cookie, Cookies, SameSite, Status}, - request::{self, FlashMessage, Form, FromRequest, Outcome, Request}, - response::{content::Html, Flash, Redirect}, + form::Form, + http::{Cookie, CookieJar, SameSite, Status}, + request::{self, FlashMessage, FromRequest, Outcome, Request}, + response::{content::RawHtml as Html, Flash, Redirect}, Route, }; -use rocket_contrib::json::Json; use crate::{ api::{ApiResult, EmptyResult, JsonResult, NumberOrString}, @@ -24,6 +25,8 @@ use crate::{ CONFIG, VERSION, }; +use futures::{stream, stream::StreamExt}; + pub fn routes() -> Vec { if !CONFIG.disable_admin_token() && !CONFIG.is_admin_token_set() { return routes![admin_disabled]; @@ -76,6 +79,7 @@ fn admin_disabled() -> &'static str { const COOKIE_NAME: &str = "VW_ADMIN"; const ADMIN_PATH: &str = "/admin"; +const DT_FMT: &str = "%Y-%m-%d %H:%M:%S %Z"; const BASE_TEMPLATE: &str = "admin/base"; @@ -85,10 +89,11 @@ fn admin_path() -> String { struct Referer(Option); -impl<'a, 'r> FromRequest<'a, 'r> for Referer { +#[rocket::async_trait] +impl<'r> FromRequest<'r> for Referer { type Error = (); - fn from_request(request: &'a Request<'r>) -> request::Outcome { + async fn from_request(request: &'r Request<'_>) -> request::Outcome { Outcome::Success(Referer(request.headers().get_one("Referer").map(str::to_string))) } } @@ -96,10 +101,11 @@ impl<'a, 'r> FromRequest<'a, 'r> for Referer { #[derive(Debug)] struct IpHeader(Option); -impl<'a, 'r> FromRequest<'a, 'r> for IpHeader { +#[rocket::async_trait] +impl<'r> FromRequest<'r> for IpHeader { type Error = (); - fn from_request(req: &'a Request<'r>) -> Outcome { + async fn from_request(req: &'r Request<'_>) -> Outcome { if req.headers().get_one(&CONFIG.ip_header()).is_some() { Outcome::Success(IpHeader(Some(CONFIG.ip_header()))) } else if req.headers().get_one("X-Client-IP").is_some() { @@ -136,9 +142,9 @@ fn admin_url(referer: Referer) -> String { } #[get("/", rank = 2)] -fn admin_login(flash: Option) -> ApiResult> { +fn admin_login(flash: Option>) -> ApiResult> { // If there is an error, show it - let msg = flash.map(|msg| format!("{}: {}", msg.name(), msg.msg())); + let msg = flash.map(|msg| format!("{}: {}", msg.kind(), msg.message())); let json = json!({ "page_content": "admin/login", "version": VERSION, @@ -159,7 +165,7 @@ struct LoginForm { #[post("/", data = "")] fn post_admin_login( data: Form, - mut cookies: Cookies, + cookies: &CookieJar<'_>, ip: ClientIp, referer: Referer, ) -> Result> { @@ -180,7 +186,7 @@ fn post_admin_login( let cookie = Cookie::build(COOKIE_NAME, jwt) .path(admin_path()) - .max_age(time::Duration::minutes(20)) + .max_age(rocket::time::Duration::minutes(20)) .same_site(SameSite::Strict) .http_only(true) .finish(); @@ -250,8 +256,8 @@ struct InviteData { email: String, } -fn get_user_or_404(uuid: &str, conn: &DbConn) -> ApiResult { - if let Some(user) = User::find_by_uuid(uuid, conn) { +async fn get_user_or_404(uuid: &str, conn: &DbConn) -> ApiResult { + if let Some(user) = User::find_by_uuid(uuid, conn).await { Ok(user) } else { err_code!("User doesn't exist", Status::NotFound.code); @@ -259,128 +265,135 @@ fn get_user_or_404(uuid: &str, conn: &DbConn) -> ApiResult { } #[post("/invite", data = "")] -fn invite_user(data: Json, _token: AdminToken, conn: DbConn) -> JsonResult { +async fn invite_user(data: Json, _token: AdminToken, conn: DbConn) -> JsonResult { let data: InviteData = data.into_inner(); let email = data.email.clone(); - if User::find_by_mail(&data.email, &conn).is_some() { + if User::find_by_mail(&data.email, &conn).await.is_some() { err_code!("User already exists", Status::Conflict.code) } let mut user = User::new(email); - // TODO: After try_blocks is stabilized, this can be made more readable - // See: https://github.com/rust-lang/rust/issues/31436 - (|| { + async fn _generate_invite(user: &User, conn: &DbConn) -> EmptyResult { if CONFIG.mail_enabled() { - mail::send_invite(&user.email, &user.uuid, None, None, &CONFIG.invitation_org_name(), None)?; + mail::send_invite(&user.email, &user.uuid, None, None, &CONFIG.invitation_org_name(), None).await } else { let invitation = Invitation::new(user.email.clone()); - invitation.save(&conn)?; + invitation.save(conn).await } + } - user.save(&conn) - })() - .map_err(|e| e.with_code(Status::InternalServerError.code))?; + _generate_invite(&user, &conn).await.map_err(|e| e.with_code(Status::InternalServerError.code))?; + user.save(&conn).await.map_err(|e| e.with_code(Status::InternalServerError.code))?; - Ok(Json(user.to_json(&conn))) + Ok(Json(user.to_json(&conn).await)) } #[post("/test/smtp", data = "")] -fn test_smtp(data: Json, _token: AdminToken) -> EmptyResult { +async fn test_smtp(data: Json, _token: AdminToken) -> EmptyResult { let data: InviteData = data.into_inner(); if CONFIG.mail_enabled() { - mail::send_test(&data.email) + mail::send_test(&data.email).await } else { err!("Mail is not enabled") } } #[get("/logout")] -fn logout(mut cookies: Cookies, referer: Referer) -> Redirect { - cookies.remove(Cookie::named(COOKIE_NAME)); +fn logout(cookies: &CookieJar<'_>, referer: Referer) -> Redirect { + cookies.remove(Cookie::build(COOKIE_NAME, "").path(admin_path()).finish()); Redirect::to(admin_url(referer)) } #[get("/users")] -fn get_users_json(_token: AdminToken, conn: DbConn) -> Json { - let users = User::get_all(&conn); - let users_json: Vec = users.iter().map(|u| u.to_json(&conn)).collect(); +async fn get_users_json(_token: AdminToken, conn: DbConn) -> Json { + let users_json = stream::iter(User::get_all(&conn).await) + .then(|u| async { + let u = u; // Move out this single variable + let mut usr = u.to_json(&conn).await; + usr["UserEnabled"] = json!(u.enabled); + usr["CreatedAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT)); + usr + }) + .collect::>() + .await; Json(Value::Array(users_json)) } #[get("/users/overview")] -fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult> { - let users = User::get_all(&conn); - let dt_fmt = "%Y-%m-%d %H:%M:%S %Z"; - let users_json: Vec = users - .iter() - .map(|u| { - let mut usr = u.to_json(&conn); - usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &conn)); - usr["attachment_count"] = json!(Attachment::count_by_user(&u.uuid, &conn)); - usr["attachment_size"] = json!(get_display_size(Attachment::size_by_user(&u.uuid, &conn) as i32)); +async fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult> { + let users_json = stream::iter(User::get_all(&conn).await) + .then(|u| async { + let u = u; // Move out this single variable + let mut usr = u.to_json(&conn).await; + usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &conn).await); + usr["attachment_count"] = json!(Attachment::count_by_user(&u.uuid, &conn).await); + usr["attachment_size"] = json!(get_display_size(Attachment::size_by_user(&u.uuid, &conn).await as i32)); usr["user_enabled"] = json!(u.enabled); - usr["created_at"] = json!(format_naive_datetime_local(&u.created_at, dt_fmt)); - usr["last_active"] = match u.last_active(&conn) { - Some(dt) => json!(format_naive_datetime_local(&dt, dt_fmt)), + usr["created_at"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT)); + usr["last_active"] = match u.last_active(&conn).await { + Some(dt) => json!(format_naive_datetime_local(&dt, DT_FMT)), None => json!("Never"), }; usr }) - .collect(); + .collect::>() + .await; let text = AdminTemplateData::with_data("admin/users", json!(users_json)).render()?; Ok(Html(text)) } #[get("/users/")] -fn get_user_json(uuid: String, _token: AdminToken, conn: DbConn) -> JsonResult { - let user = get_user_or_404(&uuid, &conn)?; - - Ok(Json(user.to_json(&conn))) +async fn get_user_json(uuid: String, _token: AdminToken, conn: DbConn) -> JsonResult { + let u = get_user_or_404(&uuid, &conn).await?; + let mut usr = u.to_json(&conn).await; + usr["UserEnabled"] = json!(u.enabled); + usr["CreatedAt"] = json!(format_naive_datetime_local(&u.created_at, DT_FMT)); + Ok(Json(usr)) } #[post("/users//delete")] -fn delete_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult { - let user = get_user_or_404(&uuid, &conn)?; - user.delete(&conn) +async fn delete_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult { + let user = get_user_or_404(&uuid, &conn).await?; + user.delete(&conn).await } #[post("/users//deauth")] -fn deauth_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult { - let mut user = get_user_or_404(&uuid, &conn)?; - Device::delete_all_by_user(&user.uuid, &conn)?; +async fn deauth_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult { + let mut user = get_user_or_404(&uuid, &conn).await?; + Device::delete_all_by_user(&user.uuid, &conn).await?; user.reset_security_stamp(); - user.save(&conn) + user.save(&conn).await } #[post("/users//disable")] -fn disable_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult { - let mut user = get_user_or_404(&uuid, &conn)?; - Device::delete_all_by_user(&user.uuid, &conn)?; +async fn disable_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult { + let mut user = get_user_or_404(&uuid, &conn).await?; + Device::delete_all_by_user(&user.uuid, &conn).await?; user.reset_security_stamp(); user.enabled = false; - user.save(&conn) + user.save(&conn).await } #[post("/users//enable")] -fn enable_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult { - let mut user = get_user_or_404(&uuid, &conn)?; +async fn enable_user(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult { + let mut user = get_user_or_404(&uuid, &conn).await?; user.enabled = true; - user.save(&conn) + user.save(&conn).await } #[post("/users//remove-2fa")] -fn remove_2fa(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult { - let mut user = get_user_or_404(&uuid, &conn)?; - TwoFactor::delete_all_by_user(&user.uuid, &conn)?; +async fn remove_2fa(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult { + let mut user = get_user_or_404(&uuid, &conn).await?; + TwoFactor::delete_all_by_user(&user.uuid, &conn).await?; user.totp_recover = None; - user.save(&conn) + user.save(&conn).await } #[derive(Deserialize, Debug)] @@ -391,10 +404,10 @@ struct UserOrgTypeData { } #[post("/users/org_type", data = "")] -fn update_user_org_type(data: Json, _token: AdminToken, conn: DbConn) -> EmptyResult { +async fn update_user_org_type(data: Json, _token: AdminToken, conn: DbConn) -> EmptyResult { let data: UserOrgTypeData = data.into_inner(); - let mut user_to_edit = match UserOrganization::find_by_user_and_org(&data.user_uuid, &data.org_uuid, &conn) { + let mut user_to_edit = match UserOrganization::find_by_user_and_org(&data.user_uuid, &data.org_uuid, &conn).await { Some(user) => user, None => err!("The specified user isn't member of the organization"), }; @@ -406,45 +419,46 @@ fn update_user_org_type(data: Json, _token: AdminToken, conn: D if user_to_edit.atype == UserOrgType::Owner && new_type != UserOrgType::Owner { // Removing owner permmission, check that there are at least another owner - let num_owners = UserOrganization::find_by_org_and_type(&data.org_uuid, UserOrgType::Owner as i32, &conn).len(); + let num_owners = + UserOrganization::find_by_org_and_type(&data.org_uuid, UserOrgType::Owner as i32, &conn).await.len(); if num_owners <= 1 { err!("Can't change the type of the last owner") } } - user_to_edit.atype = new_type as i32; - user_to_edit.save(&conn) + user_to_edit.atype = new_type; + user_to_edit.save(&conn).await } #[post("/users/update_revision")] -fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult { - User::update_all_revisions(&conn) +async fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult { + User::update_all_revisions(&conn).await } #[get("/organizations/overview")] -fn organizations_overview(_token: AdminToken, conn: DbConn) -> ApiResult> { - let organizations = Organization::get_all(&conn); - let organizations_json: Vec = organizations - .iter() - .map(|o| { +async fn organizations_overview(_token: AdminToken, conn: DbConn) -> ApiResult> { + let organizations_json = stream::iter(Organization::get_all(&conn).await) + .then(|o| async { + let o = o; //Move out this single variable let mut org = o.to_json(); - org["user_count"] = json!(UserOrganization::count_by_org(&o.uuid, &conn)); - org["cipher_count"] = json!(Cipher::count_by_org(&o.uuid, &conn)); - org["attachment_count"] = json!(Attachment::count_by_org(&o.uuid, &conn)); - org["attachment_size"] = json!(get_display_size(Attachment::size_by_org(&o.uuid, &conn) as i32)); + org["user_count"] = json!(UserOrganization::count_by_org(&o.uuid, &conn).await); + org["cipher_count"] = json!(Cipher::count_by_org(&o.uuid, &conn).await); + org["attachment_count"] = json!(Attachment::count_by_org(&o.uuid, &conn).await); + org["attachment_size"] = json!(get_display_size(Attachment::size_by_org(&o.uuid, &conn).await as i32)); org }) - .collect(); + .collect::>() + .await; let text = AdminTemplateData::with_data("admin/organizations", json!(organizations_json)).render()?; Ok(Html(text)) } #[post("/organizations//delete")] -fn delete_organization(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult { - let org = Organization::find_by_uuid(&uuid, &conn).map_res("Organization doesn't exist")?; - org.delete(&conn) +async fn delete_organization(uuid: String, _token: AdminToken, conn: DbConn) -> EmptyResult { + let org = Organization::find_by_uuid(&uuid, &conn).await.map_res("Organization doesn't exist")?; + org.delete(&conn).await } #[derive(Deserialize)] @@ -462,32 +476,74 @@ struct GitCommit { sha: String, } -fn get_github_api(url: &str) -> Result { +async fn get_github_api(url: &str) -> Result { let github_api = get_reqwest_client(); - Ok(github_api.get(url).send()?.error_for_status()?.json::()?) + Ok(github_api.get(url).send().await?.error_for_status()?.json::().await?) } -fn has_http_access() -> bool { +async fn has_http_access() -> bool { let http_access = get_reqwest_client(); - match http_access.head("https://github.com/dani-garcia/vaultwarden").send() { + match http_access.head("https://github.com/dani-garcia/vaultwarden").send().await { Ok(r) => r.status().is_success(), _ => false, } } +use cached::proc_macro::cached; +/// Cache this function to prevent API call rate limit. Github only allows 60 requests per hour, and we use 3 here already. +/// It will cache this function for 300 seconds (5 minutes) which should prevent the exhaustion of the rate limit. +#[cached(time = 300, sync_writes = true)] +async fn get_release_info(has_http_access: bool, running_within_docker: bool) -> (String, String, String) { + // If the HTTP Check failed, do not even attempt to check for new versions since we were not able to connect with github.com anyway. + if has_http_access { + info!("Running get_release_info!!"); + ( + match get_github_api::("https://api.github.com/repos/dani-garcia/vaultwarden/releases/latest") + .await + { + Ok(r) => r.tag_name, + _ => "-".to_string(), + }, + match get_github_api::("https://api.github.com/repos/dani-garcia/vaultwarden/commits/main").await + { + Ok(mut c) => { + c.sha.truncate(8); + c.sha + } + _ => "-".to_string(), + }, + // Do not fetch the web-vault version when running within Docker. + // The web-vault version is embedded within the container it self, and should not be updated manually + if running_within_docker { + "-".to_string() + } else { + match get_github_api::( + "https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest", + ) + .await + { + Ok(r) => r.tag_name.trim_start_matches('v').to_string(), + _ => "-".to_string(), + } + }, + ) + } else { + ("-".to_string(), "-".to_string(), "-".to_string()) + } +} + #[get("/diagnostics")] -fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResult> { - use crate::util::read_file_string; +async fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResult> { use chrono::prelude::*; use std::net::ToSocketAddrs; // Get current running versions let web_vault_version: WebVaultVersion = - match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "vw-version.json")) { + match std::fs::read_to_string(&format!("{}/{}", CONFIG.web_vault_folder(), "vw-version.json")) { Ok(s) => serde_json::from_str(&s)?, - _ => match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "version.json")) { + _ => match std::fs::read_to_string(&format!("{}/{}", CONFIG.web_vault_folder(), "version.json")) { Ok(s) => serde_json::from_str(&s)?, _ => WebVaultVersion { version: String::from("Version file missing"), @@ -497,7 +553,7 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu // Execute some environment checks let running_within_docker = is_running_in_docker(); - let has_http_access = has_http_access(); + let has_http_access = has_http_access().await; let uses_proxy = env::var_os("HTTP_PROXY").is_some() || env::var_os("http_proxy").is_some() || env::var_os("HTTPS_PROXY").is_some() @@ -509,37 +565,8 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu _ => "Could not resolve domain name.".to_string(), }; - // If the HTTP Check failed, do not even attempt to check for new versions since we were not able to connect with github.com anyway. - // TODO: Maybe we need to cache this using a LazyStatic or something. Github only allows 60 requests per hour, and we use 3 here already. - let (latest_release, latest_commit, latest_web_build) = if has_http_access { - ( - match get_github_api::("https://api.github.com/repos/dani-garcia/vaultwarden/releases/latest") { - Ok(r) => r.tag_name, - _ => "-".to_string(), - }, - match get_github_api::("https://api.github.com/repos/dani-garcia/vaultwarden/commits/main") { - Ok(mut c) => { - c.sha.truncate(8); - c.sha - } - _ => "-".to_string(), - }, - // Do not fetch the web-vault version when running within Docker. - // The web-vault version is embedded within the container it self, and should not be updated manually - if running_within_docker { - "-".to_string() - } else { - match get_github_api::( - "https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest", - ) { - Ok(r) => r.tag_name.trim_start_matches('v').to_string(), - _ => "-".to_string(), - } - }, - ) - } else { - ("-".to_string(), "-".to_string(), "-".to_string()) - }; + let (latest_release, latest_commit, latest_web_build) = + get_release_info(has_http_access, running_within_docker).await; let ip_header_name = match &ip_header.0 { Some(h) => h, @@ -562,7 +589,7 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu "ip_header_config": &CONFIG.ip_header(), "uses_proxy": uses_proxy, "db_type": *DB_TYPE, - "db_version": get_sql_server_version(&conn), + "db_version": get_sql_server_version(&conn).await, "admin_url": format!("{}/diagnostics", admin_url(Referer(None))), "overrides": &CONFIG.get_overrides().join(", "), "server_time_local": Local::now().format("%Y-%m-%d %H:%M:%S %Z").to_string(), @@ -591,9 +618,9 @@ fn delete_config(_token: AdminToken) -> EmptyResult { } #[post("/config/backup_db")] -fn backup_db(_token: AdminToken, conn: DbConn) -> EmptyResult { +async fn backup_db(_token: AdminToken, conn: DbConn) -> EmptyResult { if *CAN_BACKUP { - backup_database(&conn) + backup_database(&conn).await } else { err!("Can't back up current DB (Only SQLite supports this feature)"); } @@ -601,28 +628,29 @@ fn backup_db(_token: AdminToken, conn: DbConn) -> EmptyResult { pub struct AdminToken {} -impl<'a, 'r> FromRequest<'a, 'r> for AdminToken { +#[rocket::async_trait] +impl<'r> FromRequest<'r> for AdminToken { type Error = &'static str; - fn from_request(request: &'a Request<'r>) -> request::Outcome { + async fn from_request(request: &'r Request<'_>) -> request::Outcome { if CONFIG.disable_admin_token() { Outcome::Success(AdminToken {}) } else { - let mut cookies = request.cookies(); + let cookies = request.cookies(); let access_token = match cookies.get(COOKIE_NAME) { Some(cookie) => cookie.value(), None => return Outcome::Forward(()), // If there is no cookie, redirect to login }; - let ip = match request.guard::() { + let ip = match ClientIp::from_request(request).await { Outcome::Success(ip) => ip.ip, _ => err_handler!("Error getting Client IP"), }; if decode_admin(access_token).is_err() { // Remove admin cookie - cookies.remove(Cookie::named(COOKIE_NAME)); + cookies.remove(Cookie::build(COOKIE_NAME, "").path(admin_path()).finish()); error!("Invalid or expired admin JWT. IP: {}.", ip); return Outcome::Forward(()); } diff --git a/src/api/core/accounts.rs b/src/api/core/accounts.rs index ead05478..35202698 100644 --- a/src/api/core/accounts.rs +++ b/src/api/core/accounts.rs @@ -1,5 +1,5 @@ use chrono::Utc; -use rocket_contrib::json::Json; +use rocket::serde::json::Json; use serde_json::Value; use crate::{ @@ -62,12 +62,43 @@ struct KeysData { PublicKey: String, } +/// Trims whitespace from password hints, and converts blank password hints to `None`. +fn clean_password_hint(password_hint: &Option) -> Option { + match password_hint { + None => None, + Some(h) => match h.trim() { + "" => None, + ht => Some(ht.to_string()), + }, + } +} + +fn enforce_password_hint_setting(password_hint: &Option) -> EmptyResult { + if password_hint.is_some() && !CONFIG.password_hints_allowed() { + err!("Password hints have been disabled by the administrator. Remove the hint and try again."); + } + Ok(()) +} + #[post("/accounts/register", data = "")] -fn register(data: JsonUpcase, conn: DbConn) -> EmptyResult { +async fn register(data: JsonUpcase, conn: DbConn) -> EmptyResult { let data: RegisterData = data.into_inner().data; let email = data.Email.to_lowercase(); - let mut user = match User::find_by_mail(&email, &conn) { + // Check if the length of the username exceeds 50 characters (Same is Upstream Bitwarden) + // This also prevents issues with very long usernames causing to large JWT's. See #2419 + if let Some(ref name) = data.Name { + if name.len() > 50 { + err!("The field Name must be a string with a maximum length of 50."); + } + } + + // Check against the password hint setting here so if it fails, the user + // can retry without losing their invitation below. + let password_hint = clean_password_hint(&data.MasterPasswordHint); + enforce_password_hint_setting(&password_hint)?; + + let mut user = match User::find_by_mail(&email, &conn).await { Some(user) => { if !user.password_hash.is_empty() { if CONFIG.is_signup_allowed(&email) { @@ -84,13 +115,13 @@ fn register(data: JsonUpcase, conn: DbConn) -> EmptyResult { } else { err!("Registration email does not match invite email") } - } else if Invitation::take(&email, &conn) { - for mut user_org in UserOrganization::find_invited_by_user(&user.uuid, &conn).iter_mut() { + } else if Invitation::take(&email, &conn).await { + for mut user_org in UserOrganization::find_invited_by_user(&user.uuid, &conn).await.iter_mut() { user_org.status = UserOrgStatus::Accepted as i32; - user_org.save(&conn)?; + user_org.save(&conn).await?; } user - } else if EmergencyAccess::find_invited_by_grantee_email(&email, &conn).is_some() { + } else if EmergencyAccess::find_invited_by_grantee_email(&email, &conn).await.is_some() { user } else if CONFIG.is_signup_allowed(&email) { err!("Account with this email already exists") @@ -102,7 +133,7 @@ fn register(data: JsonUpcase, conn: DbConn) -> EmptyResult { // Order is important here; the invitation check must come first // because the vaultwarden admin can invite anyone, regardless // of other signup restrictions. - if Invitation::take(&email, &conn) || CONFIG.is_signup_allowed(&email) { + if Invitation::take(&email, &conn).await || CONFIG.is_signup_allowed(&email) { User::new(email.clone()) } else { err!("Registration not allowed or user already exists") @@ -111,7 +142,7 @@ fn register(data: JsonUpcase, conn: DbConn) -> EmptyResult { }; // Make sure we don't leave a lingering invitation. - Invitation::take(&email, &conn); + Invitation::take(&email, &conn).await; if let Some(client_kdf_iter) = data.KdfIterations { user.client_kdf_iter = client_kdf_iter; @@ -123,16 +154,13 @@ fn register(data: JsonUpcase, conn: DbConn) -> EmptyResult { user.set_password(&data.MasterPasswordHash, None); user.akey = data.Key; + user.password_hint = password_hint; // Add extra fields if present if let Some(name) = data.Name { user.name = name; } - if let Some(hint) = data.MasterPasswordHint { - user.password_hint = Some(hint); - } - if let Some(keys) = data.Keys { user.private_key = Some(keys.EncryptedPrivateKey); user.public_key = Some(keys.PublicKey); @@ -140,22 +168,22 @@ fn register(data: JsonUpcase, conn: DbConn) -> EmptyResult { if CONFIG.mail_enabled() { if CONFIG.signups_verify() { - if let Err(e) = mail::send_welcome_must_verify(&user.email, &user.uuid) { + if let Err(e) = mail::send_welcome_must_verify(&user.email, &user.uuid).await { error!("Error sending welcome email: {:#?}", e); } user.last_verifying_at = Some(user.created_at); - } else if let Err(e) = mail::send_welcome(&user.email) { + } else if let Err(e) = mail::send_welcome(&user.email).await { error!("Error sending welcome email: {:#?}", e); } } - user.save(&conn) + user.save(&conn).await } #[get("/accounts/profile")] -fn profile(headers: Headers, conn: DbConn) -> Json { - Json(headers.user.to_json(&conn)) +async fn profile(headers: Headers, conn: DbConn) -> Json { + Json(headers.user.to_json(&conn).await) } #[derive(Deserialize, Debug)] @@ -168,28 +196,32 @@ struct ProfileData { } #[put("/accounts/profile", data = "")] -fn put_profile(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { - post_profile(data, headers, conn) +async fn put_profile(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { + post_profile(data, headers, conn).await } #[post("/accounts/profile", data = "")] -fn post_profile(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { +async fn post_profile(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { let data: ProfileData = data.into_inner().data; - let mut user = headers.user; + // Check if the length of the username exceeds 50 characters (Same is Upstream Bitwarden) + // This also prevents issues with very long usernames causing to large JWT's. See #2419 + if data.Name.len() > 50 { + err!("The field Name must be a string with a maximum length of 50."); + } + let mut user = headers.user; user.name = data.Name; - user.password_hint = match data.MasterPasswordHint { - Some(ref h) if h.is_empty() => None, - _ => data.MasterPasswordHint, - }; - user.save(&conn)?; - Ok(Json(user.to_json(&conn))) + user.password_hint = clean_password_hint(&data.MasterPasswordHint); + enforce_password_hint_setting(&user.password_hint)?; + + user.save(&conn).await?; + Ok(Json(user.to_json(&conn).await)) } #[get("/users//public-key")] -fn get_public_keys(uuid: String, _headers: Headers, conn: DbConn) -> JsonResult { - let user = match User::find_by_uuid(&uuid, &conn) { +async fn get_public_keys(uuid: String, _headers: Headers, conn: DbConn) -> JsonResult { + let user = match User::find_by_uuid(&uuid, &conn).await { Some(user) => user, None => err!("User doesn't exist"), }; @@ -202,7 +234,7 @@ fn get_public_keys(uuid: String, _headers: Headers, conn: DbConn) -> JsonResult } #[post("/accounts/keys", data = "")] -fn post_keys(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { +async fn post_keys(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { let data: KeysData = data.into_inner().data; let mut user = headers.user; @@ -210,7 +242,7 @@ fn post_keys(data: JsonUpcase, headers: Headers, conn: DbConn) -> Json user.private_key = Some(data.EncryptedPrivateKey); user.public_key = Some(data.PublicKey); - user.save(&conn)?; + user.save(&conn).await?; Ok(Json(json!({ "PrivateKey": user.private_key, @@ -228,7 +260,7 @@ struct ChangePassData { } #[post("/accounts/password", data = "")] -fn post_password(data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { +async fn post_password(data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { let data: ChangePassData = data.into_inner().data; let mut user = headers.user; @@ -241,7 +273,7 @@ fn post_password(data: JsonUpcase, headers: Headers, conn: DbCon Some(vec![String::from("post_rotatekey"), String::from("get_contacts"), String::from("get_public_keys")]), ); user.akey = data.Key; - user.save(&conn) + user.save(&conn).await } #[derive(Deserialize)] @@ -256,7 +288,7 @@ struct ChangeKdfData { } #[post("/accounts/kdf", data = "")] -fn post_kdf(data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { +async fn post_kdf(data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { let data: ChangeKdfData = data.into_inner().data; let mut user = headers.user; @@ -268,7 +300,7 @@ fn post_kdf(data: JsonUpcase, headers: Headers, conn: DbConn) -> user.client_kdf_type = data.Kdf; user.set_password(&data.NewMasterPasswordHash, None); user.akey = data.Key; - user.save(&conn) + user.save(&conn).await } #[derive(Deserialize)] @@ -291,7 +323,7 @@ struct KeyData { } #[post("/accounts/key", data = "")] -fn post_rotatekey(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { +async fn post_rotatekey(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult { let data: KeyData = data.into_inner().data; if !headers.user.check_valid_password(&data.MasterPasswordHash) { @@ -302,7 +334,7 @@ fn post_rotatekey(data: JsonUpcase, headers: Headers, conn: DbConn, nt: // Update folder data for folder_data in data.Folders { - let mut saved_folder = match Folder::find_by_uuid(&folder_data.Id, &conn) { + let mut saved_folder = match Folder::find_by_uuid(&folder_data.Id, &conn).await { Some(folder) => folder, None => err!("Folder doesn't exist"), }; @@ -312,14 +344,14 @@ fn post_rotatekey(data: JsonUpcase, headers: Headers, conn: DbConn, nt: } saved_folder.name = folder_data.Name; - saved_folder.save(&conn)? + saved_folder.save(&conn).await? } // Update cipher data use super::ciphers::update_cipher_from_data; for cipher_data in data.Ciphers { - let mut saved_cipher = match Cipher::find_by_uuid(cipher_data.Id.as_ref().unwrap(), &conn) { + let mut saved_cipher = match Cipher::find_by_uuid(cipher_data.Id.as_ref().unwrap(), &conn).await { Some(cipher) => cipher, None => err!("Cipher doesn't exist"), }; @@ -330,7 +362,7 @@ fn post_rotatekey(data: JsonUpcase, headers: Headers, conn: DbConn, nt: // Prevent triggering cipher updates via WebSockets by settings UpdateType::None // The user sessions are invalidated because all the ciphers were re-encrypted and thus triggering an update could cause issues. - update_cipher_from_data(&mut saved_cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::None)? + update_cipher_from_data(&mut saved_cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::None).await? } // Update user data @@ -340,11 +372,11 @@ fn post_rotatekey(data: JsonUpcase, headers: Headers, conn: DbConn, nt: user.private_key = Some(data.PrivateKey); user.reset_security_stamp(); - user.save(&conn) + user.save(&conn).await } #[post("/accounts/security-stamp", data = "")] -fn post_sstamp(data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { +async fn post_sstamp(data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { let data: PasswordData = data.into_inner().data; let mut user = headers.user; @@ -352,9 +384,9 @@ fn post_sstamp(data: JsonUpcase, headers: Headers, conn: DbConn) - err!("Invalid password") } - Device::delete_all_by_user(&user.uuid, &conn)?; + Device::delete_all_by_user(&user.uuid, &conn).await?; user.reset_security_stamp(); - user.save(&conn) + user.save(&conn).await } #[derive(Deserialize)] @@ -365,7 +397,7 @@ struct EmailTokenData { } #[post("/accounts/email-token", data = "")] -fn post_email_token(data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { +async fn post_email_token(data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { let data: EmailTokenData = data.into_inner().data; let mut user = headers.user; @@ -373,7 +405,7 @@ fn post_email_token(data: JsonUpcase, headers: Headers, conn: Db err!("Invalid password") } - if User::find_by_mail(&data.NewEmail, &conn).is_some() { + if User::find_by_mail(&data.NewEmail, &conn).await.is_some() { err!("Email already in use"); } @@ -381,17 +413,17 @@ fn post_email_token(data: JsonUpcase, headers: Headers, conn: Db err!("Email domain not allowed"); } - let token = crypto::generate_token(6)?; + let token = crypto::generate_email_token(6); if CONFIG.mail_enabled() { - if let Err(e) = mail::send_change_email(&data.NewEmail, &token) { + if let Err(e) = mail::send_change_email(&data.NewEmail, &token).await { error!("Error sending change-email email: {:#?}", e); } } user.email_new = Some(data.NewEmail); user.email_new_token = Some(token); - user.save(&conn) + user.save(&conn).await } #[derive(Deserialize)] @@ -406,7 +438,7 @@ struct ChangeEmailData { } #[post("/accounts/email", data = "")] -fn post_email(data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { +async fn post_email(data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { let data: ChangeEmailData = data.into_inner().data; let mut user = headers.user; @@ -414,7 +446,7 @@ fn post_email(data: JsonUpcase, headers: Headers, conn: DbConn) err!("Invalid password") } - if User::find_by_mail(&data.NewEmail, &conn).is_some() { + if User::find_by_mail(&data.NewEmail, &conn).await.is_some() { err!("Email already in use"); } @@ -449,18 +481,18 @@ fn post_email(data: JsonUpcase, headers: Headers, conn: DbConn) user.set_password(&data.NewMasterPasswordHash, None); user.akey = data.Key; - user.save(&conn) + user.save(&conn).await } #[post("/accounts/verify-email")] -fn post_verify_email(headers: Headers) -> EmptyResult { +async fn post_verify_email(headers: Headers) -> EmptyResult { let user = headers.user; if !CONFIG.mail_enabled() { err!("Cannot verify email address"); } - if let Err(e) = mail::send_verify_email(&user.email, &user.uuid) { + if let Err(e) = mail::send_verify_email(&user.email, &user.uuid).await { error!("Error sending verify_email email: {:#?}", e); } @@ -475,10 +507,10 @@ struct VerifyEmailTokenData { } #[post("/accounts/verify-email-token", data = "")] -fn post_verify_email_token(data: JsonUpcase, conn: DbConn) -> EmptyResult { +async fn post_verify_email_token(data: JsonUpcase, conn: DbConn) -> EmptyResult { let data: VerifyEmailTokenData = data.into_inner().data; - let mut user = match User::find_by_uuid(&data.UserId, &conn) { + let mut user = match User::find_by_uuid(&data.UserId, &conn).await { Some(user) => user, None => err!("User doesn't exist"), }; @@ -493,7 +525,7 @@ fn post_verify_email_token(data: JsonUpcase, conn: DbConn) user.verified_at = Some(Utc::now().naive_utc()); user.last_verifying_at = None; user.login_verify_count = 0; - if let Err(e) = user.save(&conn) { + if let Err(e) = user.save(&conn).await { error!("Error saving email verification: {:#?}", e); } @@ -507,14 +539,12 @@ struct DeleteRecoverData { } #[post("/accounts/delete-recover", data = "")] -fn post_delete_recover(data: JsonUpcase, conn: DbConn) -> EmptyResult { +async fn post_delete_recover(data: JsonUpcase, conn: DbConn) -> EmptyResult { let data: DeleteRecoverData = data.into_inner().data; - let user = User::find_by_mail(&data.Email, &conn); - if CONFIG.mail_enabled() { - if let Some(user) = user { - if let Err(e) = mail::send_delete_account(&user.email, &user.uuid) { + if let Some(user) = User::find_by_mail(&data.Email, &conn).await { + if let Err(e) = mail::send_delete_account(&user.email, &user.uuid).await { error!("Error sending delete account email: {:#?}", e); } } @@ -536,10 +566,10 @@ struct DeleteRecoverTokenData { } #[post("/accounts/delete-recover-token", data = "")] -fn post_delete_recover_token(data: JsonUpcase, conn: DbConn) -> EmptyResult { +async fn post_delete_recover_token(data: JsonUpcase, conn: DbConn) -> EmptyResult { let data: DeleteRecoverTokenData = data.into_inner().data; - let user = match User::find_by_uuid(&data.UserId, &conn) { + let user = match User::find_by_uuid(&data.UserId, &conn).await { Some(user) => user, None => err!("User doesn't exist"), }; @@ -551,16 +581,16 @@ fn post_delete_recover_token(data: JsonUpcase, conn: DbC if claims.sub != user.uuid { err!("Invalid claim"); } - user.delete(&conn) + user.delete(&conn).await } #[post("/accounts/delete", data = "")] -fn post_delete_account(data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { - delete_account(data, headers, conn) +async fn post_delete_account(data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { + delete_account(data, headers, conn).await } #[delete("/accounts", data = "")] -fn delete_account(data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { +async fn delete_account(data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { let data: PasswordData = data.into_inner().data; let user = headers.user; @@ -568,7 +598,7 @@ fn delete_account(data: JsonUpcase, headers: Headers, conn: DbConn err!("Invalid password") } - user.delete(&conn) + user.delete(&conn).await } #[get("/accounts/revision-date")] @@ -584,7 +614,7 @@ struct PasswordHintData { } #[post("/accounts/password-hint", data = "")] -fn password_hint(data: JsonUpcase, conn: DbConn) -> EmptyResult { +async fn password_hint(data: JsonUpcase, conn: DbConn) -> EmptyResult { if !CONFIG.mail_enabled() && !CONFIG.show_password_hint() { err!("This server is not configured to provide password hints."); } @@ -594,19 +624,18 @@ fn password_hint(data: JsonUpcase, conn: DbConn) -> EmptyResul let data: PasswordHintData = data.into_inner().data; let email = &data.Email; - match User::find_by_mail(email, &conn) { + match User::find_by_mail(email, &conn).await { None => { // To prevent user enumeration, act as if the user exists. if CONFIG.mail_enabled() { // There is still a timing side channel here in that the code // paths that send mail take noticeably longer than ones that // don't. Add a randomized sleep to mitigate this somewhat. - use rand::{thread_rng, Rng}; - let mut rng = thread_rng(); - let base = 1000; + use rand::{rngs::SmallRng, Rng, SeedableRng}; + let mut rng = SmallRng::from_entropy(); let delta: i32 = 100; - let sleep_ms = (base + rng.gen_range(-delta..=delta)) as u64; - std::thread::sleep(std::time::Duration::from_millis(sleep_ms)); + let sleep_ms = (1_000 + rng.gen_range(-delta..=delta)) as u64; + tokio::time::sleep(tokio::time::Duration::from_millis(sleep_ms)).await; Ok(()) } else { err!(NO_HINT); @@ -615,7 +644,7 @@ fn password_hint(data: JsonUpcase, conn: DbConn) -> EmptyResul Some(user) => { let hint: Option = user.password_hint; if CONFIG.mail_enabled() { - mail::send_password_hint(email, hint)?; + mail::send_password_hint(email, hint).await?; Ok(()) } else if let Some(hint) = hint { err!(format!("Your password hint is: {}", hint)); @@ -628,15 +657,19 @@ fn password_hint(data: JsonUpcase, conn: DbConn) -> EmptyResul #[derive(Deserialize)] #[allow(non_snake_case)] -struct PreloginData { +pub struct PreloginData { Email: String, } #[post("/accounts/prelogin", data = "")] -fn prelogin(data: JsonUpcase, conn: DbConn) -> Json { +async fn prelogin(data: JsonUpcase, conn: DbConn) -> Json { + _prelogin(data, conn).await +} + +pub async fn _prelogin(data: JsonUpcase, conn: DbConn) -> Json { let data: PreloginData = data.into_inner().data; - let (kdf_type, kdf_iter) = match User::find_by_mail(&data.Email, &conn) { + let (kdf_type, kdf_iter) = match User::find_by_mail(&data.Email, &conn).await { Some(user) => (user.client_kdf_type, user.client_kdf_iter), None => (User::CLIENT_KDF_TYPE_DEFAULT, User::CLIENT_KDF_ITER_DEFAULT), }; @@ -666,7 +699,12 @@ fn verify_password(data: JsonUpcase, headers: Headers Ok(()) } -fn _api_key(data: JsonUpcase, rotate: bool, headers: Headers, conn: DbConn) -> JsonResult { +async fn _api_key( + data: JsonUpcase, + rotate: bool, + headers: Headers, + conn: DbConn, +) -> JsonResult { let data: SecretVerificationRequest = data.into_inner().data; let mut user = headers.user; @@ -676,7 +714,7 @@ fn _api_key(data: JsonUpcase, rotate: bool, headers: if rotate || user.api_key.is_none() { user.api_key = Some(crypto::generate_api_key()); - user.save(&conn).expect("Error saving API key"); + user.save(&conn).await.expect("Error saving API key"); } Ok(Json(json!({ @@ -686,11 +724,11 @@ fn _api_key(data: JsonUpcase, rotate: bool, headers: } #[post("/accounts/api-key", data = "")] -fn api_key(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { - _api_key(data, false, headers, conn) +async fn api_key(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { + _api_key(data, false, headers, conn).await } #[post("/accounts/rotate-api-key", data = "")] -fn rotate_api_key(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { - _api_key(data, true, headers, conn) +async fn rotate_api_key(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { + _api_key(data, true, headers, conn).await } diff --git a/src/api/core/ciphers.rs b/src/api/core/ciphers.rs index ff193a3e..b491424e 100644 --- a/src/api/core/ciphers.rs +++ b/src/api/core/ciphers.rs @@ -1,13 +1,15 @@ use std::collections::{HashMap, HashSet}; -use std::path::{Path, PathBuf}; use chrono::{NaiveDateTime, Utc}; -use rocket::{http::ContentType, request::Form, Data, Route}; -use rocket_contrib::json::Json; +use futures::{stream, stream::StreamExt}; +use rocket::fs::TempFile; +use rocket::serde::json::Json; +use rocket::{ + form::{Form, FromForm}, + Route, +}; use serde_json::Value; -use multipart::server::{save::SavedData, Multipart, SaveResult}; - use crate::{ api::{self, EmptyResult, JsonResult, JsonUpcase, Notify, PasswordData, UpdateType}, auth::Headers, @@ -16,6 +18,8 @@ use crate::{ CONFIG, }; +use super::folders::FolderData; + pub fn routes() -> Vec { // Note that many routes have an `admin` variant; this seems to be // because the stored procedure that upstream Bitwarden uses to determine @@ -79,10 +83,10 @@ pub fn routes() -> Vec { ] } -pub fn purge_trashed_ciphers(pool: DbPool) { +pub async fn purge_trashed_ciphers(pool: DbPool) { debug!("Purging trashed ciphers"); - if let Ok(conn) = pool.get() { - Cipher::purge_trash(&conn); + if let Ok(conn) = pool.get().await { + Cipher::purge_trash(&conn).await; } else { error!("Failed to get DB connection while purging trashed ciphers") } @@ -90,30 +94,44 @@ pub fn purge_trashed_ciphers(pool: DbPool) { #[derive(FromForm, Default)] struct SyncData { - #[form(field = "excludeDomains")] + #[field(name = "excludeDomains")] exclude_domains: bool, // Default: 'false' } #[get("/sync?")] -fn sync(data: Form, headers: Headers, conn: DbConn) -> Json { - let user_json = headers.user.to_json(&conn); +async fn sync(data: SyncData, headers: Headers, conn: DbConn) -> Json { + let user_json = headers.user.to_json(&conn).await; - let folders = Folder::find_by_user(&headers.user.uuid, &conn); - let folders_json: Vec = folders.iter().map(Folder::to_json).collect(); + // Get all ciphers which are visible by the user + let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn).await; + + let cipher_sync_data = CipherSyncData::new(&headers.user.uuid, &ciphers, CipherSyncType::User, &conn).await; + + // Lets generate the ciphers_json using all the gathered info + let ciphers_json: Vec = stream::iter(ciphers) + .then(|c| async { + let c = c; // Move out this single variable + c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), &conn).await + }) + .collect() + .await; - let collections = Collection::find_by_user_uuid(&headers.user.uuid, &conn); - let collections_json: Vec = - collections.iter().map(|c| c.to_json_details(&headers.user.uuid, &conn)).collect(); + let collections_json: Vec = stream::iter(Collection::find_by_user_uuid(&headers.user.uuid, &conn).await) + .then(|c| async { + let c = c; // Move out this single variable + c.to_json_details(&headers.user.uuid, Some(&cipher_sync_data), &conn).await + }) + .collect() + .await; - let policies = OrgPolicy::find_confirmed_by_user(&headers.user.uuid, &conn); - let policies_json: Vec = policies.iter().map(OrgPolicy::to_json).collect(); + let folders_json: Vec = + Folder::find_by_user(&headers.user.uuid, &conn).await.iter().map(Folder::to_json).collect(); - let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn); - let ciphers_json: Vec = - ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect(); + let sends_json: Vec = + Send::find_by_user(&headers.user.uuid, &conn).await.iter().map(Send::to_json).collect(); - let sends = Send::find_by_user(&headers.user.uuid, &conn); - let sends_json: Vec = sends.iter().map(|s| s.to_json()).collect(); + let policies_json: Vec = + OrgPolicy::find_confirmed_by_user(&headers.user.uuid, &conn).await.iter().map(OrgPolicy::to_json).collect(); let domains_json = if data.exclude_domains { Value::Null @@ -135,11 +153,17 @@ fn sync(data: Form, headers: Headers, conn: DbConn) -> Json { } #[get("/ciphers")] -fn get_ciphers(headers: Headers, conn: DbConn) -> Json { - let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn); - - let ciphers_json: Vec = - ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect(); +async fn get_ciphers(headers: Headers, conn: DbConn) -> Json { + let ciphers = Cipher::find_by_user_visible(&headers.user.uuid, &conn).await; + let cipher_sync_data = CipherSyncData::new(&headers.user.uuid, &ciphers, CipherSyncType::User, &conn).await; + + let ciphers_json = stream::iter(ciphers) + .then(|c| async { + let c = c; // Move out this single variable + c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), &conn).await + }) + .collect::>() + .await; Json(json!({ "Data": ciphers_json, @@ -149,28 +173,28 @@ fn get_ciphers(headers: Headers, conn: DbConn) -> Json { } #[get("/ciphers/")] -fn get_cipher(uuid: String, headers: Headers, conn: DbConn) -> JsonResult { - let cipher = match Cipher::find_by_uuid(&uuid, &conn) { +async fn get_cipher(uuid: String, headers: Headers, conn: DbConn) -> JsonResult { + let cipher = match Cipher::find_by_uuid(&uuid, &conn).await { Some(cipher) => cipher, None => err!("Cipher doesn't exist"), }; - if !cipher.is_accessible_to_user(&headers.user.uuid, &conn) { + if !cipher.is_accessible_to_user(&headers.user.uuid, &conn).await { err!("Cipher is not owned by user") } - Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn))) + Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, &conn).await)) } #[get("/ciphers//admin")] -fn get_cipher_admin(uuid: String, headers: Headers, conn: DbConn) -> JsonResult { +async fn get_cipher_admin(uuid: String, headers: Headers, conn: DbConn) -> JsonResult { // TODO: Implement this correctly - get_cipher(uuid, headers, conn) + get_cipher(uuid, headers, conn).await } #[get("/ciphers//details")] -fn get_cipher_details(uuid: String, headers: Headers, conn: DbConn) -> JsonResult { - get_cipher(uuid, headers, conn) +async fn get_cipher_details(uuid: String, headers: Headers, conn: DbConn) -> JsonResult { + get_cipher(uuid, headers, conn).await } #[derive(Deserialize, Debug)] @@ -189,7 +213,7 @@ pub struct CipherData { Card = 3, Identity = 4 */ - pub Type: i32, // TODO: Change this to NumberOrString + pub Type: i32, pub Name: String, Notes: Option, Fields: Option, @@ -206,8 +230,9 @@ pub struct CipherData { PasswordHistory: Option, // These are used during key rotation + // 'Attachments' is unused, contains map of {id: filename} #[serde(rename = "Attachments")] - _Attachments: Option, // Unused, contains map of {id: filename} + _Attachments: Option, Attachments2: Option>, // The revision datetime (in ISO 8601 format) of the client's local copy @@ -228,15 +253,25 @@ pub struct Attachments2Data { /// Called when an org admin clones an org cipher. #[post("/ciphers/admin", data = "")] -fn post_ciphers_admin(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { - post_ciphers_create(data, headers, conn, nt) +async fn post_ciphers_admin( + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify<'_>, +) -> JsonResult { + post_ciphers_create(data, headers, conn, nt).await } /// Called when creating a new org-owned cipher, or cloning a cipher (whether /// user- or org-owned). When cloning a cipher to a user-owned cipher, /// `organizationId` is null. #[post("/ciphers/create", data = "")] -fn post_ciphers_create(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { +async fn post_ciphers_create( + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify<'_>, +) -> JsonResult { let mut data: ShareCipherData = data.into_inner().data; // Check if there are one more more collections selected when this cipher is part of an organization. @@ -248,11 +283,11 @@ fn post_ciphers_create(data: JsonUpcase, headers: Headers, conn // This check is usually only needed in update_cipher_from_data(), but we // need it here as well to avoid creating an empty cipher in the call to // cipher.save() below. - enforce_personal_ownership_policy(Some(&data.Cipher), &headers, &conn)?; + enforce_personal_ownership_policy(Some(&data.Cipher), &headers, &conn).await?; let mut cipher = Cipher::new(data.Cipher.Type, data.Cipher.Name.clone()); cipher.user_uuid = Some(headers.user.uuid.clone()); - cipher.save(&conn)?; + cipher.save(&conn).await?; // When cloning a cipher, the Bitwarden clients seem to set this field // based on the cipher being cloned (when creating a new cipher, it's set @@ -262,12 +297,12 @@ fn post_ciphers_create(data: JsonUpcase, headers: Headers, conn // or otherwise), we can just ignore this field entirely. data.Cipher.LastKnownRevisionDate = None; - share_cipher_by_uuid(&cipher.uuid, data, &headers, &conn, &nt) + share_cipher_by_uuid(&cipher.uuid, data, &headers, &conn, &nt).await } /// Called when creating a new user-owned cipher. #[post("/ciphers", data = "")] -fn post_ciphers(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { +async fn post_ciphers(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult { let mut data: CipherData = data.into_inner().data; // The web/browser clients set this field to null as expected, but the @@ -277,9 +312,9 @@ fn post_ciphers(data: JsonUpcase, headers: Headers, conn: DbConn, nt data.LastKnownRevisionDate = None; let mut cipher = Cipher::new(data.Type, data.Name.clone()); - update_cipher_from_data(&mut cipher, data, &headers, false, &conn, &nt, UpdateType::CipherCreate)?; + update_cipher_from_data(&mut cipher, data, &headers, false, &conn, &nt, UpdateType::CipherCreate).await?; - Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn))) + Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, &conn).await)) } /// Enforces the personal ownership policy on user-owned ciphers, if applicable. @@ -289,27 +324,27 @@ fn post_ciphers(data: JsonUpcase, headers: Headers, conn: DbConn, nt /// allowed to delete or share such ciphers to an org, however. /// /// Ref: https://bitwarden.com/help/article/policies/#personal-ownership -fn enforce_personal_ownership_policy(data: Option<&CipherData>, headers: &Headers, conn: &DbConn) -> EmptyResult { +async fn enforce_personal_ownership_policy(data: Option<&CipherData>, headers: &Headers, conn: &DbConn) -> EmptyResult { if data.is_none() || data.unwrap().OrganizationId.is_none() { let user_uuid = &headers.user.uuid; let policy_type = OrgPolicyType::PersonalOwnership; - if OrgPolicy::is_applicable_to_user(user_uuid, policy_type, conn) { + if OrgPolicy::is_applicable_to_user(user_uuid, policy_type, conn).await { err!("Due to an Enterprise Policy, you are restricted from saving items to your personal vault.") } } Ok(()) } -pub fn update_cipher_from_data( +pub async fn update_cipher_from_data( cipher: &mut Cipher, data: CipherData, headers: &Headers, shared_to_collection: bool, conn: &DbConn, - nt: &Notify, + nt: &Notify<'_>, ut: UpdateType, ) -> EmptyResult { - enforce_personal_ownership_policy(Some(&data), headers, conn)?; + enforce_personal_ownership_policy(Some(&data), headers, conn).await?; // Check that the client isn't updating an existing cipher with stale data. if let Some(dt) = data.LastKnownRevisionDate { @@ -328,12 +363,12 @@ pub fn update_cipher_from_data( } if let Some(org_id) = data.OrganizationId { - match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, conn) { + match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, conn).await { None => err!("You don't have permission to add item to organization"), Some(org_user) => { if shared_to_collection || org_user.has_full_access() - || cipher.is_write_accessible_to_user(&headers.user.uuid, conn) + || cipher.is_write_accessible_to_user(&headers.user.uuid, conn).await { cipher.organization_uuid = Some(org_id); // After some discussion in PR #1329 re-added the user_uuid = None again. @@ -352,7 +387,7 @@ pub fn update_cipher_from_data( } if let Some(ref folder_id) = data.FolderId { - match Folder::find_by_uuid(folder_id, conn) { + match Folder::find_by_uuid(folder_id, conn).await { Some(folder) => { if folder.user_uuid != headers.user.uuid { err!("Folder is not owned by user") @@ -365,7 +400,7 @@ pub fn update_cipher_from_data( // Modify attachments name and keys when rotating if let Some(attachments) = data.Attachments2 { for (id, attachment) in attachments { - let mut saved_att = match Attachment::find_by_id(&id, conn) { + let mut saved_att = match Attachment::find_by_id(&id, conn).await { Some(att) => att, None => err!("Attachment doesn't exist"), }; @@ -380,7 +415,7 @@ pub fn update_cipher_from_data( saved_att.akey = Some(attachment.Key); saved_att.file_name = attachment.FileName; - saved_att.save(conn)?; + saved_att.save(conn).await?; } } @@ -426,19 +461,17 @@ pub fn update_cipher_from_data( cipher.password_history = data.PasswordHistory.map(|f| f.to_string()); cipher.reprompt = data.Reprompt; - cipher.save(conn)?; - cipher.move_to_folder(data.FolderId, &headers.user.uuid, conn)?; - cipher.set_favorite(data.Favorite, &headers.user.uuid, conn)?; + cipher.save(conn).await?; + cipher.move_to_folder(data.FolderId, &headers.user.uuid, conn).await?; + cipher.set_favorite(data.Favorite, &headers.user.uuid, conn).await?; if ut != UpdateType::None { - nt.send_cipher_update(ut, cipher, &cipher.update_users_revision(conn)); + nt.send_cipher_update(ut, cipher, &cipher.update_users_revision(conn).await).await; } Ok(()) } -use super::folders::FolderData; - #[derive(Deserialize)] #[allow(non_snake_case)] struct ImportData { @@ -457,8 +490,13 @@ struct RelationsData { } #[post("/ciphers/import", data = "")] -fn post_ciphers_import(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { - enforce_personal_ownership_policy(None, &headers, &conn)?; +async fn post_ciphers_import( + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify<'_>, +) -> EmptyResult { + enforce_personal_ownership_policy(None, &headers, &conn).await?; let data: ImportData = data.into_inner().data; @@ -466,7 +504,7 @@ fn post_ciphers_import(data: JsonUpcase, headers: Headers, conn: DbC let mut folders: Vec<_> = Vec::new(); for folder in data.Folders.into_iter() { let mut new_folder = Folder::new(headers.user.uuid.clone(), folder.Name); - new_folder.save(&conn)?; + new_folder.save(&conn).await?; folders.push(new_folder); } @@ -484,48 +522,60 @@ fn post_ciphers_import(data: JsonUpcase, headers: Headers, conn: DbC cipher_data.FolderId = folder_uuid; let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone()); - update_cipher_from_data(&mut cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::None)?; + update_cipher_from_data(&mut cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::None).await?; } let mut user = headers.user; - user.update_revision(&conn)?; - nt.send_user_update(UpdateType::Vault, &user); + user.update_revision(&conn).await?; + nt.send_user_update(UpdateType::Vault, &user).await; Ok(()) } /// Called when an org admin modifies an existing org cipher. #[put("/ciphers//admin", data = "")] -fn put_cipher_admin( +async fn put_cipher_admin( uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, - nt: Notify, + nt: Notify<'_>, ) -> JsonResult { - put_cipher(uuid, data, headers, conn, nt) + put_cipher(uuid, data, headers, conn, nt).await } #[post("/ciphers//admin", data = "")] -fn post_cipher_admin( +async fn post_cipher_admin( uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, - nt: Notify, + nt: Notify<'_>, ) -> JsonResult { - post_cipher(uuid, data, headers, conn, nt) + post_cipher(uuid, data, headers, conn, nt).await } #[post("/ciphers/", data = "")] -fn post_cipher(uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { - put_cipher(uuid, data, headers, conn, nt) +async fn post_cipher( + uuid: String, + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify<'_>, +) -> JsonResult { + put_cipher(uuid, data, headers, conn, nt).await } #[put("/ciphers/", data = "")] -fn put_cipher(uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { +async fn put_cipher( + uuid: String, + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify<'_>, +) -> JsonResult { let data: CipherData = data.into_inner().data; - let mut cipher = match Cipher::find_by_uuid(&uuid, &conn) { + let mut cipher = match Cipher::find_by_uuid(&uuid, &conn).await { Some(cipher) => cipher, None => err!("Cipher doesn't exist"), }; @@ -535,13 +585,13 @@ fn put_cipher(uuid: String, data: JsonUpcase, headers: Headers, conn // cipher itself, so the user shouldn't need write access to change these. // Interestingly, upstream Bitwarden doesn't properly handle this either. - if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) { + if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn).await { err!("Cipher is not write accessible") } - update_cipher_from_data(&mut cipher, data, &headers, false, &conn, &nt, UpdateType::CipherUpdate)?; + update_cipher_from_data(&mut cipher, data, &headers, false, &conn, &nt, UpdateType::CipherUpdate).await?; - Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn))) + Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, &conn).await)) } #[derive(Deserialize)] @@ -551,37 +601,37 @@ struct CollectionsAdminData { } #[put("/ciphers//collections", data = "")] -fn put_collections_update( +async fn put_collections_update( uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, ) -> EmptyResult { - post_collections_admin(uuid, data, headers, conn) + post_collections_admin(uuid, data, headers, conn).await } #[post("/ciphers//collections", data = "")] -fn post_collections_update( +async fn post_collections_update( uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, ) -> EmptyResult { - post_collections_admin(uuid, data, headers, conn) + post_collections_admin(uuid, data, headers, conn).await } #[put("/ciphers//collections-admin", data = "")] -fn put_collections_admin( +async fn put_collections_admin( uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, ) -> EmptyResult { - post_collections_admin(uuid, data, headers, conn) + post_collections_admin(uuid, data, headers, conn).await } #[post("/ciphers//collections-admin", data = "")] -fn post_collections_admin( +async fn post_collections_admin( uuid: String, data: JsonUpcase, headers: Headers, @@ -589,30 +639,30 @@ fn post_collections_admin( ) -> EmptyResult { let data: CollectionsAdminData = data.into_inner().data; - let cipher = match Cipher::find_by_uuid(&uuid, &conn) { + let cipher = match Cipher::find_by_uuid(&uuid, &conn).await { Some(cipher) => cipher, None => err!("Cipher doesn't exist"), }; - if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) { + if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn).await { err!("Cipher is not write accessible") } let posted_collections: HashSet = data.CollectionIds.iter().cloned().collect(); let current_collections: HashSet = - cipher.get_collections(&headers.user.uuid, &conn).iter().cloned().collect(); + cipher.get_collections(&headers.user.uuid, &conn).await.iter().cloned().collect(); for collection in posted_collections.symmetric_difference(¤t_collections) { - match Collection::find_by_uuid(collection, &conn) { + match Collection::find_by_uuid(collection, &conn).await { None => err!("Invalid collection ID provided"), Some(collection) => { - if collection.is_writable_by_user(&headers.user.uuid, &conn) { + if collection.is_writable_by_user(&headers.user.uuid, &conn).await { if posted_collections.contains(&collection.uuid) { // Add to collection - CollectionCipher::save(&cipher.uuid, &collection.uuid, &conn)?; + CollectionCipher::save(&cipher.uuid, &collection.uuid, &conn).await?; } else { // Remove from collection - CollectionCipher::delete(&cipher.uuid, &collection.uuid, &conn)?; + CollectionCipher::delete(&cipher.uuid, &collection.uuid, &conn).await?; } } else { err!("No rights to modify the collection") @@ -632,29 +682,29 @@ struct ShareCipherData { } #[post("/ciphers//share", data = "")] -fn post_cipher_share( +async fn post_cipher_share( uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, - nt: Notify, + nt: Notify<'_>, ) -> JsonResult { let data: ShareCipherData = data.into_inner().data; - share_cipher_by_uuid(&uuid, data, &headers, &conn, &nt) + share_cipher_by_uuid(&uuid, data, &headers, &conn, &nt).await } #[put("/ciphers//share", data = "")] -fn put_cipher_share( +async fn put_cipher_share( uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, - nt: Notify, + nt: Notify<'_>, ) -> JsonResult { let data: ShareCipherData = data.into_inner().data; - share_cipher_by_uuid(&uuid, data, &headers, &conn, &nt) + share_cipher_by_uuid(&uuid, data, &headers, &conn, &nt).await } #[derive(Deserialize)] @@ -665,11 +715,11 @@ struct ShareSelectedCipherData { } #[put("/ciphers/share", data = "")] -fn put_cipher_share_selected( +async fn put_cipher_share_selected( data: JsonUpcase, headers: Headers, conn: DbConn, - nt: Notify, + nt: Notify<'_>, ) -> EmptyResult { let mut data: ShareSelectedCipherData = data.into_inner().data; let mut cipher_ids: Vec = Vec::new(); @@ -696,7 +746,7 @@ fn put_cipher_share_selected( }; match shared_cipher_data.Cipher.Id.take() { - Some(id) => share_cipher_by_uuid(&id, shared_cipher_data, &headers, &conn, &nt)?, + Some(id) => share_cipher_by_uuid(&id, shared_cipher_data, &headers, &conn, &nt).await?, None => err!("Request missing ids field"), }; } @@ -704,16 +754,16 @@ fn put_cipher_share_selected( Ok(()) } -fn share_cipher_by_uuid( +async fn share_cipher_by_uuid( uuid: &str, data: ShareCipherData, headers: &Headers, conn: &DbConn, - nt: &Notify, + nt: &Notify<'_>, ) -> JsonResult { - let mut cipher = match Cipher::find_by_uuid(uuid, conn) { + let mut cipher = match Cipher::find_by_uuid(uuid, conn).await { Some(cipher) => { - if cipher.is_write_accessible_to_user(&headers.user.uuid, conn) { + if cipher.is_write_accessible_to_user(&headers.user.uuid, conn).await { cipher } else { err!("Cipher is not write accessible") @@ -730,11 +780,11 @@ fn share_cipher_by_uuid( None => {} Some(organization_uuid) => { for uuid in &data.CollectionIds { - match Collection::find_by_uuid_and_org(uuid, &organization_uuid, conn) { + match Collection::find_by_uuid_and_org(uuid, &organization_uuid, conn).await { None => err!("Invalid collection ID provided"), Some(collection) => { - if collection.is_writable_by_user(&headers.user.uuid, conn) { - CollectionCipher::save(&cipher.uuid, &collection.uuid, conn)?; + if collection.is_writable_by_user(&headers.user.uuid, conn).await { + CollectionCipher::save(&cipher.uuid, &collection.uuid, conn).await?; shared_to_collection = true; } else { err!("No rights to modify the collection") @@ -753,9 +803,10 @@ fn share_cipher_by_uuid( conn, nt, UpdateType::CipherUpdate, - )?; + ) + .await?; - Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, conn))) + Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, conn).await)) } /// v2 API for downloading an attachment. This just redirects the client to @@ -765,8 +816,8 @@ fn share_cipher_by_uuid( /// their object storage service. For self-hosted instances, it basically just /// redirects to the same location as before the v2 API. #[get("/ciphers//attachment/")] -fn get_attachment(uuid: String, attachment_id: String, headers: Headers, conn: DbConn) -> JsonResult { - match Attachment::find_by_id(&attachment_id, &conn) { +async fn get_attachment(uuid: String, attachment_id: String, headers: Headers, conn: DbConn) -> JsonResult { + match Attachment::find_by_id(&attachment_id, &conn).await { Some(attachment) if uuid == attachment.cipher_uuid => Ok(Json(attachment.to_json(&headers.host))), Some(_) => err!("Attachment doesn't belong to cipher"), None => err!("Attachment doesn't exist"), @@ -792,18 +843,18 @@ enum FileUploadType { /// For upstream's cloud-hosted service, it's an Azure object storage API. /// For self-hosted instances, it's another API on the local instance. #[post("/ciphers//attachment/v2", data = "")] -fn post_attachment_v2( +async fn post_attachment_v2( uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, ) -> JsonResult { - let cipher = match Cipher::find_by_uuid(&uuid, &conn) { + let cipher = match Cipher::find_by_uuid(&uuid, &conn).await { Some(cipher) => cipher, None => err!("Cipher doesn't exist"), }; - if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) { + if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn).await { err!("Cipher is not write accessible") } @@ -811,7 +862,7 @@ fn post_attachment_v2( let data: AttachmentRequestData = data.into_inner().data; let attachment = Attachment::new(attachment_id.clone(), cipher.uuid.clone(), data.FileName, data.FileSize, Some(data.Key)); - attachment.save(&conn).expect("Error saving attachment"); + attachment.save(&conn).await.expect("Error saving attachment"); let url = format!("/ciphers/{}/attachment/{}", cipher.uuid, attachment_id); let response_key = match data.AdminRequest { @@ -824,10 +875,16 @@ fn post_attachment_v2( "AttachmentId": attachment_id, "Url": url, "FileUploadType": FileUploadType::Direct as i32, - response_key: cipher.to_json(&headers.host, &headers.user.uuid, &conn), + response_key: cipher.to_json(&headers.host, &headers.user.uuid, None, &conn).await, }))) } +#[derive(FromForm)] +struct UploadData<'f> { + key: Option, + data: TempFile<'f>, +} + /// Saves the data content of an attachment to a file. This is common code /// shared between the v2 and legacy attachment APIs. /// @@ -836,38 +893,37 @@ fn post_attachment_v2( /// /// When used with the v2 API, post_attachment_v2() has already created the /// database record, which is passed in as `attachment`. -fn save_attachment( +async fn save_attachment( mut attachment: Option, cipher_uuid: String, - data: Data, - content_type: &ContentType, + data: Form>, headers: &Headers, - conn: &DbConn, - nt: Notify, -) -> Result { - let cipher = match Cipher::find_by_uuid(&cipher_uuid, conn) { + conn: DbConn, + nt: Notify<'_>, +) -> Result<(Cipher, DbConn), crate::error::Error> { + let cipher = match Cipher::find_by_uuid(&cipher_uuid, &conn).await { Some(cipher) => cipher, - None => err_discard!("Cipher doesn't exist", data), + None => err!("Cipher doesn't exist"), }; - if !cipher.is_write_accessible_to_user(&headers.user.uuid, conn) { - err_discard!("Cipher is not write accessible", data) + if !cipher.is_write_accessible_to_user(&headers.user.uuid, &conn).await { + err!("Cipher is not write accessible") } // In the v2 API, the attachment record has already been created, // so the size limit needs to be adjusted to account for that. let size_adjust = match &attachment { - None => 0, // Legacy API - Some(a) => a.file_size as i64, // v2 API + None => 0, // Legacy API + Some(a) => i64::from(a.file_size), // v2 API }; let size_limit = if let Some(ref user_uuid) = cipher.user_uuid { match CONFIG.user_attachment_limit() { - Some(0) => err_discard!("Attachments are disabled", data), + Some(0) => err!("Attachments are disabled"), Some(limit_kb) => { - let left = (limit_kb * 1024) - Attachment::size_by_user(user_uuid, conn) + size_adjust; + let left = (limit_kb * 1024) - Attachment::size_by_user(user_uuid, &conn).await + size_adjust; if left <= 0 { - err_discard!("Attachment storage limit reached! Delete some attachments to free up space", data) + err!("Attachment storage limit reached! Delete some attachments to free up space") } Some(left as u64) } @@ -875,130 +931,91 @@ fn save_attachment( } } else if let Some(ref org_uuid) = cipher.organization_uuid { match CONFIG.org_attachment_limit() { - Some(0) => err_discard!("Attachments are disabled", data), + Some(0) => err!("Attachments are disabled"), Some(limit_kb) => { - let left = (limit_kb * 1024) - Attachment::size_by_org(org_uuid, conn) + size_adjust; + let left = (limit_kb * 1024) - Attachment::size_by_org(org_uuid, &conn).await + size_adjust; if left <= 0 { - err_discard!("Attachment storage limit reached! Delete some attachments to free up space", data) + err!("Attachment storage limit reached! Delete some attachments to free up space") } Some(left as u64) } None => None, } } else { - err_discard!("Cipher is neither owned by a user nor an organization", data); + err!("Cipher is neither owned by a user nor an organization"); }; - let mut params = content_type.params(); - let boundary_pair = params.next().expect("No boundary provided"); - let boundary = boundary_pair.1; + let mut data = data.into_inner(); + + // There seems to be a bug somewhere regarding uploading attachments using the Android Client (Maybe iOS too?) + // See: https://github.com/dani-garcia/vaultwarden/issues/2644 + // Since all other clients seem to match TempFile::File and not TempFile::Buffered lets catch this and return an error for now. + // We need to figure out how to solve this, but for now it's better to not accept these attachments since they will be broken. + if let TempFile::Buffered { + content: _, + } = &data.data + { + err!("Error reading attachment data. Please try an other client."); + } - let base_path = Path::new(&CONFIG.attachments_folder()).join(&cipher_uuid); - let mut path = PathBuf::new(); + if let Some(size_limit) = size_limit { + if data.data.len() > size_limit { + err!("Attachment storage limit exceeded with this file"); + } + } - let mut attachment_key = None; - let mut error = None; + let file_id = match &attachment { + Some(attachment) => attachment.id.clone(), // v2 API + None => crypto::generate_attachment_id(), // Legacy API + }; - Multipart::with_body(data.open(), boundary) - .foreach_entry(|mut field| { - match &*field.headers.name { - "key" => { - use std::io::Read; - let mut key_buffer = String::new(); - if field.data.read_to_string(&mut key_buffer).is_ok() { - attachment_key = Some(key_buffer); - } - } - "data" => { - // In the legacy API, this is the encrypted filename - // provided by the client, stored to the database as-is. - // In the v2 API, this value doesn't matter, as it was - // already provided and stored via an earlier API call. - let encrypted_filename = field.headers.filename; - - // This random ID is used as the name of the file on disk. - // In the legacy API, we need to generate this value here. - // In the v2 API, we use the value from post_attachment_v2(). - let file_id = match &attachment { - Some(attachment) => attachment.id.clone(), // v2 API - None => crypto::generate_attachment_id(), // Legacy API - }; - path = base_path.join(&file_id); - - let size = - match field.data.save().memory_threshold(0).size_limit(size_limit).with_path(path.clone()) { - SaveResult::Full(SavedData::File(_, size)) => size as i32, - SaveResult::Full(other) => { - error = Some(format!("Attachment is not a file: {:?}", other)); - return; - } - SaveResult::Partial(_, reason) => { - error = Some(format!("Attachment storage limit exceeded with this file: {:?}", reason)); - return; - } - SaveResult::Error(e) => { - error = Some(format!("Error: {:?}", e)); - return; - } - }; - - if let Some(attachment) = &mut attachment { - // v2 API - - // Check the actual size against the size initially provided by - // the client. Upstream allows +/- 1 MiB deviation from this - // size, but it's not clear when or why this is needed. - const LEEWAY: i32 = 1024 * 1024; // 1 MiB - let min_size = attachment.file_size - LEEWAY; - let max_size = attachment.file_size + LEEWAY; - - if min_size <= size && size <= max_size { - if size != attachment.file_size { - // Update the attachment with the actual file size. - attachment.file_size = size; - attachment.save(conn).expect("Error updating attachment"); - } - } else { - attachment.delete(conn).ok(); + let folder_path = tokio::fs::canonicalize(&CONFIG.attachments_folder()).await?.join(&cipher_uuid); + let file_path = folder_path.join(&file_id); + tokio::fs::create_dir_all(&folder_path).await?; + + let size = data.data.len() as i32; + if let Some(attachment) = &mut attachment { + // v2 API + + // Check the actual size against the size initially provided by + // the client. Upstream allows +/- 1 MiB deviation from this + // size, but it's not clear when or why this is needed. + const LEEWAY: i32 = 1024 * 1024; // 1 MiB + let min_size = attachment.file_size - LEEWAY; + let max_size = attachment.file_size + LEEWAY; + + if min_size <= size && size <= max_size { + if size != attachment.file_size { + // Update the attachment with the actual file size. + attachment.file_size = size; + attachment.save(&conn).await.expect("Error updating attachment"); + } + } else { + attachment.delete(&conn).await.ok(); - let err_msg = "Attachment size mismatch".to_string(); - error!("{} (expected within [{}, {}], got {})", err_msg, min_size, max_size, size); - error = Some(err_msg); - } - } else { - // Legacy API + err!(format!("Attachment size mismatch (expected within [{}, {}], got {})", min_size, max_size, size)); + } + } else { + // Legacy API + let encrypted_filename = data.data.raw_name().map(|s| s.dangerous_unsafe_unsanitized_raw().to_string()); - if encrypted_filename.is_none() { - error = Some("No filename provided".to_string()); - return; - } - if attachment_key.is_none() { - error = Some("No attachment key provided".to_string()); - return; - } - let attachment = Attachment::new( - file_id, - cipher_uuid.clone(), - encrypted_filename.unwrap(), - size, - attachment_key.clone(), - ); - attachment.save(conn).expect("Error saving attachment"); - } - } - _ => error!("Invalid multipart name"), - } - }) - .expect("Error processing multipart data"); + if encrypted_filename.is_none() { + err!("No filename provided") + } + if data.key.is_none() { + err!("No attachment key provided") + } + let attachment = Attachment::new(file_id, cipher_uuid.clone(), encrypted_filename.unwrap(), size, data.key); + attachment.save(&conn).await.expect("Error saving attachment"); + } - if let Some(ref e) = error { - std::fs::remove_file(path).ok(); - err!(e); + if let Err(_err) = data.data.persist_to(&file_path).await { + data.data.move_copy_to(file_path).await? } - nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(conn)); + nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(&conn).await).await; - Ok(cipher) + Ok((cipher, conn)) } /// v2 API for uploading the actual data content of an attachment. @@ -1006,192 +1023,219 @@ fn save_attachment( /// /ciphers//attachment/v2 route, which would otherwise conflict /// with this one. #[post("/ciphers//attachment/", format = "multipart/form-data", data = "", rank = 1)] -fn post_attachment_v2_data( +async fn post_attachment_v2_data( uuid: String, attachment_id: String, - data: Data, - content_type: &ContentType, + data: Form>, headers: Headers, conn: DbConn, - nt: Notify, + nt: Notify<'_>, ) -> EmptyResult { - let attachment = match Attachment::find_by_id(&attachment_id, &conn) { + let attachment = match Attachment::find_by_id(&attachment_id, &conn).await { Some(attachment) if uuid == attachment.cipher_uuid => Some(attachment), Some(_) => err!("Attachment doesn't belong to cipher"), None => err!("Attachment doesn't exist"), }; - save_attachment(attachment, uuid, data, content_type, &headers, &conn, nt)?; + save_attachment(attachment, uuid, data, &headers, conn, nt).await?; Ok(()) } /// Legacy API for creating an attachment associated with a cipher. #[post("/ciphers//attachment", format = "multipart/form-data", data = "")] -fn post_attachment( +async fn post_attachment( uuid: String, - data: Data, - content_type: &ContentType, + data: Form>, headers: Headers, conn: DbConn, - nt: Notify, + nt: Notify<'_>, ) -> JsonResult { // Setting this as None signifies to save_attachment() that it should create // the attachment database record as well as saving the data to disk. let attachment = None; - let cipher = save_attachment(attachment, uuid, data, content_type, &headers, &conn, nt)?; + let (cipher, conn) = save_attachment(attachment, uuid, data, &headers, conn, nt).await?; - Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, &conn))) + Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, &conn).await)) } #[post("/ciphers//attachment-admin", format = "multipart/form-data", data = "")] -fn post_attachment_admin( +async fn post_attachment_admin( uuid: String, - data: Data, - content_type: &ContentType, + data: Form>, headers: Headers, conn: DbConn, - nt: Notify, + nt: Notify<'_>, ) -> JsonResult { - post_attachment(uuid, data, content_type, headers, conn, nt) + post_attachment(uuid, data, headers, conn, nt).await } #[post("/ciphers//attachment//share", format = "multipart/form-data", data = "")] -fn post_attachment_share( +async fn post_attachment_share( uuid: String, attachment_id: String, - data: Data, - content_type: &ContentType, + data: Form>, headers: Headers, conn: DbConn, - nt: Notify, + nt: Notify<'_>, ) -> JsonResult { - _delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &conn, &nt)?; - post_attachment(uuid, data, content_type, headers, conn, nt) + _delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &conn, &nt).await?; + post_attachment(uuid, data, headers, conn, nt).await } #[post("/ciphers//attachment//delete-admin")] -fn delete_attachment_post_admin( +async fn delete_attachment_post_admin( uuid: String, attachment_id: String, headers: Headers, conn: DbConn, - nt: Notify, + nt: Notify<'_>, ) -> EmptyResult { - delete_attachment(uuid, attachment_id, headers, conn, nt) + delete_attachment(uuid, attachment_id, headers, conn, nt).await } #[post("/ciphers//attachment//delete")] -fn delete_attachment_post( +async fn delete_attachment_post( uuid: String, attachment_id: String, headers: Headers, conn: DbConn, - nt: Notify, + nt: Notify<'_>, ) -> EmptyResult { - delete_attachment(uuid, attachment_id, headers, conn, nt) + delete_attachment(uuid, attachment_id, headers, conn, nt).await } #[delete("/ciphers//attachment/")] -fn delete_attachment(uuid: String, attachment_id: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { - _delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &conn, &nt) +async fn delete_attachment( + uuid: String, + attachment_id: String, + headers: Headers, + conn: DbConn, + nt: Notify<'_>, +) -> EmptyResult { + _delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &conn, &nt).await } #[delete("/ciphers//attachment//admin")] -fn delete_attachment_admin( +async fn delete_attachment_admin( uuid: String, attachment_id: String, headers: Headers, conn: DbConn, - nt: Notify, + nt: Notify<'_>, ) -> EmptyResult { - _delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &conn, &nt) + _delete_cipher_attachment_by_id(&uuid, &attachment_id, &headers, &conn, &nt).await } #[post("/ciphers//delete")] -fn delete_cipher_post(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { - _delete_cipher_by_uuid(&uuid, &headers, &conn, false, &nt) +async fn delete_cipher_post(uuid: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult { + _delete_cipher_by_uuid(&uuid, &headers, &conn, false, &nt).await } #[post("/ciphers//delete-admin")] -fn delete_cipher_post_admin(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { - _delete_cipher_by_uuid(&uuid, &headers, &conn, false, &nt) +async fn delete_cipher_post_admin(uuid: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult { + _delete_cipher_by_uuid(&uuid, &headers, &conn, false, &nt).await } #[put("/ciphers//delete")] -fn delete_cipher_put(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { - _delete_cipher_by_uuid(&uuid, &headers, &conn, true, &nt) +async fn delete_cipher_put(uuid: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult { + _delete_cipher_by_uuid(&uuid, &headers, &conn, true, &nt).await } #[put("/ciphers//delete-admin")] -fn delete_cipher_put_admin(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { - _delete_cipher_by_uuid(&uuid, &headers, &conn, true, &nt) +async fn delete_cipher_put_admin(uuid: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult { + _delete_cipher_by_uuid(&uuid, &headers, &conn, true, &nt).await } #[delete("/ciphers/")] -fn delete_cipher(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { - _delete_cipher_by_uuid(&uuid, &headers, &conn, false, &nt) +async fn delete_cipher(uuid: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult { + _delete_cipher_by_uuid(&uuid, &headers, &conn, false, &nt).await } #[delete("/ciphers//admin")] -fn delete_cipher_admin(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { - _delete_cipher_by_uuid(&uuid, &headers, &conn, false, &nt) +async fn delete_cipher_admin(uuid: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult { + _delete_cipher_by_uuid(&uuid, &headers, &conn, false, &nt).await } #[delete("/ciphers", data = "")] -fn delete_cipher_selected(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { - _delete_multiple_ciphers(data, headers, conn, false, nt) +async fn delete_cipher_selected( + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify<'_>, +) -> EmptyResult { + _delete_multiple_ciphers(data, headers, conn, false, nt).await } #[post("/ciphers/delete", data = "")] -fn delete_cipher_selected_post(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { - _delete_multiple_ciphers(data, headers, conn, false, nt) +async fn delete_cipher_selected_post( + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify<'_>, +) -> EmptyResult { + _delete_multiple_ciphers(data, headers, conn, false, nt).await } #[put("/ciphers/delete", data = "")] -fn delete_cipher_selected_put(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { - _delete_multiple_ciphers(data, headers, conn, true, nt) // soft delete +async fn delete_cipher_selected_put( + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify<'_>, +) -> EmptyResult { + _delete_multiple_ciphers(data, headers, conn, true, nt).await // soft delete } #[delete("/ciphers/admin", data = "")] -fn delete_cipher_selected_admin(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { - delete_cipher_selected(data, headers, conn, nt) +async fn delete_cipher_selected_admin( + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify<'_>, +) -> EmptyResult { + delete_cipher_selected(data, headers, conn, nt).await } #[post("/ciphers/delete-admin", data = "")] -fn delete_cipher_selected_post_admin( +async fn delete_cipher_selected_post_admin( data: JsonUpcase, headers: Headers, conn: DbConn, - nt: Notify, + nt: Notify<'_>, ) -> EmptyResult { - delete_cipher_selected_post(data, headers, conn, nt) + delete_cipher_selected_post(data, headers, conn, nt).await } #[put("/ciphers/delete-admin", data = "")] -fn delete_cipher_selected_put_admin( +async fn delete_cipher_selected_put_admin( data: JsonUpcase, headers: Headers, conn: DbConn, - nt: Notify, + nt: Notify<'_>, ) -> EmptyResult { - delete_cipher_selected_put(data, headers, conn, nt) + delete_cipher_selected_put(data, headers, conn, nt).await } #[put("/ciphers//restore")] -fn restore_cipher_put(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { - _restore_cipher_by_uuid(&uuid, &headers, &conn, &nt) +async fn restore_cipher_put(uuid: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult { + _restore_cipher_by_uuid(&uuid, &headers, &conn, &nt).await } #[put("/ciphers//restore-admin")] -fn restore_cipher_put_admin(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { - _restore_cipher_by_uuid(&uuid, &headers, &conn, &nt) +async fn restore_cipher_put_admin(uuid: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult { + _restore_cipher_by_uuid(&uuid, &headers, &conn, &nt).await } #[put("/ciphers/restore", data = "")] -fn restore_cipher_selected(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { - _restore_multiple_ciphers(data, &headers, &conn, &nt) +async fn restore_cipher_selected( + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify<'_>, +) -> JsonResult { + _restore_multiple_ciphers(data, &headers, &conn, &nt).await } #[derive(Deserialize)] @@ -1202,12 +1246,17 @@ struct MoveCipherData { } #[post("/ciphers/move", data = "")] -fn move_cipher_selected(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { +async fn move_cipher_selected( + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify<'_>, +) -> EmptyResult { let data = data.into_inner().data; let user_uuid = headers.user.uuid; if let Some(ref folder_id) = data.FolderId { - match Folder::find_by_uuid(folder_id, &conn) { + match Folder::find_by_uuid(folder_id, &conn).await { Some(folder) => { if folder.user_uuid != user_uuid { err!("Folder is not owned by user") @@ -1218,47 +1267,47 @@ fn move_cipher_selected(data: JsonUpcase, headers: Headers, conn } for uuid in data.Ids { - let cipher = match Cipher::find_by_uuid(&uuid, &conn) { + let cipher = match Cipher::find_by_uuid(&uuid, &conn).await { Some(cipher) => cipher, None => err!("Cipher doesn't exist"), }; - if !cipher.is_accessible_to_user(&user_uuid, &conn) { + if !cipher.is_accessible_to_user(&user_uuid, &conn).await { err!("Cipher is not accessible by user") } // Move cipher - cipher.move_to_folder(data.FolderId.clone(), &user_uuid, &conn)?; + cipher.move_to_folder(data.FolderId.clone(), &user_uuid, &conn).await?; - nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &[user_uuid.clone()]); + nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &[user_uuid.clone()]).await; } Ok(()) } #[put("/ciphers/move", data = "")] -fn move_cipher_selected_put( +async fn move_cipher_selected_put( data: JsonUpcase, headers: Headers, conn: DbConn, - nt: Notify, + nt: Notify<'_>, ) -> EmptyResult { - move_cipher_selected(data, headers, conn, nt) + move_cipher_selected(data, headers, conn, nt).await } #[derive(FromForm)] struct OrganizationId { - #[form(field = "organizationId")] + #[field(name = "organizationId")] org_id: String, } #[post("/ciphers/purge?", data = "")] -fn delete_all( - organization: Option>, +async fn delete_all( + organization: Option, data: JsonUpcase, headers: Headers, conn: DbConn, - nt: Notify, + nt: Notify<'_>, ) -> EmptyResult { let data: PasswordData = data.into_inner().data; let password_hash = data.MasterPasswordHash; @@ -1272,12 +1321,12 @@ fn delete_all( match organization { Some(org_data) => { // Organization ID in query params, purging organization vault - match UserOrganization::find_by_user_and_org(&user.uuid, &org_data.org_id, &conn) { + match UserOrganization::find_by_user_and_org(&user.uuid, &org_data.org_id, &conn).await { None => err!("You don't have permission to purge the organization vault"), Some(user_org) => { if user_org.atype == UserOrgType::Owner { - Cipher::delete_all_by_organization(&org_data.org_id, &conn)?; - nt.send_user_update(UpdateType::Vault, &user); + Cipher::delete_all_by_organization(&org_data.org_id, &conn).await?; + nt.send_user_update(UpdateType::Vault, &user).await; Ok(()) } else { err!("You don't have permission to purge the organization vault"); @@ -1288,50 +1337,56 @@ fn delete_all( None => { // No organization ID in query params, purging user vault // Delete ciphers and their attachments - for cipher in Cipher::find_owned_by_user(&user.uuid, &conn) { - cipher.delete(&conn)?; + for cipher in Cipher::find_owned_by_user(&user.uuid, &conn).await { + cipher.delete(&conn).await?; } // Delete folders - for f in Folder::find_by_user(&user.uuid, &conn) { - f.delete(&conn)?; + for f in Folder::find_by_user(&user.uuid, &conn).await { + f.delete(&conn).await?; } - user.update_revision(&conn)?; - nt.send_user_update(UpdateType::Vault, &user); + user.update_revision(&conn).await?; + nt.send_user_update(UpdateType::Vault, &user).await; Ok(()) } } } -fn _delete_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, soft_delete: bool, nt: &Notify) -> EmptyResult { - let mut cipher = match Cipher::find_by_uuid(uuid, conn) { +async fn _delete_cipher_by_uuid( + uuid: &str, + headers: &Headers, + conn: &DbConn, + soft_delete: bool, + nt: &Notify<'_>, +) -> EmptyResult { + let mut cipher = match Cipher::find_by_uuid(uuid, conn).await { Some(cipher) => cipher, None => err!("Cipher doesn't exist"), }; - if !cipher.is_write_accessible_to_user(&headers.user.uuid, conn) { + if !cipher.is_write_accessible_to_user(&headers.user.uuid, conn).await { err!("Cipher can't be deleted by user") } if soft_delete { cipher.deleted_at = Some(Utc::now().naive_utc()); - cipher.save(conn)?; - nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(conn)); + cipher.save(conn).await?; + nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(conn).await).await; } else { - cipher.delete(conn)?; - nt.send_cipher_update(UpdateType::CipherDelete, &cipher, &cipher.update_users_revision(conn)); + cipher.delete(conn).await?; + nt.send_cipher_update(UpdateType::CipherDelete, &cipher, &cipher.update_users_revision(conn).await).await; } Ok(()) } -fn _delete_multiple_ciphers( +async fn _delete_multiple_ciphers( data: JsonUpcase, headers: Headers, conn: DbConn, soft_delete: bool, - nt: Notify, + nt: Notify<'_>, ) -> EmptyResult { let data: Value = data.into_inner().data; @@ -1344,7 +1399,7 @@ fn _delete_multiple_ciphers( }; for uuid in uuids { - if let error @ Err(_) = _delete_cipher_by_uuid(uuid, &headers, &conn, soft_delete, &nt) { + if let error @ Err(_) = _delete_cipher_by_uuid(uuid, &headers, &conn, soft_delete, &nt).await { return error; }; } @@ -1352,24 +1407,29 @@ fn _delete_multiple_ciphers( Ok(()) } -fn _restore_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, nt: &Notify) -> JsonResult { - let mut cipher = match Cipher::find_by_uuid(uuid, conn) { +async fn _restore_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, nt: &Notify<'_>) -> JsonResult { + let mut cipher = match Cipher::find_by_uuid(uuid, conn).await { Some(cipher) => cipher, None => err!("Cipher doesn't exist"), }; - if !cipher.is_write_accessible_to_user(&headers.user.uuid, conn) { + if !cipher.is_write_accessible_to_user(&headers.user.uuid, conn).await { err!("Cipher can't be restored by user") } cipher.deleted_at = None; - cipher.save(conn)?; + cipher.save(conn).await?; - nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(conn)); - Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, conn))) + nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(conn).await).await; + Ok(Json(cipher.to_json(&headers.host, &headers.user.uuid, None, conn).await)) } -fn _restore_multiple_ciphers(data: JsonUpcase, headers: &Headers, conn: &DbConn, nt: &Notify) -> JsonResult { +async fn _restore_multiple_ciphers( + data: JsonUpcase, + headers: &Headers, + conn: &DbConn, + nt: &Notify<'_>, +) -> JsonResult { let data: Value = data.into_inner().data; let uuids = match data.get("Ids") { @@ -1382,7 +1442,7 @@ fn _restore_multiple_ciphers(data: JsonUpcase, headers: &Headers, conn: & let mut ciphers: Vec = Vec::new(); for uuid in uuids { - match _restore_cipher_by_uuid(uuid, headers, conn, nt) { + match _restore_cipher_by_uuid(uuid, headers, conn, nt).await { Ok(json) => ciphers.push(json.into_inner()), err => return err, } @@ -1395,14 +1455,14 @@ fn _restore_multiple_ciphers(data: JsonUpcase, headers: &Headers, conn: & }))) } -fn _delete_cipher_attachment_by_id( +async fn _delete_cipher_attachment_by_id( uuid: &str, attachment_id: &str, headers: &Headers, conn: &DbConn, - nt: &Notify, + nt: &Notify<'_>, ) -> EmptyResult { - let attachment = match Attachment::find_by_id(attachment_id, conn) { + let attachment = match Attachment::find_by_id(attachment_id, conn).await { Some(attachment) => attachment, None => err!("Attachment doesn't exist"), }; @@ -1411,17 +1471,94 @@ fn _delete_cipher_attachment_by_id( err!("Attachment from other cipher") } - let cipher = match Cipher::find_by_uuid(uuid, conn) { + let cipher = match Cipher::find_by_uuid(uuid, conn).await { Some(cipher) => cipher, None => err!("Cipher doesn't exist"), }; - if !cipher.is_write_accessible_to_user(&headers.user.uuid, conn) { + if !cipher.is_write_accessible_to_user(&headers.user.uuid, conn).await { err!("Cipher cannot be deleted by user") } // Delete attachment - attachment.delete(conn)?; - nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(conn)); + attachment.delete(conn).await?; + nt.send_cipher_update(UpdateType::CipherUpdate, &cipher, &cipher.update_users_revision(conn).await).await; Ok(()) } + +/// This will hold all the necessary data to improve a full sync of all the ciphers +/// It can be used during the `Cipher::to_json()` call. +/// It will prevent the so called N+1 SQL issue by running just a few queries which will hold all the data needed. +/// This will not improve the speed of a single cipher.to_json() call that much, so better not to use it for those calls. +pub struct CipherSyncData { + pub cipher_attachments: HashMap>, + pub cipher_folders: HashMap, + pub cipher_favorites: HashSet, + pub cipher_collections: HashMap>, + pub user_organizations: HashMap, + pub user_collections: HashMap, +} + +pub enum CipherSyncType { + User, + Organization, +} + +impl CipherSyncData { + pub async fn new(user_uuid: &str, ciphers: &Vec, sync_type: CipherSyncType, conn: &DbConn) -> Self { + // Generate a list of Cipher UUID's to be used during a query filter with an eq_any. + let cipher_uuids = stream::iter(ciphers).map(|c| c.uuid.clone()).collect::>().await; + + let mut cipher_folders: HashMap = HashMap::new(); + let mut cipher_favorites: HashSet = HashSet::new(); + match sync_type { + // User Sync supports Folders and Favorits + CipherSyncType::User => { + // Generate a HashMap with the Cipher UUID as key and the Folder UUID as value + cipher_folders = stream::iter(FolderCipher::find_by_user(user_uuid, conn).await).collect().await; + + // Generate a HashSet of all the Cipher UUID's which are marked as favorite + cipher_favorites = + stream::iter(Favorite::get_all_cipher_uuid_by_user(user_uuid, conn).await).collect().await; + } + // Organization Sync does not support Folders and Favorits. + // If these are set, it will cause issues in the web-vault. + CipherSyncType::Organization => {} + } + + // Generate a list of Cipher UUID's containing a Vec with one or more Attachment records + let mut cipher_attachments: HashMap> = HashMap::new(); + for attachment in Attachment::find_all_by_ciphers(&cipher_uuids, conn).await { + cipher_attachments.entry(attachment.cipher_uuid.clone()).or_default().push(attachment); + } + + // Generate a HashMap with the Cipher UUID as key and one or more Collection UUID's + let mut cipher_collections: HashMap> = HashMap::new(); + for (cipher, collection) in Cipher::get_collections_with_cipher_by_user(user_uuid, conn).await { + cipher_collections.entry(cipher).or_default().push(collection); + } + + // Generate a HashMap with the Organization UUID as key and the UserOrganization record + let user_organizations: HashMap = + stream::iter(UserOrganization::find_by_user(user_uuid, conn).await) + .map(|uo| (uo.org_uuid.clone(), uo)) + .collect() + .await; + + // Generate a HashMap with the User_Collections UUID as key and the CollectionUser record + let user_collections: HashMap = + stream::iter(CollectionUser::find_by_user(user_uuid, conn).await) + .map(|uc| (uc.collection_uuid.clone(), uc)) + .collect() + .await; + + Self { + cipher_attachments, + cipher_folders, + cipher_favorites, + cipher_collections, + user_organizations, + user_collections, + } + } +} diff --git a/src/api/core/emergency_access.rs b/src/api/core/emergency_access.rs index 8ad1fdd1..d01b599b 100644 --- a/src/api/core/emergency_access.rs +++ b/src/api/core/emergency_access.rs @@ -1,16 +1,21 @@ use chrono::{Duration, Utc}; +use rocket::serde::json::Json; use rocket::Route; -use rocket_contrib::json::Json; use serde_json::Value; use std::borrow::Borrow; use crate::{ - api::{EmptyResult, JsonResult, JsonUpcase, NumberOrString}, + api::{ + core::{CipherSyncData, CipherSyncType}, + EmptyResult, JsonResult, JsonUpcase, NumberOrString, + }, auth::{decode_emergency_access_invite, Headers}, db::{models::*, DbConn, DbPool}, mail, CONFIG, }; +use futures::{stream, stream::StreamExt}; + pub fn routes() -> Vec { routes![ get_contacts, @@ -36,13 +41,17 @@ pub fn routes() -> Vec { // region get #[get("/emergency-access/trusted")] -fn get_contacts(headers: Headers, conn: DbConn) -> JsonResult { +async fn get_contacts(headers: Headers, conn: DbConn) -> JsonResult { check_emergency_access_allowed()?; - let emergency_access_list = EmergencyAccess::find_all_by_grantor_uuid(&headers.user.uuid, &conn); - - let emergency_access_list_json: Vec = - emergency_access_list.iter().map(|e| e.to_json_grantee_details(&conn)).collect(); + let emergency_access_list_json = + stream::iter(EmergencyAccess::find_all_by_grantor_uuid(&headers.user.uuid, &conn).await) + .then(|e| async { + let e = e; // Move out this single variable + e.to_json_grantee_details(&conn).await + }) + .collect::>() + .await; Ok(Json(json!({ "Data": emergency_access_list_json, @@ -52,13 +61,17 @@ fn get_contacts(headers: Headers, conn: DbConn) -> JsonResult { } #[get("/emergency-access/granted")] -fn get_grantees(headers: Headers, conn: DbConn) -> JsonResult { +async fn get_grantees(headers: Headers, conn: DbConn) -> JsonResult { check_emergency_access_allowed()?; - let emergency_access_list = EmergencyAccess::find_all_by_grantee_uuid(&headers.user.uuid, &conn); - - let emergency_access_list_json: Vec = - emergency_access_list.iter().map(|e| e.to_json_grantor_details(&conn)).collect(); + let emergency_access_list_json = + stream::iter(EmergencyAccess::find_all_by_grantee_uuid(&headers.user.uuid, &conn).await) + .then(|e| async { + let e = e; // Move out this single variable + e.to_json_grantor_details(&conn).await + }) + .collect::>() + .await; Ok(Json(json!({ "Data": emergency_access_list_json, @@ -68,11 +81,11 @@ fn get_grantees(headers: Headers, conn: DbConn) -> JsonResult { } #[get("/emergency-access/")] -fn get_emergency_access(emer_id: String, conn: DbConn) -> JsonResult { +async fn get_emergency_access(emer_id: String, conn: DbConn) -> JsonResult { check_emergency_access_allowed()?; - match EmergencyAccess::find_by_uuid(&emer_id, &conn) { - Some(emergency_access) => Ok(Json(emergency_access.to_json_grantee_details(&conn))), + match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { + Some(emergency_access) => Ok(Json(emergency_access.to_json_grantee_details(&conn).await)), None => err!("Emergency access not valid."), } } @@ -90,17 +103,25 @@ struct EmergencyAccessUpdateData { } #[put("/emergency-access/", data = "")] -fn put_emergency_access(emer_id: String, data: JsonUpcase, conn: DbConn) -> JsonResult { - post_emergency_access(emer_id, data, conn) +async fn put_emergency_access( + emer_id: String, + data: JsonUpcase, + conn: DbConn, +) -> JsonResult { + post_emergency_access(emer_id, data, conn).await } #[post("/emergency-access/", data = "")] -fn post_emergency_access(emer_id: String, data: JsonUpcase, conn: DbConn) -> JsonResult { +async fn post_emergency_access( + emer_id: String, + data: JsonUpcase, + conn: DbConn, +) -> JsonResult { check_emergency_access_allowed()?; let data: EmergencyAccessUpdateData = data.into_inner().data; - let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn) { + let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { Some(emergency_access) => emergency_access, None => err!("Emergency access not valid."), }; @@ -114,7 +135,7 @@ fn post_emergency_access(emer_id: String, data: JsonUpcase")] -fn delete_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> EmptyResult { +async fn delete_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> EmptyResult { check_emergency_access_allowed()?; let grantor_user = headers.user; - let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn) { + let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { Some(emer) => { if emer.grantor_uuid != grantor_user.uuid && emer.grantee_uuid != Some(grantor_user.uuid) { err!("Emergency access not valid.") @@ -137,13 +158,13 @@ fn delete_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> E } None => err!("Emergency access not valid."), }; - emergency_access.delete(&conn)?; + emergency_access.delete(&conn).await?; Ok(()) } #[post("/emergency-access//delete")] -fn post_delete_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> EmptyResult { - delete_emergency_access(emer_id, headers, conn) +async fn post_delete_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> EmptyResult { + delete_emergency_access(emer_id, headers, conn).await } // endregion @@ -159,7 +180,7 @@ struct EmergencyAccessInviteData { } #[post("/emergency-access/invite", data = "")] -fn send_invite(data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { +async fn send_invite(data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { check_emergency_access_allowed()?; let data: EmergencyAccessInviteData = data.into_inner().data; @@ -180,7 +201,7 @@ fn send_invite(data: JsonUpcase, headers: Headers, co err!("You can not set yourself as an emergency contact.") } - let grantee_user = match User::find_by_mail(&email, &conn) { + let grantee_user = match User::find_by_mail(&email, &conn).await { None => { if !CONFIG.invitations_allowed() { err!(format!("Grantee user does not exist: {}", email)) @@ -192,11 +213,11 @@ fn send_invite(data: JsonUpcase, headers: Headers, co if !CONFIG.mail_enabled() { let invitation = Invitation::new(email.clone()); - invitation.save(&conn)?; + invitation.save(&conn).await?; } let mut user = User::new(email.clone()); - user.save(&conn)?; + user.save(&conn).await?; user } Some(user) => user, @@ -208,6 +229,7 @@ fn send_invite(data: JsonUpcase, headers: Headers, co &grantee_user.email, &conn, ) + .await .is_some() { err!(format!("Grantee user already invited: {}", email)) @@ -220,7 +242,7 @@ fn send_invite(data: JsonUpcase, headers: Headers, co new_type, wait_time_days, ); - new_emergency_access.save(&conn)?; + new_emergency_access.save(&conn).await?; if CONFIG.mail_enabled() { mail::send_emergency_access_invite( @@ -229,12 +251,13 @@ fn send_invite(data: JsonUpcase, headers: Headers, co Some(new_emergency_access.uuid), Some(grantor_user.name.clone()), Some(grantor_user.email), - )?; + ) + .await?; } else { // Automatically mark user as accepted if no email invites - match User::find_by_mail(&email, &conn) { + match User::find_by_mail(&email, &conn).await { Some(user) => { - match accept_invite_process(user.uuid, new_emergency_access.uuid, Some(email), conn.borrow()) { + match accept_invite_process(user.uuid, new_emergency_access.uuid, Some(email), conn.borrow()).await { Ok(v) => (v), Err(e) => err!(e.to_string()), } @@ -247,10 +270,10 @@ fn send_invite(data: JsonUpcase, headers: Headers, co } #[post("/emergency-access//reinvite")] -fn resend_invite(emer_id: String, headers: Headers, conn: DbConn) -> EmptyResult { +async fn resend_invite(emer_id: String, headers: Headers, conn: DbConn) -> EmptyResult { check_emergency_access_allowed()?; - let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn) { + let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { Some(emer) => emer, None => err!("Emergency access not valid."), }; @@ -268,7 +291,7 @@ fn resend_invite(emer_id: String, headers: Headers, conn: DbConn) -> EmptyResult None => err!("Email not valid."), }; - let grantee_user = match User::find_by_mail(&email, &conn) { + let grantee_user = match User::find_by_mail(&email, &conn).await { Some(user) => user, None => err!("Grantee user not found."), }; @@ -282,15 +305,18 @@ fn resend_invite(emer_id: String, headers: Headers, conn: DbConn) -> EmptyResult Some(emergency_access.uuid), Some(grantor_user.name.clone()), Some(grantor_user.email), - )?; + ) + .await?; } else { - if Invitation::find_by_mail(&email, &conn).is_none() { + if Invitation::find_by_mail(&email, &conn).await.is_none() { let invitation = Invitation::new(email); - invitation.save(&conn)?; + invitation.save(&conn).await?; } // Automatically mark user as accepted if no email invites - match accept_invite_process(grantee_user.uuid, emergency_access.uuid, emergency_access.email, conn.borrow()) { + match accept_invite_process(grantee_user.uuid, emergency_access.uuid, emergency_access.email, conn.borrow()) + .await + { Ok(v) => (v), Err(e) => err!(e.to_string()), } @@ -306,28 +332,28 @@ struct AcceptData { } #[post("/emergency-access//accept", data = "")] -fn accept_invite(emer_id: String, data: JsonUpcase, conn: DbConn) -> EmptyResult { +async fn accept_invite(emer_id: String, data: JsonUpcase, conn: DbConn) -> EmptyResult { check_emergency_access_allowed()?; let data: AcceptData = data.into_inner().data; let token = &data.Token; let claims = decode_emergency_access_invite(token)?; - let grantee_user = match User::find_by_mail(&claims.email, &conn) { + let grantee_user = match User::find_by_mail(&claims.email, &conn).await { Some(user) => { - Invitation::take(&claims.email, &conn); + Invitation::take(&claims.email, &conn).await; user } None => err!("Invited user not found"), }; - let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn) { + let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { Some(emer) => emer, None => err!("Emergency access not valid."), }; // get grantor user to send Accepted email - let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &conn) { + let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await { Some(user) => user, None => err!("Grantor user not found."), }; @@ -336,13 +362,13 @@ fn accept_invite(emer_id: String, data: JsonUpcase, conn: DbConn) -> && (claims.grantor_name.is_some() && grantor_user.name == claims.grantor_name.unwrap()) && (claims.grantor_email.is_some() && grantor_user.email == claims.grantor_email.unwrap()) { - match accept_invite_process(grantee_user.uuid.clone(), emer_id, Some(grantee_user.email.clone()), &conn) { + match accept_invite_process(grantee_user.uuid.clone(), emer_id, Some(grantee_user.email.clone()), &conn).await { Ok(v) => (v), Err(e) => err!(e.to_string()), } if CONFIG.mail_enabled() { - mail::send_emergency_access_invite_accepted(&grantor_user.email, &grantee_user.email)?; + mail::send_emergency_access_invite_accepted(&grantor_user.email, &grantee_user.email).await?; } Ok(()) @@ -351,8 +377,13 @@ fn accept_invite(emer_id: String, data: JsonUpcase, conn: DbConn) -> } } -fn accept_invite_process(grantee_uuid: String, emer_id: String, email: Option, conn: &DbConn) -> EmptyResult { - let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, conn) { +async fn accept_invite_process( + grantee_uuid: String, + emer_id: String, + email: Option, + conn: &DbConn, +) -> EmptyResult { + let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, conn).await { Some(emer) => emer, None => err!("Emergency access not valid."), }; @@ -369,7 +400,7 @@ fn accept_invite_process(grantee_uuid: String, emer_id: String, email: Option/confirm", data = "")] -fn confirm_emergency_access( +async fn confirm_emergency_access( emer_id: String, data: JsonUpcase, headers: Headers, @@ -391,7 +422,7 @@ fn confirm_emergency_access( let data: ConfirmData = data.into_inner().data; let key = data.Key; - let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn) { + let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { Some(emer) => emer, None => err!("Emergency access not valid."), }; @@ -402,13 +433,13 @@ fn confirm_emergency_access( err!("Emergency access not valid.") } - let grantor_user = match User::find_by_uuid(&confirming_user.uuid, &conn) { + let grantor_user = match User::find_by_uuid(&confirming_user.uuid, &conn).await { Some(user) => user, None => err!("Grantor user not found."), }; if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() { - let grantee_user = match User::find_by_uuid(grantee_uuid, &conn) { + let grantee_user = match User::find_by_uuid(grantee_uuid, &conn).await { Some(user) => user, None => err!("Grantee user not found."), }; @@ -417,10 +448,10 @@ fn confirm_emergency_access( emergency_access.key_encrypted = Some(key); emergency_access.email = None; - emergency_access.save(&conn)?; + emergency_access.save(&conn).await?; if CONFIG.mail_enabled() { - mail::send_emergency_access_invite_confirmed(&grantee_user.email, &grantor_user.name)?; + mail::send_emergency_access_invite_confirmed(&grantee_user.email, &grantor_user.name).await?; } Ok(Json(emergency_access.to_json())) } else { @@ -433,11 +464,11 @@ fn confirm_emergency_access( // region access emergency access #[post("/emergency-access//initiate")] -fn initiate_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult { +async fn initiate_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult { check_emergency_access_allowed()?; let initiating_user = headers.user; - let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn) { + let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { Some(emer) => emer, None => err!("Emergency access not valid."), }; @@ -448,7 +479,7 @@ fn initiate_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> err!("Emergency access not valid.") } - let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &conn) { + let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await { Some(user) => user, None => err!("Grantor user not found."), }; @@ -458,7 +489,7 @@ fn initiate_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> emergency_access.updated_at = now; emergency_access.recovery_initiated_at = Some(now); emergency_access.last_notification_at = Some(now); - emergency_access.save(&conn)?; + emergency_access.save(&conn).await?; if CONFIG.mail_enabled() { mail::send_emergency_access_recovery_initiated( @@ -466,17 +497,18 @@ fn initiate_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> &initiating_user.name, emergency_access.get_type_as_str(), &emergency_access.wait_time_days.clone().to_string(), - )?; + ) + .await?; } Ok(Json(emergency_access.to_json())) } #[post("/emergency-access//approve")] -fn approve_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult { +async fn approve_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult { check_emergency_access_allowed()?; let approving_user = headers.user; - let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn) { + let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { Some(emer) => emer, None => err!("Emergency access not valid."), }; @@ -487,22 +519,22 @@ fn approve_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> err!("Emergency access not valid.") } - let grantor_user = match User::find_by_uuid(&approving_user.uuid, &conn) { + let grantor_user = match User::find_by_uuid(&approving_user.uuid, &conn).await { Some(user) => user, None => err!("Grantor user not found."), }; if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() { - let grantee_user = match User::find_by_uuid(grantee_uuid, &conn) { + let grantee_user = match User::find_by_uuid(grantee_uuid, &conn).await { Some(user) => user, None => err!("Grantee user not found."), }; emergency_access.status = EmergencyAccessStatus::RecoveryApproved as i32; - emergency_access.save(&conn)?; + emergency_access.save(&conn).await?; if CONFIG.mail_enabled() { - mail::send_emergency_access_recovery_approved(&grantee_user.email, &grantor_user.name)?; + mail::send_emergency_access_recovery_approved(&grantee_user.email, &grantor_user.name).await?; } Ok(Json(emergency_access.to_json())) } else { @@ -511,11 +543,11 @@ fn approve_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> } #[post("/emergency-access//reject")] -fn reject_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult { +async fn reject_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult { check_emergency_access_allowed()?; let rejecting_user = headers.user; - let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn) { + let mut emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { Some(emer) => emer, None => err!("Emergency access not valid."), }; @@ -527,22 +559,22 @@ fn reject_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> J err!("Emergency access not valid.") } - let grantor_user = match User::find_by_uuid(&rejecting_user.uuid, &conn) { + let grantor_user = match User::find_by_uuid(&rejecting_user.uuid, &conn).await { Some(user) => user, None => err!("Grantor user not found."), }; if let Some(grantee_uuid) = emergency_access.grantee_uuid.as_ref() { - let grantee_user = match User::find_by_uuid(grantee_uuid, &conn) { + let grantee_user = match User::find_by_uuid(grantee_uuid, &conn).await { Some(user) => user, None => err!("Grantee user not found."), }; emergency_access.status = EmergencyAccessStatus::Confirmed as i32; - emergency_access.save(&conn)?; + emergency_access.save(&conn).await?; if CONFIG.mail_enabled() { - mail::send_emergency_access_recovery_rejected(&grantee_user.email, &grantor_user.name)?; + mail::send_emergency_access_recovery_rejected(&grantee_user.email, &grantor_user.name).await?; } Ok(Json(emergency_access.to_json())) } else { @@ -555,12 +587,12 @@ fn reject_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> J // region action #[post("/emergency-access//view")] -fn view_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult { +async fn view_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult { check_emergency_access_allowed()?; let requesting_user = headers.user; let host = headers.host; - let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn) { + let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { Some(emer) => emer, None => err!("Emergency access not valid."), }; @@ -569,10 +601,17 @@ fn view_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> Jso err!("Emergency access not valid.") } - let ciphers = Cipher::find_owned_by_user(&emergency_access.grantor_uuid, &conn); + let ciphers = Cipher::find_owned_by_user(&emergency_access.grantor_uuid, &conn).await; + let cipher_sync_data = + CipherSyncData::new(&emergency_access.grantor_uuid, &ciphers, CipherSyncType::User, &conn).await; - let ciphers_json: Vec = - ciphers.iter().map(|c| c.to_json(&host, &emergency_access.grantor_uuid, &conn)).collect(); + let ciphers_json = stream::iter(ciphers) + .then(|c| async { + let c = c; // Move out this single variable + c.to_json(&host, &emergency_access.grantor_uuid, Some(&cipher_sync_data), &conn).await + }) + .collect::>() + .await; Ok(Json(json!({ "Ciphers": ciphers_json, @@ -582,11 +621,11 @@ fn view_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> Jso } #[post("/emergency-access//takeover")] -fn takeover_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult { +async fn takeover_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult { check_emergency_access_allowed()?; let requesting_user = headers.user; - let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn) { + let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { Some(emer) => emer, None => err!("Emergency access not valid."), }; @@ -595,7 +634,7 @@ fn takeover_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> err!("Emergency access not valid.") } - let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &conn) { + let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await { Some(user) => user, None => err!("Grantor user not found."), }; @@ -616,7 +655,7 @@ struct EmergencyAccessPasswordData { } #[post("/emergency-access//password", data = "")] -fn password_emergency_access( +async fn password_emergency_access( emer_id: String, data: JsonUpcase, headers: Headers, @@ -629,7 +668,7 @@ fn password_emergency_access( let key = data.Key; let requesting_user = headers.user; - let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn) { + let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { Some(emer) => emer, None => err!("Emergency access not valid."), }; @@ -638,7 +677,7 @@ fn password_emergency_access( err!("Emergency access not valid.") } - let mut grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &conn) { + let mut grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await { Some(user) => user, None => err!("Grantor user not found."), }; @@ -646,18 +685,15 @@ fn password_emergency_access( // change grantor_user password grantor_user.set_password(new_master_password_hash, None); grantor_user.akey = key; - grantor_user.save(&conn)?; + grantor_user.save(&conn).await?; // Disable TwoFactor providers since they will otherwise block logins - TwoFactor::delete_all_by_user(&grantor_user.uuid, &conn)?; - - // Removing owner, check that there are at least another owner - let user_org_grantor = UserOrganization::find_any_state_by_user(&grantor_user.uuid, &conn); + TwoFactor::delete_all_by_user(&grantor_user.uuid, &conn).await?; // Remove grantor from all organisations unless Owner - for user_org in user_org_grantor { + for user_org in UserOrganization::find_any_state_by_user(&grantor_user.uuid, &conn).await { if user_org.atype != UserOrgType::Owner as i32 { - user_org.delete(&conn)?; + user_org.delete(&conn).await?; } } Ok(()) @@ -666,9 +702,9 @@ fn password_emergency_access( // endregion #[get("/emergency-access//policies")] -fn policies_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult { +async fn policies_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> JsonResult { let requesting_user = headers.user; - let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn) { + let emergency_access = match EmergencyAccess::find_by_uuid(&emer_id, &conn).await { Some(emer) => emer, None => err!("Emergency access not valid."), }; @@ -677,13 +713,13 @@ fn policies_emergency_access(emer_id: String, headers: Headers, conn: DbConn) -> err!("Emergency access not valid.") } - let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &conn) { + let grantor_user = match User::find_by_uuid(&emergency_access.grantor_uuid, &conn).await { Some(user) => user, None => err!("Grantor user not found."), }; let policies = OrgPolicy::find_confirmed_by_user(&grantor_user.uuid, &conn); - let policies_json: Vec = policies.iter().map(OrgPolicy::to_json).collect(); + let policies_json: Vec = policies.await.iter().map(OrgPolicy::to_json).collect(); Ok(Json(json!({ "Data": policies_json, @@ -709,14 +745,14 @@ fn check_emergency_access_allowed() -> EmptyResult { Ok(()) } -pub fn emergency_request_timeout_job(pool: DbPool) { +pub async fn emergency_request_timeout_job(pool: DbPool) { debug!("Start emergency_request_timeout_job"); if !CONFIG.emergency_access_allowed() { return; } - if let Ok(conn) = pool.get() { - let emergency_access_list = EmergencyAccess::find_all_recoveries(&conn); + if let Ok(conn) = pool.get().await { + let emergency_access_list = EmergencyAccess::find_all_recoveries(&conn).await; if emergency_access_list.is_empty() { debug!("No emergency request timeout to approve"); @@ -725,18 +761,20 @@ pub fn emergency_request_timeout_job(pool: DbPool) { for mut emer in emergency_access_list { if emer.recovery_initiated_at.is_some() && Utc::now().naive_utc() - >= emer.recovery_initiated_at.unwrap() + Duration::days(emer.wait_time_days as i64) + >= emer.recovery_initiated_at.unwrap() + Duration::days(i64::from(emer.wait_time_days)) { emer.status = EmergencyAccessStatus::RecoveryApproved as i32; - emer.save(&conn).expect("Cannot save emergency access on job"); + emer.save(&conn).await.expect("Cannot save emergency access on job"); if CONFIG.mail_enabled() { // get grantor user to send Accepted email - let grantor_user = User::find_by_uuid(&emer.grantor_uuid, &conn).expect("Grantor user not found."); + let grantor_user = + User::find_by_uuid(&emer.grantor_uuid, &conn).await.expect("Grantor user not found."); // get grantee user to send Accepted email let grantee_user = User::find_by_uuid(&emer.grantee_uuid.clone().expect("Grantee user invalid."), &conn) + .await .expect("Grantee user not found."); mail::send_emergency_access_recovery_timed_out( @@ -744,9 +782,11 @@ pub fn emergency_request_timeout_job(pool: DbPool) { &grantee_user.name.clone(), emer.get_type_as_str(), ) + .await .expect("Error on sending email"); mail::send_emergency_access_recovery_approved(&grantee_user.email, &grantor_user.name.clone()) + .await .expect("Error on sending email"); } } @@ -756,14 +796,14 @@ pub fn emergency_request_timeout_job(pool: DbPool) { } } -pub fn emergency_notification_reminder_job(pool: DbPool) { +pub async fn emergency_notification_reminder_job(pool: DbPool) { debug!("Start emergency_notification_reminder_job"); if !CONFIG.emergency_access_allowed() { return; } - if let Ok(conn) = pool.get() { - let emergency_access_list = EmergencyAccess::find_all_recoveries(&conn); + if let Ok(conn) = pool.get().await { + let emergency_access_list = EmergencyAccess::find_all_recoveries(&conn).await; if emergency_access_list.is_empty() { debug!("No emergency request reminder notification to send"); @@ -772,20 +812,22 @@ pub fn emergency_notification_reminder_job(pool: DbPool) { for mut emer in emergency_access_list { if (emer.recovery_initiated_at.is_some() && Utc::now().naive_utc() - >= emer.recovery_initiated_at.unwrap() + Duration::days((emer.wait_time_days as i64) - 1)) + >= emer.recovery_initiated_at.unwrap() + Duration::days((i64::from(emer.wait_time_days)) - 1)) && (emer.last_notification_at.is_none() || (emer.last_notification_at.is_some() && Utc::now().naive_utc() >= emer.last_notification_at.unwrap() + Duration::days(1))) { - emer.save(&conn).expect("Cannot save emergency access on job"); + emer.save(&conn).await.expect("Cannot save emergency access on job"); if CONFIG.mail_enabled() { // get grantor user to send Accepted email - let grantor_user = User::find_by_uuid(&emer.grantor_uuid, &conn).expect("Grantor user not found."); + let grantor_user = + User::find_by_uuid(&emer.grantor_uuid, &conn).await.expect("Grantor user not found."); // get grantee user to send Accepted email let grantee_user = User::find_by_uuid(&emer.grantee_uuid.clone().expect("Grantee user invalid."), &conn) + .await .expect("Grantee user not found."); mail::send_emergency_access_recovery_reminder( @@ -794,6 +836,7 @@ pub fn emergency_notification_reminder_job(pool: DbPool) { emer.get_type_as_str(), &emer.wait_time_days.to_string(), // TODO(jjlin): This should be the number of days left. ) + .await .expect("Error on sending email"); } } diff --git a/src/api/core/folders.rs b/src/api/core/folders.rs index 57ec7f18..a5997983 100644 --- a/src/api/core/folders.rs +++ b/src/api/core/folders.rs @@ -1,4 +1,4 @@ -use rocket_contrib::json::Json; +use rocket::serde::json::Json; use serde_json::Value; use crate::{ @@ -12,9 +12,8 @@ pub fn routes() -> Vec { } #[get("/folders")] -fn get_folders(headers: Headers, conn: DbConn) -> Json { - let folders = Folder::find_by_user(&headers.user.uuid, &conn); - +async fn get_folders(headers: Headers, conn: DbConn) -> Json { + let folders = Folder::find_by_user(&headers.user.uuid, &conn).await; let folders_json: Vec = folders.iter().map(Folder::to_json).collect(); Json(json!({ @@ -25,8 +24,8 @@ fn get_folders(headers: Headers, conn: DbConn) -> Json { } #[get("/folders/")] -fn get_folder(uuid: String, headers: Headers, conn: DbConn) -> JsonResult { - let folder = match Folder::find_by_uuid(&uuid, &conn) { +async fn get_folder(uuid: String, headers: Headers, conn: DbConn) -> JsonResult { + let folder = match Folder::find_by_uuid(&uuid, &conn).await { Some(folder) => folder, _ => err!("Invalid folder"), }; @@ -45,27 +44,39 @@ pub struct FolderData { } #[post("/folders", data = "")] -fn post_folders(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { +async fn post_folders(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult { let data: FolderData = data.into_inner().data; let mut folder = Folder::new(headers.user.uuid, data.Name); - folder.save(&conn)?; - nt.send_folder_update(UpdateType::FolderCreate, &folder); + folder.save(&conn).await?; + nt.send_folder_update(UpdateType::FolderCreate, &folder).await; Ok(Json(folder.to_json())) } #[post("/folders/", data = "")] -fn post_folder(uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { - put_folder(uuid, data, headers, conn, nt) +async fn post_folder( + uuid: String, + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify<'_>, +) -> JsonResult { + put_folder(uuid, data, headers, conn, nt).await } #[put("/folders/", data = "")] -fn put_folder(uuid: String, data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { +async fn put_folder( + uuid: String, + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify<'_>, +) -> JsonResult { let data: FolderData = data.into_inner().data; - let mut folder = match Folder::find_by_uuid(&uuid, &conn) { + let mut folder = match Folder::find_by_uuid(&uuid, &conn).await { Some(folder) => folder, _ => err!("Invalid folder"), }; @@ -76,20 +87,20 @@ fn put_folder(uuid: String, data: JsonUpcase, headers: Headers, conn folder.name = data.Name; - folder.save(&conn)?; - nt.send_folder_update(UpdateType::FolderUpdate, &folder); + folder.save(&conn).await?; + nt.send_folder_update(UpdateType::FolderUpdate, &folder).await; Ok(Json(folder.to_json())) } #[post("/folders//delete")] -fn delete_folder_post(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { - delete_folder(uuid, headers, conn, nt) +async fn delete_folder_post(uuid: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult { + delete_folder(uuid, headers, conn, nt).await } #[delete("/folders/")] -fn delete_folder(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { - let folder = match Folder::find_by_uuid(&uuid, &conn) { +async fn delete_folder(uuid: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult { + let folder = match Folder::find_by_uuid(&uuid, &conn).await { Some(folder) => folder, _ => err!("Invalid folder"), }; @@ -99,8 +110,8 @@ fn delete_folder(uuid: String, headers: Headers, conn: DbConn, nt: Notify) -> Em } // Delete the actual folder entry - folder.delete(&conn)?; + folder.delete(&conn).await?; - nt.send_folder_update(UpdateType::FolderDelete, &folder); + nt.send_folder_update(UpdateType::FolderDelete, &folder).await; Ok(()) } diff --git a/src/api/core/mod.rs b/src/api/core/mod.rs index 77e8780d..c54ebeb7 100644 --- a/src/api/core/mod.rs +++ b/src/api/core/mod.rs @@ -1,4 +1,4 @@ -mod accounts; +pub mod accounts; mod ciphers; mod emergency_access; mod folders; @@ -7,13 +7,16 @@ mod sends; pub mod two_factor; pub use ciphers::purge_trashed_ciphers; +pub use ciphers::{CipherSyncData, CipherSyncType}; pub use emergency_access::{emergency_notification_reminder_job, emergency_request_timeout_job}; pub use sends::purge_sends; pub use two_factor::send_incomplete_2fa_notifications; pub fn routes() -> Vec { - let mut mod_routes = - routes![clear_device_token, put_device_token, get_eq_domains, post_eq_domains, put_eq_domains, hibp_breach,]; + let mut device_token_routes = routes![clear_device_token, put_device_token]; + let mut eq_domains_routes = routes![get_eq_domains, post_eq_domains, put_eq_domains]; + let mut hibp_routes = routes![hibp_breach]; + let mut meta_routes = routes![alive, now, version]; let mut routes = Vec::new(); routes.append(&mut accounts::routes()); @@ -23,7 +26,10 @@ pub fn routes() -> Vec { routes.append(&mut organizations::routes()); routes.append(&mut two_factor::routes()); routes.append(&mut sends::routes()); - routes.append(&mut mod_routes); + routes.append(&mut device_token_routes); + routes.append(&mut eq_domains_routes); + routes.append(&mut hibp_routes); + routes.append(&mut meta_routes); routes } @@ -31,8 +37,8 @@ pub fn routes() -> Vec { // // Move this somewhere else // +use rocket::serde::json::Json; use rocket::Route; -use rocket_contrib::json::Json; use serde_json::Value; use crate::{ @@ -121,7 +127,7 @@ struct EquivDomainData { } #[post("/settings/domains", data = "")] -fn post_eq_domains(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { +async fn post_eq_domains(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { let data: EquivDomainData = data.into_inner().data; let excluded_globals = data.ExcludedGlobalEquivalentDomains.unwrap_or_default(); @@ -133,18 +139,18 @@ fn post_eq_domains(data: JsonUpcase, headers: Headers, conn: Db user.excluded_globals = to_string(&excluded_globals).unwrap_or_else(|_| "[]".to_string()); user.equivalent_domains = to_string(&equivalent_domains).unwrap_or_else(|_| "[]".to_string()); - user.save(&conn)?; + user.save(&conn).await?; Ok(Json(json!({}))) } #[put("/settings/domains", data = "")] -fn put_eq_domains(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { - post_eq_domains(data, headers, conn) +async fn put_eq_domains(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { + post_eq_domains(data, headers, conn).await } #[get("/hibp/breach?")] -fn hibp_breach(username: String) -> JsonResult { +async fn hibp_breach(username: String) -> JsonResult { let url = format!( "https://haveibeenpwned.com/api/v3/breachedaccount/{}?truncateResponse=false&includeUnverified=false", username @@ -153,14 +159,14 @@ fn hibp_breach(username: String) -> JsonResult { if let Some(api_key) = crate::CONFIG.hibp_api_key() { let hibp_client = get_reqwest_client(); - let res = hibp_client.get(&url).header("hibp-api-key", api_key).send()?; + let res = hibp_client.get(&url).header("hibp-api-key", api_key).send().await?; // If we get a 404, return a 404, it means no breached accounts if res.status() == 404 { return Err(Error::empty().with_code(404)); } - let value: Value = res.error_for_status()?.json()?; + let value: Value = res.error_for_status()?.json().await?; Ok(Json(value)) } else { Ok(Json(json!([{ @@ -178,3 +184,19 @@ fn hibp_breach(username: String) -> JsonResult { }]))) } } + +// We use DbConn here to let the alive healthcheck also verify the database connection. +#[get("/alive")] +fn alive(_conn: DbConn) -> Json { + now() +} + +#[get("/now")] +pub fn now() -> Json { + Json(crate::util::format_date(&chrono::Utc::now().naive_utc())) +} + +#[get("/version")] +fn version() -> Json<&'static str> { + Json(crate::VERSION.unwrap_or_default()) +} diff --git a/src/api/core/organizations.rs b/src/api/core/organizations.rs index fa79c39c..e968ffbd 100644 --- a/src/api/core/organizations.rs +++ b/src/api/core/organizations.rs @@ -1,15 +1,20 @@ use num_traits::FromPrimitive; -use rocket::{request::Form, Route}; -use rocket_contrib::json::Json; +use rocket::serde::json::Json; +use rocket::Route; use serde_json::Value; use crate::{ - api::{EmptyResult, JsonResult, JsonUpcase, JsonUpcaseVec, Notify, NumberOrString, PasswordData, UpdateType}, + api::{ + core::{CipherSyncData, CipherSyncType}, + EmptyResult, JsonResult, JsonUpcase, JsonUpcaseVec, Notify, NumberOrString, PasswordData, UpdateType, + }, auth::{decode_invite, AdminHeaders, Headers, ManagerHeaders, ManagerHeadersLoose, OwnerHeaders}, db::{models::*, DbConn}, mail, CONFIG, }; +use futures::{stream, stream::StreamExt}; + pub fn routes() -> Vec { routes![ get_organization, @@ -98,11 +103,11 @@ struct OrgBulkIds { } #[post("/organizations", data = "")] -fn create_organization(headers: Headers, data: JsonUpcase, conn: DbConn) -> JsonResult { +async fn create_organization(headers: Headers, data: JsonUpcase, conn: DbConn) -> JsonResult { if !CONFIG.is_org_creation_allowed(&headers.user.email) { err!("User not allowed to create organizations") } - if OrgPolicy::is_applicable_to_user(&headers.user.uuid, OrgPolicyType::SingleOrg, &conn) { + if OrgPolicy::is_applicable_to_user(&headers.user.uuid, OrgPolicyType::SingleOrg, &conn).await { err!( "You may not create an organization. You belong to an organization which has a policy that prohibits you from being a member of any other organization." ) @@ -125,15 +130,15 @@ fn create_organization(headers: Headers, data: JsonUpcase, conn: DbConn user_org.atype = UserOrgType::Owner as i32; user_org.status = UserOrgStatus::Confirmed as i32; - org.save(&conn)?; - user_org.save(&conn)?; - collection.save(&conn)?; + org.save(&conn).await?; + user_org.save(&conn).await?; + collection.save(&conn).await?; Ok(Json(org.to_json())) } #[delete("/organizations/", data = "")] -fn delete_organization( +async fn delete_organization( org_id: String, data: JsonUpcase, headers: OwnerHeaders, @@ -146,61 +151,61 @@ fn delete_organization( err!("Invalid password") } - match Organization::find_by_uuid(&org_id, &conn) { + match Organization::find_by_uuid(&org_id, &conn).await { None => err!("Organization not found"), - Some(org) => org.delete(&conn), + Some(org) => org.delete(&conn).await, } } #[post("/organizations//delete", data = "")] -fn post_delete_organization( +async fn post_delete_organization( org_id: String, data: JsonUpcase, headers: OwnerHeaders, conn: DbConn, ) -> EmptyResult { - delete_organization(org_id, data, headers, conn) + delete_organization(org_id, data, headers, conn).await } #[post("/organizations//leave")] -fn leave_organization(org_id: String, headers: Headers, conn: DbConn) -> EmptyResult { - match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn) { +async fn leave_organization(org_id: String, headers: Headers, conn: DbConn) -> EmptyResult { + match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn).await { None => err!("User not part of organization"), Some(user_org) => { if user_org.atype == UserOrgType::Owner { let num_owners = - UserOrganization::find_by_org_and_type(&org_id, UserOrgType::Owner as i32, &conn).len(); + UserOrganization::find_by_org_and_type(&org_id, UserOrgType::Owner as i32, &conn).await.len(); if num_owners <= 1 { err!("The last owner can't leave") } } - user_org.delete(&conn) + user_org.delete(&conn).await } } } #[get("/organizations/")] -fn get_organization(org_id: String, _headers: OwnerHeaders, conn: DbConn) -> JsonResult { - match Organization::find_by_uuid(&org_id, &conn) { +async fn get_organization(org_id: String, _headers: OwnerHeaders, conn: DbConn) -> JsonResult { + match Organization::find_by_uuid(&org_id, &conn).await { Some(organization) => Ok(Json(organization.to_json())), None => err!("Can't find organization details"), } } #[put("/organizations/", data = "")] -fn put_organization( +async fn put_organization( org_id: String, headers: OwnerHeaders, data: JsonUpcase, conn: DbConn, ) -> JsonResult { - post_organization(org_id, headers, data, conn) + post_organization(org_id, headers, data, conn).await } #[post("/organizations/", data = "")] -fn post_organization( +async fn post_organization( org_id: String, _headers: OwnerHeaders, data: JsonUpcase, @@ -208,7 +213,7 @@ fn post_organization( ) -> JsonResult { let data: OrganizationUpdateData = data.into_inner().data; - let mut org = match Organization::find_by_uuid(&org_id, &conn) { + let mut org = match Organization::find_by_uuid(&org_id, &conn).await { Some(organization) => organization, None => err!("Can't find organization details"), }; @@ -216,16 +221,16 @@ fn post_organization( org.name = data.Name; org.billing_email = data.BillingEmail; - org.save(&conn)?; + org.save(&conn).await?; Ok(Json(org.to_json())) } // GET /api/collections?writeOnly=false #[get("/collections")] -fn get_user_collections(headers: Headers, conn: DbConn) -> Json { +async fn get_user_collections(headers: Headers, conn: DbConn) -> Json { Json(json!({ "Data": - Collection::find_by_user_uuid(&headers.user.uuid, &conn) + Collection::find_by_user_uuid(&headers.user.uuid, &conn).await .iter() .map(Collection::to_json) .collect::(), @@ -235,10 +240,10 @@ fn get_user_collections(headers: Headers, conn: DbConn) -> Json { } #[get("/organizations//collections")] -fn get_org_collections(org_id: String, _headers: ManagerHeadersLoose, conn: DbConn) -> Json { +async fn get_org_collections(org_id: String, _headers: ManagerHeadersLoose, conn: DbConn) -> Json { Json(json!({ "Data": - Collection::find_by_organization(&org_id, &conn) + Collection::find_by_organization(&org_id, &conn).await .iter() .map(Collection::to_json) .collect::(), @@ -248,7 +253,7 @@ fn get_org_collections(org_id: String, _headers: ManagerHeadersLoose, conn: DbCo } #[post("/organizations//collections", data = "")] -fn post_organization_collections( +async fn post_organization_collections( org_id: String, headers: ManagerHeadersLoose, data: JsonUpcase, @@ -256,43 +261,43 @@ fn post_organization_collections( ) -> JsonResult { let data: NewCollectionData = data.into_inner().data; - let org = match Organization::find_by_uuid(&org_id, &conn) { + let org = match Organization::find_by_uuid(&org_id, &conn).await { Some(organization) => organization, None => err!("Can't find organization details"), }; // Get the user_organization record so that we can check if the user has access to all collections. - let user_org = match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn) { + let user_org = match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn).await { Some(u) => u, None => err!("User is not part of organization"), }; let collection = Collection::new(org.uuid, data.Name); - collection.save(&conn)?; + collection.save(&conn).await?; // If the user doesn't have access to all collections, only in case of a Manger, // then we need to save the creating user uuid (Manager) to the users_collection table. // Else the user will not have access to his own created collection. if !user_org.access_all { - CollectionUser::save(&headers.user.uuid, &collection.uuid, false, false, &conn)?; + CollectionUser::save(&headers.user.uuid, &collection.uuid, false, false, &conn).await?; } Ok(Json(collection.to_json())) } #[put("/organizations//collections/", data = "")] -fn put_organization_collection_update( +async fn put_organization_collection_update( org_id: String, col_id: String, headers: ManagerHeaders, data: JsonUpcase, conn: DbConn, ) -> JsonResult { - post_organization_collection_update(org_id, col_id, headers, data, conn) + post_organization_collection_update(org_id, col_id, headers, data, conn).await } #[post("/organizations//collections/", data = "")] -fn post_organization_collection_update( +async fn post_organization_collection_update( org_id: String, col_id: String, _headers: ManagerHeaders, @@ -301,12 +306,12 @@ fn post_organization_collection_update( ) -> JsonResult { let data: NewCollectionData = data.into_inner().data; - let org = match Organization::find_by_uuid(&org_id, &conn) { + let org = match Organization::find_by_uuid(&org_id, &conn).await { Some(organization) => organization, None => err!("Can't find organization details"), }; - let mut collection = match Collection::find_by_uuid(&col_id, &conn) { + let mut collection = match Collection::find_by_uuid(&col_id, &conn).await { Some(collection) => collection, None => err!("Collection not found"), }; @@ -316,20 +321,20 @@ fn post_organization_collection_update( } collection.name = data.Name; - collection.save(&conn)?; + collection.save(&conn).await?; Ok(Json(collection.to_json())) } #[delete("/organizations//collections//user/")] -fn delete_organization_collection_user( +async fn delete_organization_collection_user( org_id: String, col_id: String, org_user_id: String, _headers: AdminHeaders, conn: DbConn, ) -> EmptyResult { - let collection = match Collection::find_by_uuid(&col_id, &conn) { + let collection = match Collection::find_by_uuid(&col_id, &conn).await { None => err!("Collection not found"), Some(collection) => { if collection.org_uuid == org_id { @@ -340,40 +345,40 @@ fn delete_organization_collection_user( } }; - match UserOrganization::find_by_uuid_and_org(&org_user_id, &org_id, &conn) { + match UserOrganization::find_by_uuid_and_org(&org_user_id, &org_id, &conn).await { None => err!("User not found in organization"), Some(user_org) => { - match CollectionUser::find_by_collection_and_user(&collection.uuid, &user_org.user_uuid, &conn) { + match CollectionUser::find_by_collection_and_user(&collection.uuid, &user_org.user_uuid, &conn).await { None => err!("User not assigned to collection"), - Some(col_user) => col_user.delete(&conn), + Some(col_user) => col_user.delete(&conn).await, } } } } #[post("/organizations//collections//delete-user/")] -fn post_organization_collection_delete_user( +async fn post_organization_collection_delete_user( org_id: String, col_id: String, org_user_id: String, headers: AdminHeaders, conn: DbConn, ) -> EmptyResult { - delete_organization_collection_user(org_id, col_id, org_user_id, headers, conn) + delete_organization_collection_user(org_id, col_id, org_user_id, headers, conn).await } #[delete("/organizations//collections/")] -fn delete_organization_collection( +async fn delete_organization_collection( org_id: String, col_id: String, _headers: ManagerHeaders, conn: DbConn, ) -> EmptyResult { - match Collection::find_by_uuid(&col_id, &conn) { + match Collection::find_by_uuid(&col_id, &conn).await { None => err!("Collection not found"), Some(collection) => { if collection.org_uuid == org_id { - collection.delete(&conn) + collection.delete(&conn).await } else { err!("Collection and Organization id do not match") } @@ -389,19 +394,24 @@ struct DeleteCollectionData { } #[post("/organizations//collections//delete", data = "<_data>")] -fn post_organization_collection_delete( +async fn post_organization_collection_delete( org_id: String, col_id: String, headers: ManagerHeaders, _data: JsonUpcase, conn: DbConn, ) -> EmptyResult { - delete_organization_collection(org_id, col_id, headers, conn) + delete_organization_collection(org_id, col_id, headers, conn).await } #[get("/organizations//collections//details")] -fn get_org_collection_detail(org_id: String, coll_id: String, headers: ManagerHeaders, conn: DbConn) -> JsonResult { - match Collection::find_by_uuid_and_user(&coll_id, &headers.user.uuid, &conn) { +async fn get_org_collection_detail( + org_id: String, + coll_id: String, + headers: ManagerHeaders, + conn: DbConn, +) -> JsonResult { + match Collection::find_by_uuid_and_user(&coll_id, &headers.user.uuid, &conn).await { None => err!("Collection not found"), Some(collection) => { if collection.org_uuid != org_id { @@ -414,28 +424,29 @@ fn get_org_collection_detail(org_id: String, coll_id: String, headers: ManagerHe } #[get("/organizations//collections//users")] -fn get_collection_users(org_id: String, coll_id: String, _headers: ManagerHeaders, conn: DbConn) -> JsonResult { +async fn get_collection_users(org_id: String, coll_id: String, _headers: ManagerHeaders, conn: DbConn) -> JsonResult { // Get org and collection, check that collection is from org - let collection = match Collection::find_by_uuid_and_org(&coll_id, &org_id, &conn) { + let collection = match Collection::find_by_uuid_and_org(&coll_id, &org_id, &conn).await { None => err!("Collection not found in Organization"), Some(collection) => collection, }; - // Get the users from collection - let user_list: Vec = CollectionUser::find_by_collection(&collection.uuid, &conn) - .iter() - .map(|col_user| { + let user_list = stream::iter(CollectionUser::find_by_collection(&collection.uuid, &conn).await) + .then(|col_user| async { + let col_user = col_user; // Move out this single variable UserOrganization::find_by_user_and_org(&col_user.user_uuid, &org_id, &conn) + .await .unwrap() - .to_json_user_access_restrictions(col_user) + .to_json_user_access_restrictions(&col_user) }) - .collect(); + .collect::>() + .await; Ok(Json(json!(user_list))) } #[put("/organizations//collections//users", data = "")] -fn put_collection_users( +async fn put_collection_users( org_id: String, coll_id: String, data: JsonUpcaseVec, @@ -443,16 +454,16 @@ fn put_collection_users( conn: DbConn, ) -> EmptyResult { // Get org and collection, check that collection is from org - if Collection::find_by_uuid_and_org(&coll_id, &org_id, &conn).is_none() { + if Collection::find_by_uuid_and_org(&coll_id, &org_id, &conn).await.is_none() { err!("Collection not found in Organization") } // Delete all the user-collections - CollectionUser::delete_all_by_collection(&coll_id, &conn)?; + CollectionUser::delete_all_by_collection(&coll_id, &conn).await?; // And then add all the received ones (except if the user has access_all) for d in data.iter().map(|d| &d.data) { - let user = match UserOrganization::find_by_uuid(&d.Id, &conn) { + let user = match UserOrganization::find_by_uuid(&d.Id, &conn).await { Some(u) => u, None => err!("User is not part of organization"), }; @@ -461,7 +472,7 @@ fn put_collection_users( continue; } - CollectionUser::save(&user.user_uuid, &coll_id, d.ReadOnly, d.HidePasswords, &conn)?; + CollectionUser::save(&user.user_uuid, &coll_id, d.ReadOnly, d.HidePasswords, &conn).await?; } Ok(()) @@ -469,15 +480,22 @@ fn put_collection_users( #[derive(FromForm)] struct OrgIdData { - #[form(field = "organizationId")] + #[field(name = "organizationId")] organization_id: String, } #[get("/ciphers/organization-details?")] -fn get_org_details(data: Form, headers: Headers, conn: DbConn) -> Json { - let ciphers = Cipher::find_by_org(&data.organization_id, &conn); - let ciphers_json: Vec = - ciphers.iter().map(|c| c.to_json(&headers.host, &headers.user.uuid, &conn)).collect(); +async fn get_org_details(data: OrgIdData, headers: Headers, conn: DbConn) -> Json { + let ciphers = Cipher::find_by_org(&data.organization_id, &conn).await; + let cipher_sync_data = CipherSyncData::new(&headers.user.uuid, &ciphers, CipherSyncType::Organization, &conn).await; + + let ciphers_json = stream::iter(ciphers) + .then(|c| async { + let c = c; // Move out this single variable + c.to_json(&headers.host, &headers.user.uuid, Some(&cipher_sync_data), &conn).await + }) + .collect::>() + .await; Json(json!({ "Data": ciphers_json, @@ -487,9 +505,14 @@ fn get_org_details(data: Form, headers: Headers, conn: DbConn) -> Jso } #[get("/organizations//users")] -fn get_org_users(org_id: String, _headers: ManagerHeadersLoose, conn: DbConn) -> Json { - let users = UserOrganization::find_by_org(&org_id, &conn); - let users_json: Vec = users.iter().map(|c| c.to_json_user_details(&conn)).collect(); +async fn get_org_users(org_id: String, _headers: ManagerHeadersLoose, conn: DbConn) -> Json { + let users_json = stream::iter(UserOrganization::find_by_org(&org_id, &conn).await) + .then(|u| async { + let u = u; // Move out this single variable + u.to_json_user_details(&conn).await + }) + .collect::>() + .await; Json(json!({ "Data": users_json, @@ -499,10 +522,15 @@ fn get_org_users(org_id: String, _headers: ManagerHeadersLoose, conn: DbConn) -> } #[post("/organizations//keys", data = "")] -fn post_org_keys(org_id: String, data: JsonUpcase, _headers: AdminHeaders, conn: DbConn) -> JsonResult { +async fn post_org_keys( + org_id: String, + data: JsonUpcase, + _headers: AdminHeaders, + conn: DbConn, +) -> JsonResult { let data: OrgKeyData = data.into_inner().data; - let mut org = match Organization::find_by_uuid(&org_id, &conn) { + let mut org = match Organization::find_by_uuid(&org_id, &conn).await { Some(organization) => { if organization.private_key.is_some() && organization.public_key.is_some() { err!("Organization Keys already exist") @@ -515,7 +543,7 @@ fn post_org_keys(org_id: String, data: JsonUpcase, _headers: AdminHe org.private_key = Some(data.EncryptedPrivateKey); org.public_key = Some(data.PublicKey); - org.save(&conn)?; + org.save(&conn).await?; Ok(Json(json!({ "Object": "organizationKeys", @@ -542,7 +570,7 @@ struct InviteData { } #[post("/organizations//users/invite", data = "")] -fn send_invite(org_id: String, data: JsonUpcase, headers: AdminHeaders, conn: DbConn) -> EmptyResult { +async fn send_invite(org_id: String, data: JsonUpcase, headers: AdminHeaders, conn: DbConn) -> EmptyResult { let data: InviteData = data.into_inner().data; let new_type = match UserOrgType::from_str(&data.Type.into_string()) { @@ -561,7 +589,7 @@ fn send_invite(org_id: String, data: JsonUpcase, headers: AdminHeade } else { UserOrgStatus::Accepted as i32 // Automatically mark user as accepted if no email invites }; - let user = match User::find_by_mail(&email, &conn) { + let user = match User::find_by_mail(&email, &conn).await { None => { if !CONFIG.invitations_allowed() { err!(format!("User does not exist: {}", email)) @@ -573,16 +601,16 @@ fn send_invite(org_id: String, data: JsonUpcase, headers: AdminHeade if !CONFIG.mail_enabled() { let invitation = Invitation::new(email.clone()); - invitation.save(&conn)?; + invitation.save(&conn).await?; } let mut user = User::new(email.clone()); - user.save(&conn)?; + user.save(&conn).await?; user_org_status = UserOrgStatus::Invited as i32; user } Some(user) => { - if UserOrganization::find_by_user_and_org(&user.uuid, &org_id, &conn).is_some() { + if UserOrganization::find_by_user_and_org(&user.uuid, &org_id, &conn).await.is_some() { err!(format!("User already in organization: {}", email)) } else { user @@ -599,19 +627,20 @@ fn send_invite(org_id: String, data: JsonUpcase, headers: AdminHeade // If no accessAll, add the collections received if !access_all { for col in data.Collections.iter().flatten() { - match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) { + match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn).await { None => err!("Collection not found in Organization"), Some(collection) => { - CollectionUser::save(&user.uuid, &collection.uuid, col.ReadOnly, col.HidePasswords, &conn)?; + CollectionUser::save(&user.uuid, &collection.uuid, col.ReadOnly, col.HidePasswords, &conn) + .await?; } } } } - new_user.save(&conn)?; + new_user.save(&conn).await?; if CONFIG.mail_enabled() { - let org_name = match Organization::find_by_uuid(&org_id, &conn) { + let org_name = match Organization::find_by_uuid(&org_id, &conn).await { Some(org) => org.name, None => err!("Error looking up organization"), }; @@ -623,7 +652,8 @@ fn send_invite(org_id: String, data: JsonUpcase, headers: AdminHeade Some(new_user.uuid), &org_name, Some(headers.user.email.clone()), - )?; + ) + .await?; } } @@ -631,7 +661,7 @@ fn send_invite(org_id: String, data: JsonUpcase, headers: AdminHeade } #[post("/organizations//users/reinvite", data = "")] -fn bulk_reinvite_user( +async fn bulk_reinvite_user( org_id: String, data: JsonUpcase, headers: AdminHeaders, @@ -641,7 +671,7 @@ fn bulk_reinvite_user( let mut bulk_response = Vec::new(); for org_user_id in data.Ids { - let err_msg = match _reinvite_user(&org_id, &org_user_id, &headers.user.email, &conn) { + let err_msg = match _reinvite_user(&org_id, &org_user_id, &headers.user.email, &conn).await { Ok(_) => String::from(""), Err(e) => format!("{:?}", e), }; @@ -663,11 +693,11 @@ fn bulk_reinvite_user( } #[post("/organizations//users//reinvite")] -fn reinvite_user(org_id: String, user_org: String, headers: AdminHeaders, conn: DbConn) -> EmptyResult { - _reinvite_user(&org_id, &user_org, &headers.user.email, &conn) +async fn reinvite_user(org_id: String, user_org: String, headers: AdminHeaders, conn: DbConn) -> EmptyResult { + _reinvite_user(&org_id, &user_org, &headers.user.email, &conn).await } -fn _reinvite_user(org_id: &str, user_org: &str, invited_by_email: &str, conn: &DbConn) -> EmptyResult { +async fn _reinvite_user(org_id: &str, user_org: &str, invited_by_email: &str, conn: &DbConn) -> EmptyResult { if !CONFIG.invitations_allowed() { err!("Invitations are not allowed.") } @@ -676,7 +706,7 @@ fn _reinvite_user(org_id: &str, user_org: &str, invited_by_email: &str, conn: &D err!("SMTP is not configured.") } - let user_org = match UserOrganization::find_by_uuid(user_org, conn) { + let user_org = match UserOrganization::find_by_uuid(user_org, conn).await { Some(user_org) => user_org, None => err!("The user hasn't been invited to the organization."), }; @@ -685,12 +715,12 @@ fn _reinvite_user(org_id: &str, user_org: &str, invited_by_email: &str, conn: &D err!("The user is already accepted or confirmed to the organization") } - let user = match User::find_by_uuid(&user_org.user_uuid, conn) { + let user = match User::find_by_uuid(&user_org.user_uuid, conn).await { Some(user) => user, None => err!("User not found."), }; - let org_name = match Organization::find_by_uuid(org_id, conn) { + let org_name = match Organization::find_by_uuid(org_id, conn).await { Some(org) => org.name, None => err!("Error looking up organization."), }; @@ -703,10 +733,11 @@ fn _reinvite_user(org_id: &str, user_org: &str, invited_by_email: &str, conn: &D Some(user_org.uuid), &org_name, Some(invited_by_email.to_string()), - )?; + ) + .await?; } else { let invitation = Invitation::new(user.email); - invitation.save(conn)?; + invitation.save(conn).await?; } Ok(()) @@ -719,18 +750,23 @@ struct AcceptData { } #[post("/organizations/<_org_id>/users/<_org_user_id>/accept", data = "")] -fn accept_invite(_org_id: String, _org_user_id: String, data: JsonUpcase, conn: DbConn) -> EmptyResult { +async fn accept_invite( + _org_id: String, + _org_user_id: String, + data: JsonUpcase, + conn: DbConn, +) -> EmptyResult { // The web-vault passes org_id and org_user_id in the URL, but we are just reading them from the JWT instead let data: AcceptData = data.into_inner().data; let token = &data.Token; let claims = decode_invite(token)?; - match User::find_by_mail(&claims.email, &conn) { + match User::find_by_mail(&claims.email, &conn).await { Some(_) => { - Invitation::take(&claims.email, &conn); + Invitation::take(&claims.email, &conn).await; if let (Some(user_org), Some(org)) = (&claims.user_org_id, &claims.org_id) { - let mut user_org = match UserOrganization::find_by_uuid_and_org(user_org, org, &conn) { + let mut user_org = match UserOrganization::find_by_uuid_and_org(user_org, org, &conn).await { Some(user_org) => user_org, None => err!("Error accepting the invitation"), }; @@ -739,11 +775,11 @@ fn accept_invite(_org_id: String, _org_user_id: String, data: JsonUpcase p.enabled, None => false, }; @@ -754,12 +790,15 @@ fn accept_invite(_org_id: String, _org_user_id: String, data: JsonUpcase p.enabled, None => false, }; if single_org_policy_enabled && user_org.atype < UserOrgType::Admin { let is_member_of_another_org = UserOrganization::find_any_state_by_user(&user_org.user_uuid, &conn) + .await .into_iter() .filter(|uo| uo.org_uuid != user_org.org_uuid) .count() @@ -770,14 +809,14 @@ fn accept_invite(_org_id: String, _org_user_id: String, data: JsonUpcase err!("Invited user not found"), @@ -786,17 +825,17 @@ fn accept_invite(_org_id: String, _org_user_id: String, data: JsonUpcase org.name, None => err!("Organization not found."), }; }; if let Some(invited_by_email) = &claims.invited_by_email { // User was invited to an organization, so they must be confirmed manually after acceptance - mail::send_invite_accepted(&claims.email, invited_by_email, &org_name)?; + mail::send_invite_accepted(&claims.email, invited_by_email, &org_name).await?; } else { // User was invited from /admin, so they are automatically confirmed - mail::send_invite_confirmed(&claims.email, &org_name)?; + mail::send_invite_confirmed(&claims.email, &org_name).await?; } } @@ -804,7 +843,12 @@ fn accept_invite(_org_id: String, _org_user_id: String, data: JsonUpcase/users/confirm", data = "")] -fn bulk_confirm_invite(org_id: String, data: JsonUpcase, headers: AdminHeaders, conn: DbConn) -> Json { +async fn bulk_confirm_invite( + org_id: String, + data: JsonUpcase, + headers: AdminHeaders, + conn: DbConn, +) -> Json { let data = data.into_inner().data; let mut bulk_response = Vec::new(); @@ -813,7 +857,7 @@ fn bulk_confirm_invite(org_id: String, data: JsonUpcase, headers: AdminHe for invite in keys { let org_user_id = invite["Id"].as_str().unwrap_or_default(); let user_key = invite["Key"].as_str().unwrap_or_default(); - let err_msg = match _confirm_invite(&org_id, org_user_id, user_key, &headers, &conn) { + let err_msg = match _confirm_invite(&org_id, org_user_id, user_key, &headers, &conn).await { Ok(_) => String::from(""), Err(e) => format!("{:?}", e), }; @@ -838,7 +882,7 @@ fn bulk_confirm_invite(org_id: String, data: JsonUpcase, headers: AdminHe } #[post("/organizations//users//confirm", data = "")] -fn confirm_invite( +async fn confirm_invite( org_id: String, org_user_id: String, data: JsonUpcase, @@ -847,15 +891,21 @@ fn confirm_invite( ) -> EmptyResult { let data = data.into_inner().data; let user_key = data["Key"].as_str().unwrap_or_default(); - _confirm_invite(&org_id, &org_user_id, user_key, &headers, &conn) + _confirm_invite(&org_id, &org_user_id, user_key, &headers, &conn).await } -fn _confirm_invite(org_id: &str, org_user_id: &str, key: &str, headers: &AdminHeaders, conn: &DbConn) -> EmptyResult { +async fn _confirm_invite( + org_id: &str, + org_user_id: &str, + key: &str, + headers: &AdminHeaders, + conn: &DbConn, +) -> EmptyResult { if key.is_empty() || org_user_id.is_empty() { err!("Key or UserId is not set, unable to process request"); } - let mut user_to_confirm = match UserOrganization::find_by_uuid_and_org(org_user_id, org_id, conn) { + let mut user_to_confirm = match UserOrganization::find_by_uuid_and_org(org_user_id, org_id, conn).await { Some(user) => user, None => err!("The specified user isn't a member of the organization"), }; @@ -872,28 +922,28 @@ fn _confirm_invite(org_id: &str, org_user_id: &str, key: &str, headers: &AdminHe user_to_confirm.akey = key.to_string(); if CONFIG.mail_enabled() { - let org_name = match Organization::find_by_uuid(org_id, conn) { + let org_name = match Organization::find_by_uuid(org_id, conn).await { Some(org) => org.name, None => err!("Error looking up organization."), }; - let address = match User::find_by_uuid(&user_to_confirm.user_uuid, conn) { + let address = match User::find_by_uuid(&user_to_confirm.user_uuid, conn).await { Some(user) => user.email, None => err!("Error looking up user."), }; - mail::send_invite_confirmed(&address, &org_name)?; + mail::send_invite_confirmed(&address, &org_name).await?; } - user_to_confirm.save(conn) + user_to_confirm.save(conn).await } #[get("/organizations//users/")] -fn get_user(org_id: String, org_user_id: String, _headers: AdminHeaders, conn: DbConn) -> JsonResult { - let user = match UserOrganization::find_by_uuid_and_org(&org_user_id, &org_id, &conn) { +async fn get_user(org_id: String, org_user_id: String, _headers: AdminHeaders, conn: DbConn) -> JsonResult { + let user = match UserOrganization::find_by_uuid_and_org(&org_user_id, &org_id, &conn).await { Some(user) => user, None => err!("The specified user isn't a member of the organization"), }; - Ok(Json(user.to_json_details(&conn))) + Ok(Json(user.to_json_details(&conn).await)) } #[derive(Deserialize)] @@ -905,18 +955,18 @@ struct EditUserData { } #[put("/organizations//users/", data = "", rank = 1)] -fn put_organization_user( +async fn put_organization_user( org_id: String, org_user_id: String, data: JsonUpcase, headers: AdminHeaders, conn: DbConn, ) -> EmptyResult { - edit_user(org_id, org_user_id, data, headers, conn) + edit_user(org_id, org_user_id, data, headers, conn).await } #[post("/organizations//users/", data = "", rank = 1)] -fn edit_user( +async fn edit_user( org_id: String, org_user_id: String, data: JsonUpcase, @@ -930,7 +980,7 @@ fn edit_user( None => err!("Invalid type"), }; - let mut user_to_edit = match UserOrganization::find_by_uuid_and_org(&org_user_id, &org_id, &conn) { + let mut user_to_edit = match UserOrganization::find_by_uuid_and_org(&org_user_id, &org_id, &conn).await { Some(user) => user, None => err!("The specified user isn't member of the organization"), }; @@ -948,7 +998,7 @@ fn edit_user( if user_to_edit.atype == UserOrgType::Owner && new_type != UserOrgType::Owner { // Removing owner permmission, check that there are at least another owner - let num_owners = UserOrganization::find_by_org_and_type(&org_id, UserOrgType::Owner as i32, &conn).len(); + let num_owners = UserOrganization::find_by_org_and_type(&org_id, UserOrgType::Owner as i32, &conn).await.len(); if num_owners <= 1 { err!("Can't delete the last owner") @@ -959,14 +1009,14 @@ fn edit_user( user_to_edit.atype = new_type as i32; // Delete all the odd collections - for c in CollectionUser::find_by_organization_and_user_uuid(&org_id, &user_to_edit.user_uuid, &conn) { - c.delete(&conn)?; + for c in CollectionUser::find_by_organization_and_user_uuid(&org_id, &user_to_edit.user_uuid, &conn).await { + c.delete(&conn).await?; } // If no accessAll, add the collections received if !data.AccessAll { for col in data.Collections.iter().flatten() { - match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) { + match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn).await { None => err!("Collection not found in Organization"), Some(collection) => { CollectionUser::save( @@ -975,22 +1025,28 @@ fn edit_user( col.ReadOnly, col.HidePasswords, &conn, - )?; + ) + .await?; } } } } - user_to_edit.save(&conn) + user_to_edit.save(&conn).await } #[delete("/organizations//users", data = "")] -fn bulk_delete_user(org_id: String, data: JsonUpcase, headers: AdminHeaders, conn: DbConn) -> Json { +async fn bulk_delete_user( + org_id: String, + data: JsonUpcase, + headers: AdminHeaders, + conn: DbConn, +) -> Json { let data: OrgBulkIds = data.into_inner().data; let mut bulk_response = Vec::new(); for org_user_id in data.Ids { - let err_msg = match _delete_user(&org_id, &org_user_id, &headers, &conn) { + let err_msg = match _delete_user(&org_id, &org_user_id, &headers, &conn).await { Ok(_) => String::from(""), Err(e) => format!("{:?}", e), }; @@ -1012,12 +1068,12 @@ fn bulk_delete_user(org_id: String, data: JsonUpcase, headers: Admin } #[delete("/organizations//users/")] -fn delete_user(org_id: String, org_user_id: String, headers: AdminHeaders, conn: DbConn) -> EmptyResult { - _delete_user(&org_id, &org_user_id, &headers, &conn) +async fn delete_user(org_id: String, org_user_id: String, headers: AdminHeaders, conn: DbConn) -> EmptyResult { + _delete_user(&org_id, &org_user_id, &headers, &conn).await } -fn _delete_user(org_id: &str, org_user_id: &str, headers: &AdminHeaders, conn: &DbConn) -> EmptyResult { - let user_to_delete = match UserOrganization::find_by_uuid_and_org(org_user_id, org_id, conn) { +async fn _delete_user(org_id: &str, org_user_id: &str, headers: &AdminHeaders, conn: &DbConn) -> EmptyResult { + let user_to_delete = match UserOrganization::find_by_uuid_and_org(org_user_id, org_id, conn).await { Some(user) => user, None => err!("User to delete isn't member of the organization"), }; @@ -1028,23 +1084,28 @@ fn _delete_user(org_id: &str, org_user_id: &str, headers: &AdminHeaders, conn: & if user_to_delete.atype == UserOrgType::Owner { // Removing owner, check that there are at least another owner - let num_owners = UserOrganization::find_by_org_and_type(org_id, UserOrgType::Owner as i32, conn).len(); + let num_owners = UserOrganization::find_by_org_and_type(org_id, UserOrgType::Owner as i32, conn).await.len(); if num_owners <= 1 { err!("Can't delete the last owner") } } - user_to_delete.delete(conn) + user_to_delete.delete(conn).await } #[post("/organizations//users//delete")] -fn post_delete_user(org_id: String, org_user_id: String, headers: AdminHeaders, conn: DbConn) -> EmptyResult { - delete_user(org_id, org_user_id, headers, conn) +async fn post_delete_user(org_id: String, org_user_id: String, headers: AdminHeaders, conn: DbConn) -> EmptyResult { + delete_user(org_id, org_user_id, headers, conn).await } #[post("/organizations//users/public-keys", data = "")] -fn bulk_public_keys(org_id: String, data: JsonUpcase, _headers: AdminHeaders, conn: DbConn) -> Json { +async fn bulk_public_keys( + org_id: String, + data: JsonUpcase, + _headers: AdminHeaders, + conn: DbConn, +) -> Json { let data: OrgBulkIds = data.into_inner().data; let mut bulk_response = Vec::new(); @@ -1052,8 +1113,8 @@ fn bulk_public_keys(org_id: String, data: JsonUpcase, _headers: Admi // If the user does not exists, just ignore it, and do not return any information regarding that UserOrg UUID. // The web-vault will then ignore that user for the folowing steps. for user_org_id in data.Ids { - match UserOrganization::find_by_uuid_and_org(&user_org_id, &org_id, &conn) { - Some(user_org) => match User::find_by_uuid(&user_org.user_uuid, &conn) { + match UserOrganization::find_by_uuid_and_org(&user_org_id, &org_id, &conn).await { + Some(user_org) => match User::find_by_uuid(&user_org.user_uuid, &conn).await { Some(user) => bulk_response.push(json!( { "Object": "organizationUserPublicKeyResponseModel", @@ -1096,29 +1157,27 @@ struct RelationsData { } #[post("/ciphers/import-organization?", data = "")] -fn post_org_import( - query: Form, +async fn post_org_import( + query: OrgIdData, data: JsonUpcase, headers: AdminHeaders, conn: DbConn, - nt: Notify, + nt: Notify<'_>, ) -> EmptyResult { let data: ImportData = data.into_inner().data; - let org_id = query.into_inner().organization_id; + let org_id = query.organization_id; - // Read and create the collections - let collections: Vec<_> = data - .Collections - .into_iter() - .map(|coll| { + let collections = stream::iter(data.Collections) + .then(|coll| async { let collection = Collection::new(org_id.clone(), coll.Name); - if collection.save(&conn).is_err() { + if collection.save(&conn).await.is_err() { err!("Failed to create Collection"); } Ok(collection) }) - .collect(); + .collect::>() + .await; // Read the relations between collections and ciphers let mut relations = Vec::new(); @@ -1128,17 +1187,14 @@ fn post_org_import( let headers: Headers = headers.into(); - // Read and create the ciphers - let ciphers: Vec<_> = data - .Ciphers - .into_iter() - .map(|cipher_data| { + let ciphers = stream::iter(data.Ciphers) + .then(|cipher_data| async { let mut cipher = Cipher::new(cipher_data.Type, cipher_data.Name.clone()); - update_cipher_from_data(&mut cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::CipherCreate) - .ok(); + update_cipher_from_data(&mut cipher, cipher_data, &headers, false, &conn, &nt, UpdateType::None).await.ok(); cipher }) - .collect(); + .collect::>() + .await; // Assign the collections for (cipher_index, coll_index) in relations { @@ -1149,16 +1205,16 @@ fn post_org_import( Err(_) => err!("Failed to assign to collection"), }; - CollectionCipher::save(cipher_id, coll_id, &conn)?; + CollectionCipher::save(cipher_id, coll_id, &conn).await?; } let mut user = headers.user; - user.update_revision(&conn) + user.update_revision(&conn).await } #[get("/organizations//policies")] -fn list_policies(org_id: String, _headers: AdminHeaders, conn: DbConn) -> Json { - let policies = OrgPolicy::find_by_org(&org_id, &conn); +async fn list_policies(org_id: String, _headers: AdminHeaders, conn: DbConn) -> Json { + let policies = OrgPolicy::find_by_org(&org_id, &conn).await; let policies_json: Vec = policies.iter().map(OrgPolicy::to_json).collect(); Json(json!({ @@ -1169,7 +1225,7 @@ fn list_policies(org_id: String, _headers: AdminHeaders, conn: DbConn) -> Json/policies/token?")] -fn list_policies_token(org_id: String, token: String, conn: DbConn) -> JsonResult { +async fn list_policies_token(org_id: String, token: String, conn: DbConn) -> JsonResult { let invite = crate::auth::decode_invite(&token)?; let invite_org_id = match invite.org_id { @@ -1182,7 +1238,7 @@ fn list_policies_token(org_id: String, token: String, conn: DbConn) -> JsonResul } // TODO: We receive the invite token as ?token=<>, validate it contains the org id - let policies = OrgPolicy::find_by_org(&org_id, &conn); + let policies = OrgPolicy::find_by_org(&org_id, &conn).await; let policies_json: Vec = policies.iter().map(OrgPolicy::to_json).collect(); Ok(Json(json!({ @@ -1193,13 +1249,13 @@ fn list_policies_token(org_id: String, token: String, conn: DbConn) -> JsonResul } #[get("/organizations//policies/")] -fn get_policy(org_id: String, pol_type: i32, _headers: AdminHeaders, conn: DbConn) -> JsonResult { +async fn get_policy(org_id: String, pol_type: i32, _headers: AdminHeaders, conn: DbConn) -> JsonResult { let pol_type_enum = match OrgPolicyType::from_i32(pol_type) { Some(pt) => pt, None => err!("Invalid or unsupported policy type"), }; - let policy = match OrgPolicy::find_by_org_and_type(&org_id, pol_type, &conn) { + let policy = match OrgPolicy::find_by_org_and_type(&org_id, pol_type, &conn).await { Some(p) => p, None => OrgPolicy::new(org_id, pol_type_enum, "{}".to_string()), }; @@ -1216,7 +1272,7 @@ struct PolicyData { } #[put("/organizations//policies/", data = "")] -fn put_policy( +async fn put_policy( org_id: String, pol_type: i32, data: Json, @@ -1232,10 +1288,8 @@ fn put_policy( // If enabling the TwoFactorAuthentication policy, remove this org's members that do have 2FA if pol_type_enum == OrgPolicyType::TwoFactorAuthentication && data.enabled { - let org_members = UserOrganization::find_by_org(&org_id, &conn); - - for member in org_members.into_iter() { - let user_twofactor_disabled = TwoFactor::find_by_user(&member.user_uuid, &conn).is_empty(); + for member in UserOrganization::find_by_org(&org_id, &conn).await.into_iter() { + let user_twofactor_disabled = TwoFactor::find_by_user(&member.user_uuid, &conn).await.is_empty(); // Policy only applies to non-Owner/non-Admin members who have accepted joining the org if user_twofactor_disabled @@ -1243,24 +1297,23 @@ fn put_policy( && member.status != UserOrgStatus::Invited as i32 { if CONFIG.mail_enabled() { - let org = Organization::find_by_uuid(&member.org_uuid, &conn).unwrap(); - let user = User::find_by_uuid(&member.user_uuid, &conn).unwrap(); + let org = Organization::find_by_uuid(&member.org_uuid, &conn).await.unwrap(); + let user = User::find_by_uuid(&member.user_uuid, &conn).await.unwrap(); - mail::send_2fa_removed_from_org(&user.email, &org.name)?; + mail::send_2fa_removed_from_org(&user.email, &org.name).await?; } - member.delete(&conn)?; + member.delete(&conn).await?; } } } // If enabling the SingleOrg policy, remove this org's members that are members of other orgs if pol_type_enum == OrgPolicyType::SingleOrg && data.enabled { - let org_members = UserOrganization::find_by_org(&org_id, &conn); - - for member in org_members.into_iter() { + for member in UserOrganization::find_by_org(&org_id, &conn).await.into_iter() { // Policy only applies to non-Owner/non-Admin members who have accepted joining the org if member.atype < UserOrgType::Admin && member.status != UserOrgStatus::Invited as i32 { let is_member_of_another_org = UserOrganization::find_any_state_by_user(&member.user_uuid, &conn) + .await .into_iter() // Other UserOrganization's where they have accepted being a member of .filter(|uo| uo.uuid != member.uuid && uo.status != UserOrgStatus::Invited as i32) @@ -1269,25 +1322,25 @@ fn put_policy( if is_member_of_another_org { if CONFIG.mail_enabled() { - let org = Organization::find_by_uuid(&member.org_uuid, &conn).unwrap(); - let user = User::find_by_uuid(&member.user_uuid, &conn).unwrap(); + let org = Organization::find_by_uuid(&member.org_uuid, &conn).await.unwrap(); + let user = User::find_by_uuid(&member.user_uuid, &conn).await.unwrap(); - mail::send_single_org_removed_from_org(&user.email, &org.name)?; + mail::send_single_org_removed_from_org(&user.email, &org.name).await?; } - member.delete(&conn)?; + member.delete(&conn).await?; } } } } - let mut policy = match OrgPolicy::find_by_org_and_type(&org_id, pol_type, &conn) { + let mut policy = match OrgPolicy::find_by_org_and_type(&org_id, pol_type, &conn).await { Some(p) => p, None => OrgPolicy::new(org_id, pol_type_enum, "{}".to_string()), }; policy.enabled = data.enabled; policy.data = serde_json::to_string(&data.data)?; - policy.save(&conn)?; + policy.save(&conn).await?; Ok(Json(policy.to_json())) } @@ -1360,7 +1413,7 @@ struct OrgImportData { } #[post("/organizations//import", data = "")] -fn import(org_id: String, data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { +async fn import(org_id: String, data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { let data = data.into_inner().data; // TODO: Currently we aren't storing the externalId's anywhere, so we also don't have a way @@ -1369,7 +1422,7 @@ fn import(org_id: String, data: JsonUpcase, headers: Headers, con // as opposed to upstream which only removes auto-imported users. // User needs to be admin or owner to use the Directry Connector - match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn) { + match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn).await { Some(user_org) if user_org.atype >= UserOrgType::Admin => { /* Okay, nothing to do */ } Some(_) => err!("User has insufficient permissions to use Directory Connector"), None => err!("User not part of organization"), @@ -1378,13 +1431,13 @@ fn import(org_id: String, data: JsonUpcase, headers: Headers, con for user_data in &data.Users { if user_data.Deleted { // If user is marked for deletion and it exists, delete it - if let Some(user_org) = UserOrganization::find_by_email_and_org(&user_data.Email, &org_id, &conn) { - user_org.delete(&conn)?; + if let Some(user_org) = UserOrganization::find_by_email_and_org(&user_data.Email, &org_id, &conn).await { + user_org.delete(&conn).await?; } // If user is not part of the organization, but it exists - } else if UserOrganization::find_by_email_and_org(&user_data.Email, &org_id, &conn).is_none() { - if let Some(user) = User::find_by_mail(&user_data.Email, &conn) { + } else if UserOrganization::find_by_email_and_org(&user_data.Email, &org_id, &conn).await.is_none() { + if let Some(user) = User::find_by_mail(&user_data.Email, &conn).await { let user_org_status = if CONFIG.mail_enabled() { UserOrgStatus::Invited as i32 } else { @@ -1396,10 +1449,10 @@ fn import(org_id: String, data: JsonUpcase, headers: Headers, con new_org_user.atype = UserOrgType::User as i32; new_org_user.status = user_org_status; - new_org_user.save(&conn)?; + new_org_user.save(&conn).await?; if CONFIG.mail_enabled() { - let org_name = match Organization::find_by_uuid(&org_id, &conn) { + let org_name = match Organization::find_by_uuid(&org_id, &conn).await { Some(org) => org.name, None => err!("Error looking up organization"), }; @@ -1411,7 +1464,8 @@ fn import(org_id: String, data: JsonUpcase, headers: Headers, con Some(new_org_user.uuid), &org_name, Some(headers.user.email.clone()), - )?; + ) + .await?; } } } @@ -1419,10 +1473,10 @@ fn import(org_id: String, data: JsonUpcase, headers: Headers, con // If this flag is enabled, any user that isn't provided in the Users list will be removed (by default they will be kept unless they have Deleted == true) if data.OverwriteExisting { - for user_org in UserOrganization::find_by_org_and_type(&org_id, UserOrgType::User as i32, &conn) { - if let Some(user_email) = User::find_by_uuid(&user_org.user_uuid, &conn).map(|u| u.email) { + for user_org in UserOrganization::find_by_org_and_type(&org_id, UserOrgType::User as i32, &conn).await { + if let Some(user_email) = User::find_by_uuid(&user_org.user_uuid, &conn).await.map(|u| u.email) { if !data.Users.iter().any(|u| u.Email == user_email) { - user_org.delete(&conn)?; + user_org.delete(&conn).await?; } } } diff --git a/src/api/core/sends.rs b/src/api/core/sends.rs index 72437f15..4f3291dc 100644 --- a/src/api/core/sends.rs +++ b/src/api/core/sends.rs @@ -1,14 +1,15 @@ -use std::{io::Read, path::Path}; +use std::path::Path; use chrono::{DateTime, Duration, Utc}; -use multipart::server::{save::SavedData, Multipart, SaveResult}; -use rocket::{http::ContentType, response::NamedFile, Data}; -use rocket_contrib::json::Json; +use rocket::form::Form; +use rocket::fs::NamedFile; +use rocket::fs::TempFile; +use rocket::serde::json::Json; use serde_json::Value; use crate::{ api::{ApiResult, EmptyResult, JsonResult, JsonUpcase, Notify, NumberOrString, UpdateType}, - auth::{Headers, Host}, + auth::{ClientIp, Headers, Host}, db::{models::*, DbConn, DbPool}, util::SafeString, CONFIG, @@ -31,10 +32,10 @@ pub fn routes() -> Vec { ] } -pub fn purge_sends(pool: DbPool) { +pub async fn purge_sends(pool: DbPool) { debug!("Purging sends"); - if let Ok(conn) = pool.get() { - Send::purge(&conn); + if let Ok(conn) = pool.get().await { + Send::purge(&conn).await; } else { error!("Failed to get DB connection while purging sends") } @@ -67,10 +68,10 @@ struct SendData { /// /// There is also a Vaultwarden-specific `sends_allowed` config setting that /// controls this policy globally. -fn enforce_disable_send_policy(headers: &Headers, conn: &DbConn) -> EmptyResult { +async fn enforce_disable_send_policy(headers: &Headers, conn: &DbConn) -> EmptyResult { let user_uuid = &headers.user.uuid; let policy_type = OrgPolicyType::DisableSend; - if !CONFIG.sends_allowed() || OrgPolicy::is_applicable_to_user(user_uuid, policy_type, conn) { + if !CONFIG.sends_allowed() || OrgPolicy::is_applicable_to_user(user_uuid, policy_type, conn).await { err!("Due to an Enterprise Policy, you are only able to delete an existing Send.") } Ok(()) @@ -82,10 +83,10 @@ fn enforce_disable_send_policy(headers: &Headers, conn: &DbConn) -> EmptyResult /// but is allowed to remove this option from an existing Send. /// /// Ref: https://bitwarden.com/help/article/policies/#send-options -fn enforce_disable_hide_email_policy(data: &SendData, headers: &Headers, conn: &DbConn) -> EmptyResult { +async fn enforce_disable_hide_email_policy(data: &SendData, headers: &Headers, conn: &DbConn) -> EmptyResult { let user_uuid = &headers.user.uuid; let hide_email = data.HideEmail.unwrap_or(false); - if hide_email && OrgPolicy::is_hide_email_disabled(user_uuid, conn) { + if hide_email && OrgPolicy::is_hide_email_disabled(user_uuid, conn).await { err!( "Due to an Enterprise Policy, you are not allowed to hide your email address \ from recipients when creating or editing a Send." @@ -134,9 +135,9 @@ fn create_send(data: SendData, user_uuid: String) -> ApiResult { } #[get("/sends")] -fn get_sends(headers: Headers, conn: DbConn) -> Json { +async fn get_sends(headers: Headers, conn: DbConn) -> Json { let sends = Send::find_by_user(&headers.user.uuid, &conn); - let sends_json: Vec = sends.iter().map(|s| s.to_json()).collect(); + let sends_json: Vec = sends.await.iter().map(|s| s.to_json()).collect(); Json(json!({ "Data": sends_json, @@ -146,8 +147,8 @@ fn get_sends(headers: Headers, conn: DbConn) -> Json { } #[get("/sends/")] -fn get_send(uuid: String, headers: Headers, conn: DbConn) -> JsonResult { - let send = match Send::find_by_uuid(&uuid, &conn) { +async fn get_send(uuid: String, headers: Headers, conn: DbConn) -> JsonResult { + let send = match Send::find_by_uuid(&uuid, &conn).await { Some(send) => send, None => err!("Send not found"), }; @@ -160,42 +161,40 @@ fn get_send(uuid: String, headers: Headers, conn: DbConn) -> JsonResult { } #[post("/sends", data = "")] -fn post_send(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { - enforce_disable_send_policy(&headers, &conn)?; +async fn post_send(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult { + enforce_disable_send_policy(&headers, &conn).await?; let data: SendData = data.into_inner().data; - enforce_disable_hide_email_policy(&data, &headers, &conn)?; + enforce_disable_hide_email_policy(&data, &headers, &conn).await?; if data.Type == SendType::File as i32 { err!("File sends should use /api/sends/file") } let mut send = create_send(data, headers.user.uuid)?; - send.save(&conn)?; - nt.send_send_update(UpdateType::SyncSendCreate, &send, &send.update_users_revision(&conn)); + send.save(&conn).await?; + nt.send_send_update(UpdateType::SyncSendCreate, &send, &send.update_users_revision(&conn).await).await; Ok(Json(send.to_json())) } -#[post("/sends/file", format = "multipart/form-data", data = "")] -fn post_send_file(data: Data, content_type: &ContentType, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { - enforce_disable_send_policy(&headers, &conn)?; - - let boundary = content_type.params().next().expect("No boundary provided").1; +#[derive(FromForm)] +struct UploadData<'f> { + model: Json>, + data: TempFile<'f>, +} - let mut mpart = Multipart::with_body(data.open(), boundary); +#[post("/sends/file", format = "multipart/form-data", data = "")] +async fn post_send_file(data: Form>, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult { + enforce_disable_send_policy(&headers, &conn).await?; - // First entry is the SendData JSON - let mut model_entry = match mpart.read_entry()? { - Some(e) if &*e.headers.name == "model" => e, - Some(_) => err!("Invalid entry name"), - None => err!("No model entry present"), - }; + let UploadData { + model, + mut data, + } = data.into_inner(); + let model = model.into_inner().data; - let mut buf = String::new(); - model_entry.data.read_to_string(&mut buf)?; - let data = serde_json::from_str::>(&buf)?; - enforce_disable_hide_email_policy(&data.data, &headers, &conn)?; + enforce_disable_hide_email_policy(&model, &headers, &conn).await?; // Get the file length and add an extra 5% to avoid issues const SIZE_525_MB: u64 = 550_502_400; @@ -203,7 +202,7 @@ fn post_send_file(data: Data, content_type: &ContentType, headers: Headers, conn let size_limit = match CONFIG.user_attachment_limit() { Some(0) => err!("File uploads are disabled"), Some(limit_kb) => { - let left = (limit_kb * 1024) - Attachment::size_by_user(&headers.user.uuid, &conn); + let left = (limit_kb * 1024) - Attachment::size_by_user(&headers.user.uuid, &conn).await; if left <= 0 { err!("Attachment storage limit reached! Delete some attachments to free up space") } @@ -212,51 +211,47 @@ fn post_send_file(data: Data, content_type: &ContentType, headers: Headers, conn None => SIZE_525_MB, }; - // Create the Send - let mut send = create_send(data.data, headers.user.uuid)?; - let file_id = crate::crypto::generate_send_id(); - + let mut send = create_send(model, headers.user.uuid)?; if send.atype != SendType::File as i32 { err!("Send content is not a file"); } - let file_path = Path::new(&CONFIG.sends_folder()).join(&send.uuid).join(&file_id); + // There seems to be a bug somewhere regarding uploading attachments using the Android Client (Maybe iOS too?) + // See: https://github.com/dani-garcia/vaultwarden/issues/2644 + // Since all other clients seem to match TempFile::File and not TempFile::Buffered lets catch this and return an error for now. + // We need to figure out how to solve this, but for now it's better to not accept these attachments since they will be broken. + if let TempFile::Buffered { + content: _, + } = &data + { + err!("Error reading send file data. Please try an other client."); + } - // Read the data entry and save the file - let mut data_entry = match mpart.read_entry()? { - Some(e) if &*e.headers.name == "data" => e, - Some(_) => err!("Invalid entry name"), - None => err!("No model entry present"), - }; + let size = data.len(); + if size > size_limit { + err!("Attachment storage limit exceeded with this file"); + } - let size = match data_entry.data.save().memory_threshold(0).size_limit(size_limit).with_path(&file_path) { - SaveResult::Full(SavedData::File(_, size)) => size as i32, - SaveResult::Full(other) => { - std::fs::remove_file(&file_path).ok(); - err!(format!("Attachment is not a file: {:?}", other)); - } - SaveResult::Partial(_, reason) => { - std::fs::remove_file(&file_path).ok(); - err!(format!("Attachment storage limit exceeded with this file: {:?}", reason)); - } - SaveResult::Error(e) => { - std::fs::remove_file(&file_path).ok(); - err!(format!("Error: {:?}", e)); - } - }; + let file_id = crate::crypto::generate_send_id(); + let folder_path = tokio::fs::canonicalize(&CONFIG.sends_folder()).await?.join(&send.uuid); + let file_path = folder_path.join(&file_id); + tokio::fs::create_dir_all(&folder_path).await?; + + if let Err(_err) = data.persist_to(&file_path).await { + data.move_copy_to(file_path).await? + } - // Set ID and sizes let mut data_value: Value = serde_json::from_str(&send.data)?; if let Some(o) = data_value.as_object_mut() { o.insert(String::from("Id"), Value::String(file_id)); o.insert(String::from("Size"), Value::Number(size.into())); - o.insert(String::from("SizeName"), Value::String(crate::util::get_display_size(size))); + o.insert(String::from("SizeName"), Value::String(crate::util::get_display_size(size as i32))); } send.data = serde_json::to_string(&data_value)?; // Save the changes in the database - send.save(&conn)?; - nt.send_send_update(UpdateType::SyncSendUpdate, &send, &send.update_users_revision(&conn)); + send.save(&conn).await?; + nt.send_send_update(UpdateType::SyncSendUpdate, &send, &send.update_users_revision(&conn).await).await; Ok(Json(send.to_json())) } @@ -268,8 +263,8 @@ pub struct SendAccessData { } #[post("/sends/access/", data = "")] -fn post_access(access_id: String, data: JsonUpcase, conn: DbConn) -> JsonResult { - let mut send = match Send::find_by_access_id(&access_id, &conn) { +async fn post_access(access_id: String, data: JsonUpcase, conn: DbConn, ip: ClientIp) -> JsonResult { + let mut send = match Send::find_by_access_id(&access_id, &conn).await { Some(s) => s, None => err_code!(SEND_INACCESSIBLE_MSG, 404), }; @@ -297,8 +292,8 @@ fn post_access(access_id: String, data: JsonUpcase, conn: DbConn if send.password_hash.is_some() { match data.into_inner().data.Password { Some(ref p) if send.check_password(p) => { /* Nothing to do here */ } - Some(_) => err!("Invalid password."), - None => err_code!("Password not provided", 401), + Some(_) => err!("Invalid password", format!("IP: {}.", ip.ip)), + None => err_code!("Password not provided", format!("IP: {}.", ip.ip), 401), } } @@ -307,20 +302,20 @@ fn post_access(access_id: String, data: JsonUpcase, conn: DbConn send.access_count += 1; } - send.save(&conn)?; + send.save(&conn).await?; - Ok(Json(send.to_json_access(&conn))) + Ok(Json(send.to_json_access(&conn).await)) } #[post("/sends//access/file/", data = "")] -fn post_access_file( +async fn post_access_file( send_id: String, file_id: String, data: JsonUpcase, host: Host, conn: DbConn, ) -> JsonResult { - let mut send = match Send::find_by_uuid(&send_id, &conn) { + let mut send = match Send::find_by_uuid(&send_id, &conn).await { Some(s) => s, None => err_code!(SEND_INACCESSIBLE_MSG, 404), }; @@ -355,7 +350,7 @@ fn post_access_file( send.access_count += 1; - send.save(&conn)?; + send.save(&conn).await?; let token_claims = crate::auth::generate_send_claims(&send_id, &file_id); let token = crate::auth::encode_jwt(&token_claims); @@ -367,23 +362,29 @@ fn post_access_file( } #[get("/sends//?")] -fn download_send(send_id: SafeString, file_id: SafeString, t: String) -> Option { +async fn download_send(send_id: SafeString, file_id: SafeString, t: String) -> Option { if let Ok(claims) = crate::auth::decode_send(&t) { if claims.sub == format!("{}/{}", send_id, file_id) { - return NamedFile::open(Path::new(&CONFIG.sends_folder()).join(send_id).join(file_id)).ok(); + return NamedFile::open(Path::new(&CONFIG.sends_folder()).join(send_id).join(file_id)).await.ok(); } } None } #[put("/sends/", data = "")] -fn put_send(id: String, data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { - enforce_disable_send_policy(&headers, &conn)?; +async fn put_send( + id: String, + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify<'_>, +) -> JsonResult { + enforce_disable_send_policy(&headers, &conn).await?; let data: SendData = data.into_inner().data; - enforce_disable_hide_email_policy(&data, &headers, &conn)?; + enforce_disable_hide_email_policy(&data, &headers, &conn).await?; - let mut send = match Send::find_by_uuid(&id, &conn) { + let mut send = match Send::find_by_uuid(&id, &conn).await { Some(s) => s, None => err!("Send not found"), }; @@ -430,15 +431,15 @@ fn put_send(id: String, data: JsonUpcase, headers: Headers, conn: DbCo send.set_password(Some(&password)); } - send.save(&conn)?; - nt.send_send_update(UpdateType::SyncSendUpdate, &send, &send.update_users_revision(&conn)); + send.save(&conn).await?; + nt.send_send_update(UpdateType::SyncSendUpdate, &send, &send.update_users_revision(&conn).await).await; Ok(Json(send.to_json())) } #[delete("/sends/")] -fn delete_send(id: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { - let send = match Send::find_by_uuid(&id, &conn) { +async fn delete_send(id: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> EmptyResult { + let send = match Send::find_by_uuid(&id, &conn).await { Some(s) => s, None => err!("Send not found"), }; @@ -447,17 +448,17 @@ fn delete_send(id: String, headers: Headers, conn: DbConn, nt: Notify) -> EmptyR err!("Send is not owned by user") } - send.delete(&conn)?; - nt.send_send_update(UpdateType::SyncSendDelete, &send, &send.update_users_revision(&conn)); + send.delete(&conn).await?; + nt.send_send_update(UpdateType::SyncSendDelete, &send, &send.update_users_revision(&conn).await).await; Ok(()) } #[put("/sends//remove-password")] -fn put_remove_password(id: String, headers: Headers, conn: DbConn, nt: Notify) -> JsonResult { - enforce_disable_send_policy(&headers, &conn)?; +async fn put_remove_password(id: String, headers: Headers, conn: DbConn, nt: Notify<'_>) -> JsonResult { + enforce_disable_send_policy(&headers, &conn).await?; - let mut send = match Send::find_by_uuid(&id, &conn) { + let mut send = match Send::find_by_uuid(&id, &conn).await { Some(s) => s, None => err!("Send not found"), }; @@ -467,8 +468,8 @@ fn put_remove_password(id: String, headers: Headers, conn: DbConn, nt: Notify) - } send.set_password(None); - send.save(&conn)?; - nt.send_send_update(UpdateType::SyncSendUpdate, &send, &send.update_users_revision(&conn)); + send.save(&conn).await?; + nt.send_send_update(UpdateType::SyncSendUpdate, &send, &send.update_users_revision(&conn).await).await; Ok(Json(send.to_json())) } diff --git a/src/api/core/two_factor/authenticator.rs b/src/api/core/two_factor/authenticator.rs index e72d7b29..542651dd 100644 --- a/src/api/core/two_factor/authenticator.rs +++ b/src/api/core/two_factor/authenticator.rs @@ -1,6 +1,6 @@ use data_encoding::BASE32; +use rocket::serde::json::Json; use rocket::Route; -use rocket_contrib::json::Json; use crate::{ api::{ @@ -21,7 +21,7 @@ pub fn routes() -> Vec { } #[post("/two-factor/get-authenticator", data = "")] -fn generate_authenticator(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { +async fn generate_authenticator(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { let data: PasswordData = data.into_inner().data; let user = headers.user; @@ -30,7 +30,7 @@ fn generate_authenticator(data: JsonUpcase, headers: Headers, conn } let type_ = TwoFactorType::Authenticator as i32; - let twofactor = TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn); + let twofactor = TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn).await; let (enabled, key) = match twofactor { Some(tf) => (true, tf.data), @@ -53,7 +53,7 @@ struct EnableAuthenticatorData { } #[post("/two-factor/authenticator", data = "")] -fn activate_authenticator( +async fn activate_authenticator( data: JsonUpcase, headers: Headers, ip: ClientIp, @@ -81,9 +81,9 @@ fn activate_authenticator( } // Validate the token provided with the key, and save new twofactor - validate_totp_code(&user.uuid, &token, &key.to_uppercase(), &ip, &conn)?; + validate_totp_code(&user.uuid, &token, &key.to_uppercase(), &ip, &conn).await?; - _generate_recover_code(&mut user, &conn); + _generate_recover_code(&mut user, &conn).await; Ok(Json(json!({ "Enabled": true, @@ -93,16 +93,16 @@ fn activate_authenticator( } #[put("/two-factor/authenticator", data = "")] -fn activate_authenticator_put( +async fn activate_authenticator_put( data: JsonUpcase, headers: Headers, ip: ClientIp, conn: DbConn, ) -> JsonResult { - activate_authenticator(data, headers, ip, conn) + activate_authenticator(data, headers, ip, conn).await } -pub fn validate_totp_code_str( +pub async fn validate_totp_code_str( user_uuid: &str, totp_code: &str, secret: &str, @@ -113,10 +113,16 @@ pub fn validate_totp_code_str( err!("TOTP code is not a number"); } - validate_totp_code(user_uuid, totp_code, secret, ip, conn) + validate_totp_code(user_uuid, totp_code, secret, ip, conn).await } -pub fn validate_totp_code(user_uuid: &str, totp_code: &str, secret: &str, ip: &ClientIp, conn: &DbConn) -> EmptyResult { +pub async fn validate_totp_code( + user_uuid: &str, + totp_code: &str, + secret: &str, + ip: &ClientIp, + conn: &DbConn, +) -> EmptyResult { use totp_lite::{totp_custom, Sha1}; let decoded_secret = match BASE32.decode(secret.as_bytes()) { @@ -124,15 +130,16 @@ pub fn validate_totp_code(user_uuid: &str, totp_code: &str, secret: &str, ip: &C Err(_) => err!("Invalid TOTP secret"), }; - let mut twofactor = match TwoFactor::find_by_user_and_type(user_uuid, TwoFactorType::Authenticator as i32, conn) { - Some(tf) => tf, - _ => TwoFactor::new(user_uuid.to_string(), TwoFactorType::Authenticator, secret.to_string()), - }; + let mut twofactor = + match TwoFactor::find_by_user_and_type(user_uuid, TwoFactorType::Authenticator as i32, conn).await { + Some(tf) => tf, + _ => TwoFactor::new(user_uuid.to_string(), TwoFactorType::Authenticator, secret.to_string()), + }; // The amount of steps back and forward in time // Also check if we need to disable time drifted TOTP codes. // If that is the case, we set the steps to 0 so only the current TOTP is valid. - let steps = !CONFIG.authenticator_disable_time_drift() as i64; + let steps = i64::from(!CONFIG.authenticator_disable_time_drift()); // Get the current system time in UNIX Epoch (UTC) let current_time = chrono::Utc::now(); @@ -147,7 +154,7 @@ pub fn validate_totp_code(user_uuid: &str, totp_code: &str, secret: &str, ip: &C let generated = totp_custom::(30, 6, &decoded_secret, time); // Check the the given code equals the generated and if the time_step is larger then the one last used. - if generated == totp_code && time_step > twofactor.last_used as i64 { + if generated == totp_code && time_step > i64::from(twofactor.last_used) { // If the step does not equals 0 the time is drifted either server or client side. if step != 0 { warn!("TOTP Time drift detected. The step offset is {}", step); @@ -156,9 +163,9 @@ pub fn validate_totp_code(user_uuid: &str, totp_code: &str, secret: &str, ip: &C // Save the last used time step so only totp time steps higher then this one are allowed. // This will also save a newly created twofactor if the code is correct. twofactor.last_used = time_step as i32; - twofactor.save(conn)?; + twofactor.save(conn).await?; return Ok(()); - } else if generated == totp_code && time_step <= twofactor.last_used as i64 { + } else if generated == totp_code && time_step <= i64::from(twofactor.last_used) { warn!("This TOTP or a TOTP code within {} steps back or forward has already been used!", steps); err!(format!("Invalid TOTP code! Server time: {} IP: {}", current_time.format("%F %T UTC"), ip.ip)); } diff --git a/src/api/core/two_factor/duo.rs b/src/api/core/two_factor/duo.rs index 606e32b2..ccfa05be 100644 --- a/src/api/core/two_factor/duo.rs +++ b/src/api/core/two_factor/duo.rs @@ -1,7 +1,7 @@ use chrono::Utc; use data_encoding::BASE64; +use rocket::serde::json::Json; use rocket::Route; -use rocket_contrib::json::Json; use crate::{ api::{core::two_factor::_generate_recover_code, ApiResult, EmptyResult, JsonResult, JsonUpcase, PasswordData}, @@ -89,14 +89,14 @@ impl DuoStatus { const DISABLED_MESSAGE_DEFAULT: &str = ""; #[post("/two-factor/get-duo", data = "")] -fn get_duo(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { +async fn get_duo(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { let data: PasswordData = data.into_inner().data; if !headers.user.check_valid_password(&data.MasterPasswordHash) { err!("Invalid password"); } - let data = get_user_duo_data(&headers.user.uuid, &conn); + let data = get_user_duo_data(&headers.user.uuid, &conn).await; let (enabled, data) = match data { DuoStatus::Global(_) => (true, Some(DuoData::secret())), @@ -152,7 +152,7 @@ fn check_duo_fields_custom(data: &EnableDuoData) -> bool { } #[post("/two-factor/duo", data = "")] -fn activate_duo(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { +async fn activate_duo(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { let data: EnableDuoData = data.into_inner().data; let mut user = headers.user; @@ -163,7 +163,7 @@ fn activate_duo(data: JsonUpcase, headers: Headers, conn: DbConn) let (data, data_str) = if check_duo_fields_custom(&data) { let data_req: DuoData = data.into(); let data_str = serde_json::to_string(&data_req)?; - duo_api_request("GET", "/auth/v2/check", "", &data_req).map_res("Failed to validate Duo credentials")?; + duo_api_request("GET", "/auth/v2/check", "", &data_req).await.map_res("Failed to validate Duo credentials")?; (data_req.obscure(), data_str) } else { (DuoData::secret(), String::new()) @@ -171,9 +171,9 @@ fn activate_duo(data: JsonUpcase, headers: Headers, conn: DbConn) let type_ = TwoFactorType::Duo; let twofactor = TwoFactor::new(user.uuid.clone(), type_, data_str); - twofactor.save(&conn)?; + twofactor.save(&conn).await?; - _generate_recover_code(&mut user, &conn); + _generate_recover_code(&mut user, &conn).await; Ok(Json(json!({ "Enabled": true, @@ -185,11 +185,11 @@ fn activate_duo(data: JsonUpcase, headers: Headers, conn: DbConn) } #[put("/two-factor/duo", data = "")] -fn activate_duo_put(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { - activate_duo(data, headers, conn) +async fn activate_duo_put(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { + activate_duo(data, headers, conn).await } -fn duo_api_request(method: &str, path: &str, params: &str, data: &DuoData) -> EmptyResult { +async fn duo_api_request(method: &str, path: &str, params: &str, data: &DuoData) -> EmptyResult { use reqwest::{header, Method}; use std::str::FromStr; @@ -209,7 +209,8 @@ fn duo_api_request(method: &str, path: &str, params: &str, data: &DuoData) -> Em .basic_auth(username, Some(password)) .header(header::USER_AGENT, "vaultwarden:Duo/1.0 (Rust)") .header(header::DATE, date) - .send()? + .send() + .await? .error_for_status()?; Ok(()) @@ -222,11 +223,11 @@ const AUTH_PREFIX: &str = "AUTH"; const DUO_PREFIX: &str = "TX"; const APP_PREFIX: &str = "APP"; -fn get_user_duo_data(uuid: &str, conn: &DbConn) -> DuoStatus { +async fn get_user_duo_data(uuid: &str, conn: &DbConn) -> DuoStatus { let type_ = TwoFactorType::Duo as i32; // If the user doesn't have an entry, disabled - let twofactor = match TwoFactor::find_by_user_and_type(uuid, type_, conn) { + let twofactor = match TwoFactor::find_by_user_and_type(uuid, type_, conn).await { Some(t) => t, None => return DuoStatus::Disabled(DuoData::global().is_some()), }; @@ -246,19 +247,20 @@ fn get_user_duo_data(uuid: &str, conn: &DbConn) -> DuoStatus { } // let (ik, sk, ak, host) = get_duo_keys(); -fn get_duo_keys_email(email: &str, conn: &DbConn) -> ApiResult<(String, String, String, String)> { - let data = User::find_by_mail(email, conn) - .and_then(|u| get_user_duo_data(&u.uuid, conn).data()) - .or_else(DuoData::global) - .map_res("Can't fetch Duo keys")?; +async fn get_duo_keys_email(email: &str, conn: &DbConn) -> ApiResult<(String, String, String, String)> { + let data = match User::find_by_mail(email, conn).await { + Some(u) => get_user_duo_data(&u.uuid, conn).await.data(), + _ => DuoData::global(), + } + .map_res("Can't fetch Duo Keys")?; Ok((data.ik, data.sk, CONFIG.get_duo_akey(), data.host)) } -pub fn generate_duo_signature(email: &str, conn: &DbConn) -> ApiResult<(String, String)> { +pub async fn generate_duo_signature(email: &str, conn: &DbConn) -> ApiResult<(String, String)> { let now = Utc::now().timestamp(); - let (ik, sk, ak, host) = get_duo_keys_email(email, conn)?; + let (ik, sk, ak, host) = get_duo_keys_email(email, conn).await?; let duo_sign = sign_duo_values(&sk, email, &ik, DUO_PREFIX, now + DUO_EXPIRE); let app_sign = sign_duo_values(&ak, email, &ik, APP_PREFIX, now + APP_EXPIRE); @@ -273,7 +275,7 @@ fn sign_duo_values(key: &str, email: &str, ikey: &str, prefix: &str, expire: i64 format!("{}|{}", cookie, crypto::hmac_sign(key, &cookie)) } -pub fn validate_duo_login(email: &str, response: &str, conn: &DbConn) -> EmptyResult { +pub async fn validate_duo_login(email: &str, response: &str, conn: &DbConn) -> EmptyResult { // email is as entered by the user, so it needs to be normalized before // comparison with auth_user below. let email = &email.to_lowercase(); @@ -288,7 +290,7 @@ pub fn validate_duo_login(email: &str, response: &str, conn: &DbConn) -> EmptyRe let now = Utc::now().timestamp(); - let (ik, sk, ak, _host) = get_duo_keys_email(email, conn)?; + let (ik, sk, ak, _host) = get_duo_keys_email(email, conn).await?; let auth_user = parse_duo_values(&sk, auth_sig, &ik, AUTH_PREFIX, now)?; let app_user = parse_duo_values(&ak, app_sig, &ik, APP_PREFIX, now)?; diff --git a/src/api/core/two_factor/email.rs b/src/api/core/two_factor/email.rs index 998aeccf..6b7212e8 100644 --- a/src/api/core/two_factor/email.rs +++ b/src/api/core/two_factor/email.rs @@ -1,6 +1,6 @@ use chrono::{Duration, NaiveDateTime, Utc}; +use rocket::serde::json::Json; use rocket::Route; -use rocket_contrib::json::Json; use crate::{ api::{core::two_factor::_generate_recover_code, EmptyResult, JsonResult, JsonUpcase, PasswordData}, @@ -28,13 +28,13 @@ struct SendEmailLoginData { /// User is trying to login and wants to use email 2FA. /// Does not require Bearer token #[post("/two-factor/send-email-login", data = "")] // JsonResult -fn send_email_login(data: JsonUpcase, conn: DbConn) -> EmptyResult { +async fn send_email_login(data: JsonUpcase, conn: DbConn) -> EmptyResult { let data: SendEmailLoginData = data.into_inner().data; use crate::db::models::User; // Get the user - let user = match User::find_by_mail(&data.Email, &conn) { + let user = match User::find_by_mail(&data.Email, &conn).await { Some(user) => user, None => err!("Username or password is incorrect. Try again."), }; @@ -48,31 +48,32 @@ fn send_email_login(data: JsonUpcase, conn: DbConn) -> Empty err!("Email 2FA is disabled") } - send_token(&user.uuid, &conn)?; + send_token(&user.uuid, &conn).await?; Ok(()) } /// Generate the token, save the data for later verification and send email to user -pub fn send_token(user_uuid: &str, conn: &DbConn) -> EmptyResult { +pub async fn send_token(user_uuid: &str, conn: &DbConn) -> EmptyResult { let type_ = TwoFactorType::Email as i32; - let mut twofactor = TwoFactor::find_by_user_and_type(user_uuid, type_, conn).map_res("Two factor not found")?; + let mut twofactor = + TwoFactor::find_by_user_and_type(user_uuid, type_, conn).await.map_res("Two factor not found")?; - let generated_token = crypto::generate_token(CONFIG.email_token_size())?; + let generated_token = crypto::generate_email_token(CONFIG.email_token_size()); let mut twofactor_data = EmailTokenData::from_json(&twofactor.data)?; twofactor_data.set_token(generated_token); twofactor.data = twofactor_data.to_json(); - twofactor.save(conn)?; + twofactor.save(conn).await?; - mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?)?; + mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?).await?; Ok(()) } /// When user clicks on Manage email 2FA show the user the related information #[post("/two-factor/get-email", data = "")] -fn get_email(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { +async fn get_email(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { let data: PasswordData = data.into_inner().data; let user = headers.user; @@ -80,13 +81,14 @@ fn get_email(data: JsonUpcase, headers: Headers, conn: DbConn) -> err!("Invalid password"); } - let (enabled, mfa_email) = match TwoFactor::find_by_user_and_type(&user.uuid, TwoFactorType::Email as i32, &conn) { - Some(x) => { - let twofactor_data = EmailTokenData::from_json(&x.data)?; - (true, json!(twofactor_data.email)) - } - _ => (false, json!(null)), - }; + let (enabled, mfa_email) = + match TwoFactor::find_by_user_and_type(&user.uuid, TwoFactorType::Email as i32, &conn).await { + Some(x) => { + let twofactor_data = EmailTokenData::from_json(&x.data)?; + (true, json!(twofactor_data.email)) + } + _ => (false, json!(null)), + }; Ok(Json(json!({ "Email": mfa_email, @@ -105,7 +107,7 @@ struct SendEmailData { /// Send a verification email to the specified email address to check whether it exists/belongs to user. #[post("/two-factor/send-email", data = "")] -fn send_email(data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { +async fn send_email(data: JsonUpcase, headers: Headers, conn: DbConn) -> EmptyResult { let data: SendEmailData = data.into_inner().data; let user = headers.user; @@ -119,18 +121,18 @@ fn send_email(data: JsonUpcase, headers: Headers, conn: DbConn) - let type_ = TwoFactorType::Email as i32; - if let Some(tf) = TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn) { - tf.delete(&conn)?; + if let Some(tf) = TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn).await { + tf.delete(&conn).await?; } - let generated_token = crypto::generate_token(CONFIG.email_token_size())?; + let generated_token = crypto::generate_email_token(CONFIG.email_token_size()); let twofactor_data = EmailTokenData::new(data.Email, generated_token); // Uses EmailVerificationChallenge as type to show that it's not verified yet. let twofactor = TwoFactor::new(user.uuid, TwoFactorType::EmailVerificationChallenge, twofactor_data.to_json()); - twofactor.save(&conn)?; + twofactor.save(&conn).await?; - mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?)?; + mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?).await?; Ok(()) } @@ -145,7 +147,7 @@ struct EmailData { /// Verify email belongs to user and can be used for 2FA email codes. #[put("/two-factor/email", data = "")] -fn email(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { +async fn email(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { let data: EmailData = data.into_inner().data; let mut user = headers.user; @@ -154,7 +156,8 @@ fn email(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonRes } let type_ = TwoFactorType::EmailVerificationChallenge as i32; - let mut twofactor = TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn).map_res("Two factor not found")?; + let mut twofactor = + TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn).await.map_res("Two factor not found")?; let mut email_data = EmailTokenData::from_json(&twofactor.data)?; @@ -170,9 +173,9 @@ fn email(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonRes email_data.reset_token(); twofactor.atype = TwoFactorType::Email as i32; twofactor.data = email_data.to_json(); - twofactor.save(&conn)?; + twofactor.save(&conn).await?; - _generate_recover_code(&mut user, &conn); + _generate_recover_code(&mut user, &conn).await; Ok(Json(json!({ "Email": email_data.email, @@ -182,9 +185,10 @@ fn email(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonRes } /// Validate the email code when used as TwoFactor token mechanism -pub fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, conn: &DbConn) -> EmptyResult { +pub async fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, conn: &DbConn) -> EmptyResult { let mut email_data = EmailTokenData::from_json(data)?; let mut twofactor = TwoFactor::find_by_user_and_type(user_uuid, TwoFactorType::Email as i32, conn) + .await .map_res("Two factor not found")?; let issued_token = match &email_data.last_token { Some(t) => t, @@ -197,14 +201,14 @@ pub fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, conn: & email_data.reset_token(); } twofactor.data = email_data.to_json(); - twofactor.save(conn)?; + twofactor.save(conn).await?; err!("Token is invalid") } email_data.reset_token(); twofactor.data = email_data.to_json(); - twofactor.save(conn)?; + twofactor.save(conn).await?; let date = NaiveDateTime::from_timestamp(email_data.token_sent, 0); let max_time = CONFIG.email_expiration_time() as i64; @@ -309,18 +313,4 @@ mod tests { // If it's smaller than 3 characters it should only show asterisks. assert_eq!(result, "***@example.ext"); } - - #[test] - fn test_token() { - let result = crypto::generate_token(19).unwrap(); - - assert_eq!(result.chars().count(), 19); - } - - #[test] - fn test_token_too_large() { - let result = crypto::generate_token(20); - - assert!(result.is_err(), "too large token should give an error"); - } } diff --git a/src/api/core/two_factor/mod.rs b/src/api/core/two_factor/mod.rs index 2c48b9cf..3ecc5454 100644 --- a/src/api/core/two_factor/mod.rs +++ b/src/api/core/two_factor/mod.rs @@ -1,7 +1,7 @@ use chrono::{Duration, Utc}; use data_encoding::BASE32; +use rocket::serde::json::Json; use rocket::Route; -use rocket_contrib::json::Json; use serde_json::Value; use crate::{ @@ -15,7 +15,6 @@ use crate::{ pub mod authenticator; pub mod duo; pub mod email; -pub mod u2f; pub mod webauthn; pub mod yubikey; @@ -25,7 +24,6 @@ pub fn routes() -> Vec { routes.append(&mut authenticator::routes()); routes.append(&mut duo::routes()); routes.append(&mut email::routes()); - routes.append(&mut u2f::routes()); routes.append(&mut webauthn::routes()); routes.append(&mut yubikey::routes()); @@ -33,8 +31,8 @@ pub fn routes() -> Vec { } #[get("/two-factor")] -fn get_twofactor(headers: Headers, conn: DbConn) -> Json { - let twofactors = TwoFactor::find_by_user(&headers.user.uuid, &conn); +async fn get_twofactor(headers: Headers, conn: DbConn) -> Json { + let twofactors = TwoFactor::find_by_user(&headers.user.uuid, &conn).await; let twofactors_json: Vec = twofactors.iter().map(TwoFactor::to_json_provider).collect(); Json(json!({ @@ -68,13 +66,13 @@ struct RecoverTwoFactor { } #[post("/two-factor/recover", data = "")] -fn recover(data: JsonUpcase, conn: DbConn) -> JsonResult { +async fn recover(data: JsonUpcase, conn: DbConn) -> JsonResult { let data: RecoverTwoFactor = data.into_inner().data; use crate::db::models::User; // Get the user - let mut user = match User::find_by_mail(&data.Email, &conn) { + let mut user = match User::find_by_mail(&data.Email, &conn).await { Some(user) => user, None => err!("Username or password is incorrect. Try again."), }; @@ -90,19 +88,19 @@ fn recover(data: JsonUpcase, conn: DbConn) -> JsonResult { } // Remove all twofactors from the user - TwoFactor::delete_all_by_user(&user.uuid, &conn)?; + TwoFactor::delete_all_by_user(&user.uuid, &conn).await?; // Remove the recovery code, not needed without twofactors user.totp_recover = None; - user.save(&conn)?; + user.save(&conn).await?; Ok(Json(json!({}))) } -fn _generate_recover_code(user: &mut User, conn: &DbConn) { +async fn _generate_recover_code(user: &mut User, conn: &DbConn) { if user.totp_recover.is_none() { let totp_recover = BASE32.encode(&crypto::get_random(vec![0u8; 20])); user.totp_recover = Some(totp_recover); - user.save(conn).ok(); + user.save(conn).await.ok(); } } @@ -114,7 +112,7 @@ struct DisableTwoFactorData { } #[post("/two-factor/disable", data = "")] -fn disable_twofactor(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { +async fn disable_twofactor(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { let data: DisableTwoFactorData = data.into_inner().data; let password_hash = data.MasterPasswordHash; let user = headers.user; @@ -125,23 +123,24 @@ fn disable_twofactor(data: JsonUpcase, headers: Headers, c let type_ = data.Type.into_i32()?; - if let Some(twofactor) = TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn) { - twofactor.delete(&conn)?; + if let Some(twofactor) = TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn).await { + twofactor.delete(&conn).await?; } - let twofactor_disabled = TwoFactor::find_by_user(&user.uuid, &conn).is_empty(); + let twofactor_disabled = TwoFactor::find_by_user(&user.uuid, &conn).await.is_empty(); if twofactor_disabled { - let policy_type = OrgPolicyType::TwoFactorAuthentication; - let org_list = UserOrganization::find_by_user_and_policy(&user.uuid, policy_type, &conn); - - for user_org in org_list.into_iter() { + for user_org in + UserOrganization::find_by_user_and_policy(&user.uuid, OrgPolicyType::TwoFactorAuthentication, &conn) + .await + .into_iter() + { if user_org.atype < UserOrgType::Admin { if CONFIG.mail_enabled() { - let org = Organization::find_by_uuid(&user_org.org_uuid, &conn).unwrap(); - mail::send_2fa_removed_from_org(&user.email, &org.name)?; + let org = Organization::find_by_uuid(&user_org.org_uuid, &conn).await.unwrap(); + mail::send_2fa_removed_from_org(&user.email, &org.name).await?; } - user_org.delete(&conn)?; + user_org.delete(&conn).await?; } } } @@ -154,18 +153,18 @@ fn disable_twofactor(data: JsonUpcase, headers: Headers, c } #[put("/two-factor/disable", data = "")] -fn disable_twofactor_put(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { - disable_twofactor(data, headers, conn) +async fn disable_twofactor_put(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { + disable_twofactor(data, headers, conn).await } -pub fn send_incomplete_2fa_notifications(pool: DbPool) { +pub async fn send_incomplete_2fa_notifications(pool: DbPool) { debug!("Sending notifications for incomplete 2FA logins"); if CONFIG.incomplete_2fa_time_limit() <= 0 || !CONFIG.mail_enabled() { return; } - let conn = match pool.get() { + let conn = match pool.get().await { Ok(conn) => conn, _ => { error!("Failed to get DB connection in send_incomplete_2fa_notifications()"); @@ -175,15 +174,17 @@ pub fn send_incomplete_2fa_notifications(pool: DbPool) { let now = Utc::now().naive_utc(); let time_limit = Duration::minutes(CONFIG.incomplete_2fa_time_limit()); - let incomplete_logins = TwoFactorIncomplete::find_logins_before(&(now - time_limit), &conn); + let time_before = now - time_limit; + let incomplete_logins = TwoFactorIncomplete::find_logins_before(&time_before, &conn).await; for login in incomplete_logins { - let user = User::find_by_uuid(&login.user_uuid, &conn).expect("User not found"); + let user = User::find_by_uuid(&login.user_uuid, &conn).await.expect("User not found"); info!( "User {} did not complete a 2FA login within the configured time limit. IP: {}", user.email, login.ip_address ); mail::send_incomplete_2fa_login(&user.email, &login.ip_address, &login.login_time, &login.device_name) + .await .expect("Error sending incomplete 2FA email"); - login.delete(&conn).expect("Error deleting incomplete 2FA record"); + login.delete(&conn).await.expect("Error deleting incomplete 2FA record"); } } diff --git a/src/api/core/two_factor/u2f.rs b/src/api/core/two_factor/u2f.rs deleted file mode 100644 index f3d42c3e..00000000 --- a/src/api/core/two_factor/u2f.rs +++ /dev/null @@ -1,352 +0,0 @@ -use once_cell::sync::Lazy; -use rocket::Route; -use rocket_contrib::json::Json; -use serde_json::Value; -use u2f::{ - messages::{RegisterResponse, SignResponse, U2fSignRequest}, - protocol::{Challenge, U2f}, - register::Registration, -}; - -use crate::{ - api::{ - core::two_factor::_generate_recover_code, ApiResult, EmptyResult, JsonResult, JsonUpcase, NumberOrString, - PasswordData, - }, - auth::Headers, - db::{ - models::{TwoFactor, TwoFactorType}, - DbConn, - }, - error::Error, - CONFIG, -}; - -const U2F_VERSION: &str = "U2F_V2"; - -static APP_ID: Lazy = Lazy::new(|| format!("{}/app-id.json", &CONFIG.domain())); -static U2F: Lazy = Lazy::new(|| U2f::new(APP_ID.clone())); - -pub fn routes() -> Vec { - routes![generate_u2f, generate_u2f_challenge, activate_u2f, activate_u2f_put, delete_u2f,] -} - -#[post("/two-factor/get-u2f", data = "")] -fn generate_u2f(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { - if !CONFIG.domain_set() { - err!("`DOMAIN` environment variable is not set. U2F disabled") - } - let data: PasswordData = data.into_inner().data; - - if !headers.user.check_valid_password(&data.MasterPasswordHash) { - err!("Invalid password"); - } - - let (enabled, keys) = get_u2f_registrations(&headers.user.uuid, &conn)?; - let keys_json: Vec = keys.iter().map(U2FRegistration::to_json).collect(); - - Ok(Json(json!({ - "Enabled": enabled, - "Keys": keys_json, - "Object": "twoFactorU2f" - }))) -} - -#[post("/two-factor/get-u2f-challenge", data = "")] -fn generate_u2f_challenge(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { - let data: PasswordData = data.into_inner().data; - - if !headers.user.check_valid_password(&data.MasterPasswordHash) { - err!("Invalid password"); - } - - let _type = TwoFactorType::U2fRegisterChallenge; - let challenge = _create_u2f_challenge(&headers.user.uuid, _type, &conn).challenge; - - Ok(Json(json!({ - "UserId": headers.user.uuid, - "AppId": APP_ID.to_string(), - "Challenge": challenge, - "Version": U2F_VERSION, - }))) -} - -#[derive(Deserialize, Debug)] -#[allow(non_snake_case)] -struct EnableU2FData { - Id: NumberOrString, - // 1..5 - Name: String, - MasterPasswordHash: String, - DeviceResponse: String, -} - -// This struct is referenced from the U2F lib -// because it doesn't implement Deserialize -#[derive(Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -#[serde(remote = "Registration")] -struct RegistrationDef { - key_handle: Vec, - pub_key: Vec, - attestation_cert: Option>, - device_name: Option, -} - -#[derive(Serialize, Deserialize)] -pub struct U2FRegistration { - pub id: i32, - pub name: String, - #[serde(with = "RegistrationDef")] - pub reg: Registration, - pub counter: u32, - compromised: bool, - pub migrated: Option, -} - -impl U2FRegistration { - fn to_json(&self) -> Value { - json!({ - "Id": self.id, - "Name": self.name, - "Compromised": self.compromised, - }) - } -} - -// This struct is copied from the U2F lib -// to add an optional error code -#[derive(Deserialize)] -#[serde(rename_all = "camelCase")] -struct RegisterResponseCopy { - pub registration_data: String, - pub version: String, - pub client_data: String, - - pub error_code: Option, -} - -impl From for RegisterResponse { - fn from(r: RegisterResponseCopy) -> RegisterResponse { - RegisterResponse { - registration_data: r.registration_data, - version: r.version, - client_data: r.client_data, - } - } -} - -#[post("/two-factor/u2f", data = "")] -fn activate_u2f(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { - let data: EnableU2FData = data.into_inner().data; - let mut user = headers.user; - - if !user.check_valid_password(&data.MasterPasswordHash) { - err!("Invalid password"); - } - - let tf_type = TwoFactorType::U2fRegisterChallenge as i32; - let tf_challenge = match TwoFactor::find_by_user_and_type(&user.uuid, tf_type, &conn) { - Some(c) => c, - None => err!("Can't recover challenge"), - }; - - let challenge: Challenge = serde_json::from_str(&tf_challenge.data)?; - tf_challenge.delete(&conn)?; - - let response: RegisterResponseCopy = serde_json::from_str(&data.DeviceResponse)?; - - let error_code = response.error_code.clone().map_or("0".into(), NumberOrString::into_string); - - if error_code != "0" { - err!("Error registering U2F token") - } - - let registration = U2F.register_response(challenge, response.into())?; - let full_registration = U2FRegistration { - id: data.Id.into_i32()?, - name: data.Name, - reg: registration, - compromised: false, - counter: 0, - migrated: None, - }; - - let mut regs = get_u2f_registrations(&user.uuid, &conn)?.1; - - // TODO: Check that there is no repeat Id - regs.push(full_registration); - save_u2f_registrations(&user.uuid, ®s, &conn)?; - - _generate_recover_code(&mut user, &conn); - - let keys_json: Vec = regs.iter().map(U2FRegistration::to_json).collect(); - Ok(Json(json!({ - "Enabled": true, - "Keys": keys_json, - "Object": "twoFactorU2f" - }))) -} - -#[put("/two-factor/u2f", data = "")] -fn activate_u2f_put(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { - activate_u2f(data, headers, conn) -} - -#[derive(Deserialize, Debug)] -#[allow(non_snake_case)] -struct DeleteU2FData { - Id: NumberOrString, - MasterPasswordHash: String, -} - -#[delete("/two-factor/u2f", data = "")] -fn delete_u2f(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { - let data: DeleteU2FData = data.into_inner().data; - - let id = data.Id.into_i32()?; - - if !headers.user.check_valid_password(&data.MasterPasswordHash) { - err!("Invalid password"); - } - - let type_ = TwoFactorType::U2f as i32; - let mut tf = match TwoFactor::find_by_user_and_type(&headers.user.uuid, type_, &conn) { - Some(tf) => tf, - None => err!("U2F data not found!"), - }; - - let mut data: Vec = match serde_json::from_str(&tf.data) { - Ok(d) => d, - Err(_) => err!("Error parsing U2F data"), - }; - - data.retain(|r| r.id != id); - - let new_data_str = serde_json::to_string(&data)?; - - tf.data = new_data_str; - tf.save(&conn)?; - - let keys_json: Vec = data.iter().map(U2FRegistration::to_json).collect(); - - Ok(Json(json!({ - "Enabled": true, - "Keys": keys_json, - "Object": "twoFactorU2f" - }))) -} - -fn _create_u2f_challenge(user_uuid: &str, type_: TwoFactorType, conn: &DbConn) -> Challenge { - let challenge = U2F.generate_challenge().unwrap(); - - TwoFactor::new(user_uuid.into(), type_, serde_json::to_string(&challenge).unwrap()) - .save(conn) - .expect("Error saving challenge"); - - challenge -} - -fn save_u2f_registrations(user_uuid: &str, regs: &[U2FRegistration], conn: &DbConn) -> EmptyResult { - TwoFactor::new(user_uuid.into(), TwoFactorType::U2f, serde_json::to_string(regs)?).save(conn) -} - -fn get_u2f_registrations(user_uuid: &str, conn: &DbConn) -> Result<(bool, Vec), Error> { - let type_ = TwoFactorType::U2f as i32; - let (enabled, regs) = match TwoFactor::find_by_user_and_type(user_uuid, type_, conn) { - Some(tf) => (tf.enabled, tf.data), - None => return Ok((false, Vec::new())), // If no data, return empty list - }; - - let data = match serde_json::from_str(®s) { - Ok(d) => d, - Err(_) => { - // If error, try old format - let mut old_regs = _old_parse_registrations(®s); - - if old_regs.len() != 1 { - err!("The old U2F format only allows one device") - } - - // Convert to new format - let new_regs = vec![U2FRegistration { - id: 1, - name: "Unnamed U2F key".into(), - reg: old_regs.remove(0), - compromised: false, - counter: 0, - migrated: None, - }]; - - // Save new format - save_u2f_registrations(user_uuid, &new_regs, conn)?; - - new_regs - } - }; - - Ok((enabled, data)) -} - -fn _old_parse_registrations(registations: &str) -> Vec { - #[derive(Deserialize)] - struct Helper(#[serde(with = "RegistrationDef")] Registration); - - let regs: Vec = serde_json::from_str(registations).expect("Can't parse Registration data"); - - regs.into_iter().map(|r| serde_json::from_value(r).unwrap()).map(|Helper(r)| r).collect() -} - -pub fn generate_u2f_login(user_uuid: &str, conn: &DbConn) -> ApiResult { - let challenge = _create_u2f_challenge(user_uuid, TwoFactorType::U2fLoginChallenge, conn); - - let registrations: Vec<_> = get_u2f_registrations(user_uuid, conn)?.1.into_iter().map(|r| r.reg).collect(); - - if registrations.is_empty() { - err!("No U2F devices registered") - } - - Ok(U2F.sign_request(challenge, registrations)) -} - -pub fn validate_u2f_login(user_uuid: &str, response: &str, conn: &DbConn) -> EmptyResult { - let challenge_type = TwoFactorType::U2fLoginChallenge as i32; - let tf_challenge = TwoFactor::find_by_user_and_type(user_uuid, challenge_type, conn); - - let challenge = match tf_challenge { - Some(tf_challenge) => { - let challenge: Challenge = serde_json::from_str(&tf_challenge.data)?; - tf_challenge.delete(conn)?; - challenge - } - None => err!("Can't recover login challenge"), - }; - let response: SignResponse = serde_json::from_str(response)?; - let mut registrations = get_u2f_registrations(user_uuid, conn)?.1; - if registrations.is_empty() { - err!("No U2F devices registered") - } - - for reg in &mut registrations { - let response = U2F.sign_response(challenge.clone(), reg.reg.clone(), response.clone(), reg.counter); - match response { - Ok(new_counter) => { - reg.counter = new_counter; - save_u2f_registrations(user_uuid, ®istrations, conn)?; - - return Ok(()); - } - Err(u2f::u2ferror::U2fError::CounterTooLow) => { - reg.compromised = true; - save_u2f_registrations(user_uuid, ®istrations, conn)?; - - err!("This device might be compromised!"); - } - Err(e) => { - warn!("E {:#}", e); - // break; - } - } - } - err!("error verifying response") -} diff --git a/src/api/core/two_factor/webauthn.rs b/src/api/core/two_factor/webauthn.rs index ecc932ca..ab80c235 100644 --- a/src/api/core/two_factor/webauthn.rs +++ b/src/api/core/two_factor/webauthn.rs @@ -1,5 +1,5 @@ +use rocket::serde::json::Json; use rocket::Route; -use rocket_contrib::json::Json; use serde_json::Value; use url::Url; use webauthn_rs::{base64_data::Base64UrlSafeData, proto::*, AuthenticationState, RegistrationState, Webauthn}; @@ -21,6 +21,28 @@ pub fn routes() -> Vec { routes![get_webauthn, generate_webauthn_challenge, activate_webauthn, activate_webauthn_put, delete_webauthn,] } +// Some old u2f structs still needed for migrating from u2f to WebAuthn +// Both `struct Registration` and `struct U2FRegistration` can be removed if we remove the u2f to WebAuthn migration +#[derive(Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Registration { + pub key_handle: Vec, + pub pub_key: Vec, + pub attestation_cert: Option>, + pub device_name: Option, +} + +#[derive(Serialize, Deserialize)] +pub struct U2FRegistration { + pub id: i32, + pub name: String, + #[serde(with = "Registration")] + pub reg: Registration, + pub counter: u32, + compromised: bool, + pub migrated: Option, +} + struct WebauthnConfig { url: String, origin: Url, @@ -80,7 +102,7 @@ impl WebauthnRegistration { } #[post("/two-factor/get-webauthn", data = "")] -fn get_webauthn(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { +async fn get_webauthn(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { if !CONFIG.domain_set() { err!("`DOMAIN` environment variable is not set. Webauthn disabled") } @@ -89,7 +111,7 @@ fn get_webauthn(data: JsonUpcase, headers: Headers, conn: DbConn) err!("Invalid password"); } - let (enabled, registrations) = get_webauthn_registrations(&headers.user.uuid, &conn)?; + let (enabled, registrations) = get_webauthn_registrations(&headers.user.uuid, &conn).await?; let registrations_json: Vec = registrations.iter().map(WebauthnRegistration::to_json).collect(); Ok(Json(json!({ @@ -100,12 +122,13 @@ fn get_webauthn(data: JsonUpcase, headers: Headers, conn: DbConn) } #[post("/two-factor/get-webauthn-challenge", data = "")] -fn generate_webauthn_challenge(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { +async fn generate_webauthn_challenge(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { if !headers.user.check_valid_password(&data.data.MasterPasswordHash) { err!("Invalid password"); } - let registrations = get_webauthn_registrations(&headers.user.uuid, &conn)? + let registrations = get_webauthn_registrations(&headers.user.uuid, &conn) + .await? .1 .into_iter() .map(|r| r.credential.cred_id) // We return the credentialIds to the clients to avoid double registering @@ -121,7 +144,7 @@ fn generate_webauthn_challenge(data: JsonUpcase, headers: Headers, )?; let type_ = TwoFactorType::WebauthnRegisterChallenge; - TwoFactor::new(headers.user.uuid, type_, serde_json::to_string(&state)?).save(&conn)?; + TwoFactor::new(headers.user.uuid, type_, serde_json::to_string(&state)?).save(&conn).await?; let mut challenge_value = serde_json::to_value(challenge.public_key)?; challenge_value["status"] = "ok".into(); @@ -218,7 +241,7 @@ impl From for PublicKeyCredential { } #[post("/two-factor/webauthn", data = "")] -fn activate_webauthn(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { +async fn activate_webauthn(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { let data: EnableWebauthnData = data.into_inner().data; let mut user = headers.user; @@ -228,10 +251,10 @@ fn activate_webauthn(data: JsonUpcase, headers: Headers, con // Retrieve and delete the saved challenge state let type_ = TwoFactorType::WebauthnRegisterChallenge as i32; - let state = match TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn) { + let state = match TwoFactor::find_by_user_and_type(&user.uuid, type_, &conn).await { Some(tf) => { let state: RegistrationState = serde_json::from_str(&tf.data)?; - tf.delete(&conn)?; + tf.delete(&conn).await?; state } None => err!("Can't recover challenge"), @@ -241,7 +264,7 @@ fn activate_webauthn(data: JsonUpcase, headers: Headers, con let (credential, _data) = WebauthnConfig::load().register_credential(&data.DeviceResponse.into(), &state, |_| Ok(false))?; - let mut registrations: Vec<_> = get_webauthn_registrations(&user.uuid, &conn)?.1; + let mut registrations: Vec<_> = get_webauthn_registrations(&user.uuid, &conn).await?.1; // TODO: Check for repeated ID's registrations.push(WebauthnRegistration { id: data.Id.into_i32()?, @@ -252,8 +275,10 @@ fn activate_webauthn(data: JsonUpcase, headers: Headers, con }); // Save the registrations and return them - TwoFactor::new(user.uuid.clone(), TwoFactorType::Webauthn, serde_json::to_string(®istrations)?).save(&conn)?; - _generate_recover_code(&mut user, &conn); + TwoFactor::new(user.uuid.clone(), TwoFactorType::Webauthn, serde_json::to_string(®istrations)?) + .save(&conn) + .await?; + _generate_recover_code(&mut user, &conn).await; let keys_json: Vec = registrations.iter().map(WebauthnRegistration::to_json).collect(); Ok(Json(json!({ @@ -264,8 +289,8 @@ fn activate_webauthn(data: JsonUpcase, headers: Headers, con } #[put("/two-factor/webauthn", data = "")] -fn activate_webauthn_put(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { - activate_webauthn(data, headers, conn) +async fn activate_webauthn_put(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { + activate_webauthn(data, headers, conn).await } #[derive(Deserialize, Debug)] @@ -276,13 +301,14 @@ struct DeleteU2FData { } #[delete("/two-factor/webauthn", data = "")] -fn delete_webauthn(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { +async fn delete_webauthn(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { let id = data.data.Id.into_i32()?; if !headers.user.check_valid_password(&data.data.MasterPasswordHash) { err!("Invalid password"); } - let mut tf = match TwoFactor::find_by_user_and_type(&headers.user.uuid, TwoFactorType::Webauthn as i32, &conn) { + let mut tf = match TwoFactor::find_by_user_and_type(&headers.user.uuid, TwoFactorType::Webauthn as i32, &conn).await + { Some(tf) => tf, None => err!("Webauthn data not found!"), }; @@ -296,12 +322,12 @@ fn delete_webauthn(data: JsonUpcase, headers: Headers, conn: DbCo let removed_item = data.remove(item_pos); tf.data = serde_json::to_string(&data)?; - tf.save(&conn)?; + tf.save(&conn).await?; drop(tf); // If entry is migrated from u2f, delete the u2f entry as well - if let Some(mut u2f) = TwoFactor::find_by_user_and_type(&headers.user.uuid, TwoFactorType::U2f as i32, &conn) { - use crate::api::core::two_factor::u2f::U2FRegistration; + if let Some(mut u2f) = TwoFactor::find_by_user_and_type(&headers.user.uuid, TwoFactorType::U2f as i32, &conn).await + { let mut data: Vec = match serde_json::from_str(&u2f.data) { Ok(d) => d, Err(_) => err!("Error parsing U2F data"), @@ -311,7 +337,7 @@ fn delete_webauthn(data: JsonUpcase, headers: Headers, conn: DbCo let new_data_str = serde_json::to_string(&data)?; u2f.data = new_data_str; - u2f.save(&conn)?; + u2f.save(&conn).await?; } let keys_json: Vec = data.iter().map(WebauthnRegistration::to_json).collect(); @@ -323,18 +349,21 @@ fn delete_webauthn(data: JsonUpcase, headers: Headers, conn: DbCo }))) } -pub fn get_webauthn_registrations(user_uuid: &str, conn: &DbConn) -> Result<(bool, Vec), Error> { +pub async fn get_webauthn_registrations( + user_uuid: &str, + conn: &DbConn, +) -> Result<(bool, Vec), Error> { let type_ = TwoFactorType::Webauthn as i32; - match TwoFactor::find_by_user_and_type(user_uuid, type_, conn) { + match TwoFactor::find_by_user_and_type(user_uuid, type_, conn).await { Some(tf) => Ok((tf.enabled, serde_json::from_str(&tf.data)?)), None => Ok((false, Vec::new())), // If no data, return empty list } } -pub fn generate_webauthn_login(user_uuid: &str, conn: &DbConn) -> JsonResult { +pub async fn generate_webauthn_login(user_uuid: &str, conn: &DbConn) -> JsonResult { // Load saved credentials let creds: Vec = - get_webauthn_registrations(user_uuid, conn)?.1.into_iter().map(|r| r.credential).collect(); + get_webauthn_registrations(user_uuid, conn).await?.1.into_iter().map(|r| r.credential).collect(); if creds.is_empty() { err!("No Webauthn devices registered") @@ -346,18 +375,19 @@ pub fn generate_webauthn_login(user_uuid: &str, conn: &DbConn) -> JsonResult { // Save the challenge state for later validation TwoFactor::new(user_uuid.into(), TwoFactorType::WebauthnLoginChallenge, serde_json::to_string(&state)?) - .save(conn)?; + .save(conn) + .await?; // Return challenge to the clients Ok(Json(serde_json::to_value(response.public_key)?)) } -pub fn validate_webauthn_login(user_uuid: &str, response: &str, conn: &DbConn) -> EmptyResult { +pub async fn validate_webauthn_login(user_uuid: &str, response: &str, conn: &DbConn) -> EmptyResult { let type_ = TwoFactorType::WebauthnLoginChallenge as i32; - let state = match TwoFactor::find_by_user_and_type(user_uuid, type_, conn) { + let state = match TwoFactor::find_by_user_and_type(user_uuid, type_, conn).await { Some(tf) => { let state: AuthenticationState = serde_json::from_str(&tf.data)?; - tf.delete(conn)?; + tf.delete(conn).await?; state } None => err!("Can't recover login challenge"), @@ -366,7 +396,7 @@ pub fn validate_webauthn_login(user_uuid: &str, response: &str, conn: &DbConn) - let rsp: crate::util::UpCase = serde_json::from_str(response)?; let rsp: PublicKeyCredential = rsp.data.into(); - let mut registrations = get_webauthn_registrations(user_uuid, conn)?.1; + let mut registrations = get_webauthn_registrations(user_uuid, conn).await?.1; // If the credential we received is migrated from U2F, enable the U2F compatibility //let use_u2f = registrations.iter().any(|r| r.migrated && r.credential.cred_id == rsp.raw_id.0); @@ -377,7 +407,8 @@ pub fn validate_webauthn_login(user_uuid: &str, response: &str, conn: &DbConn) - reg.credential.counter = auth_data.counter; TwoFactor::new(user_uuid.to_string(), TwoFactorType::Webauthn, serde_json::to_string(®istrations)?) - .save(conn)?; + .save(conn) + .await?; return Ok(()); } } diff --git a/src/api/core/two_factor/yubikey.rs b/src/api/core/two_factor/yubikey.rs index c088324b..cadb04a9 100644 --- a/src/api/core/two_factor/yubikey.rs +++ b/src/api/core/two_factor/yubikey.rs @@ -1,5 +1,5 @@ +use rocket::serde::json::Json; use rocket::Route; -use rocket_contrib::json::Json; use serde_json::Value; use yubico::{config::Config, verify}; @@ -78,7 +78,7 @@ fn verify_yubikey_otp(otp: String) -> EmptyResult { } #[post("/two-factor/get-yubikey", data = "")] -fn generate_yubikey(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { +async fn generate_yubikey(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { // Make sure the credentials are set get_yubico_credentials()?; @@ -92,7 +92,7 @@ fn generate_yubikey(data: JsonUpcase, headers: Headers, conn: DbCo let user_uuid = &user.uuid; let yubikey_type = TwoFactorType::YubiKey as i32; - let r = TwoFactor::find_by_user_and_type(user_uuid, yubikey_type, &conn); + let r = TwoFactor::find_by_user_and_type(user_uuid, yubikey_type, &conn).await; if let Some(r) = r { let yubikey_metadata: YubikeyMetadata = serde_json::from_str(&r.data)?; @@ -113,7 +113,7 @@ fn generate_yubikey(data: JsonUpcase, headers: Headers, conn: DbCo } #[post("/two-factor/yubikey", data = "")] -fn activate_yubikey(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { +async fn activate_yubikey(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { let data: EnableYubikeyData = data.into_inner().data; let mut user = headers.user; @@ -122,10 +122,11 @@ fn activate_yubikey(data: JsonUpcase, headers: Headers, conn: } // Check if we already have some data - let mut yubikey_data = match TwoFactor::find_by_user_and_type(&user.uuid, TwoFactorType::YubiKey as i32, &conn) { - Some(data) => data, - None => TwoFactor::new(user.uuid.clone(), TwoFactorType::YubiKey, String::new()), - }; + let mut yubikey_data = + match TwoFactor::find_by_user_and_type(&user.uuid, TwoFactorType::YubiKey as i32, &conn).await { + Some(data) => data, + None => TwoFactor::new(user.uuid.clone(), TwoFactorType::YubiKey, String::new()), + }; let yubikeys = parse_yubikeys(&data); @@ -146,7 +147,7 @@ fn activate_yubikey(data: JsonUpcase, headers: Headers, conn: verify_yubikey_otp(yubikey.to_owned()).map_res("Invalid Yubikey OTP provided")?; } - let yubikey_ids: Vec = yubikeys.into_iter().map(|x| (&x[..12]).to_owned()).collect(); + let yubikey_ids: Vec = yubikeys.into_iter().map(|x| (x[..12]).to_owned()).collect(); let yubikey_metadata = YubikeyMetadata { Keys: yubikey_ids, @@ -154,9 +155,9 @@ fn activate_yubikey(data: JsonUpcase, headers: Headers, conn: }; yubikey_data.data = serde_json::to_string(&yubikey_metadata).unwrap(); - yubikey_data.save(&conn)?; + yubikey_data.save(&conn).await?; - _generate_recover_code(&mut user, &conn); + _generate_recover_code(&mut user, &conn).await; let mut result = jsonify_yubikeys(yubikey_metadata.Keys); @@ -168,8 +169,8 @@ fn activate_yubikey(data: JsonUpcase, headers: Headers, conn: } #[put("/two-factor/yubikey", data = "")] -fn activate_yubikey_put(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { - activate_yubikey(data, headers, conn) +async fn activate_yubikey_put(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { + activate_yubikey(data, headers, conn).await } pub fn validate_yubikey_login(response: &str, twofactor_data: &str) -> EmptyResult { diff --git a/src/api/icons.rs b/src/api/icons.rs index a2e5cc3a..c343df14 100644 --- a/src/api/icons.rs +++ b/src/api/icons.rs @@ -1,21 +1,26 @@ use std::{ - collections::HashMap, - fs::{create_dir_all, remove_file, symlink_metadata, File}, - io::prelude::*, - net::{IpAddr, ToSocketAddrs}, - sync::{Arc, RwLock}, + net::IpAddr, + sync::Arc, time::{Duration, SystemTime}, }; +use bytes::{Bytes, BytesMut}; +use futures::{stream::StreamExt, TryFutureExt}; use once_cell::sync::Lazy; use regex::Regex; -use reqwest::{blocking::Client, blocking::Response, header}; -use rocket::{ - http::ContentType, - response::{Content, Redirect}, - Route, +use reqwest::{ + header::{self, HeaderMap, HeaderValue}, + Client, Response, +}; +use rocket::{http::ContentType, response::Redirect, Route}; +use tokio::{ + fs::{create_dir_all, remove_file, symlink_metadata, File}, + io::{AsyncReadExt, AsyncWriteExt}, + net::lookup_host, }; +use html5gum::{Emitter, EndTag, HtmlString, InfallibleTokenizer, Readable, StartTag, StringReader, Tokenizer}; + use crate::{ error::Error, util::{get_reqwest_client_builder, Cached}, @@ -25,48 +30,56 @@ use crate::{ pub fn routes() -> Vec { match CONFIG.icon_service().as_str() { "internal" => routes![icon_internal], - "bitwarden" => routes![icon_bitwarden], - "duckduckgo" => routes![icon_duckduckgo], - "google" => routes![icon_google], - _ => routes![icon_custom], + _ => routes![icon_external], } } static CLIENT: Lazy = Lazy::new(|| { // Generate the default headers - let mut default_headers = header::HeaderMap::new(); - default_headers - .insert(header::USER_AGENT, header::HeaderValue::from_static("Links (2.22; Linux X86_64; GNU C; text)")); - default_headers - .insert(header::ACCEPT, header::HeaderValue::from_static("text/html, text/*;q=0.5, image/*, */*;q=0.1")); - default_headers.insert(header::ACCEPT_LANGUAGE, header::HeaderValue::from_static("en,*;q=0.1")); - default_headers.insert(header::CACHE_CONTROL, header::HeaderValue::from_static("no-cache")); - default_headers.insert(header::PRAGMA, header::HeaderValue::from_static("no-cache")); + let mut default_headers = HeaderMap::new(); + default_headers.insert(header::USER_AGENT, HeaderValue::from_static("Links (2.22; Linux X86_64; GNU C; text)")); + default_headers.insert(header::ACCEPT, HeaderValue::from_static("text/html, text/*;q=0.5, image/*, */*;q=0.1")); + default_headers.insert(header::ACCEPT_LANGUAGE, HeaderValue::from_static("en,*;q=0.1")); + default_headers.insert(header::CACHE_CONTROL, HeaderValue::from_static("no-cache")); + default_headers.insert(header::PRAGMA, HeaderValue::from_static("no-cache")); + + // Generate the cookie store + let cookie_store = Arc::new(Jar::default()); // Reuse the client between requests - get_reqwest_client_builder() - .cookie_provider(Arc::new(Jar::default())) + let client = get_reqwest_client_builder() + .cookie_provider(Arc::clone(&cookie_store)) .timeout(Duration::from_secs(CONFIG.icon_download_timeout())) - .default_headers(default_headers) - .build() - .expect("Failed to build icon client") + .default_headers(default_headers.clone()); + + match client.build() { + Ok(client) => client, + Err(e) => { + error!("Possible trust-dns error, trying with trust-dns disabled: '{e}'"); + get_reqwest_client_builder() + .cookie_provider(cookie_store) + .timeout(Duration::from_secs(CONFIG.icon_download_timeout())) + .default_headers(default_headers) + .trust_dns(false) + .build() + .expect("Failed to build client") + } + } }); // Build Regex only once since this takes a lot of time. -static ICON_REL_REGEX: Lazy = Lazy::new(|| Regex::new(r"(?i)icon$|apple.*icon").unwrap()); -static ICON_REL_BLACKLIST: Lazy = Lazy::new(|| Regex::new(r"(?i)mask-icon").unwrap()); static ICON_SIZE_REGEX: Lazy = Lazy::new(|| Regex::new(r"(?x)(\d+)\D*(\d+)").unwrap()); // Special HashMap which holds the user defined Regex to speedup matching the regex. -static ICON_BLACKLIST_REGEX: Lazy>> = Lazy::new(|| RwLock::new(HashMap::new())); +static ICON_BLACKLIST_REGEX: Lazy> = Lazy::new(dashmap::DashMap::new); -fn icon_redirect(domain: &str, template: &str) -> Option { +async fn icon_redirect(domain: &str, template: &str) -> Option { if !is_valid_domain(domain) { warn!("Invalid domain: {}", domain); return None; } - if is_domain_blacklisted(domain) { + if is_domain_blacklisted(domain).await { return None; } @@ -84,47 +97,28 @@ fn icon_redirect(domain: &str, template: &str) -> Option { } #[get("//icon.png")] -fn icon_custom(domain: String) -> Option { - icon_redirect(&domain, &CONFIG.icon_service()) -} - -#[get("//icon.png")] -fn icon_bitwarden(domain: String) -> Option { - icon_redirect(&domain, "https://icons.bitwarden.net/{}/icon.png") +async fn icon_external(domain: String) -> Option { + icon_redirect(&domain, &CONFIG._icon_service_url()).await } #[get("//icon.png")] -fn icon_duckduckgo(domain: String) -> Option { - icon_redirect(&domain, "https://icons.duckduckgo.com/ip3/{}.ico") -} - -#[get("//icon.png")] -fn icon_google(domain: String) -> Option { - icon_redirect(&domain, "https://www.google.com/s2/favicons?domain={}&sz=32") -} - -#[get("//icon.png")] -fn icon_internal(domain: String) -> Cached>> { +async fn icon_internal(domain: String) -> Cached<(ContentType, Vec)> { const FALLBACK_ICON: &[u8] = include_bytes!("../static/images/fallback-icon.png"); if !is_valid_domain(&domain) { warn!("Invalid domain: {}", domain); return Cached::ttl( - Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), + (ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl(), true, ); } - match get_icon(&domain) { + match get_icon(&domain).await { Some((icon, icon_type)) => { - Cached::ttl(Content(ContentType::new("image", icon_type), icon), CONFIG.icon_cache_ttl(), true) + Cached::ttl((ContentType::new("image", icon_type), icon), CONFIG.icon_cache_ttl(), true) } - _ => Cached::ttl( - Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), - CONFIG.icon_cache_negttl(), - true, - ), + _ => Cached::ttl((ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl(), true), } } @@ -264,68 +258,57 @@ mod tests { } } -fn is_domain_blacklisted(domain: &str) -> bool { - let mut is_blacklisted = CONFIG.icon_blacklist_non_global_ips() - && (domain, 0) - .to_socket_addrs() - .map(|x| { - for ip_port in x { - if !is_global(ip_port.ip()) { - warn!("IP {} for domain '{}' is not a global IP!", ip_port.ip(), domain); - return true; - } +use cached::proc_macro::cached; +#[cached(key = "String", convert = r#"{ domain.to_string() }"#, size = 16, time = 60)] +#[allow(clippy::unused_async)] // This is needed because cached causes a false-positive here. +async fn is_domain_blacklisted(domain: &str) -> bool { + if CONFIG.icon_blacklist_non_global_ips() { + if let Ok(s) = lookup_host((domain, 0)).await { + for addr in s { + if !is_global(addr.ip()) { + debug!("IP {} for domain '{}' is not a global IP!", addr.ip(), domain); + return true; } - false - }) - .unwrap_or(false); - - // Skip the regex check if the previous one is true already - if !is_blacklisted { - if let Some(blacklist) = CONFIG.icon_blacklist_regex() { - let mut regex_hashmap = ICON_BLACKLIST_REGEX.read().unwrap(); - - // Use the pre-generate Regex stored in a Lazy HashMap if there's one, else generate it. - let regex = if let Some(regex) = regex_hashmap.get(&blacklist) { - regex - } else { - drop(regex_hashmap); + } + } + } - let mut regex_hashmap_write = ICON_BLACKLIST_REGEX.write().unwrap(); - // Clear the current list if the previous key doesn't exists. - // To prevent growing of the HashMap after someone has changed it via the admin interface. - if regex_hashmap_write.len() >= 1 { - regex_hashmap_write.clear(); - } + if let Some(blacklist) = CONFIG.icon_blacklist_regex() { + // Use the pre-generate Regex stored in a Lazy HashMap if there's one, else generate it. + let is_match = if let Some(regex) = ICON_BLACKLIST_REGEX.get(&blacklist) { + regex.is_match(domain) + } else { + // Clear the current list if the previous key doesn't exists. + // To prevent growing of the HashMap after someone has changed it via the admin interface. + if ICON_BLACKLIST_REGEX.len() >= 1 { + ICON_BLACKLIST_REGEX.clear(); + } - // Generate the regex to store in too the Lazy Static HashMap. - let blacklist_regex = Regex::new(&blacklist).unwrap(); - regex_hashmap_write.insert(blacklist.to_string(), blacklist_regex); - drop(regex_hashmap_write); + // Generate the regex to store in too the Lazy Static HashMap. + let blacklist_regex = Regex::new(&blacklist).unwrap(); + let is_match = blacklist_regex.is_match(domain); + ICON_BLACKLIST_REGEX.insert(blacklist.clone(), blacklist_regex); - regex_hashmap = ICON_BLACKLIST_REGEX.read().unwrap(); - regex_hashmap.get(&blacklist).unwrap() - }; + is_match + }; - // Use the pre-generate Regex stored in a Lazy HashMap. - if regex.is_match(domain) { - debug!("Blacklisted domain: {} matched ICON_BLACKLIST_REGEX", domain); - is_blacklisted = true; - } + if is_match { + debug!("Blacklisted domain: {} matched ICON_BLACKLIST_REGEX", domain); + return true; } } - - is_blacklisted + false } -fn get_icon(domain: &str) -> Option<(Vec, String)> { +async fn get_icon(domain: &str) -> Option<(Vec, String)> { let path = format!("{}/{}.png", CONFIG.icon_cache_folder(), domain); // Check for expiration of negatively cached copy - if icon_is_negcached(&path) { + if icon_is_negcached(&path).await { return None; } - if let Some(icon) = get_cached_icon(&path) { + if let Some(icon) = get_cached_icon(&path).await { let icon_type = match get_icon_type(&icon) { Some(x) => x, _ => "x-icon", @@ -338,31 +321,31 @@ fn get_icon(domain: &str) -> Option<(Vec, String)> { } // Get the icon, or None in case of error - match download_icon(domain) { + match download_icon(domain).await { Ok((icon, icon_type)) => { - save_icon(&path, &icon); - Some((icon, icon_type.unwrap_or("x-icon").to_string())) + save_icon(&path, &icon).await; + Some((icon.to_vec(), icon_type.unwrap_or("x-icon").to_string())) } Err(e) => { warn!("Unable to download icon: {:?}", e); let miss_indicator = path + ".miss"; - save_icon(&miss_indicator, &[]); + save_icon(&miss_indicator, &[]).await; None } } } -fn get_cached_icon(path: &str) -> Option> { +async fn get_cached_icon(path: &str) -> Option> { // Check for expiration of successfully cached copy - if icon_is_expired(path) { + if icon_is_expired(path).await { return None; } // Try to read the cached icon, and return it if it exists - if let Ok(mut f) = File::open(path) { + if let Ok(mut f) = File::open(path).await { let mut buffer = Vec::new(); - if f.read_to_end(&mut buffer).is_ok() { + if f.read_to_end(&mut buffer).await.is_ok() { return Some(buffer); } } @@ -370,22 +353,22 @@ fn get_cached_icon(path: &str) -> Option> { None } -fn file_is_expired(path: &str, ttl: u64) -> Result { - let meta = symlink_metadata(path)?; +async fn file_is_expired(path: &str, ttl: u64) -> Result { + let meta = symlink_metadata(path).await?; let modified = meta.modified()?; let age = SystemTime::now().duration_since(modified)?; Ok(ttl > 0 && ttl <= age.as_secs()) } -fn icon_is_negcached(path: &str) -> bool { +async fn icon_is_negcached(path: &str) -> bool { let miss_indicator = path.to_owned() + ".miss"; - let expired = file_is_expired(&miss_indicator, CONFIG.icon_cache_negttl()); + let expired = file_is_expired(&miss_indicator, CONFIG.icon_cache_negttl()).await; match expired { // No longer negatively cached, drop the marker Ok(true) => { - if let Err(e) = remove_file(&miss_indicator) { + if let Err(e) = remove_file(&miss_indicator).await { error!("Could not remove negative cache indicator for icon {:?}: {:?}", path, e); } false @@ -397,8 +380,8 @@ fn icon_is_negcached(path: &str) -> bool { } } -fn icon_is_expired(path: &str) -> bool { - let expired = file_is_expired(path, CONFIG.icon_cache_ttl()); +async fn icon_is_expired(path: &str) -> bool { + let expired = file_is_expired(path, CONFIG.icon_cache_ttl()).await; expired.unwrap_or(true) } @@ -416,91 +399,62 @@ impl Icon { } } -/// Iterates over the HTML document to find -/// When found it will stop the iteration and the found base href will be shared deref via `base_href`. -/// -/// # Arguments -/// * `node` - A Parsed HTML document via html5ever::parse_document() -/// * `base_href` - a mutable url::Url which will be overwritten when a base href tag has been found. -/// -fn get_base_href(node: &std::rc::Rc, base_href: &mut url::Url) -> bool { - if let markup5ever_rcdom::NodeData::Element { - name, - attrs, - .. - } = &node.data - { - if name.local.as_ref() == "base" { - let attrs = attrs.borrow(); - for attr in attrs.iter() { - let attr_name = attr.name.local.as_ref(); - let attr_value = attr.value.as_ref(); - - if attr_name == "href" { - debug!("Found base href: {}", attr_value); - *base_href = match base_href.join(attr_value) { - Ok(href) => href, - _ => base_href.clone(), - }; - return true; - } - } - return true; - } - } - - // TODO: Might want to limit the recursion depth? - for child in node.children.borrow().iter() { - // Check if we got a true back and stop the iter. - // This means we found a tag and can stop processing the html. - if get_base_href(child, base_href) { - return true; - } - } - false -} - -fn get_favicons_node(node: &std::rc::Rc, icons: &mut Vec, url: &url::Url) { - if let markup5ever_rcdom::NodeData::Element { - name, - attrs, - .. - } = &node.data - { - if name.local.as_ref() == "link" { - let mut has_rel = false; - let mut href = None; - let mut sizes = None; - - let attrs = attrs.borrow(); - for attr in attrs.iter() { - let attr_name = attr.name.local.as_ref(); - let attr_value = attr.value.as_ref(); - - if attr_name == "rel" && ICON_REL_REGEX.is_match(attr_value) && !ICON_REL_BLACKLIST.is_match(attr_value) +fn get_favicons_node( + dom: InfallibleTokenizer, FaviconEmitter>, + icons: &mut Vec, + url: &url::Url, +) { + const TAG_LINK: &[u8] = b"link"; + const TAG_BASE: &[u8] = b"base"; + const TAG_HEAD: &[u8] = b"head"; + const ATTR_REL: &[u8] = b"rel"; + const ATTR_HREF: &[u8] = b"href"; + const ATTR_SIZES: &[u8] = b"sizes"; + + let mut base_url = url.clone(); + let mut icon_tags: Vec = Vec::new(); + for token in dom { + match token { + FaviconToken::StartTag(tag) => { + if *tag.name == TAG_LINK + && tag.attributes.contains_key(ATTR_REL) + && tag.attributes.contains_key(ATTR_HREF) { - has_rel = true; - } else if attr_name == "href" { - href = Some(attr_value); - } else if attr_name == "sizes" { - sizes = Some(attr_value); + let rel_value = std::str::from_utf8(tag.attributes.get(ATTR_REL).unwrap()) + .unwrap_or_default() + .to_ascii_lowercase(); + if rel_value.contains("icon") && !rel_value.contains("mask-icon") { + icon_tags.push(tag); + } + } else if *tag.name == TAG_BASE && tag.attributes.contains_key(ATTR_HREF) { + let href = std::str::from_utf8(tag.attributes.get(ATTR_HREF).unwrap()).unwrap_or_default(); + debug!("Found base href: {href}"); + base_url = match base_url.join(href) { + Ok(inner_url) => inner_url, + _ => url.clone(), + }; } } - - if has_rel { - if let Some(inner_href) = href { - if let Ok(full_href) = url.join(inner_href).map(String::from) { - let priority = get_icon_priority(&full_href, sizes); - icons.push(Icon::new(priority, full_href)); - } + FaviconToken::EndTag(tag) => { + if *tag.name == TAG_HEAD { + break; } } } } - // TODO: Might want to limit the recursion depth? - for child in node.children.borrow().iter() { - get_favicons_node(child, icons, url); + for icon_tag in icon_tags { + if let Some(icon_href) = icon_tag.attributes.get(ATTR_HREF) { + if let Ok(full_href) = base_url.join(std::str::from_utf8(icon_href).unwrap_or_default()) { + let sizes = if let Some(v) = icon_tag.attributes.get(ATTR_SIZES) { + std::str::from_utf8(v).unwrap_or_default() + } else { + "" + }; + let priority = get_icon_priority(full_href.as_str(), sizes); + icons.push(Icon::new(priority, full_href.to_string())); + } + }; } } @@ -518,16 +472,16 @@ struct IconUrlResult { /// /// # Example /// ``` -/// let icon_result = get_icon_url("github.com")?; -/// let icon_result = get_icon_url("vaultwarden.discourse.group")?; +/// let icon_result = get_icon_url("github.com").await?; +/// let icon_result = get_icon_url("vaultwarden.discourse.group").await?; /// ``` -fn get_icon_url(domain: &str) -> Result { +async fn get_icon_url(domain: &str) -> Result { // Default URL with secure and insecure schemes - let ssldomain = format!("https://{}", domain); - let httpdomain = format!("http://{}", domain); + let ssldomain = format!("https://{domain}"); + let httpdomain = format!("http://{domain}"); // First check the domain as given during the request for both HTTPS and HTTP. - let resp = match get_page(&ssldomain).or_else(|_| get_page(&httpdomain)) { + let resp = match get_page(&ssldomain).or_else(|_| get_page(&httpdomain)).await { Ok(c) => Ok(c), Err(e) => { let mut sub_resp = Err(e); @@ -542,25 +496,24 @@ fn get_icon_url(domain: &str) -> Result { base = domain_parts.next_back().unwrap() ); if is_valid_domain(&base_domain) { - let sslbase = format!("https://{}", base_domain); - let httpbase = format!("http://{}", base_domain); - debug!("[get_icon_url]: Trying without subdomains '{}'", base_domain); + let sslbase = format!("https://{base_domain}"); + let httpbase = format!("http://{base_domain}"); + debug!("[get_icon_url]: Trying without subdomains '{base_domain}'"); - sub_resp = get_page(&sslbase).or_else(|_| get_page(&httpbase)); + sub_resp = get_page(&sslbase).or_else(|_| get_page(&httpbase)).await; } // When the domain is not an IP, and has less then 2 dots, try to add www. infront of it. } else if is_ip.is_err() && domain.matches('.').count() < 2 { - let www_domain = format!("www.{}", domain); + let www_domain = format!("www.{domain}"); if is_valid_domain(&www_domain) { - let sslwww = format!("https://{}", www_domain); - let httpwww = format!("http://{}", www_domain); - debug!("[get_icon_url]: Trying with www. prefix '{}'", www_domain); + let sslwww = format!("https://{www_domain}"); + let httpwww = format!("http://{www_domain}"); + debug!("[get_icon_url]: Trying with www. prefix '{www_domain}'"); - sub_resp = get_page(&sslwww).or_else(|_| get_page(&httpwww)); + sub_resp = get_page(&sslwww).or_else(|_| get_page(&httpwww)).await; } } - sub_resp } }; @@ -575,26 +528,23 @@ fn get_icon_url(domain: &str) -> Result { // Set the referer to be used on the final request, some sites check this. // Mostly used to prevent direct linking and other security resons. - referer = url.as_str().to_string(); + referer = url.to_string(); - // Add the default favicon.ico to the list with the domain the content responded from. + // Add the fallback favicon.ico and apple-touch-icon.png to the list with the domain the content responded from. iconlist.push(Icon::new(35, String::from(url.join("/favicon.ico").unwrap()))); + iconlist.push(Icon::new(40, String::from(url.join("/apple-touch-icon.png").unwrap()))); // 384KB should be more than enough for the HTML, though as we only really need the HTML header. - let mut limited_reader = content.take(384 * 1024); - - use html5ever::tendril::TendrilSink; - let dom = html5ever::parse_document(markup5ever_rcdom::RcDom::default(), Default::default()) - .from_utf8() - .read_from(&mut limited_reader)?; + let limited_reader = stream_to_bytes_limit(content, 384 * 1024).await?.to_vec(); - let mut base_url: url::Url = url; - get_base_href(&dom.document, &mut base_url); - get_favicons_node(&dom.document, &mut iconlist, &base_url); + let dom = Tokenizer::new_with_emitter(limited_reader.to_reader(), FaviconEmitter::default()).infallible(); + get_favicons_node(dom, &mut iconlist, &url); } else { // Add the default favicon.ico to the list with just the given domain - iconlist.push(Icon::new(35, format!("{}/favicon.ico", ssldomain))); - iconlist.push(Icon::new(35, format!("{}/favicon.ico", httpdomain))); + iconlist.push(Icon::new(35, format!("{ssldomain}/favicon.ico"))); + iconlist.push(Icon::new(40, format!("{ssldomain}/apple-touch-icon.png"))); + iconlist.push(Icon::new(35, format!("{httpdomain}/favicon.ico"))); + iconlist.push(Icon::new(40, format!("{httpdomain}/apple-touch-icon.png"))); } // Sort the iconlist by priority @@ -607,12 +557,12 @@ fn get_icon_url(domain: &str) -> Result { }) } -fn get_page(url: &str) -> Result { - get_page_with_referer(url, "") +async fn get_page(url: &str) -> Result { + get_page_with_referer(url, "").await } -fn get_page_with_referer(url: &str, referer: &str) -> Result { - if is_domain_blacklisted(url::Url::parse(url).unwrap().host_str().unwrap_or_default()) { +async fn get_page_with_referer(url: &str, referer: &str) -> Result { + if is_domain_blacklisted(url::Url::parse(url).unwrap().host_str().unwrap_or_default()).await { warn!("Favicon '{}' resolves to a blacklisted domain or IP!", url); } @@ -621,7 +571,7 @@ fn get_page_with_referer(url: &str, referer: &str) -> Result { client = client.header("Referer", referer) } - match client.send() { + match client.send().await { Ok(c) => c.error_for_status().map_err(Into::into), Err(e) => err_silent!(format!("{}", e)), } @@ -639,7 +589,7 @@ fn get_page_with_referer(url: &str, referer: &str) -> Result { /// priority1 = get_icon_priority("http://example.com/path/to/a/favicon.png", "32x32"); /// priority2 = get_icon_priority("https://example.com/path/to/a/favicon.ico", ""); /// ``` -fn get_icon_priority(href: &str, sizes: Option<&str>) -> u8 { +fn get_icon_priority(href: &str, sizes: &str) -> u8 { // Check if there is a dimension set let (width, height) = parse_sizes(sizes); @@ -687,11 +637,11 @@ fn get_icon_priority(href: &str, sizes: Option<&str>) -> u8 { /// let (width, height) = parse_sizes("x128x128"); // (128, 128) /// let (width, height) = parse_sizes("32"); // (0, 0) /// ``` -fn parse_sizes(sizes: Option<&str>) -> (u16, u16) { +fn parse_sizes(sizes: &str) -> (u16, u16) { let mut width: u16 = 0; let mut height: u16 = 0; - if let Some(sizes) = sizes { + if !sizes.is_empty() { match ICON_SIZE_REGEX.captures(sizes.trim()) { None => {} Some(dimensions) => { @@ -706,14 +656,14 @@ fn parse_sizes(sizes: Option<&str>) -> (u16, u16) { (width, height) } -fn download_icon(domain: &str) -> Result<(Vec, Option<&str>), Error> { - if is_domain_blacklisted(domain) { +async fn download_icon(domain: &str) -> Result<(Bytes, Option<&str>), Error> { + if is_domain_blacklisted(domain).await { err_silent!("Domain is blacklisted", domain) } - let icon_result = get_icon_url(domain)?; + let icon_result = get_icon_url(domain).await?; - let mut buffer = Vec::new(); + let mut buffer = Bytes::new(); let mut icon_type: Option<&str> = None; use data_url::DataUrl; @@ -722,8 +672,12 @@ fn download_icon(domain: &str) -> Result<(Vec, Option<&str>), Error> { if icon.href.starts_with("data:image") { let datauri = DataUrl::process(&icon.href).unwrap(); // Check if we are able to decode the data uri - match datauri.decode_to_vec() { - Ok((body, _fragment)) => { + let mut body = BytesMut::new(); + match datauri.decode::<_, ()>(|bytes| { + body.extend_from_slice(bytes); + Ok(()) + }) { + Ok(_) => { // Also check if the size is atleast 67 bytes, which seems to be the smallest png i could create if body.len() >= 67 { // Check if the icon type is allowed, else try an icon from the list. @@ -733,16 +687,17 @@ fn download_icon(domain: &str) -> Result<(Vec, Option<&str>), Error> { continue; } info!("Extracted icon from data:image uri for {}", domain); - buffer = body; + buffer = body.freeze(); break; } } _ => debug!("Extracted icon from data:image uri is invalid"), }; } else { - match get_page_with_referer(&icon.href, &icon_result.referer) { - Ok(mut res) => { - res.copy_to(&mut buffer)?; + match get_page_with_referer(&icon.href, &icon_result.referer).await { + Ok(res) => { + buffer = stream_to_bytes_limit(res, 5120 * 1024).await?; // 5120KB/5MB for each icon max (Same as icons.bitwarden.net) + // Check if the icon type is allowed, else try an icon from the list. icon_type = get_icon_type(&buffer); if icon_type.is_none() { @@ -765,13 +720,13 @@ fn download_icon(domain: &str) -> Result<(Vec, Option<&str>), Error> { Ok((buffer, icon_type)) } -fn save_icon(path: &str, icon: &[u8]) { - match File::create(path) { +async fn save_icon(path: &str, icon: &[u8]) { + match File::create(path).await { Ok(mut f) => { - f.write_all(icon).expect("Error writing icon file"); + f.write_all(icon).await.expect("Error writing icon file"); } Err(ref e) if e.kind() == std::io::ErrorKind::NotFound => { - create_dir_all(&CONFIG.icon_cache_folder()).expect("Error creating icon cache folder"); + create_dir_all(&CONFIG.icon_cache_folder()).await.expect("Error creating icon cache folder"); } Err(e) => { warn!("Unable to save icon: {:?}", e); @@ -791,13 +746,30 @@ fn get_icon_type(bytes: &[u8]) -> Option<&'static str> { } } +/// Minimize the amount of bytes to be parsed from a reqwest result. +/// This prevents very long parsing and memory usage. +async fn stream_to_bytes_limit(res: Response, max_size: usize) -> Result { + let mut stream = res.bytes_stream().take(max_size); + let mut buf = BytesMut::new(); + let mut size = 0; + while let Some(chunk) = stream.next().await { + let chunk = &chunk?; + size += chunk.len(); + buf.extend(chunk); + if size >= max_size { + break; + } + } + Ok(buf.freeze()) +} + /// This is an implementation of the default Cookie Jar from Reqwest and reqwest_cookie_store build by pfernie. /// The default cookie jar used by Reqwest keeps all the cookies based upon the Max-Age or Expires which could be a long time. /// That could be used for tracking, to prevent this we force the lifespan of the cookies to always be max two minutes. /// A Cookie Jar is needed because some sites force a redirect with cookies to verify if a request uses cookies or not. use cookie_store::CookieStore; #[derive(Default)] -pub struct Jar(RwLock); +pub struct Jar(std::sync::RwLock); impl reqwest::cookie::CookieStore for Jar { fn set_cookies(&self, cookie_headers: &mut dyn Iterator, url: &url::Url) { @@ -820,8 +792,6 @@ impl reqwest::cookie::CookieStore for Jar { } fn cookies(&self, url: &url::Url) -> Option { - use bytes::Bytes; - let cookie_store = self.0.read().unwrap(); let s = cookie_store .get_request_values(url) @@ -836,3 +806,158 @@ impl reqwest::cookie::CookieStore for Jar { header::HeaderValue::from_maybe_shared(Bytes::from(s)).ok() } } + +/// Custom FaviconEmitter for the html5gum parser. +/// The FaviconEmitter is using an almost 1:1 copy of the DefaultEmitter with some small changes. +/// This prevents emitting tags like comments, doctype and also strings between the tags. +/// Therefor parsing the HTML content is faster. +use std::collections::{BTreeSet, VecDeque}; + +#[derive(Debug)] +enum FaviconToken { + StartTag(StartTag), + EndTag(EndTag), +} + +#[derive(Default, Debug)] +struct FaviconEmitter { + current_token: Option, + last_start_tag: HtmlString, + current_attribute: Option<(HtmlString, HtmlString)>, + seen_attributes: BTreeSet, + emitted_tokens: VecDeque, +} + +impl FaviconEmitter { + fn emit_token(&mut self, token: FaviconToken) { + self.emitted_tokens.push_front(token); + } + + fn flush_current_attribute(&mut self) { + if let Some((k, v)) = self.current_attribute.take() { + match self.current_token { + Some(FaviconToken::StartTag(ref mut tag)) => { + tag.attributes.entry(k).and_modify(|_| {}).or_insert(v); + } + Some(FaviconToken::EndTag(_)) => { + self.seen_attributes.insert(k); + } + _ => { + debug_assert!(false); + } + } + } + } +} + +impl Emitter for FaviconEmitter { + type Token = FaviconToken; + + fn set_last_start_tag(&mut self, last_start_tag: Option<&[u8]>) { + self.last_start_tag.clear(); + self.last_start_tag.extend(last_start_tag.unwrap_or_default()); + } + + fn pop_token(&mut self) -> Option { + self.emitted_tokens.pop_back() + } + + fn init_start_tag(&mut self) { + self.current_token = Some(FaviconToken::StartTag(StartTag::default())); + } + + fn init_end_tag(&mut self) { + self.current_token = Some(FaviconToken::EndTag(EndTag::default())); + self.seen_attributes.clear(); + } + + fn emit_current_tag(&mut self) -> Option { + self.flush_current_attribute(); + let mut token = self.current_token.take().unwrap(); + let mut emit = false; + match token { + FaviconToken::EndTag(ref mut tag) => { + // Always clean seen attributes + self.seen_attributes.clear(); + + // Only trigger an emit for the tag. + // This is matched, and will break the for-loop. + if *tag.name == b"head" { + emit = true; + } + } + FaviconToken::StartTag(ref mut tag) => { + // Only trriger an emit for and tags. + // These are the only tags we want to parse. + if *tag.name == b"link" || *tag.name == b"base" { + self.set_last_start_tag(Some(&tag.name)); + emit = true; + } else { + self.set_last_start_tag(None); + } + } + } + + // Only emit the tags we want to parse. + if emit { + self.emit_token(token); + } + None + } + + fn push_tag_name(&mut self, s: &[u8]) { + match self.current_token { + Some( + FaviconToken::StartTag(StartTag { + ref mut name, + .. + }) + | FaviconToken::EndTag(EndTag { + ref mut name, + .. + }), + ) => { + name.extend(s); + } + _ => debug_assert!(false), + } + } + + fn init_attribute(&mut self) { + self.flush_current_attribute(); + self.current_attribute = Some(Default::default()); + } + + fn push_attribute_name(&mut self, s: &[u8]) { + self.current_attribute.as_mut().unwrap().0.extend(s); + } + + fn push_attribute_value(&mut self, s: &[u8]) { + self.current_attribute.as_mut().unwrap().1.extend(s); + } + + fn current_is_appropriate_end_tag_token(&mut self) -> bool { + match self.current_token { + Some(FaviconToken::EndTag(ref tag)) => !self.last_start_tag.is_empty() && self.last_start_tag == tag.name, + _ => false, + } + } + + // We do not want and need these parts of the HTML document + // These will be skipped and ignored during the tokenization and iteration. + fn emit_current_comment(&mut self) {} + fn emit_current_doctype(&mut self) {} + fn emit_eof(&mut self) {} + fn emit_error(&mut self, _: html5gum::Error) {} + fn emit_string(&mut self, _: &[u8]) {} + fn init_comment(&mut self) {} + fn init_doctype(&mut self) {} + fn push_comment(&mut self, _: &[u8]) {} + fn push_doctype_name(&mut self, _: &[u8]) {} + fn push_doctype_public_identifier(&mut self, _: &[u8]) {} + fn push_doctype_system_identifier(&mut self, _: &[u8]) {} + fn set_doctype_public_identifier(&mut self, _: &[u8]) {} + fn set_doctype_system_identifier(&mut self, _: &[u8]) {} + fn set_force_quirks(&mut self) {} + fn set_self_closing(&mut self) {} +} diff --git a/src/api/identity.rs b/src/api/identity.rs index 0adc542f..d0a3bcce 100644 --- a/src/api/identity.rs +++ b/src/api/identity.rs @@ -1,16 +1,17 @@ use chrono::Utc; use num_traits::FromPrimitive; +use rocket::serde::json::Json; use rocket::{ - request::{Form, FormItems, FromForm}, + form::{Form, FromForm}, Route, }; -use rocket_contrib::json::Json; use serde_json::Value; use crate::{ api::{ + core::accounts::{PreloginData, _prelogin}, core::two_factor::{duo, email, email::EmailTokenData, yubikey}, - ApiResult, EmptyResult, JsonResult, + ApiResult, EmptyResult, JsonResult, JsonUpcase, }, auth::ClientIp, db::{models::*, DbConn}, @@ -19,17 +20,17 @@ use crate::{ }; pub fn routes() -> Vec { - routes![login] + routes![login, prelogin] } #[post("/connect/token", data = "")] -fn login(data: Form, conn: DbConn, ip: ClientIp) -> JsonResult { +async fn login(data: Form, conn: DbConn, ip: ClientIp) -> JsonResult { let data: ConnectData = data.into_inner(); match data.grant_type.as_ref() { "refresh_token" => { _check_is_some(&data.refresh_token, "refresh_token cannot be blank")?; - _refresh_login(data, conn) + _refresh_login(data, conn).await } "password" => { _check_is_some(&data.client_id, "client_id cannot be blank")?; @@ -41,34 +42,34 @@ fn login(data: Form, conn: DbConn, ip: ClientIp) -> JsonResult { _check_is_some(&data.device_name, "device_name cannot be blank")?; _check_is_some(&data.device_type, "device_type cannot be blank")?; - _password_login(data, conn, &ip) + _password_login(data, conn, &ip).await } "client_credentials" => { _check_is_some(&data.client_id, "client_id cannot be blank")?; _check_is_some(&data.client_secret, "client_secret cannot be blank")?; _check_is_some(&data.scope, "scope cannot be blank")?; - _api_key_login(data, conn, &ip) + _api_key_login(data, conn, &ip).await } t => err!("Invalid type", t), } } -fn _refresh_login(data: ConnectData, conn: DbConn) -> JsonResult { +async fn _refresh_login(data: ConnectData, conn: DbConn) -> JsonResult { // Extract token let token = data.refresh_token.unwrap(); // Get device by refresh token - let mut device = Device::find_by_refresh_token(&token, &conn).map_res("Invalid refresh token")?; + let mut device = Device::find_by_refresh_token(&token, &conn).await.map_res("Invalid refresh token")?; let scope = "api offline_access"; let scope_vec = vec!["api".into(), "offline_access".into()]; // Common - let user = User::find_by_uuid(&device.user_uuid, &conn).unwrap(); - let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, &conn); + let user = User::find_by_uuid(&device.user_uuid, &conn).await.unwrap(); + let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, &conn).await; let (access_token, expires_in) = device.refresh_tokens(&user, orgs, scope_vec); - device.save(&conn)?; + device.save(&conn).await?; Ok(Json(json!({ "access_token": access_token, @@ -86,7 +87,7 @@ fn _refresh_login(data: ConnectData, conn: DbConn) -> JsonResult { }))) } -fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult { +async fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult { // Validate scope let scope = data.scope.as_ref().unwrap(); if scope != "api offline_access" { @@ -98,8 +99,8 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult crate::ratelimit::check_limit_login(&ip.ip)?; // Get the user - let username = data.username.as_ref().unwrap(); - let user = match User::find_by_mail(username, &conn) { + let username = data.username.as_ref().unwrap().trim(); + let user = match User::find_by_mail(username, &conn).await { Some(user) => user, None => err!("Username or password is incorrect. Try again", format!("IP: {}. Username: {}.", ip.ip, username)), }; @@ -130,11 +131,11 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult user.last_verifying_at = Some(now); user.login_verify_count += 1; - if let Err(e) = user.save(&conn) { + if let Err(e) = user.save(&conn).await { error!("Error updating user: {:#?}", e); } - if let Err(e) = mail::send_verify_email(&user.email, &user.uuid) { + if let Err(e) = mail::send_verify_email(&user.email, &user.uuid).await { error!("Error auto-sending email verification email: {:#?}", e); } } @@ -144,12 +145,12 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult err!("Please verify your email before trying again.", format!("IP: {}. Username: {}.", ip.ip, username)) } - let (mut device, new_device) = get_device(&data, &conn, &user); + let (mut device, new_device) = get_device(&data, &conn, &user).await; - let twofactor_token = twofactor_auth(&user.uuid, &data, &mut device, ip, &conn)?; + let twofactor_token = twofactor_auth(&user.uuid, &data, &mut device, ip, &conn).await?; if CONFIG.mail_enabled() && new_device { - if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device.name) { + if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device.name).await { error!("Error sending new device email: {:#?}", e); if CONFIG.require_device_email() { @@ -159,9 +160,9 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult } // Common - let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, &conn); + let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, &conn).await; let (access_token, expires_in) = device.refresh_tokens(&user, orgs, scope_vec); - device.save(&conn)?; + device.save(&conn).await?; let mut result = json!({ "access_token": access_token, @@ -187,7 +188,7 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult Ok(Json(result)) } -fn _api_key_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult { +async fn _api_key_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult { // Validate scope let scope = data.scope.as_ref().unwrap(); if scope != "api" { @@ -204,7 +205,7 @@ fn _api_key_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult Some(uuid) => uuid, None => err!("Malformed client_id", format!("IP: {}.", ip.ip)), }; - let user = match User::find_by_uuid(user_uuid, &conn) { + let user = match User::find_by_uuid(user_uuid, &conn).await { Some(user) => user, None => err!("Invalid client_id", format!("IP: {}.", ip.ip)), }; @@ -220,11 +221,11 @@ fn _api_key_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult err!("Incorrect client_secret", format!("IP: {}. Username: {}.", ip.ip, user.email)) } - let (mut device, new_device) = get_device(&data, &conn, &user); + let (mut device, new_device) = get_device(&data, &conn, &user).await; if CONFIG.mail_enabled() && new_device { let now = Utc::now().naive_utc(); - if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device.name) { + if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device.name).await { error!("Error sending new device email: {:#?}", e); if CONFIG.require_device_email() { @@ -234,9 +235,9 @@ fn _api_key_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult } // Common - let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, &conn); + let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, &conn).await; let (access_token, expires_in) = device.refresh_tokens(&user, orgs, scope_vec); - device.save(&conn)?; + device.save(&conn).await?; info!("User {} logged in successfully via API key. IP: {}", user.email, ip.ip); @@ -258,7 +259,7 @@ fn _api_key_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult } /// Retrieves an existing device or creates a new device from ConnectData and the User -fn get_device(data: &ConnectData, conn: &DbConn, user: &User) -> (Device, bool) { +async fn get_device(data: &ConnectData, conn: &DbConn, user: &User) -> (Device, bool) { // On iOS, device_type sends "iOS", on others it sends a number let device_type = util::try_parse_string(data.device_type.as_ref()).unwrap_or(0); let device_id = data.device_identifier.clone().expect("No device id provided"); @@ -266,17 +267,8 @@ fn get_device(data: &ConnectData, conn: &DbConn, user: &User) -> (Device, bool) let mut new_device = false; // Find device or create new - let device = match Device::find_by_uuid(&device_id, conn) { - Some(device) => { - // Check if owned device, and recreate if not - if device.user_uuid != user.uuid { - info!("Device exists but is owned by another user. The old device will be discarded"); - new_device = true; - Device::new(device_id, user.uuid.clone(), device_name, device_type) - } else { - device - } - } + let device = match Device::find_by_uuid_and_user(&device_id, &user.uuid, conn).await { + Some(device) => device, None => { new_device = true; Device::new(device_id, user.uuid.clone(), device_name, device_type) @@ -286,28 +278,28 @@ fn get_device(data: &ConnectData, conn: &DbConn, user: &User) -> (Device, bool) (device, new_device) } -fn twofactor_auth( +async fn twofactor_auth( user_uuid: &str, data: &ConnectData, device: &mut Device, ip: &ClientIp, conn: &DbConn, ) -> ApiResult> { - let twofactors = TwoFactor::find_by_user(user_uuid, conn); + let twofactors = TwoFactor::find_by_user(user_uuid, conn).await; // No twofactor token if twofactor is disabled if twofactors.is_empty() { return Ok(None); } - TwoFactorIncomplete::mark_incomplete(user_uuid, &device.uuid, &device.name, ip, conn)?; + TwoFactorIncomplete::mark_incomplete(user_uuid, &device.uuid, &device.name, ip, conn).await?; let twofactor_ids: Vec<_> = twofactors.iter().map(|tf| tf.atype).collect(); let selected_id = data.two_factor_provider.unwrap_or(twofactor_ids[0]); // If we aren't given a two factor provider, asume the first one let twofactor_code = match data.two_factor_token { Some(ref code) => code, - None => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA token not provided"), + None => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn).await?, "2FA token not provided"), }; let selected_twofactor = twofactors.into_iter().find(|tf| tf.atype == selected_id && tf.enabled); @@ -320,16 +312,17 @@ fn twofactor_auth( match TwoFactorType::from_i32(selected_id) { Some(TwoFactorType::Authenticator) => { - _tf::authenticator::validate_totp_code_str(user_uuid, twofactor_code, &selected_data?, ip, conn)? + _tf::authenticator::validate_totp_code_str(user_uuid, twofactor_code, &selected_data?, ip, conn).await? + } + Some(TwoFactorType::Webauthn) => { + _tf::webauthn::validate_webauthn_login(user_uuid, twofactor_code, conn).await? } - Some(TwoFactorType::U2f) => _tf::u2f::validate_u2f_login(user_uuid, twofactor_code, conn)?, - Some(TwoFactorType::Webauthn) => _tf::webauthn::validate_webauthn_login(user_uuid, twofactor_code, conn)?, Some(TwoFactorType::YubiKey) => _tf::yubikey::validate_yubikey_login(twofactor_code, &selected_data?)?, Some(TwoFactorType::Duo) => { - _tf::duo::validate_duo_login(data.username.as_ref().unwrap(), twofactor_code, conn)? + _tf::duo::validate_duo_login(data.username.as_ref().unwrap().trim(), twofactor_code, conn).await? } Some(TwoFactorType::Email) => { - _tf::email::validate_email_code_str(user_uuid, twofactor_code, &selected_data?, conn)? + _tf::email::validate_email_code_str(user_uuid, twofactor_code, &selected_data?, conn).await? } Some(TwoFactorType::Remember) => { @@ -338,14 +331,17 @@ fn twofactor_auth( remember = 1; // Make sure we also return the token here, otherwise it will only remember the first time } _ => { - err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA Remember token not provided") + err_json!( + _json_err_twofactor(&twofactor_ids, user_uuid, conn).await?, + "2FA Remember token not provided" + ) } } } _ => err!("Invalid two factor provider"), } - TwoFactorIncomplete::mark_complete(user_uuid, &device.uuid, conn)?; + TwoFactorIncomplete::mark_complete(user_uuid, &device.uuid, conn).await?; if !CONFIG.disable_2fa_remember() && remember == 1 { Ok(Some(device.refresh_twofactor_remember())) @@ -359,7 +355,7 @@ fn _selected_data(tf: Option) -> ApiResult { tf.map(|t| t.data).map_res("Two factor doesn't exist") } -fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> ApiResult { +async fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> ApiResult { use crate::api::core::two_factor; let mut result = json!({ @@ -375,38 +371,18 @@ fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> Api match TwoFactorType::from_i32(*provider) { Some(TwoFactorType::Authenticator) => { /* Nothing to do for TOTP */ } - Some(TwoFactorType::U2f) if CONFIG.domain_set() => { - let request = two_factor::u2f::generate_u2f_login(user_uuid, conn)?; - let mut challenge_list = Vec::new(); - - for key in request.registered_keys { - challenge_list.push(json!({ - "appId": request.app_id, - "challenge": request.challenge, - "version": key.version, - "keyHandle": key.key_handle, - })); - } - - let challenge_list_str = serde_json::to_string(&challenge_list).unwrap(); - - result["TwoFactorProviders2"][provider.to_string()] = json!({ - "Challenges": challenge_list_str, - }); - } - Some(TwoFactorType::Webauthn) if CONFIG.domain_set() => { - let request = two_factor::webauthn::generate_webauthn_login(user_uuid, conn)?; + let request = two_factor::webauthn::generate_webauthn_login(user_uuid, conn).await?; result["TwoFactorProviders2"][provider.to_string()] = request.0; } Some(TwoFactorType::Duo) => { - let email = match User::find_by_uuid(user_uuid, conn) { + let email = match User::find_by_uuid(user_uuid, conn).await { Some(u) => u.email, None => err!("User does not exist"), }; - let (signature, host) = duo::generate_duo_signature(&email, conn)?; + let (signature, host) = duo::generate_duo_signature(&email, conn).await?; result["TwoFactorProviders2"][provider.to_string()] = json!({ "Host": host, @@ -415,7 +391,7 @@ fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> Api } Some(tf_type @ TwoFactorType::YubiKey) => { - let twofactor = match TwoFactor::find_by_user_and_type(user_uuid, tf_type as i32, conn) { + let twofactor = match TwoFactor::find_by_user_and_type(user_uuid, tf_type as i32, conn).await { Some(tf) => tf, None => err!("No YubiKey devices registered"), }; @@ -430,14 +406,14 @@ fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> Api Some(tf_type @ TwoFactorType::Email) => { use crate::api::core::two_factor as _tf; - let twofactor = match TwoFactor::find_by_user_and_type(user_uuid, tf_type as i32, conn) { + let twofactor = match TwoFactor::find_by_user_and_type(user_uuid, tf_type as i32, conn).await { Some(tf) => tf, None => err!("No twofactor email registered"), }; // Send email immediately if email is the only 2FA option if providers.len() == 1 { - _tf::email::send_token(user_uuid, conn)? + _tf::email::send_token(user_uuid, conn).await? } let email_data = EmailTokenData::from_json(&twofactor.data)?; @@ -453,68 +429,65 @@ fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> Api Ok(result) } +#[post("/accounts/prelogin", data = "")] +async fn prelogin(data: JsonUpcase, conn: DbConn) -> Json { + _prelogin(data, conn).await +} + // https://github.com/bitwarden/jslib/blob/master/common/src/models/request/tokenRequest.ts // https://github.com/bitwarden/mobile/blob/master/src/Core/Models/Request/TokenRequest.cs -#[derive(Debug, Clone, Default)] +#[derive(Debug, Clone, Default, FromForm)] #[allow(non_snake_case)] struct ConnectData { - // refresh_token, password, client_credentials (API key) - grant_type: String, + #[field(name = uncased("grant_type"))] + #[field(name = uncased("granttype"))] + grant_type: String, // refresh_token, password, client_credentials (API key) // Needed for grant_type="refresh_token" + #[field(name = uncased("refresh_token"))] + #[field(name = uncased("refreshtoken"))] refresh_token: Option, // Needed for grant_type = "password" | "client_credentials" - client_id: Option, // web, cli, desktop, browser, mobile - client_secret: Option, // API key login (cli only) + #[field(name = uncased("client_id"))] + #[field(name = uncased("clientid"))] + client_id: Option, // web, cli, desktop, browser, mobile + #[field(name = uncased("client_secret"))] + #[field(name = uncased("clientsecret"))] + client_secret: Option, + #[field(name = uncased("password"))] password: Option, + #[field(name = uncased("scope"))] scope: Option, + #[field(name = uncased("username"))] username: Option, + #[field(name = uncased("device_identifier"))] + #[field(name = uncased("deviceidentifier"))] device_identifier: Option, + #[field(name = uncased("device_name"))] + #[field(name = uncased("devicename"))] device_name: Option, + #[field(name = uncased("device_type"))] + #[field(name = uncased("devicetype"))] device_type: Option, - device_push_token: Option, // Unused; mobile device push not yet supported. + #[allow(unused)] + #[field(name = uncased("device_push_token"))] + #[field(name = uncased("devicepushtoken"))] + _device_push_token: Option, // Unused; mobile device push not yet supported. // Needed for two-factor auth + #[field(name = uncased("two_factor_provider"))] + #[field(name = uncased("twofactorprovider"))] two_factor_provider: Option, + #[field(name = uncased("two_factor_token"))] + #[field(name = uncased("twofactortoken"))] two_factor_token: Option, + #[field(name = uncased("two_factor_remember"))] + #[field(name = uncased("twofactorremember"))] two_factor_remember: Option, } -impl<'f> FromForm<'f> for ConnectData { - type Error = String; - - fn from_form(items: &mut FormItems<'f>, _strict: bool) -> Result { - let mut form = Self::default(); - for item in items { - let (key, value) = item.key_value_decoded(); - let mut normalized_key = key.to_lowercase(); - normalized_key.retain(|c| c != '_'); // Remove '_' - - match normalized_key.as_ref() { - "granttype" => form.grant_type = value, - "refreshtoken" => form.refresh_token = Some(value), - "clientid" => form.client_id = Some(value), - "clientsecret" => form.client_secret = Some(value), - "password" => form.password = Some(value), - "scope" => form.scope = Some(value), - "username" => form.username = Some(value), - "deviceidentifier" => form.device_identifier = Some(value), - "devicename" => form.device_name = Some(value), - "devicetype" => form.device_type = Some(value), - "devicepushtoken" => form.device_push_token = Some(value), - "twofactorprovider" => form.two_factor_provider = value.parse().ok(), - "twofactortoken" => form.two_factor_token = Some(value), - "twofactorremember" => form.two_factor_remember = value.parse().ok(), - key => warn!("Detected unexpected parameter during login: {}", key), - } - } - - Ok(form) - } -} - fn _check_is_some(value: &Option, msg: &str) -> EmptyResult { if value.is_none() { err!(msg) diff --git a/src/api/mod.rs b/src/api/mod.rs index 3546acd7..99fb98be 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -5,7 +5,7 @@ mod identity; mod notifications; mod web; -use rocket_contrib::json::Json; +use rocket::serde::json::Json; use serde_json::Value; pub use crate::api::{ diff --git a/src/api/notifications.rs b/src/api/notifications.rs index 77539969..2657d312 100644 --- a/src/api/notifications.rs +++ b/src/api/notifications.rs @@ -1,19 +1,41 @@ -use std::sync::atomic::{AtomicBool, Ordering}; +use std::{ + net::SocketAddr, + sync::{ + atomic::{AtomicBool, Ordering}, + Arc, + }, + time::Duration, +}; -use rocket::Route; -use rocket_contrib::json::Json; +use chrono::NaiveDateTime; +use futures::{SinkExt, StreamExt}; +use rmpv::Value; +use rocket::{serde::json::Json, Route}; use serde_json::Value as JsonValue; - -use crate::{api::EmptyResult, auth::Headers, Error, CONFIG}; +use tokio::{ + net::{TcpListener, TcpStream}, + sync::mpsc::Sender, +}; +use tokio_tungstenite::{ + accept_hdr_async, + tungstenite::{handshake, Message}, +}; + +use crate::{ + api::EmptyResult, + auth::Headers, + db::models::{Cipher, Folder, Send, User}, + Error, CONFIG, +}; pub fn routes() -> Vec { routes![negotiate, websockets_err] } -static SHOW_WEBSOCKETS_MSG: AtomicBool = AtomicBool::new(true); - #[get("/hub")] fn websockets_err() -> EmptyResult { + static SHOW_WEBSOCKETS_MSG: AtomicBool = AtomicBool::new(true); + if CONFIG.websocket_enabled() && SHOW_WEBSOCKETS_MSG.compare_exchange(true, false, Ordering::Relaxed, Ordering::Relaxed).is_ok() { @@ -55,19 +77,6 @@ fn negotiate(_headers: Headers) -> Json { // // Websockets server // -use std::io; -use std::sync::Arc; -use std::thread; - -use ws::{self, util::Token, Factory, Handler, Handshake, Message, Sender}; - -use chashmap::CHashMap; -use chrono::NaiveDateTime; -use serde_json::from_str; - -use crate::db::models::{Cipher, Folder, Send, User}; - -use rmpv::Value; fn serialize(val: Value) -> Vec { use rmpv::encode::write_value; @@ -118,192 +127,49 @@ fn convert_option>(option: Option) -> Value { } } -// Server WebSocket handler -pub struct WsHandler { - out: Sender, - user_uuid: Option, - users: WebSocketUsers, -} - const RECORD_SEPARATOR: u8 = 0x1e; const INITIAL_RESPONSE: [u8; 3] = [0x7b, 0x7d, RECORD_SEPARATOR]; // {, }, -#[derive(Deserialize)] -struct InitialMessage { - protocol: String, +#[derive(Deserialize, Copy, Clone, Eq, PartialEq)] +struct InitialMessage<'a> { + protocol: &'a str, version: i32, } -const PING_MS: u64 = 15_000; -const PING: Token = Token(1); - -const ACCESS_TOKEN_KEY: &str = "access_token="; - -impl WsHandler { - fn err(&self, msg: &'static str) -> ws::Result<()> { - self.out.close(ws::CloseCode::Invalid)?; - - // We need to specifically return an IO error so ws closes the connection - let io_error = io::Error::from(io::ErrorKind::InvalidData); - Err(ws::Error::new(ws::ErrorKind::Io(io_error), msg)) - } - - fn get_request_token(&self, hs: Handshake) -> Option { - use std::str::from_utf8; - - // Verify we have a token header - if let Some(header_value) = hs.request.header("Authorization") { - if let Ok(converted) = from_utf8(header_value) { - if let Some(token_part) = converted.split("Bearer ").nth(1) { - return Some(token_part.into()); - } - } - }; - - // Otherwise verify the query parameter value - let path = hs.request.resource(); - if let Some(params) = path.split('?').nth(1) { - let params_iter = params.split('&').take(1); - for val in params_iter { - if let Some(stripped) = val.strip_prefix(ACCESS_TOKEN_KEY) { - return Some(stripped.into()); - } - } - }; - - None - } -} - -impl Handler for WsHandler { - fn on_open(&mut self, hs: Handshake) -> ws::Result<()> { - // Path == "/notifications/hub?id===&access_token=" - // - // We don't use `id`, and as of around 2020-03-25, the official clients - // no longer seem to pass `id` (only `access_token`). - - // Get user token from header or query parameter - let access_token = match self.get_request_token(hs) { - Some(token) => token, - _ => return self.err("Missing access token"), - }; - - // Validate the user - use crate::auth; - let claims = match auth::decode_login(access_token.as_str()) { - Ok(claims) => claims, - Err(_) => return self.err("Invalid access token provided"), - }; - - // Assign the user to the handler - let user_uuid = claims.sub; - self.user_uuid = Some(user_uuid.clone()); - - // Add the current Sender to the user list - let handler_insert = self.out.clone(); - let handler_update = self.out.clone(); - - self.users.map.upsert(user_uuid, || vec![handler_insert], |ref mut v| v.push(handler_update)); - - // Schedule a ping to keep the connection alive - self.out.timeout(PING_MS, PING) - } - - fn on_message(&mut self, msg: Message) -> ws::Result<()> { - if let Message::Text(text) = msg.clone() { - let json = &text[..text.len() - 1]; // Remove last char - - if let Ok(InitialMessage { - protocol, - version, - }) = from_str::(json) - { - if &protocol == "messagepack" && version == 1 { - return self.out.send(&INITIAL_RESPONSE[..]); // Respond to initial message - } - } - } - - // If it's not the initial message, just echo the message - self.out.send(msg) - } - - fn on_timeout(&mut self, event: Token) -> ws::Result<()> { - if event == PING { - // send ping - self.out.send(create_ping())?; - - // reschedule the timeout - self.out.timeout(PING_MS, PING) - } else { - Ok(()) - } - } -} - -struct WsFactory { - pub users: WebSocketUsers, -} - -impl WsFactory { - pub fn init() -> Self { - WsFactory { - users: WebSocketUsers { - map: Arc::new(CHashMap::new()), - }, - } - } -} - -impl Factory for WsFactory { - type Handler = WsHandler; - - fn connection_made(&mut self, out: Sender) -> Self::Handler { - WsHandler { - out, - user_uuid: None, - users: self.users.clone(), - } - } - - fn connection_lost(&mut self, handler: Self::Handler) { - // Remove handler - if let Some(user_uuid) = &handler.user_uuid { - if let Some(mut user_conn) = self.users.map.get_mut(user_uuid) { - if let Some(pos) = user_conn.iter().position(|x| x == &handler.out) { - user_conn.remove(pos); - } - } - } - } -} +static INITIAL_MESSAGE: InitialMessage<'static> = InitialMessage { + protocol: "messagepack", + version: 1, +}; +// We attach the UUID to the sender so we can differentiate them when we need to remove them from the Vec +type UserSenders = (uuid::Uuid, Sender); #[derive(Clone)] pub struct WebSocketUsers { - map: Arc>>, + map: Arc>>, } impl WebSocketUsers { - fn send_update(&self, user_uuid: &str, data: &[u8]) -> ws::Result<()> { - if let Some(user) = self.map.get(user_uuid) { - for sender in user.iter() { - sender.send(data)?; + async fn send_update(&self, user_uuid: &str, data: &[u8]) { + if let Some(user) = self.map.get(user_uuid).map(|v| v.clone()) { + for (_, sender) in user.iter() { + if sender.send(Message::binary(data)).await.is_err() { + // TODO: Delete from map here too? + } } } - Ok(()) } // NOTE: The last modified date needs to be updated before calling these methods - pub fn send_user_update(&self, ut: UpdateType, user: &User) { + pub async fn send_user_update(&self, ut: UpdateType, user: &User) { let data = create_update( vec![("UserId".into(), user.uuid.clone().into()), ("Date".into(), serialize_date(user.updated_at))], ut, ); - self.send_update(&user.uuid, &data).ok(); + self.send_update(&user.uuid, &data).await; } - pub fn send_folder_update(&self, ut: UpdateType, folder: &Folder) { + pub async fn send_folder_update(&self, ut: UpdateType, folder: &Folder) { let data = create_update( vec![ ("Id".into(), folder.uuid.clone().into()), @@ -313,10 +179,10 @@ impl WebSocketUsers { ut, ); - self.send_update(&folder.user_uuid, &data).ok(); + self.send_update(&folder.user_uuid, &data).await; } - pub fn send_cipher_update(&self, ut: UpdateType, cipher: &Cipher, user_uuids: &[String]) { + pub async fn send_cipher_update(&self, ut: UpdateType, cipher: &Cipher, user_uuids: &[String]) { let user_uuid = convert_option(cipher.user_uuid.clone()); let org_uuid = convert_option(cipher.organization_uuid.clone()); @@ -332,11 +198,11 @@ impl WebSocketUsers { ); for uuid in user_uuids { - self.send_update(uuid, &data).ok(); + self.send_update(uuid, &data).await; } } - pub fn send_send_update(&self, ut: UpdateType, send: &Send, user_uuids: &[String]) { + pub async fn send_send_update(&self, ut: UpdateType, send: &Send, user_uuids: &[String]) { let user_uuid = convert_option(send.user_uuid.clone()); let data = create_update( @@ -349,7 +215,7 @@ impl WebSocketUsers { ); for uuid in user_uuids { - self.send_update(uuid, &data).ok(); + self.send_update(uuid, &data).await; } } } @@ -392,7 +258,7 @@ fn create_ping() -> Vec { } #[allow(dead_code)] -#[derive(PartialEq)] +#[derive(Eq, PartialEq)] pub enum UpdateType { CipherUpdate = 0, CipherCreate = 1, @@ -416,28 +282,145 @@ pub enum UpdateType { None = 100, } -use rocket::State; -pub type Notify<'a> = State<'a, WebSocketUsers>; +pub type Notify<'a> = &'a rocket::State; pub fn start_notification_server() -> WebSocketUsers { - let factory = WsFactory::init(); - let users = factory.users.clone(); + let users = WebSocketUsers { + map: Arc::new(dashmap::DashMap::new()), + }; if CONFIG.websocket_enabled() { - thread::spawn(move || { - let mut settings = ws::Settings::default(); - settings.max_connections = 500; - settings.queue_size = 2; - settings.panic_on_internal = false; - - ws::Builder::new() - .with_settings(settings) - .build(factory) - .unwrap() - .listen((CONFIG.websocket_address().as_str(), CONFIG.websocket_port())) - .unwrap(); + let users2 = users.clone(); + tokio::spawn(async move { + let addr = (CONFIG.websocket_address(), CONFIG.websocket_port()); + info!("Starting WebSockets server on {}:{}", addr.0, addr.1); + let listener = TcpListener::bind(addr).await.expect("Can't listen on websocket port"); + + let (shutdown_tx, mut shutdown_rx) = tokio::sync::oneshot::channel::<()>(); + CONFIG.set_ws_shutdown_handle(shutdown_tx); + + loop { + tokio::select! { + Ok((stream, addr)) = listener.accept() => { + tokio::spawn(handle_connection(stream, users2.clone(), addr)); + } + + _ = &mut shutdown_rx => { + break; + } + } + } + + info!("Shutting down WebSockets server!") }); } users } + +async fn handle_connection(stream: TcpStream, users: WebSocketUsers, addr: SocketAddr) -> Result<(), Error> { + let mut user_uuid: Option = None; + + info!("Accepting WS connection from {addr}"); + + // Accept connection, do initial handshake, validate auth token and get the user ID + use handshake::server::{Request, Response}; + let mut stream = accept_hdr_async(stream, |req: &Request, res: Response| { + if let Some(token) = get_request_token(req) { + if let Ok(claims) = crate::auth::decode_login(&token) { + user_uuid = Some(claims.sub); + return Ok(res); + } + } + Err(Response::builder().status(401).body(None).unwrap()) + }) + .await?; + + let user_uuid = user_uuid.expect("User UUID should be set after the handshake"); + + // Add a channel to send messages to this client to the map + let entry_uuid = uuid::Uuid::new_v4(); + let (tx, mut rx) = tokio::sync::mpsc::channel(100); + users.map.entry(user_uuid.clone()).or_default().push((entry_uuid, tx)); + + let mut interval = tokio::time::interval(Duration::from_secs(15)); + loop { + tokio::select! { + res = stream.next() => { + match res { + Some(Ok(message)) => { + // Respond to any pings + if let Message::Ping(ping) = message { + if stream.send(Message::Pong(ping)).await.is_err() { + break; + } + continue; + } else if let Message::Pong(_) = message { + /* Ignored */ + continue; + } + + // We should receive an initial message with the protocol and version, and we will reply to it + if let Message::Text(ref message) = message { + let msg = message.strip_suffix(RECORD_SEPARATOR as char).unwrap_or(message); + + if serde_json::from_str(msg).ok() == Some(INITIAL_MESSAGE) { + stream.send(Message::binary(INITIAL_RESPONSE)).await?; + continue; + } + } + + // Just echo anything else the client sends + if stream.send(message).await.is_err() { + break; + } + } + _ => break, + } + } + + res = rx.recv() => { + match res { + Some(res) => { + if stream.send(res).await.is_err() { + break; + } + }, + None => break, + } + } + + _= interval.tick() => { + if stream.send(Message::Ping(create_ping())).await.is_err() { + break; + } + } + } + } + + info!("Closing WS connection from {addr}"); + + // Delete from map + users.map.entry(user_uuid).or_default().retain(|(uuid, _)| uuid != &entry_uuid); + Ok(()) +} + +fn get_request_token(req: &handshake::server::Request) -> Option { + const ACCESS_TOKEN_KEY: &str = "access_token="; + + if let Some(Ok(auth)) = req.headers().get("Authorization").map(|a| a.to_str()) { + if let Some(token_part) = auth.strip_prefix("Bearer ") { + return Some(token_part.to_owned()); + } + } + + if let Some(params) = req.uri().query() { + let params_iter = params.split('&').take(1); + for val in params_iter { + if let Some(stripped) = val.strip_prefix(ACCESS_TOKEN_KEY) { + return Some(stripped.to_owned()); + } + } + } + None +} diff --git a/src/api/web.rs b/src/api/web.rs index 9a5f74cc..c8ecdb84 100644 --- a/src/api/web.rs +++ b/src/api/web.rs @@ -1,10 +1,11 @@ use std::path::{Path, PathBuf}; -use rocket::{http::ContentType, response::content::Content, response::NamedFile, Route}; -use rocket_contrib::json::Json; +use rocket::serde::json::Json; +use rocket::{fs::NamedFile, http::ContentType, Route}; use serde_json::Value; use crate::{ + api::core::now, error::Error, util::{Cached, SafeString}, CONFIG, @@ -21,16 +22,16 @@ pub fn routes() -> Vec { } #[get("/")] -fn web_index() -> Cached> { - Cached::short(NamedFile::open(Path::new(&CONFIG.web_vault_folder()).join("index.html")).ok(), false) +async fn web_index() -> Cached> { + Cached::short(NamedFile::open(Path::new(&CONFIG.web_vault_folder()).join("index.html")).await.ok(), false) } #[get("/app-id.json")] -fn app_id() -> Cached>> { +fn app_id() -> Cached<(ContentType, Json)> { let content_type = ContentType::new("application", "fido.trusted-apps+json"); Cached::long( - Content( + ( content_type, Json(json!({ "trustedFacets": [ @@ -58,45 +59,37 @@ fn app_id() -> Cached>> { } #[get("/", rank = 10)] // Only match this if the other routes don't match -fn web_files(p: PathBuf) -> Cached> { - Cached::long(NamedFile::open(Path::new(&CONFIG.web_vault_folder()).join(p)).ok(), true) +async fn web_files(p: PathBuf) -> Cached> { + Cached::long(NamedFile::open(Path::new(&CONFIG.web_vault_folder()).join(p)).await.ok(), true) } #[get("/attachments//")] -fn attachments(uuid: SafeString, file_id: SafeString) -> Option { - NamedFile::open(Path::new(&CONFIG.attachments_folder()).join(uuid).join(file_id)).ok() +async fn attachments(uuid: SafeString, file_id: SafeString) -> Option { + NamedFile::open(Path::new(&CONFIG.attachments_folder()).join(uuid).join(file_id)).await.ok() } // We use DbConn here to let the alive healthcheck also verify the database connection. use crate::db::DbConn; #[get("/alive")] fn alive(_conn: DbConn) -> Json { - use crate::util::format_date; - use chrono::Utc; - - Json(format_date(&Utc::now().naive_utc())) + now() } #[get("/vw_static/")] -fn static_files(filename: String) -> Result, Error> { +fn static_files(filename: String) -> Result<(ContentType, &'static [u8]), Error> { match filename.as_ref() { - "mail-github.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/mail-github.png"))), - "logo-gray.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/logo-gray.png"))), - "error-x.svg" => Ok(Content(ContentType::SVG, include_bytes!("../static/images/error-x.svg"))), - "hibp.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/hibp.png"))), - "vaultwarden-icon.png" => { - Ok(Content(ContentType::PNG, include_bytes!("../static/images/vaultwarden-icon.png"))) - } - - "bootstrap.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/bootstrap.css"))), - "bootstrap-native.js" => { - Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/bootstrap-native.js"))) - } - "identicon.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/identicon.js"))), - "datatables.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/datatables.js"))), - "datatables.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/datatables.css"))), + "mail-github.png" => Ok((ContentType::PNG, include_bytes!("../static/images/mail-github.png"))), + "logo-gray.png" => Ok((ContentType::PNG, include_bytes!("../static/images/logo-gray.png"))), + "error-x.svg" => Ok((ContentType::SVG, include_bytes!("../static/images/error-x.svg"))), + "hibp.png" => Ok((ContentType::PNG, include_bytes!("../static/images/hibp.png"))), + "vaultwarden-icon.png" => Ok((ContentType::PNG, include_bytes!("../static/images/vaultwarden-icon.png"))), + "bootstrap.css" => Ok((ContentType::CSS, include_bytes!("../static/scripts/bootstrap.css"))), + "bootstrap-native.js" => Ok((ContentType::JavaScript, include_bytes!("../static/scripts/bootstrap-native.js"))), + "identicon.js" => Ok((ContentType::JavaScript, include_bytes!("../static/scripts/identicon.js"))), + "datatables.js" => Ok((ContentType::JavaScript, include_bytes!("../static/scripts/datatables.js"))), + "datatables.css" => Ok((ContentType::CSS, include_bytes!("../static/scripts/datatables.css"))), "jquery-3.6.0.slim.js" => { - Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/jquery-3.6.0.slim.js"))) + Ok((ContentType::JavaScript, include_bytes!("../static/scripts/jquery-3.6.0.slim.js"))) } _ => err!(format!("Static file not found: {}", filename)), } diff --git a/src/auth.rs b/src/auth.rs index 741d4e95..f99fbd39 100644 --- a/src/auth.rs +++ b/src/auth.rs @@ -11,7 +11,6 @@ use serde::ser::Serialize; use crate::{ error::{Error, MapResult}, - util::read_file, CONFIG, }; @@ -30,13 +29,13 @@ static JWT_ADMIN_ISSUER: Lazy = Lazy::new(|| format!("{}|admin", CONFIG. static JWT_SEND_ISSUER: Lazy = Lazy::new(|| format!("{}|send", CONFIG.domain_origin())); static PRIVATE_RSA_KEY_VEC: Lazy> = Lazy::new(|| { - read_file(&CONFIG.private_rsa_key()).unwrap_or_else(|e| panic!("Error loading private RSA Key.\n{}", e)) + std::fs::read(&CONFIG.private_rsa_key()).unwrap_or_else(|e| panic!("Error loading private RSA Key.\n{}", e)) }); static PRIVATE_RSA_KEY: Lazy = Lazy::new(|| { EncodingKey::from_rsa_pem(&PRIVATE_RSA_KEY_VEC).unwrap_or_else(|e| panic!("Error decoding private RSA Key.\n{}", e)) }); static PUBLIC_RSA_KEY_VEC: Lazy> = Lazy::new(|| { - read_file(&CONFIG.public_rsa_key()).unwrap_or_else(|e| panic!("Error loading public RSA Key.\n{}", e)) + std::fs::read(&CONFIG.public_rsa_key()).unwrap_or_else(|e| panic!("Error loading public RSA Key.\n{}", e)) }); static PUBLIC_RSA_KEY: Lazy = Lazy::new(|| { DecodingKey::from_rsa_pem(&PUBLIC_RSA_KEY_VEC).unwrap_or_else(|e| panic!("Error decoding public RSA Key.\n{}", e)) @@ -55,15 +54,11 @@ pub fn encode_jwt(claims: &T) -> String { } fn decode_jwt(token: &str, issuer: String) -> Result { - let validation = jsonwebtoken::Validation { - leeway: 30, // 30 seconds - validate_exp: true, - validate_nbf: true, - aud: None, - iss: Some(issuer), - sub: None, - algorithms: vec![JWT_ALGORITHM], - }; + let mut validation = jsonwebtoken::Validation::new(JWT_ALGORITHM); + validation.leeway = 30; // 30 seconds + validation.validate_exp = true; + validation.validate_nbf = true; + validation.set_issuer(&[issuer]); let token = token.replace(char::is_whitespace, ""); jsonwebtoken::decode(&token, &PUBLIC_RSA_KEY, &validation).map(|d| d.claims).map_res("Error decoding JWT") @@ -257,7 +252,10 @@ pub fn generate_send_claims(send_id: &str, file_id: &str) -> BasicJwtClaims { // // Bearer token authentication // -use rocket::request::{FromRequest, Outcome, Request}; +use rocket::{ + outcome::try_outcome, + request::{FromRequest, Outcome, Request}, +}; use crate::db::{ models::{CollectionUser, Device, User, UserOrgStatus, UserOrgType, UserOrganization, UserStampException}, @@ -268,10 +266,11 @@ pub struct Host { pub host: String, } -impl<'a, 'r> FromRequest<'a, 'r> for Host { +#[rocket::async_trait] +impl<'r> FromRequest<'r> for Host { type Error = &'static str; - fn from_request(request: &'a Request<'r>) -> Outcome { + async fn from_request(request: &'r Request<'_>) -> Outcome { let headers = request.headers(); // Get host @@ -314,17 +313,14 @@ pub struct Headers { pub user: User, } -impl<'a, 'r> FromRequest<'a, 'r> for Headers { +#[rocket::async_trait] +impl<'r> FromRequest<'r> for Headers { type Error = &'static str; - fn from_request(request: &'a Request<'r>) -> Outcome { + async fn from_request(request: &'r Request<'_>) -> Outcome { let headers = request.headers(); - let host = match Host::from_request(request) { - Outcome::Forward(_) => return Outcome::Forward(()), - Outcome::Failure(f) => return Outcome::Failure(f), - Outcome::Success(host) => host.host, - }; + let host = try_outcome!(Host::from_request(request).await).host; // Get access_token let access_token: &str = match headers.get_one("Authorization") { @@ -344,17 +340,17 @@ impl<'a, 'r> FromRequest<'a, 'r> for Headers { let device_uuid = claims.device; let user_uuid = claims.sub; - let conn = match request.guard::() { + let conn = match DbConn::from_request(request).await { Outcome::Success(conn) => conn, _ => err_handler!("Error getting DB"), }; - let device = match Device::find_by_uuid(&device_uuid, &conn) { + let device = match Device::find_by_uuid_and_user(&device_uuid, &user_uuid, &conn).await { Some(device) => device, None => err_handler!("Invalid device id"), }; - let user = match User::find_by_uuid(&user_uuid, &conn) { + let user = match User::find_by_uuid(&user_uuid, &conn).await { Some(user) => user, None => err_handler!("Device has no user associated"), }; @@ -363,7 +359,7 @@ impl<'a, 'r> FromRequest<'a, 'r> for Headers { if let Some(stamp_exception) = user.stamp_exception.as_deref().and_then(|s| serde_json::from_str::(s).ok()) { - let current_route = match request.route().and_then(|r| r.name) { + let current_route = match request.route().and_then(|r| r.name.as_deref()) { Some(name) => name, _ => err_handler!("Error getting current route for stamp exception"), }; @@ -376,7 +372,7 @@ impl<'a, 'r> FromRequest<'a, 'r> for Headers { // This prevents checking this stamp exception for new requests. let mut user = user; user.reset_stamp_exception(); - if let Err(e) = user.save(&conn) { + if let Err(e) = user.save(&conn).await { error!("Error updating user: {:#?}", e); } err_handler!("Stamp exception is expired") @@ -410,14 +406,14 @@ pub struct OrgHeaders { // org_id is usually the second path param ("/organizations/"), // but there are cases where it is a query value. // First check the path, if this is not a valid uuid, try the query values. -fn get_org_id(request: &Request) -> Option { - if let Some(Ok(org_id)) = request.get_param::(1) { +fn get_org_id(request: &Request<'_>) -> Option { + if let Some(Ok(org_id)) = request.param::(1) { if uuid::Uuid::parse_str(&org_id).is_ok() { return Some(org_id); } } - if let Some(Ok(org_id)) = request.get_query_value::("organizationId") { + if let Some(Ok(org_id)) = request.query_value::("organizationId") { if uuid::Uuid::parse_str(&org_id).is_ok() { return Some(org_id); } @@ -426,52 +422,48 @@ fn get_org_id(request: &Request) -> Option { None } -impl<'a, 'r> FromRequest<'a, 'r> for OrgHeaders { +#[rocket::async_trait] +impl<'r> FromRequest<'r> for OrgHeaders { type Error = &'static str; - fn from_request(request: &'a Request<'r>) -> Outcome { - match request.guard::() { - Outcome::Forward(_) => Outcome::Forward(()), - Outcome::Failure(f) => Outcome::Failure(f), - Outcome::Success(headers) => { - match get_org_id(request) { - Some(org_id) => { - let conn = match request.guard::() { - Outcome::Success(conn) => conn, - _ => err_handler!("Error getting DB"), - }; - - let user = headers.user; - let org_user = match UserOrganization::find_by_user_and_org(&user.uuid, &org_id, &conn) { - Some(user) => { - if user.status == UserOrgStatus::Confirmed as i32 { - user - } else { - err_handler!("The current user isn't confirmed member of the organization") - } - } - None => err_handler!("The current user isn't member of the organization"), - }; - - Outcome::Success(Self { - host: headers.host, - device: headers.device, - user, - org_user_type: { - if let Some(org_usr_type) = UserOrgType::from_i32(org_user.atype) { - org_usr_type - } else { - // This should only happen if the DB is corrupted - err_handler!("Unknown user type in the database") - } - }, - org_user, - org_id, - }) + async fn from_request(request: &'r Request<'_>) -> Outcome { + let headers = try_outcome!(Headers::from_request(request).await); + match get_org_id(request) { + Some(org_id) => { + let conn = match DbConn::from_request(request).await { + Outcome::Success(conn) => conn, + _ => err_handler!("Error getting DB"), + }; + + let user = headers.user; + let org_user = match UserOrganization::find_by_user_and_org(&user.uuid, &org_id, &conn).await { + Some(user) => { + if user.status == UserOrgStatus::Confirmed as i32 { + user + } else { + err_handler!("The current user isn't confirmed member of the organization") + } } - _ => err_handler!("Error getting the organization id"), - } + None => err_handler!("The current user isn't member of the organization"), + }; + + Outcome::Success(Self { + host: headers.host, + device: headers.device, + user, + org_user_type: { + if let Some(org_usr_type) = UserOrgType::from_i32(org_user.atype) { + org_usr_type + } else { + // This should only happen if the DB is corrupted + err_handler!("Unknown user type in the database") + } + }, + org_user, + org_id, + }) } + _ => err_handler!("Error getting the organization id"), } } } @@ -483,25 +475,21 @@ pub struct AdminHeaders { pub org_user_type: UserOrgType, } -impl<'a, 'r> FromRequest<'a, 'r> for AdminHeaders { +#[rocket::async_trait] +impl<'r> FromRequest<'r> for AdminHeaders { type Error = &'static str; - fn from_request(request: &'a Request<'r>) -> Outcome { - match request.guard::() { - Outcome::Forward(_) => Outcome::Forward(()), - Outcome::Failure(f) => Outcome::Failure(f), - Outcome::Success(headers) => { - if headers.org_user_type >= UserOrgType::Admin { - Outcome::Success(Self { - host: headers.host, - device: headers.device, - user: headers.user, - org_user_type: headers.org_user_type, - }) - } else { - err_handler!("You need to be Admin or Owner to call this endpoint") - } - } + async fn from_request(request: &'r Request<'_>) -> Outcome { + let headers = try_outcome!(OrgHeaders::from_request(request).await); + if headers.org_user_type >= UserOrgType::Admin { + Outcome::Success(Self { + host: headers.host, + device: headers.device, + user: headers.user, + org_user_type: headers.org_user_type, + }) + } else { + err_handler!("You need to be Admin or Owner to call this endpoint") } } } @@ -519,14 +507,14 @@ impl From for Headers { // col_id is usually the fourth path param ("/organizations//collections/"), // but there could be cases where it is a query value. // First check the path, if this is not a valid uuid, try the query values. -fn get_col_id(request: &Request) -> Option { - if let Some(Ok(col_id)) = request.get_param::(3) { +fn get_col_id(request: &Request<'_>) -> Option { + if let Some(Ok(col_id)) = request.param::(3) { if uuid::Uuid::parse_str(&col_id).is_ok() { return Some(col_id); } } - if let Some(Ok(col_id)) = request.get_query_value::("collectionId") { + if let Some(Ok(col_id)) = request.query_value::("collectionId") { if uuid::Uuid::parse_str(&col_id).is_ok() { return Some(col_id); } @@ -545,46 +533,40 @@ pub struct ManagerHeaders { pub org_user_type: UserOrgType, } -impl<'a, 'r> FromRequest<'a, 'r> for ManagerHeaders { +#[rocket::async_trait] +impl<'r> FromRequest<'r> for ManagerHeaders { type Error = &'static str; - fn from_request(request: &'a Request<'r>) -> Outcome { - match request.guard::() { - Outcome::Forward(_) => Outcome::Forward(()), - Outcome::Failure(f) => Outcome::Failure(f), - Outcome::Success(headers) => { - if headers.org_user_type >= UserOrgType::Manager { - match get_col_id(request) { - Some(col_id) => { - let conn = match request.guard::() { - Outcome::Success(conn) => conn, - _ => err_handler!("Error getting DB"), - }; - - if !headers.org_user.has_full_access() { - match CollectionUser::find_by_collection_and_user( - &col_id, - &headers.org_user.user_uuid, - &conn, - ) { - Some(_) => (), - None => err_handler!("The current user isn't a manager for this collection"), - } - } + async fn from_request(request: &'r Request<'_>) -> Outcome { + let headers = try_outcome!(OrgHeaders::from_request(request).await); + if headers.org_user_type >= UserOrgType::Manager { + match get_col_id(request) { + Some(col_id) => { + let conn = match DbConn::from_request(request).await { + Outcome::Success(conn) => conn, + _ => err_handler!("Error getting DB"), + }; + + if !headers.org_user.has_full_access() { + match CollectionUser::find_by_collection_and_user(&col_id, &headers.org_user.user_uuid, &conn) + .await + { + Some(_) => (), + None => err_handler!("The current user isn't a manager for this collection"), } - _ => err_handler!("Error getting the collection id"), } - - Outcome::Success(Self { - host: headers.host, - device: headers.device, - user: headers.user, - org_user_type: headers.org_user_type, - }) - } else { - err_handler!("You need to be a Manager, Admin or Owner to call this endpoint") } + _ => err_handler!("Error getting the collection id"), } + + Outcome::Success(Self { + host: headers.host, + device: headers.device, + user: headers.user, + org_user_type: headers.org_user_type, + }) + } else { + err_handler!("You need to be a Manager, Admin or Owner to call this endpoint") } } } @@ -608,25 +590,21 @@ pub struct ManagerHeadersLoose { pub org_user_type: UserOrgType, } -impl<'a, 'r> FromRequest<'a, 'r> for ManagerHeadersLoose { +#[rocket::async_trait] +impl<'r> FromRequest<'r> for ManagerHeadersLoose { type Error = &'static str; - fn from_request(request: &'a Request<'r>) -> Outcome { - match request.guard::() { - Outcome::Forward(_) => Outcome::Forward(()), - Outcome::Failure(f) => Outcome::Failure(f), - Outcome::Success(headers) => { - if headers.org_user_type >= UserOrgType::Manager { - Outcome::Success(Self { - host: headers.host, - device: headers.device, - user: headers.user, - org_user_type: headers.org_user_type, - }) - } else { - err_handler!("You need to be a Manager, Admin or Owner to call this endpoint") - } - } + async fn from_request(request: &'r Request<'_>) -> Outcome { + let headers = try_outcome!(OrgHeaders::from_request(request).await); + if headers.org_user_type >= UserOrgType::Manager { + Outcome::Success(Self { + host: headers.host, + device: headers.device, + user: headers.user, + org_user_type: headers.org_user_type, + }) + } else { + err_handler!("You need to be a Manager, Admin or Owner to call this endpoint") } } } @@ -647,24 +625,20 @@ pub struct OwnerHeaders { pub user: User, } -impl<'a, 'r> FromRequest<'a, 'r> for OwnerHeaders { +#[rocket::async_trait] +impl<'r> FromRequest<'r> for OwnerHeaders { type Error = &'static str; - fn from_request(request: &'a Request<'r>) -> Outcome { - match request.guard::() { - Outcome::Forward(_) => Outcome::Forward(()), - Outcome::Failure(f) => Outcome::Failure(f), - Outcome::Success(headers) => { - if headers.org_user_type == UserOrgType::Owner { - Outcome::Success(Self { - host: headers.host, - device: headers.device, - user: headers.user, - }) - } else { - err_handler!("You need to be Owner to call this endpoint") - } - } + async fn from_request(request: &'r Request<'_>) -> Outcome { + let headers = try_outcome!(OrgHeaders::from_request(request).await); + if headers.org_user_type == UserOrgType::Owner { + Outcome::Success(Self { + host: headers.host, + device: headers.device, + user: headers.user, + }) + } else { + err_handler!("You need to be Owner to call this endpoint") } } } @@ -678,10 +652,11 @@ pub struct ClientIp { pub ip: IpAddr, } -impl<'a, 'r> FromRequest<'a, 'r> for ClientIp { +#[rocket::async_trait] +impl<'r> FromRequest<'r> for ClientIp { type Error = (); - fn from_request(req: &'a Request<'r>) -> Outcome { + async fn from_request(req: &'r Request<'_>) -> Outcome { let ip = if CONFIG._ip_header_enabled() { req.headers().get_one(&CONFIG.ip_header()).and_then(|ip| { match ip.find(',') { diff --git a/src/config.rs b/src/config.rs index 92fe8b9d..b8f3246b 100644 --- a/src/config.rs +++ b/src/config.rs @@ -36,6 +36,9 @@ macro_rules! make_config { pub struct Config { inner: RwLock } struct Inner { + rocket_shutdown_handle: Option, + ws_shutdown_handle: Option>, + templates: Handlebars<'static>, config: ConfigItems, @@ -56,13 +59,13 @@ macro_rules! make_config { impl ConfigBuilder { #[allow(clippy::field_reassign_with_default)] fn from_env() -> Self { - match dotenv::from_path(".env") { + match dotenvy::from_path(get_env("ENV_FILE").unwrap_or_else(|| String::from(".env"))) { Ok(_) => (), Err(e) => match e { - dotenv::Error::LineParse(msg, pos) => { + dotenvy::Error::LineParse(msg, pos) => { panic!("Error loading the .env file:\nNear {:?} on position {}\nPlease fix and restart!\n", msg, pos); }, - dotenv::Error::Io(ioerr) => match ioerr.kind() { + dotenvy::Error::Io(ioerr) => match ioerr.kind() { std::io::ErrorKind::NotFound => { println!("[INFO] No .env file found.\n"); }, @@ -88,8 +91,7 @@ macro_rules! make_config { } fn from_file(path: &str) -> Result { - use crate::util::read_file_string; - let config_str = read_file_string(path)?; + let config_str = std::fs::read_to_string(path)?; serde_json::from_str(&config_str).map_err(Into::into) } @@ -332,6 +334,8 @@ make_config! { attachments_folder: String, false, auto, |c| format!("{}/{}", c.data_folder, "attachments"); /// Sends folder sends_folder: String, false, auto, |c| format!("{}/{}", c.data_folder, "sends"); + /// Temp folder |> Used for storing temporary file uploads + tmp_folder: String, false, auto, |c| format!("{}/{}", c.data_folder, "tmp"); /// Templates folder templates_folder: String, false, auto, |c| format!("{}/{}", c.data_folder, "templates"); /// Session JWT key @@ -431,6 +435,8 @@ make_config! { /// Password iterations |> Number of server-side passwords hashing iterations. /// The changes only apply when a user changes their password. Not recommended to lower the value password_iterations: i32, true, def, 100_000; + /// Allow password hints |> Controls whether users can set password hints. This setting applies globally to all users. + password_hints_allowed: bool, true, def, true; /// Show password hint |> Controls whether a password hint should be shown directly in the web page /// if SMTP service is not configured. Not recommended for publicly-accessible instances as this /// provides unauthenticated access to potentially sensitive data. @@ -457,6 +463,10 @@ make_config! { /// service is set, an icon request to Vaultwarden will return an HTTP redirect to the /// corresponding icon at the external service. icon_service: String, false, def, "internal".to_string(); + /// Internal + _icon_service_url: String, false, gen, |c| generate_icon_service_url(&c.icon_service); + /// Internal + _icon_service_csp: String, false, gen, |c| generate_icon_service_csp(&c.icon_service, &c._icon_service_url); /// Icon redirect code |> The HTTP status code to use for redirects to an external icon service. /// The supported codes are 301 (legacy permanent), 302 (legacy temporary), 307 (temporary), and 308 (permanent). /// Temporary redirects are useful while testing different icon services, but once a service @@ -509,9 +519,15 @@ make_config! { /// Max database connection retries |> Number of times to retry the database connection during startup, with 1 second between each retry, set to 0 to retry indefinitely db_connection_retries: u32, false, def, 15; + /// Timeout when aquiring database connection + database_timeout: u64, false, def, 30; + /// Database connection pool size database_max_conns: u32, false, def, 10; + /// Database connection init |> SQL statements to run when creating a new database connection, mainly useful for connection-scoped pragmas. If empty, a database-specific default is used. + database_conn_init: String, false, def, "".to_string(); + /// Bypass admin page security (Know the risks!) |> Disables the Admin Token for the admin page so you may use your own auth in-front disable_admin_token: bool, true, def, false; @@ -561,12 +577,14 @@ make_config! { _enable_smtp: bool, true, def, true; /// Host smtp_host: String, true, option; - /// Enable Secure SMTP |> (Explicit) - Enabling this by default would use STARTTLS (Standard ports 587 or 25) - smtp_ssl: bool, true, def, true; - /// Force TLS |> (Implicit) - Enabling this would force the use of an SSL/TLS connection, instead of upgrading an insecure one with STARTTLS (Standard port 465) - smtp_explicit_tls: bool, true, def, false; + /// DEPRECATED smtp_ssl |> DEPRECATED - Please use SMTP_SECURITY + smtp_ssl: bool, false, option; + /// DEPRECATED smtp_explicit_tls |> DEPRECATED - Please use SMTP_SECURITY + smtp_explicit_tls: bool, false, option; + /// Secure SMTP |> ("starttls", "force_tls", "off") Enable a secure connection. Default is "starttls" (Explicit - ports 587 or 25), "force_tls" (Implicit - port 465) or "off", no encryption + smtp_security: String, true, auto, |c| smtp_convert_deprecated_ssl_options(c.smtp_ssl, c.smtp_explicit_tls); // TODO: After deprecation make it `def, "starttls".to_string()` /// Port - smtp_port: u16, true, auto, |c| if c.smtp_explicit_tls {465} else if c.smtp_ssl {587} else {25}; + smtp_port: u16, true, auto, |c| if c.smtp_security == *"force_tls" {465} else if c.smtp_security == *"starttls" {587} else {25}; /// From Address smtp_from: String, true, def, String::new(); /// From Name @@ -593,8 +611,8 @@ make_config! { email_2fa: _enable_email_2fa { /// Enabled |> Disabling will prevent users from setting up new email 2FA and using existing email 2FA configured _enable_email_2fa: bool, true, auto, |c| c._enable_smtp && c.smtp_host.is_some(); - /// Email token size |> Number of digits in an email token (min: 6, max: 19). Note that the Bitwarden clients are hardcoded to mention 6 digit codes regardless of this setting. - email_token_size: u32, true, def, 6; + /// Email token size |> Number of digits in an email 2FA token (min: 6, max: 255). Note that the Bitwarden clients are hardcoded to mention 6 digit codes regardless of this setting. + email_token_size: u8, true, def, 6; /// Token expiration time |> Maximum time in seconds a token is valid. The time the user has to open email client and copy token. email_expiration_time: u64, true, def, 600; /// Maximum attempts |> Maximum attempts before an email token is reset and a new email will need to be sent @@ -649,6 +667,13 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { } if cfg._enable_smtp { + match cfg.smtp_security.as_str() { + "off" | "starttls" | "force_tls" => (), + _ => err!( + "`SMTP_SECURITY` is invalid. It needs to be one of the following options: starttls, force_tls or off" + ), + } + if cfg.smtp_host.is_some() == cfg.smtp_from.is_empty() { err!("Both `SMTP_HOST` and `SMTP_FROM` need to be set for email support") } @@ -668,10 +693,6 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { if cfg._enable_email_2fa && cfg.email_token_size < 6 { err!("`EMAIL_TOKEN_SIZE` has a minimum size of 6") } - - if cfg._enable_email_2fa && cfg.email_token_size > 19 { - err!("`EMAIL_TOKEN_SIZE` has a maximum size of 19") - } } // Check if the icon blacklist regex is valid @@ -731,6 +752,48 @@ fn extract_url_path(url: &str) -> String { } } +/// Generate the correct URL for the icon service. +/// This will be used within icons.rs to call the external icon service. +fn generate_icon_service_url(icon_service: &str) -> String { + match icon_service { + "internal" => "".to_string(), + "bitwarden" => "https://icons.bitwarden.net/{}/icon.png".to_string(), + "duckduckgo" => "https://icons.duckduckgo.com/ip3/{}.ico".to_string(), + "google" => "https://www.google.com/s2/favicons?domain={}&sz=32".to_string(), + _ => icon_service.to_string(), + } +} + +/// Generate the CSP string needed to allow redirected icon fetching +fn generate_icon_service_csp(icon_service: &str, icon_service_url: &str) -> String { + // We split on the first '{', since that is the variable delimiter for an icon service URL. + // Everything up until the first '{' should be fixed and can be used as an CSP string. + let csp_string = match icon_service_url.split_once('{') { + Some((c, _)) => c.to_string(), + None => "".to_string(), + }; + + // Because Google does a second redirect to there gstatic.com domain, we need to add an extra csp string. + match icon_service { + "google" => csp_string + " https://*.gstatic.com/favicon", + _ => csp_string, + } +} + +/// Convert the old SMTP_SSL and SMTP_EXPLICIT_TLS options +fn smtp_convert_deprecated_ssl_options(smtp_ssl: Option, smtp_explicit_tls: Option) -> String { + if smtp_explicit_tls.is_some() || smtp_ssl.is_some() { + println!("[DEPRECATED]: `SMTP_SSL` or `SMTP_EXPLICIT_TLS` is set. Please use `SMTP_SECURITY` instead."); + } + if smtp_explicit_tls.is_some() && smtp_explicit_tls.unwrap() { + return "force_tls".to_string(); + } else if smtp_ssl.is_some() && !smtp_ssl.unwrap() { + return "off".to_string(); + } + // Return the default `starttls` in all other cases + "starttls".to_string() +} + impl Config { pub fn load() -> Result { // Loading from env and file @@ -747,6 +810,8 @@ impl Config { Ok(Config { inner: RwLock::new(Inner { + rocket_shutdown_handle: None, + ws_shutdown_handle: None, templates: load_templates(&config.templates_folder), config, _env, @@ -911,6 +976,26 @@ impl Config { hb.render(name, data).map_err(Into::into) } } + + pub fn set_rocket_shutdown_handle(&self, handle: rocket::Shutdown) { + self.inner.write().unwrap().rocket_shutdown_handle = Some(handle); + } + + pub fn set_ws_shutdown_handle(&self, handle: tokio::sync::oneshot::Sender<()>) { + self.inner.write().unwrap().ws_shutdown_handle = Some(handle); + } + + pub fn shutdown(&self) { + if let Ok(mut c) = self.inner.write() { + if let Some(handle) = c.ws_shutdown_handle.take() { + handle.send(()).ok(); + } + + if let Some(handle) = c.rocket_shutdown_handle.take() { + handle.notify(); + } + } + } } use handlebars::{Context, Handlebars, Helper, HelperResult, Output, RenderContext, RenderError, Renderable}; @@ -984,7 +1069,7 @@ where fn case_helper<'reg, 'rc>( h: &Helper<'reg, 'rc>, - r: &'reg Handlebars, + r: &'reg Handlebars<'_>, ctx: &'rc Context, rc: &mut RenderContext<'reg, 'rc>, out: &mut dyn Output, @@ -1001,17 +1086,16 @@ fn case_helper<'reg, 'rc>( fn js_escape_helper<'reg, 'rc>( h: &Helper<'reg, 'rc>, - _r: &'reg Handlebars, + _r: &'reg Handlebars<'_>, _ctx: &'rc Context, _rc: &mut RenderContext<'reg, 'rc>, out: &mut dyn Output, ) -> HelperResult { - let param = h.param(0).ok_or_else(|| RenderError::new("Param not found for helper \"js_escape\""))?; + let param = h.param(0).ok_or_else(|| RenderError::new("Param not found for helper \"jsesc\""))?; let no_quote = h.param(1).is_some(); - let value = - param.value().as_str().ok_or_else(|| RenderError::new("Param for helper \"js_escape\" is not a String"))?; + let value = param.value().as_str().ok_or_else(|| RenderError::new("Param for helper \"jsesc\" is not a String"))?; let mut escaped_value = value.replace('\\', "").replace('\'', "\\x22").replace('\"', "\\x27"); if !no_quote { diff --git a/src/crypto.rs b/src/crypto.rs index e30439fc..be9680cb 100644 --- a/src/crypto.rs +++ b/src/crypto.rs @@ -6,8 +6,6 @@ use std::num::NonZeroU32; use data_encoding::HEXLOWER; use ring::{digest, hmac, pbkdf2}; -use crate::error::Error; - static DIGEST_ALG: pbkdf2::Algorithm = pbkdf2::PBKDF2_HMAC_SHA256; const OUTPUT_LEN: usize = digest::SHA256_OUTPUT_LEN; @@ -65,6 +63,12 @@ pub fn get_random_string(alphabet: &[u8], num_chars: usize) -> String { .collect() } +/// Generates a random numeric string. +pub fn get_random_string_numeric(num_chars: usize) -> String { + const ALPHABET: &[u8] = b"0123456789"; + get_random_string(ALPHABET, num_chars) +} + /// Generates a random alphanumeric string. pub fn get_random_string_alphanum(num_chars: usize) -> String { const ALPHABET: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ\ @@ -87,23 +91,9 @@ pub fn generate_attachment_id() -> String { generate_id(10) // 80 bits } -pub fn generate_token(token_size: u32) -> Result { - // A u64 can represent all whole numbers up to 19 digits long. - if token_size > 19 { - err!("Token size is limited to 19 digits") - } - - let low: u64 = 0; - let high: u64 = 10u64.pow(token_size); - - // Generate a random number in the range [low, high), then format it as a - // token of fixed width, left-padding with 0 as needed. - use rand::{thread_rng, Rng}; - let mut rng = thread_rng(); - let number: u64 = rng.gen_range(low..high); - let token = format!("{:0size$}", number, size = token_size as usize); - - Ok(token) +/// Generates a numeric token for email-based verifications. +pub fn generate_email_token(token_size: u8) -> String { + get_random_string_numeric(token_size as usize) } /// Generates a personal API key. diff --git a/src/db/mod.rs b/src/db/mod.rs index bcbb7ce4..0b3b7a5b 100644 --- a/src/db/mod.rs +++ b/src/db/mod.rs @@ -1,8 +1,20 @@ -use diesel::r2d2::{ConnectionManager, Pool, PooledConnection}; +use std::{sync::Arc, time::Duration}; + +use diesel::{ + connection::SimpleConnection, + r2d2::{ConnectionManager, CustomizeConnection, Pool, PooledConnection}, +}; + use rocket::{ http::Status, + outcome::IntoOutcome, request::{FromRequest, Outcome}, - Request, State, + Request, +}; + +use tokio::{ + sync::{Mutex, OwnedSemaphorePermit, Semaphore}, + time::timeout, }; use crate::{ @@ -22,6 +34,23 @@ pub mod __mysql_schema; #[path = "schemas/postgresql/schema.rs"] pub mod __postgresql_schema; +// These changes are based on Rocket 0.5-rc wrapper of Diesel: https://github.com/SergioBenitez/Rocket/blob/v0.5-rc/contrib/sync_db_pools + +// A wrapper around spawn_blocking that propagates panics to the calling code. +pub async fn run_blocking(job: F) -> R +where + F: FnOnce() -> R + Send + 'static, + R: Send + 'static, +{ + match tokio::task::spawn_blocking(job).await { + Ok(ret) => ret, + Err(e) => match e.try_into_panic() { + Ok(panic) => std::panic::resume_unwind(panic), + Err(_) => unreachable!("spawn_blocking tasks are never cancelled"), + }, + } +} + // This is used to generate the main DbConn and DbPool enums, which contain one variant for each database supported macro_rules! generate_connections { ( $( $name:ident: $ty:ty ),+ ) => { @@ -29,15 +58,74 @@ macro_rules! generate_connections { #[derive(Eq, PartialEq)] pub enum DbConnType { $( $name, )+ } + pub struct DbConn { + conn: Arc>>, + permit: Option, + } + #[allow(non_camel_case_types)] - pub enum DbConn { $( #[cfg($name)] $name(PooledConnection>), )+ } + pub enum DbConnInner { $( #[cfg($name)] $name(PooledConnection>), )+ } + + #[derive(Debug)] + pub struct DbConnOptions { + pub init_stmts: String, + } + + $( // Based on . + #[cfg($name)] + impl CustomizeConnection<$ty, diesel::r2d2::Error> for DbConnOptions { + fn on_acquire(&self, conn: &mut $ty) -> Result<(), diesel::r2d2::Error> { + (|| { + if !self.init_stmts.is_empty() { + conn.batch_execute(&self.init_stmts)?; + } + Ok(()) + })().map_err(diesel::r2d2::Error::QueryError) + } + })+ + + #[derive(Clone)] + pub struct DbPool { + // This is an 'Option' so that we can drop the pool in a 'spawn_blocking'. + pool: Option, + semaphore: Arc + } #[allow(non_camel_case_types)] #[derive(Clone)] - pub enum DbPool { $( #[cfg($name)] $name(Pool>), )+ } + pub enum DbPoolInner { $( #[cfg($name)] $name(Pool>), )+ } + + impl Drop for DbConn { + fn drop(&mut self) { + let conn = self.conn.clone(); + let permit = self.permit.take(); + + // Since connection can't be on the stack in an async fn during an + // await, we have to spawn a new blocking-safe thread... + tokio::task::spawn_blocking(move || { + // And then re-enter the runtime to wait on the async mutex, but in a blocking fashion. + let mut conn = tokio::runtime::Handle::current().block_on(conn.lock_owned()); + + if let Some(conn) = conn.take() { + drop(conn); + } + + // Drop permit after the connection is dropped + drop(permit); + }); + } + } + + impl Drop for DbPool { + fn drop(&mut self) { + let pool = self.pool.take(); + tokio::task::spawn_blocking(move || drop(pool)); + } + } impl DbPool { - // For the given database URL, guess it's type, run migrations create pool and return it + // For the given database URL, guess its type, run migrations, create pool, and return it + #[allow(clippy::diverging_sub_expression)] pub fn from_config() -> Result { let url = CONFIG.database_url(); let conn_type = DbConnType::from_url(&url)?; @@ -50,9 +138,16 @@ macro_rules! generate_connections { let manager = ConnectionManager::new(&url); let pool = Pool::builder() .max_size(CONFIG.database_max_conns()) + .connection_timeout(Duration::from_secs(CONFIG.database_timeout())) + .connection_customizer(Box::new(DbConnOptions{ + init_stmts: conn_type.get_init_stmts() + })) .build(manager) .map_res("Failed to create pool")?; - return Ok(Self::$name(pool)); + return Ok(DbPool { + pool: Some(DbPoolInner::$name(pool)), + semaphore: Arc::new(Semaphore::new(CONFIG.database_max_conns() as usize)), + }); } #[cfg(not($name))] #[allow(unreachable_code)] @@ -61,10 +156,26 @@ macro_rules! generate_connections { )+ } } // Get a connection from the pool - pub fn get(&self) -> Result { - match self { $( + pub async fn get(&self) -> Result { + let duration = Duration::from_secs(CONFIG.database_timeout()); + let permit = match timeout(duration, self.semaphore.clone().acquire_owned()).await { + Ok(p) => p.expect("Semaphore should be open"), + Err(_) => { + err!("Timeout waiting for database connection"); + } + }; + + match self.pool.as_ref().expect("DbPool.pool should always be Some()") { $( #[cfg($name)] - Self::$name(p) => Ok(DbConn::$name(p.get().map_res("Error retrieving connection from pool")?)), + DbPoolInner::$name(p) => { + let pool = p.clone(); + let c = run_blocking(move || pool.get_timeout(duration)).await.map_res("Error retrieving connection from pool")?; + + return Ok(DbConn { + conn: Arc::new(Mutex::new(Some(DbConnInner::$name(c)))), + permit: Some(permit) + }); + }, )+ } } } @@ -104,6 +215,23 @@ impl DbConnType { err!("`DATABASE_URL` looks like a SQLite URL, but 'sqlite' feature is not enabled") } } + + pub fn get_init_stmts(&self) -> String { + let init_stmts = CONFIG.database_conn_init(); + if !init_stmts.is_empty() { + init_stmts + } else { + self.default_init_stmts() + } + } + + pub fn default_init_stmts(&self) -> String { + match self { + Self::sqlite => "PRAGMA busy_timeout = 5000; PRAGMA synchronous = NORMAL;".to_string(), + Self::mysql => "".to_string(), + Self::postgresql => "".to_string(), + } + } } #[macro_export] @@ -113,42 +241,52 @@ macro_rules! db_run { db_run! { $conn: sqlite, mysql, postgresql $body } }; + ( @raw $conn:ident: $body:block ) => { + db_run! { @raw $conn: sqlite, mysql, postgresql $body } + }; + // Different code for each db ( $conn:ident: $( $($db:ident),+ $body:block )+ ) => {{ #[allow(unused)] use diesel::prelude::*; - match $conn { - $($( + #[allow(unused)] use $crate::db::FromDb; + + let conn = $conn.conn.clone(); + let mut conn = conn.lock_owned().await; + match conn.as_mut().expect("internal invariant broken: self.connection is Some") { + $($( #[cfg($db)] - crate::db::DbConn::$db(ref $conn) => { + $crate::db::DbConnInner::$db($conn) => { paste::paste! { - #[allow(unused)] use crate::db::[<__ $db _schema>]::{self as schema, *}; + #[allow(unused)] use $crate::db::[<__ $db _schema>]::{self as schema, *}; #[allow(unused)] use [<__ $db _model>]::*; - #[allow(unused)] use crate::db::FromDb; } - $body + + tokio::task::block_in_place(move || { $body }) // Run blocking can't be used due to the 'static limitation, use block_in_place instead }, )+)+ - }} - }; - - // Same for all dbs - ( @raw $conn:ident: $body:block ) => { - db_run! { @raw $conn: sqlite, mysql, postgresql $body } - }; + } + }}; - // Different code for each db - ( @raw $conn:ident: $( $($db:ident),+ $body:block )+ ) => { + ( @raw $conn:ident: $( $($db:ident),+ $body:block )+ ) => {{ #[allow(unused)] use diesel::prelude::*; - #[allow(unused_variables)] - match $conn { - $($( + #[allow(unused)] use $crate::db::FromDb; + + let conn = $conn.conn.clone(); + let mut conn = conn.lock_owned().await; + match conn.as_mut().expect("internal invariant broken: self.connection is Some") { + $($( #[cfg($db)] - crate::db::DbConn::$db(ref $conn) => { - $body + $crate::db::DbConnInner::$db($conn) => { + paste::paste! { + #[allow(unused)] use $crate::db::[<__ $db _schema>]::{self as schema, *}; + // @ RAW: #[allow(unused)] use [<__ $db _model>]::*; + } + + tokio::task::block_in_place(move || { $body }) // Run blocking can't be used due to the 'static limitation, use block_in_place instead }, )+)+ } - }; + }}; } pub trait FromDb { @@ -201,7 +339,7 @@ macro_rules! db_object { paste::paste! { #[allow(unused)] use super::*; #[allow(unused)] use diesel::prelude::*; - #[allow(unused)] use crate::db::[<__ $db _schema>]::*; + #[allow(unused)] use $crate::db::[<__ $db _schema>]::*; $( #[$attr] )* pub struct [<$name Db>] { $( @@ -213,7 +351,7 @@ macro_rules! db_object { #[inline(always)] pub fn to_db(x: &super::$name) -> Self { Self { $( $field: x.$field.clone(), )+ } } } - impl crate::db::FromDb for [<$name Db>] { + impl $crate::db::FromDb for [<$name Db>] { type Output = super::$name; #[allow(clippy::wrong_self_convention)] #[inline(always)] fn from_db(self) -> Self::Output { super::$name { $( $field: self.$field, )+ } } @@ -227,9 +365,10 @@ pub mod models; /// Creates a back-up of the sqlite database /// MySQL/MariaDB and PostgreSQL are not supported. -pub fn backup_database(conn: &DbConn) -> Result<(), Error> { +pub async fn backup_database(conn: &DbConn) -> Result<(), Error> { db_run! {@raw conn: postgresql, mysql { + let _ = conn; err!("PostgreSQL and MySQL/MariaDB do not support this backup feature"); } sqlite { @@ -244,7 +383,7 @@ pub fn backup_database(conn: &DbConn) -> Result<(), Error> { } /// Get the SQL Server version -pub fn get_sql_server_version(conn: &DbConn) -> String { +pub async fn get_sql_server_version(conn: &DbConn) -> String { db_run! {@raw conn: postgresql, mysql { no_arg_sql_function!(version, diesel::sql_types::Text); @@ -260,15 +399,14 @@ pub fn get_sql_server_version(conn: &DbConn) -> String { /// Attempts to retrieve a single connection from the managed database pool. If /// no pool is currently managed, fails with an `InternalServerError` status. If /// no connections are available, fails with a `ServiceUnavailable` status. -impl<'a, 'r> FromRequest<'a, 'r> for DbConn { +#[rocket::async_trait] +impl<'r> FromRequest<'r> for DbConn { type Error = (); - fn from_request(request: &'a Request<'r>) -> Outcome { - // https://github.com/SergioBenitez/Rocket/commit/e3c1a4ad3ab9b840482ec6de4200d30df43e357c - let pool = try_outcome!(request.guard::>()); - match pool.get() { - Ok(conn) => Outcome::Success(conn), - Err(_) => Outcome::Failure((Status::ServiceUnavailable, ())), + async fn from_request(request: &'r Request<'_>) -> Outcome { + match request.rocket().state::() { + Some(p) => p.get().await.map_err(|_| ()).into_outcome(Status::ServiceUnavailable), + None => Outcome::Failure((Status::InternalServerError, ())), } } } diff --git a/src/db/models/attachment.rs b/src/db/models/attachment.rs index bb0f9395..1df4d539 100644 --- a/src/db/models/attachment.rs +++ b/src/db/models/attachment.rs @@ -2,14 +2,12 @@ use std::io::ErrorKind; use serde_json::Value; -use super::Cipher; use crate::CONFIG; db_object! { - #[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] + #[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[table_name = "attachments"] #[changeset_options(treat_none_as_null="true")] - #[belongs_to(super::Cipher, foreign_key = "cipher_uuid")] #[primary_key(id)] pub struct Attachment { pub id: String, @@ -60,7 +58,7 @@ use crate::error::MapResult; /// Database methods impl Attachment { - pub fn save(&self, conn: &DbConn) -> EmptyResult { + pub async fn save(&self, conn: &DbConn) -> EmptyResult { db_run! { conn: sqlite, mysql { match diesel::replace_into(attachments::table) @@ -92,7 +90,7 @@ impl Attachment { } } - pub fn delete(&self, conn: &DbConn) -> EmptyResult { + pub async fn delete(&self, conn: &DbConn) -> EmptyResult { db_run! { conn: { crate::util::retry( || diesel::delete(attachments::table.filter(attachments::id.eq(&self.id))).execute(conn), @@ -116,14 +114,14 @@ impl Attachment { }} } - pub fn delete_all_by_cipher(cipher_uuid: &str, conn: &DbConn) -> EmptyResult { - for attachment in Attachment::find_by_cipher(cipher_uuid, conn) { - attachment.delete(conn)?; + pub async fn delete_all_by_cipher(cipher_uuid: &str, conn: &DbConn) -> EmptyResult { + for attachment in Attachment::find_by_cipher(cipher_uuid, conn).await { + attachment.delete(conn).await?; } Ok(()) } - pub fn find_by_id(id: &str, conn: &DbConn) -> Option { + pub async fn find_by_id(id: &str, conn: &DbConn) -> Option { db_run! { conn: { attachments::table .filter(attachments::id.eq(id.to_lowercase())) @@ -133,7 +131,7 @@ impl Attachment { }} } - pub fn find_by_cipher(cipher_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_by_cipher(cipher_uuid: &str, conn: &DbConn) -> Vec { db_run! { conn: { attachments::table .filter(attachments::cipher_uuid.eq(cipher_uuid)) @@ -143,7 +141,7 @@ impl Attachment { }} } - pub fn size_by_user(user_uuid: &str, conn: &DbConn) -> i64 { + pub async fn size_by_user(user_uuid: &str, conn: &DbConn) -> i64 { db_run! { conn: { let result: Option = attachments::table .left_join(ciphers::table.on(ciphers::uuid.eq(attachments::cipher_uuid))) @@ -155,7 +153,7 @@ impl Attachment { }} } - pub fn count_by_user(user_uuid: &str, conn: &DbConn) -> i64 { + pub async fn count_by_user(user_uuid: &str, conn: &DbConn) -> i64 { db_run! { conn: { attachments::table .left_join(ciphers::table.on(ciphers::uuid.eq(attachments::cipher_uuid))) @@ -166,7 +164,7 @@ impl Attachment { }} } - pub fn size_by_org(org_uuid: &str, conn: &DbConn) -> i64 { + pub async fn size_by_org(org_uuid: &str, conn: &DbConn) -> i64 { db_run! { conn: { let result: Option = attachments::table .left_join(ciphers::table.on(ciphers::uuid.eq(attachments::cipher_uuid))) @@ -178,7 +176,7 @@ impl Attachment { }} } - pub fn count_by_org(org_uuid: &str, conn: &DbConn) -> i64 { + pub async fn count_by_org(org_uuid: &str, conn: &DbConn) -> i64 { db_run! { conn: { attachments::table .left_join(ciphers::table.on(ciphers::uuid.eq(attachments::cipher_uuid))) @@ -188,4 +186,15 @@ impl Attachment { .unwrap_or(0) }} } + + pub async fn find_all_by_ciphers(cipher_uuids: &Vec, conn: &DbConn) -> Vec { + db_run! { conn: { + attachments::table + .filter(attachments::cipher_uuid.eq_any(cipher_uuids)) + .select(attachments::all_columns) + .load::(conn) + .expect("Error loading attachments") + .from_db() + }} + } } diff --git a/src/db/models/cipher.rs b/src/db/models/cipher.rs index 39aaf580..d5f78fbe 100644 --- a/src/db/models/cipher.rs +++ b/src/db/models/cipher.rs @@ -1,19 +1,17 @@ +use crate::CONFIG; use chrono::{Duration, NaiveDateTime, Utc}; use serde_json::Value; -use crate::CONFIG; +use super::{Attachment, CollectionCipher, Favorite, FolderCipher, User, UserOrgStatus, UserOrgType, UserOrganization}; -use super::{ - Attachment, CollectionCipher, Favorite, FolderCipher, Organization, User, UserOrgStatus, UserOrgType, - UserOrganization, -}; +use crate::api::core::CipherSyncData; + +use std::borrow::Cow; db_object! { - #[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] + #[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[table_name = "ciphers"] #[changeset_options(treat_none_as_null="true")] - #[belongs_to(User, foreign_key = "user_uuid")] - #[belongs_to(Organization, foreign_key = "organization_uuid")] #[primary_key(uuid)] pub struct Cipher { pub uuid: String, @@ -82,22 +80,32 @@ use crate::error::MapResult; /// Database methods impl Cipher { - pub fn to_json(&self, host: &str, user_uuid: &str, conn: &DbConn) -> Value { + pub async fn to_json( + &self, + host: &str, + user_uuid: &str, + cipher_sync_data: Option<&CipherSyncData>, + conn: &DbConn, + ) -> Value { use crate::util::format_date; - let attachments = Attachment::find_by_cipher(&self.uuid, conn); - // When there are no attachments use null instead of an empty array - let attachments_json = if attachments.is_empty() { - Value::Null + let mut attachments_json: Value = Value::Null; + if let Some(cipher_sync_data) = cipher_sync_data { + if let Some(attachments) = cipher_sync_data.cipher_attachments.get(&self.uuid) { + attachments_json = attachments.iter().map(|c| c.to_json(host)).collect(); + } } else { - attachments.iter().map(|c| c.to_json(host)).collect() - }; + let attachments = Attachment::find_by_cipher(&self.uuid, conn).await; + if !attachments.is_empty() { + attachments_json = attachments.iter().map(|c| c.to_json(host)).collect() + } + } let fields_json = self.fields.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null); let password_history_json = self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null); - let (read_only, hide_passwords) = match self.get_access_restrictions(user_uuid, conn) { + let (read_only, hide_passwords) = match self.get_access_restrictions(user_uuid, cipher_sync_data, conn).await { Some((ro, hp)) => (ro, hp), None => { error!("Cipher ownership assertion failure"); @@ -109,7 +117,7 @@ impl Cipher { // If not passing an empty object, mobile clients will crash. let mut type_data_json: Value = serde_json::from_str(&self.data).unwrap_or_else(|_| json!({})); - // NOTE: This was marked as *Backwards Compatibilty Code*, but as of January 2021 this is still being used by upstream + // NOTE: This was marked as *Backwards Compatibility Code*, but as of January 2021 this is still being used by upstream // Set the first element of the Uris array as Uri, this is needed several (mobile) clients. if self.atype == 1 { if type_data_json["Uris"].is_array() { @@ -124,13 +132,23 @@ impl Cipher { // Clone the type_data and add some default value. let mut data_json = type_data_json.clone(); - // NOTE: This was marked as *Backwards Compatibilty Code*, but as of January 2021 this is still being used by upstream + // NOTE: This was marked as *Backwards Compatibility Code*, but as of January 2021 this is still being used by upstream // data_json should always contain the following keys with every atype data_json["Fields"] = json!(fields_json); data_json["Name"] = json!(self.name); data_json["Notes"] = json!(self.notes); data_json["PasswordHistory"] = json!(password_history_json); + let collection_ids = if let Some(cipher_sync_data) = cipher_sync_data { + if let Some(cipher_collections) = cipher_sync_data.cipher_collections.get(&self.uuid) { + Cow::from(cipher_collections) + } else { + Cow::from(Vec::with_capacity(0)) + } + } else { + Cow::from(self.get_collections(user_uuid, conn).await) + }; + // There are three types of cipher response models in upstream // Bitwarden: "cipherMini", "cipher", and "cipherDetails" (in order // of increasing level of detail). vaultwarden currently only @@ -144,8 +162,8 @@ impl Cipher { "Type": self.atype, "RevisionDate": format_date(&self.updated_at), "DeletedDate": self.deleted_at.map_or(Value::Null, |d| Value::String(format_date(&d))), - "FolderId": self.get_folder_uuid(user_uuid, conn), - "Favorite": self.is_favorite(user_uuid, conn), + "FolderId": if let Some(cipher_sync_data) = cipher_sync_data { cipher_sync_data.cipher_folders.get(&self.uuid).map(|c| c.to_string() ) } else { self.get_folder_uuid(user_uuid, conn).await }, + "Favorite": if let Some(cipher_sync_data) = cipher_sync_data { cipher_sync_data.cipher_favorites.contains(&self.uuid) } else { self.is_favorite(user_uuid, conn).await }, "Reprompt": self.reprompt.unwrap_or(RepromptType::None as i32), "OrganizationId": self.organization_uuid, "Attachments": attachments_json, @@ -154,7 +172,7 @@ impl Cipher { "OrganizationUseTotp": true, // This field is specific to the cipherDetails type. - "CollectionIds": self.get_collections(user_uuid, conn), + "CollectionIds": collection_ids, "Name": self.name, "Notes": self.notes, @@ -189,28 +207,28 @@ impl Cipher { json_object } - pub fn update_users_revision(&self, conn: &DbConn) -> Vec { + pub async fn update_users_revision(&self, conn: &DbConn) -> Vec { let mut user_uuids = Vec::new(); match self.user_uuid { Some(ref user_uuid) => { - User::update_uuid_revision(user_uuid, conn); + User::update_uuid_revision(user_uuid, conn).await; user_uuids.push(user_uuid.clone()) } None => { // Belongs to Organization, need to update affected users if let Some(ref org_uuid) = self.organization_uuid { - UserOrganization::find_by_cipher_and_org(&self.uuid, org_uuid, conn).iter().for_each(|user_org| { - User::update_uuid_revision(&user_org.user_uuid, conn); + for user_org in UserOrganization::find_by_cipher_and_org(&self.uuid, org_uuid, conn).await.iter() { + User::update_uuid_revision(&user_org.user_uuid, conn).await; user_uuids.push(user_org.user_uuid.clone()) - }); + } } } }; user_uuids } - pub fn save(&mut self, conn: &DbConn) -> EmptyResult { - self.update_users_revision(conn); + pub async fn save(&mut self, conn: &DbConn) -> EmptyResult { + self.update_users_revision(conn).await; self.updated_at = Utc::now().naive_utc(); db_run! { conn: @@ -244,13 +262,13 @@ impl Cipher { } } - pub fn delete(&self, conn: &DbConn) -> EmptyResult { - self.update_users_revision(conn); + pub async fn delete(&self, conn: &DbConn) -> EmptyResult { + self.update_users_revision(conn).await; - FolderCipher::delete_all_by_cipher(&self.uuid, conn)?; - CollectionCipher::delete_all_by_cipher(&self.uuid, conn)?; - Attachment::delete_all_by_cipher(&self.uuid, conn)?; - Favorite::delete_all_by_cipher(&self.uuid, conn)?; + FolderCipher::delete_all_by_cipher(&self.uuid, conn).await?; + CollectionCipher::delete_all_by_cipher(&self.uuid, conn).await?; + Attachment::delete_all_by_cipher(&self.uuid, conn).await?; + Favorite::delete_all_by_cipher(&self.uuid, conn).await?; db_run! { conn: { diesel::delete(ciphers::table.filter(ciphers::uuid.eq(&self.uuid))) @@ -259,54 +277,55 @@ impl Cipher { }} } - pub fn delete_all_by_organization(org_uuid: &str, conn: &DbConn) -> EmptyResult { - for cipher in Self::find_by_org(org_uuid, conn) { - cipher.delete(conn)?; + pub async fn delete_all_by_organization(org_uuid: &str, conn: &DbConn) -> EmptyResult { + // TODO: Optimize this by executing a DELETE directly on the database, instead of first fetching. + for cipher in Self::find_by_org(org_uuid, conn).await { + cipher.delete(conn).await?; } Ok(()) } - pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { - for cipher in Self::find_owned_by_user(user_uuid, conn) { - cipher.delete(conn)?; + pub async fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { + for cipher in Self::find_owned_by_user(user_uuid, conn).await { + cipher.delete(conn).await?; } Ok(()) } /// Purge all ciphers that are old enough to be auto-deleted. - pub fn purge_trash(conn: &DbConn) { + pub async fn purge_trash(conn: &DbConn) { if let Some(auto_delete_days) = CONFIG.trash_auto_delete_days() { let now = Utc::now().naive_utc(); let dt = now - Duration::days(auto_delete_days); - for cipher in Self::find_deleted_before(&dt, conn) { - cipher.delete(conn).ok(); + for cipher in Self::find_deleted_before(&dt, conn).await { + cipher.delete(conn).await.ok(); } } } - pub fn move_to_folder(&self, folder_uuid: Option, user_uuid: &str, conn: &DbConn) -> EmptyResult { - User::update_uuid_revision(user_uuid, conn); + pub async fn move_to_folder(&self, folder_uuid: Option, user_uuid: &str, conn: &DbConn) -> EmptyResult { + User::update_uuid_revision(user_uuid, conn).await; - match (self.get_folder_uuid(user_uuid, conn), folder_uuid) { + match (self.get_folder_uuid(user_uuid, conn).await, folder_uuid) { // No changes (None, None) => Ok(()), (Some(ref old), Some(ref new)) if old == new => Ok(()), // Add to folder - (None, Some(new)) => FolderCipher::new(&new, &self.uuid).save(conn), + (None, Some(new)) => FolderCipher::new(&new, &self.uuid).save(conn).await, // Remove from folder - (Some(old), None) => match FolderCipher::find_by_folder_and_cipher(&old, &self.uuid, conn) { - Some(old) => old.delete(conn), + (Some(old), None) => match FolderCipher::find_by_folder_and_cipher(&old, &self.uuid, conn).await { + Some(old) => old.delete(conn).await, None => err!("Couldn't move from previous folder"), }, // Move to another folder (Some(old), Some(new)) => { - if let Some(old) = FolderCipher::find_by_folder_and_cipher(&old, &self.uuid, conn) { - old.delete(conn)?; + if let Some(old) = FolderCipher::find_by_folder_and_cipher(&old, &self.uuid, conn).await { + old.delete(conn).await?; } - FolderCipher::new(&new, &self.uuid).save(conn) + FolderCipher::new(&new, &self.uuid).save(conn).await } } } @@ -317,13 +336,21 @@ impl Cipher { } /// Returns whether this cipher is owned by an org in which the user has full access. - pub fn is_in_full_access_org(&self, user_uuid: &str, conn: &DbConn) -> bool { + pub async fn is_in_full_access_org( + &self, + user_uuid: &str, + cipher_sync_data: Option<&CipherSyncData>, + conn: &DbConn, + ) -> bool { if let Some(ref org_uuid) = self.organization_uuid { - if let Some(user_org) = UserOrganization::find_by_user_and_org(user_uuid, org_uuid, conn) { + if let Some(cipher_sync_data) = cipher_sync_data { + if let Some(cached_user_org) = cipher_sync_data.user_organizations.get(org_uuid) { + return cached_user_org.has_full_access(); + } + } else if let Some(user_org) = UserOrganization::find_by_user_and_org(user_uuid, org_uuid, conn).await { return user_org.has_full_access(); } } - false } @@ -332,18 +359,62 @@ impl Cipher { /// not in any collection the user has access to. Otherwise, the user has /// access to this cipher, and Some(read_only, hide_passwords) represents /// the access restrictions. - pub fn get_access_restrictions(&self, user_uuid: &str, conn: &DbConn) -> Option<(bool, bool)> { + pub async fn get_access_restrictions( + &self, + user_uuid: &str, + cipher_sync_data: Option<&CipherSyncData>, + conn: &DbConn, + ) -> Option<(bool, bool)> { // Check whether this cipher is directly owned by the user, or is in // a collection that the user has full access to. If so, there are no // access restrictions. - if self.is_owned_by_user(user_uuid) || self.is_in_full_access_org(user_uuid, conn) { + if self.is_owned_by_user(user_uuid) || self.is_in_full_access_org(user_uuid, cipher_sync_data, conn).await { return Some((false, false)); } + let rows = if let Some(cipher_sync_data) = cipher_sync_data { + let mut rows: Vec<(bool, bool)> = Vec::new(); + if let Some(collections) = cipher_sync_data.cipher_collections.get(&self.uuid) { + for collection in collections { + if let Some(uc) = cipher_sync_data.user_collections.get(collection) { + rows.push((uc.read_only, uc.hide_passwords)); + } + } + } + rows + } else { + self.get_collections_access_flags(user_uuid, conn).await + }; + + if rows.is_empty() { + // This cipher isn't in any collections accessible to the user. + return None; + } + + // A cipher can be in multiple collections with inconsistent access flags. + // For example, a cipher could be in one collection where the user has + // read-only access, but also in another collection where the user has + // read/write access. For a flag to be in effect for a cipher, upstream + // requires all collections the cipher is in to have that flag set. + // Therefore, we do a boolean AND of all values in each of the `read_only` + // and `hide_passwords` columns. This could ideally be done as part of the + // query, but Diesel doesn't support a min() or bool_and() function on + // booleans and this behavior isn't portable anyway. + let mut read_only = true; + let mut hide_passwords = true; + for (ro, hp) in rows.iter() { + read_only &= ro; + hide_passwords &= hp; + } + + Some((read_only, hide_passwords)) + } + + pub async fn get_collections_access_flags(&self, user_uuid: &str, conn: &DbConn) -> Vec<(bool, bool)> { db_run! {conn: { // Check whether this cipher is in any collections accessible to the // user. If so, retrieve the access flags for each collection. - let rows = ciphers::table + ciphers::table .filter(ciphers::uuid.eq(&self.uuid)) .inner_join(ciphers_collections::table.on( ciphers::uuid.eq(ciphers_collections::cipher_uuid))) @@ -352,58 +423,35 @@ impl Cipher { .and(users_collections::user_uuid.eq(user_uuid)))) .select((users_collections::read_only, users_collections::hide_passwords)) .load::<(bool, bool)>(conn) - .expect("Error getting access restrictions"); - - if rows.is_empty() { - // This cipher isn't in any collections accessible to the user. - return None; - } - - // A cipher can be in multiple collections with inconsistent access flags. - // For example, a cipher could be in one collection where the user has - // read-only access, but also in another collection where the user has - // read/write access. For a flag to be in effect for a cipher, upstream - // requires all collections the cipher is in to have that flag set. - // Therefore, we do a boolean AND of all values in each of the `read_only` - // and `hide_passwords` columns. This could ideally be done as part of the - // query, but Diesel doesn't support a min() or bool_and() function on - // booleans and this behavior isn't portable anyway. - let mut read_only = true; - let mut hide_passwords = true; - for (ro, hp) in rows.iter() { - read_only &= ro; - hide_passwords &= hp; - } - - Some((read_only, hide_passwords)) + .expect("Error getting access restrictions") }} } - pub fn is_write_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool { - match self.get_access_restrictions(user_uuid, conn) { + pub async fn is_write_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool { + match self.get_access_restrictions(user_uuid, None, conn).await { Some((read_only, _hide_passwords)) => !read_only, None => false, } } - pub fn is_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool { - self.get_access_restrictions(user_uuid, conn).is_some() + pub async fn is_accessible_to_user(&self, user_uuid: &str, conn: &DbConn) -> bool { + self.get_access_restrictions(user_uuid, None, conn).await.is_some() } // Returns whether this cipher is a favorite of the specified user. - pub fn is_favorite(&self, user_uuid: &str, conn: &DbConn) -> bool { - Favorite::is_favorite(&self.uuid, user_uuid, conn) + pub async fn is_favorite(&self, user_uuid: &str, conn: &DbConn) -> bool { + Favorite::is_favorite(&self.uuid, user_uuid, conn).await } // Sets whether this cipher is a favorite of the specified user. - pub fn set_favorite(&self, favorite: Option, user_uuid: &str, conn: &DbConn) -> EmptyResult { + pub async fn set_favorite(&self, favorite: Option, user_uuid: &str, conn: &DbConn) -> EmptyResult { match favorite { None => Ok(()), // No change requested. - Some(status) => Favorite::set_favorite(status, &self.uuid, user_uuid, conn), + Some(status) => Favorite::set_favorite(status, &self.uuid, user_uuid, conn).await, } } - pub fn get_folder_uuid(&self, user_uuid: &str, conn: &DbConn) -> Option { + pub async fn get_folder_uuid(&self, user_uuid: &str, conn: &DbConn) -> Option { db_run! {conn: { folders_ciphers::table .inner_join(folders::table) @@ -415,7 +463,7 @@ impl Cipher { }} } - pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { + pub async fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { db_run! {conn: { ciphers::table .filter(ciphers::uuid.eq(uuid)) @@ -437,7 +485,7 @@ impl Cipher { // true, then the non-interesting ciphers will not be returned. As a // result, those ciphers will not appear in "My Vault" for the org // owner/admin, but they can still be accessed via the org vault view. - pub fn find_by_user(user_uuid: &str, visible_only: bool, conn: &DbConn) -> Vec { + pub async fn find_by_user(user_uuid: &str, visible_only: bool, conn: &DbConn) -> Vec { db_run! {conn: { let mut query = ciphers::table .left_join(ciphers_collections::table.on( @@ -472,12 +520,12 @@ impl Cipher { } // Find all ciphers visible to the specified user. - pub fn find_by_user_visible(user_uuid: &str, conn: &DbConn) -> Vec { - Self::find_by_user(user_uuid, true, conn) + pub async fn find_by_user_visible(user_uuid: &str, conn: &DbConn) -> Vec { + Self::find_by_user(user_uuid, true, conn).await } // Find all ciphers directly owned by the specified user. - pub fn find_owned_by_user(user_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_owned_by_user(user_uuid: &str, conn: &DbConn) -> Vec { db_run! {conn: { ciphers::table .filter( @@ -488,7 +536,7 @@ impl Cipher { }} } - pub fn count_owned_by_user(user_uuid: &str, conn: &DbConn) -> i64 { + pub async fn count_owned_by_user(user_uuid: &str, conn: &DbConn) -> i64 { db_run! {conn: { ciphers::table .filter(ciphers::user_uuid.eq(user_uuid)) @@ -499,7 +547,7 @@ impl Cipher { }} } - pub fn find_by_org(org_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_by_org(org_uuid: &str, conn: &DbConn) -> Vec { db_run! {conn: { ciphers::table .filter(ciphers::organization_uuid.eq(org_uuid)) @@ -507,7 +555,7 @@ impl Cipher { }} } - pub fn count_by_org(org_uuid: &str, conn: &DbConn) -> i64 { + pub async fn count_by_org(org_uuid: &str, conn: &DbConn) -> i64 { db_run! {conn: { ciphers::table .filter(ciphers::organization_uuid.eq(org_uuid)) @@ -518,7 +566,7 @@ impl Cipher { }} } - pub fn find_by_folder(folder_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_by_folder(folder_uuid: &str, conn: &DbConn) -> Vec { db_run! {conn: { folders_ciphers::table.inner_join(ciphers::table) .filter(folders_ciphers::folder_uuid.eq(folder_uuid)) @@ -528,7 +576,7 @@ impl Cipher { } /// Find all ciphers that were deleted before the specified datetime. - pub fn find_deleted_before(dt: &NaiveDateTime, conn: &DbConn) -> Vec { + pub async fn find_deleted_before(dt: &NaiveDateTime, conn: &DbConn) -> Vec { db_run! {conn: { ciphers::table .filter(ciphers::deleted_at.lt(dt)) @@ -536,7 +584,7 @@ impl Cipher { }} } - pub fn get_collections(&self, user_id: &str, conn: &DbConn) -> Vec { + pub async fn get_collections(&self, user_id: &str, conn: &DbConn) -> Vec { db_run! {conn: { ciphers_collections::table .inner_join(collections::table.on( @@ -562,4 +610,32 @@ impl Cipher { .load::(conn).unwrap_or_default() }} } + + /// Return a Vec with (cipher_uuid, collection_uuid) + /// This is used during a full sync so we only need one query for all collections accessible. + pub async fn get_collections_with_cipher_by_user(user_id: &str, conn: &DbConn) -> Vec<(String, String)> { + db_run! {conn: { + ciphers_collections::table + .inner_join(collections::table.on( + collections::uuid.eq(ciphers_collections::collection_uuid) + )) + .inner_join(users_organizations::table.on( + users_organizations::org_uuid.eq(collections::org_uuid).and( + users_organizations::user_uuid.eq(user_id) + ) + )) + .left_join(users_collections::table.on( + users_collections::collection_uuid.eq(ciphers_collections::collection_uuid).and( + users_collections::user_uuid.eq(user_id) + ) + )) + .filter(users_collections::user_uuid.eq(user_id).or( // User has access to collection + users_organizations::access_all.eq(true).or( // User has access all + users_organizations::atype.le(UserOrgType::Admin as i32) // User is admin or owner + ) + )) + .select(ciphers_collections::all_columns) + .load::<(String, String)>(conn).unwrap_or_default() + }} + } } diff --git a/src/db/models/collection.rs b/src/db/models/collection.rs index 2073ca17..5d9464fd 100644 --- a/src/db/models/collection.rs +++ b/src/db/models/collection.rs @@ -1,11 +1,10 @@ use serde_json::Value; -use super::{Cipher, Organization, User, UserOrgStatus, UserOrgType, UserOrganization}; +use super::{User, UserOrgStatus, UserOrgType, UserOrganization}; db_object! { - #[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] + #[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[table_name = "collections"] - #[belongs_to(Organization, foreign_key = "org_uuid")] #[primary_key(uuid)] pub struct Collection { pub uuid: String, @@ -13,10 +12,8 @@ db_object! { pub name: String, } - #[derive(Identifiable, Queryable, Insertable, Associations)] + #[derive(Identifiable, Queryable, Insertable)] #[table_name = "users_collections"] - #[belongs_to(User, foreign_key = "user_uuid")] - #[belongs_to(Collection, foreign_key = "collection_uuid")] #[primary_key(user_uuid, collection_uuid)] pub struct CollectionUser { pub user_uuid: String, @@ -25,10 +22,8 @@ db_object! { pub hide_passwords: bool, } - #[derive(Identifiable, Queryable, Insertable, Associations)] + #[derive(Identifiable, Queryable, Insertable)] #[table_name = "ciphers_collections"] - #[belongs_to(Cipher, foreign_key = "cipher_uuid")] - #[belongs_to(Collection, foreign_key = "collection_uuid")] #[primary_key(cipher_uuid, collection_uuid)] pub struct CollectionCipher { pub cipher_uuid: String, @@ -57,11 +52,32 @@ impl Collection { }) } - pub fn to_json_details(&self, user_uuid: &str, conn: &DbConn) -> Value { + pub async fn to_json_details( + &self, + user_uuid: &str, + cipher_sync_data: Option<&crate::api::core::CipherSyncData>, + conn: &DbConn, + ) -> Value { + let (read_only, hide_passwords) = if let Some(cipher_sync_data) = cipher_sync_data { + match cipher_sync_data.user_organizations.get(&self.org_uuid) { + Some(uo) if uo.has_full_access() => (false, false), + Some(_) => { + if let Some(uc) = cipher_sync_data.user_collections.get(&self.uuid) { + (uc.read_only, uc.hide_passwords) + } else { + (false, false) + } + } + _ => (true, true), + } + } else { + (!self.is_writable_by_user(user_uuid, conn).await, self.hide_passwords_for_user(user_uuid, conn).await) + }; + let mut json_object = self.to_json(); json_object["Object"] = json!("collectionDetails"); - json_object["ReadOnly"] = json!(!self.is_writable_by_user(user_uuid, conn)); - json_object["HidePasswords"] = json!(self.hide_passwords_for_user(user_uuid, conn)); + json_object["ReadOnly"] = json!(read_only); + json_object["HidePasswords"] = json!(hide_passwords); json_object } } @@ -73,8 +89,8 @@ use crate::error::MapResult; /// Database methods impl Collection { - pub fn save(&self, conn: &DbConn) -> EmptyResult { - self.update_users_revision(conn); + pub async fn save(&self, conn: &DbConn) -> EmptyResult { + self.update_users_revision(conn).await; db_run! { conn: sqlite, mysql { @@ -107,10 +123,10 @@ impl Collection { } } - pub fn delete(self, conn: &DbConn) -> EmptyResult { - self.update_users_revision(conn); - CollectionCipher::delete_all_by_collection(&self.uuid, conn)?; - CollectionUser::delete_all_by_collection(&self.uuid, conn)?; + pub async fn delete(self, conn: &DbConn) -> EmptyResult { + self.update_users_revision(conn).await; + CollectionCipher::delete_all_by_collection(&self.uuid, conn).await?; + CollectionUser::delete_all_by_collection(&self.uuid, conn).await?; db_run! { conn: { diesel::delete(collections::table.filter(collections::uuid.eq(self.uuid))) @@ -119,20 +135,20 @@ impl Collection { }} } - pub fn delete_all_by_organization(org_uuid: &str, conn: &DbConn) -> EmptyResult { - for collection in Self::find_by_organization(org_uuid, conn) { - collection.delete(conn)?; + pub async fn delete_all_by_organization(org_uuid: &str, conn: &DbConn) -> EmptyResult { + for collection in Self::find_by_organization(org_uuid, conn).await { + collection.delete(conn).await?; } Ok(()) } - pub fn update_users_revision(&self, conn: &DbConn) { - UserOrganization::find_by_collection_and_org(&self.uuid, &self.org_uuid, conn).iter().for_each(|user_org| { - User::update_uuid_revision(&user_org.user_uuid, conn); - }); + pub async fn update_users_revision(&self, conn: &DbConn) { + for user_org in UserOrganization::find_by_collection_and_org(&self.uuid, &self.org_uuid, conn).await.iter() { + User::update_uuid_revision(&user_org.user_uuid, conn).await; + } } - pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { + pub async fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { db_run! { conn: { collections::table .filter(collections::uuid.eq(uuid)) @@ -142,7 +158,7 @@ impl Collection { }} } - pub fn find_by_user_uuid(user_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_by_user_uuid(user_uuid: &str, conn: &DbConn) -> Vec { db_run! { conn: { collections::table .left_join(users_collections::table.on( @@ -167,11 +183,11 @@ impl Collection { }} } - pub fn find_by_organization_and_user_uuid(org_uuid: &str, user_uuid: &str, conn: &DbConn) -> Vec { - Self::find_by_user_uuid(user_uuid, conn).into_iter().filter(|c| c.org_uuid == org_uuid).collect() + pub async fn find_by_organization_and_user_uuid(org_uuid: &str, user_uuid: &str, conn: &DbConn) -> Vec { + Self::find_by_user_uuid(user_uuid, conn).await.into_iter().filter(|c| c.org_uuid == org_uuid).collect() } - pub fn find_by_organization(org_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_by_organization(org_uuid: &str, conn: &DbConn) -> Vec { db_run! { conn: { collections::table .filter(collections::org_uuid.eq(org_uuid)) @@ -181,7 +197,7 @@ impl Collection { }} } - pub fn find_by_uuid_and_org(uuid: &str, org_uuid: &str, conn: &DbConn) -> Option { + pub async fn find_by_uuid_and_org(uuid: &str, org_uuid: &str, conn: &DbConn) -> Option { db_run! { conn: { collections::table .filter(collections::uuid.eq(uuid)) @@ -193,7 +209,7 @@ impl Collection { }} } - pub fn find_by_uuid_and_user(uuid: &str, user_uuid: &str, conn: &DbConn) -> Option { + pub async fn find_by_uuid_and_user(uuid: &str, user_uuid: &str, conn: &DbConn) -> Option { db_run! { conn: { collections::table .left_join(users_collections::table.on( @@ -219,8 +235,8 @@ impl Collection { }} } - pub fn is_writable_by_user(&self, user_uuid: &str, conn: &DbConn) -> bool { - match UserOrganization::find_by_user_and_org(user_uuid, &self.org_uuid, conn) { + pub async fn is_writable_by_user(&self, user_uuid: &str, conn: &DbConn) -> bool { + match UserOrganization::find_by_user_and_org(user_uuid, &self.org_uuid, conn).await { None => false, // Not in Org Some(user_org) => { if user_org.has_full_access() { @@ -241,8 +257,8 @@ impl Collection { } } - pub fn hide_passwords_for_user(&self, user_uuid: &str, conn: &DbConn) -> bool { - match UserOrganization::find_by_user_and_org(user_uuid, &self.org_uuid, conn) { + pub async fn hide_passwords_for_user(&self, user_uuid: &str, conn: &DbConn) -> bool { + match UserOrganization::find_by_user_and_org(user_uuid, &self.org_uuid, conn).await { None => true, // Not in Org Some(user_org) => { if user_org.has_full_access() { @@ -266,7 +282,7 @@ impl Collection { /// Database methods impl CollectionUser { - pub fn find_by_organization_and_user_uuid(org_uuid: &str, user_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_by_organization_and_user_uuid(org_uuid: &str, user_uuid: &str, conn: &DbConn) -> Vec { db_run! { conn: { users_collections::table .filter(users_collections::user_uuid.eq(user_uuid)) @@ -279,14 +295,14 @@ impl CollectionUser { }} } - pub fn save( + pub async fn save( user_uuid: &str, collection_uuid: &str, read_only: bool, hide_passwords: bool, conn: &DbConn, ) -> EmptyResult { - User::update_uuid_revision(user_uuid, conn); + User::update_uuid_revision(user_uuid, conn).await; db_run! { conn: sqlite, mysql { @@ -337,8 +353,8 @@ impl CollectionUser { } } - pub fn delete(self, conn: &DbConn) -> EmptyResult { - User::update_uuid_revision(&self.user_uuid, conn); + pub async fn delete(self, conn: &DbConn) -> EmptyResult { + User::update_uuid_revision(&self.user_uuid, conn).await; db_run! { conn: { diesel::delete( @@ -351,7 +367,7 @@ impl CollectionUser { }} } - pub fn find_by_collection(collection_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_by_collection(collection_uuid: &str, conn: &DbConn) -> Vec { db_run! { conn: { users_collections::table .filter(users_collections::collection_uuid.eq(collection_uuid)) @@ -362,7 +378,7 @@ impl CollectionUser { }} } - pub fn find_by_collection_and_user(collection_uuid: &str, user_uuid: &str, conn: &DbConn) -> Option { + pub async fn find_by_collection_and_user(collection_uuid: &str, user_uuid: &str, conn: &DbConn) -> Option { db_run! { conn: { users_collections::table .filter(users_collections::collection_uuid.eq(collection_uuid)) @@ -374,10 +390,21 @@ impl CollectionUser { }} } - pub fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult { - CollectionUser::find_by_collection(collection_uuid, conn).iter().for_each(|collection| { - User::update_uuid_revision(&collection.user_uuid, conn); - }); + pub async fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec { + db_run! { conn: { + users_collections::table + .filter(users_collections::user_uuid.eq(user_uuid)) + .select(users_collections::all_columns) + .load::(conn) + .expect("Error loading users_collections") + .from_db() + }} + } + + pub async fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult { + for collection in CollectionUser::find_by_collection(collection_uuid, conn).await.iter() { + User::update_uuid_revision(&collection.user_uuid, conn).await; + } db_run! { conn: { diesel::delete(users_collections::table.filter(users_collections::collection_uuid.eq(collection_uuid))) @@ -386,8 +413,8 @@ impl CollectionUser { }} } - pub fn delete_all_by_user_and_org(user_uuid: &str, org_uuid: &str, conn: &DbConn) -> EmptyResult { - let collectionusers = Self::find_by_organization_and_user_uuid(org_uuid, user_uuid, conn); + pub async fn delete_all_by_user_and_org(user_uuid: &str, org_uuid: &str, conn: &DbConn) -> EmptyResult { + let collectionusers = Self::find_by_organization_and_user_uuid(org_uuid, user_uuid, conn).await; db_run! { conn: { for user in collectionusers { @@ -405,8 +432,8 @@ impl CollectionUser { /// Database methods impl CollectionCipher { - pub fn save(cipher_uuid: &str, collection_uuid: &str, conn: &DbConn) -> EmptyResult { - Self::update_users_revision(collection_uuid, conn); + pub async fn save(cipher_uuid: &str, collection_uuid: &str, conn: &DbConn) -> EmptyResult { + Self::update_users_revision(collection_uuid, conn).await; db_run! { conn: sqlite, mysql { @@ -435,8 +462,8 @@ impl CollectionCipher { } } - pub fn delete(cipher_uuid: &str, collection_uuid: &str, conn: &DbConn) -> EmptyResult { - Self::update_users_revision(collection_uuid, conn); + pub async fn delete(cipher_uuid: &str, collection_uuid: &str, conn: &DbConn) -> EmptyResult { + Self::update_users_revision(collection_uuid, conn).await; db_run! { conn: { diesel::delete( @@ -449,7 +476,7 @@ impl CollectionCipher { }} } - pub fn delete_all_by_cipher(cipher_uuid: &str, conn: &DbConn) -> EmptyResult { + pub async fn delete_all_by_cipher(cipher_uuid: &str, conn: &DbConn) -> EmptyResult { db_run! { conn: { diesel::delete(ciphers_collections::table.filter(ciphers_collections::cipher_uuid.eq(cipher_uuid))) .execute(conn) @@ -457,7 +484,7 @@ impl CollectionCipher { }} } - pub fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult { + pub async fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> EmptyResult { db_run! { conn: { diesel::delete(ciphers_collections::table.filter(ciphers_collections::collection_uuid.eq(collection_uuid))) .execute(conn) @@ -465,9 +492,9 @@ impl CollectionCipher { }} } - pub fn update_users_revision(collection_uuid: &str, conn: &DbConn) { - if let Some(collection) = Collection::find_by_uuid(collection_uuid, conn) { - collection.update_users_revision(conn); + pub async fn update_users_revision(collection_uuid: &str, conn: &DbConn) { + if let Some(collection) = Collection::find_by_uuid(collection_uuid, conn).await { + collection.update_users_revision(conn).await; } } } diff --git a/src/db/models/device.rs b/src/db/models/device.rs index 05955c04..ce6fa638 100644 --- a/src/db/models/device.rs +++ b/src/db/models/device.rs @@ -1,14 +1,12 @@ use chrono::{NaiveDateTime, Utc}; -use super::User; use crate::CONFIG; db_object! { - #[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] + #[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[table_name = "devices"] #[changeset_options(treat_none_as_null="true")] - #[belongs_to(User, foreign_key = "user_uuid")] - #[primary_key(uuid)] + #[primary_key(uuid, user_uuid)] pub struct Device { pub uuid: String, pub created_at: NaiveDateTime, @@ -89,11 +87,11 @@ impl Device { nbf: time_now.timestamp(), exp: (time_now + *DEFAULT_VALIDITY).timestamp(), iss: JWT_LOGIN_ISSUER.to_string(), - sub: user.uuid.to_string(), + sub: user.uuid.clone(), premium: true, - name: user.name.to_string(), - email: user.email.to_string(), + name: user.name.clone(), + email: user.email.clone(), email_verified: !CONFIG.mail_enabled() || user.verified_at.is_some(), orgowner, @@ -101,8 +99,8 @@ impl Device { orguser, orgmanager, - sstamp: user.security_stamp.to_string(), - device: self.uuid.to_string(), + sstamp: user.security_stamp.clone(), + device: self.uuid.clone(), scope, amr: vec!["Application".into()], }; @@ -118,7 +116,7 @@ use crate::error::MapResult; /// Database methods impl Device { - pub fn save(&mut self, conn: &DbConn) -> EmptyResult { + pub async fn save(&mut self, conn: &DbConn) -> EmptyResult { self.updated_at = Utc::now().naive_utc(); db_run! { conn: @@ -131,39 +129,33 @@ impl Device { postgresql { let value = DeviceDb::to_db(self); crate::util::retry( - || diesel::insert_into(devices::table).values(&value).on_conflict(devices::uuid).do_update().set(&value).execute(conn), + || diesel::insert_into(devices::table).values(&value).on_conflict((devices::uuid, devices::user_uuid)).do_update().set(&value).execute(conn), 10, ).map_res("Error saving device") } } } - pub fn delete(self, conn: &DbConn) -> EmptyResult { + pub async fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { db_run! { conn: { - diesel::delete(devices::table.filter(devices::uuid.eq(self.uuid))) + diesel::delete(devices::table.filter(devices::user_uuid.eq(user_uuid))) .execute(conn) - .map_res("Error removing device") + .map_res("Error removing devices for user") }} } - pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { - for device in Self::find_by_user(user_uuid, conn) { - device.delete(conn)?; - } - Ok(()) - } - - pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { + pub async fn find_by_uuid_and_user(uuid: &str, user_uuid: &str, conn: &DbConn) -> Option { db_run! { conn: { devices::table .filter(devices::uuid.eq(uuid)) + .filter(devices::user_uuid.eq(user_uuid)) .first::(conn) .ok() .from_db() }} } - pub fn find_by_refresh_token(refresh_token: &str, conn: &DbConn) -> Option { + pub async fn find_by_refresh_token(refresh_token: &str, conn: &DbConn) -> Option { db_run! { conn: { devices::table .filter(devices::refresh_token.eq(refresh_token)) @@ -173,17 +165,7 @@ impl Device { }} } - pub fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec { - db_run! { conn: { - devices::table - .filter(devices::user_uuid.eq(user_uuid)) - .load::(conn) - .expect("Error loading devices") - .from_db() - }} - } - - pub fn find_latest_active_by_user(user_uuid: &str, conn: &DbConn) -> Option { + pub async fn find_latest_active_by_user(user_uuid: &str, conn: &DbConn) -> Option { db_run! { conn: { devices::table .filter(devices::user_uuid.eq(user_uuid)) diff --git a/src/db/models/emergency_access.rs b/src/db/models/emergency_access.rs index 7327eb34..1f0b84fd 100644 --- a/src/db/models/emergency_access.rs +++ b/src/db/models/emergency_access.rs @@ -4,10 +4,9 @@ use serde_json::Value; use super::User; db_object! { - #[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)] + #[derive(Debug, Identifiable, Queryable, Insertable, AsChangeset)] #[table_name = "emergency_access"] #[changeset_options(treat_none_as_null="true")] - #[belongs_to(User, foreign_key = "grantor_uuid")] #[primary_key(uuid)] pub struct EmergencyAccess { pub uuid: String, @@ -73,8 +72,8 @@ impl EmergencyAccess { }) } - pub fn to_json_grantor_details(&self, conn: &DbConn) -> Value { - let grantor_user = User::find_by_uuid(&self.grantor_uuid, conn).expect("Grantor user not found."); + pub async fn to_json_grantor_details(&self, conn: &DbConn) -> Value { + let grantor_user = User::find_by_uuid(&self.grantor_uuid, conn).await.expect("Grantor user not found."); json!({ "Id": self.uuid, @@ -89,11 +88,11 @@ impl EmergencyAccess { } #[allow(clippy::manual_map)] - pub fn to_json_grantee_details(&self, conn: &DbConn) -> Value { + pub async fn to_json_grantee_details(&self, conn: &DbConn) -> Value { let grantee_user = if let Some(grantee_uuid) = self.grantee_uuid.as_deref() { - Some(User::find_by_uuid(grantee_uuid, conn).expect("Grantee user not found.")) + Some(User::find_by_uuid(grantee_uuid, conn).await.expect("Grantee user not found.")) } else if let Some(email) = self.email.as_deref() { - Some(User::find_by_mail(email, conn).expect("Grantee user not found.")) + Some(User::find_by_mail(email, conn).await.expect("Grantee user not found.")) } else { None }; @@ -155,8 +154,8 @@ use crate::api::EmptyResult; use crate::error::MapResult; impl EmergencyAccess { - pub fn save(&mut self, conn: &DbConn) -> EmptyResult { - User::update_uuid_revision(&self.grantor_uuid, conn); + pub async fn save(&mut self, conn: &DbConn) -> EmptyResult { + User::update_uuid_revision(&self.grantor_uuid, conn).await; self.updated_at = Utc::now().naive_utc(); db_run! { conn: @@ -190,18 +189,18 @@ impl EmergencyAccess { } } - pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { - for ea in Self::find_all_by_grantor_uuid(user_uuid, conn) { - ea.delete(conn)?; + pub async fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { + for ea in Self::find_all_by_grantor_uuid(user_uuid, conn).await { + ea.delete(conn).await?; } - for ea in Self::find_all_by_grantee_uuid(user_uuid, conn) { - ea.delete(conn)?; + for ea in Self::find_all_by_grantee_uuid(user_uuid, conn).await { + ea.delete(conn).await?; } Ok(()) } - pub fn delete(self, conn: &DbConn) -> EmptyResult { - User::update_uuid_revision(&self.grantor_uuid, conn); + pub async fn delete(self, conn: &DbConn) -> EmptyResult { + User::update_uuid_revision(&self.grantor_uuid, conn).await; db_run! { conn: { diesel::delete(emergency_access::table.filter(emergency_access::uuid.eq(self.uuid))) @@ -210,7 +209,7 @@ impl EmergencyAccess { }} } - pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { + pub async fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { db_run! { conn: { emergency_access::table .filter(emergency_access::uuid.eq(uuid)) @@ -219,7 +218,7 @@ impl EmergencyAccess { }} } - pub fn find_by_grantor_uuid_and_grantee_uuid_or_email( + pub async fn find_by_grantor_uuid_and_grantee_uuid_or_email( grantor_uuid: &str, grantee_uuid: &str, email: &str, @@ -234,7 +233,7 @@ impl EmergencyAccess { }} } - pub fn find_all_recoveries(conn: &DbConn) -> Vec { + pub async fn find_all_recoveries(conn: &DbConn) -> Vec { db_run! { conn: { emergency_access::table .filter(emergency_access::status.eq(EmergencyAccessStatus::RecoveryInitiated as i32)) @@ -242,7 +241,7 @@ impl EmergencyAccess { }} } - pub fn find_by_uuid_and_grantor_uuid(uuid: &str, grantor_uuid: &str, conn: &DbConn) -> Option { + pub async fn find_by_uuid_and_grantor_uuid(uuid: &str, grantor_uuid: &str, conn: &DbConn) -> Option { db_run! { conn: { emergency_access::table .filter(emergency_access::uuid.eq(uuid)) @@ -252,7 +251,7 @@ impl EmergencyAccess { }} } - pub fn find_all_by_grantee_uuid(grantee_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_all_by_grantee_uuid(grantee_uuid: &str, conn: &DbConn) -> Vec { db_run! { conn: { emergency_access::table .filter(emergency_access::grantee_uuid.eq(grantee_uuid)) @@ -260,7 +259,7 @@ impl EmergencyAccess { }} } - pub fn find_invited_by_grantee_email(grantee_email: &str, conn: &DbConn) -> Option { + pub async fn find_invited_by_grantee_email(grantee_email: &str, conn: &DbConn) -> Option { db_run! { conn: { emergency_access::table .filter(emergency_access::email.eq(grantee_email)) @@ -270,7 +269,7 @@ impl EmergencyAccess { }} } - pub fn find_all_by_grantor_uuid(grantor_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_all_by_grantor_uuid(grantor_uuid: &str, conn: &DbConn) -> Vec { db_run! { conn: { emergency_access::table .filter(emergency_access::grantor_uuid.eq(grantor_uuid)) diff --git a/src/db/models/favorite.rs b/src/db/models/favorite.rs index cb3e3420..fd67c60c 100644 --- a/src/db/models/favorite.rs +++ b/src/db/models/favorite.rs @@ -1,10 +1,8 @@ -use super::{Cipher, User}; +use super::User; db_object! { - #[derive(Identifiable, Queryable, Insertable, Associations)] + #[derive(Identifiable, Queryable, Insertable)] #[table_name = "favorites"] - #[belongs_to(User, foreign_key = "user_uuid")] - #[belongs_to(Cipher, foreign_key = "cipher_uuid")] #[primary_key(user_uuid, cipher_uuid)] pub struct Favorite { pub user_uuid: String, @@ -19,7 +17,7 @@ use crate::error::MapResult; impl Favorite { // Returns whether the specified cipher is a favorite of the specified user. - pub fn is_favorite(cipher_uuid: &str, user_uuid: &str, conn: &DbConn) -> bool { + pub async fn is_favorite(cipher_uuid: &str, user_uuid: &str, conn: &DbConn) -> bool { db_run! { conn: { let query = favorites::table .filter(favorites::cipher_uuid.eq(cipher_uuid)) @@ -31,11 +29,11 @@ impl Favorite { } // Sets whether the specified cipher is a favorite of the specified user. - pub fn set_favorite(favorite: bool, cipher_uuid: &str, user_uuid: &str, conn: &DbConn) -> EmptyResult { - let (old, new) = (Self::is_favorite(cipher_uuid, user_uuid, conn), favorite); + pub async fn set_favorite(favorite: bool, cipher_uuid: &str, user_uuid: &str, conn: &DbConn) -> EmptyResult { + let (old, new) = (Self::is_favorite(cipher_uuid, user_uuid, conn).await, favorite); match (old, new) { (false, true) => { - User::update_uuid_revision(user_uuid, conn); + User::update_uuid_revision(user_uuid, conn).await; db_run! { conn: { diesel::insert_into(favorites::table) .values(( @@ -47,7 +45,7 @@ impl Favorite { }} } (true, false) => { - User::update_uuid_revision(user_uuid, conn); + User::update_uuid_revision(user_uuid, conn).await; db_run! { conn: { diesel::delete( favorites::table @@ -64,7 +62,7 @@ impl Favorite { } // Delete all favorite entries associated with the specified cipher. - pub fn delete_all_by_cipher(cipher_uuid: &str, conn: &DbConn) -> EmptyResult { + pub async fn delete_all_by_cipher(cipher_uuid: &str, conn: &DbConn) -> EmptyResult { db_run! { conn: { diesel::delete(favorites::table.filter(favorites::cipher_uuid.eq(cipher_uuid))) .execute(conn) @@ -73,11 +71,23 @@ impl Favorite { } // Delete all favorite entries associated with the specified user. - pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { + pub async fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { db_run! { conn: { diesel::delete(favorites::table.filter(favorites::user_uuid.eq(user_uuid))) .execute(conn) .map_res("Error removing favorites by user") }} } + + /// Return a vec with (cipher_uuid) this will only contain favorite flagged ciphers + /// This is used during a full sync so we only need one query for all favorite cipher matches. + pub async fn get_all_cipher_uuid_by_user(user_uuid: &str, conn: &DbConn) -> Vec { + db_run! { conn: { + favorites::table + .filter(favorites::user_uuid.eq(user_uuid)) + .select(favorites::cipher_uuid) + .load::(conn) + .unwrap_or_default() + }} + } } diff --git a/src/db/models/folder.rs b/src/db/models/folder.rs index d51e71b4..0b76704f 100644 --- a/src/db/models/folder.rs +++ b/src/db/models/folder.rs @@ -1,12 +1,11 @@ use chrono::{NaiveDateTime, Utc}; use serde_json::Value; -use super::{Cipher, User}; +use super::User; db_object! { - #[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] + #[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[table_name = "folders"] - #[belongs_to(User, foreign_key = "user_uuid")] #[primary_key(uuid)] pub struct Folder { pub uuid: String, @@ -16,10 +15,8 @@ db_object! { pub name: String, } - #[derive(Identifiable, Queryable, Insertable, Associations)] + #[derive(Identifiable, Queryable, Insertable)] #[table_name = "folders_ciphers"] - #[belongs_to(Cipher, foreign_key = "cipher_uuid")] - #[belongs_to(Folder, foreign_key = "folder_uuid")] #[primary_key(cipher_uuid, folder_uuid)] pub struct FolderCipher { pub cipher_uuid: String, @@ -70,8 +67,8 @@ use crate::error::MapResult; /// Database methods impl Folder { - pub fn save(&mut self, conn: &DbConn) -> EmptyResult { - User::update_uuid_revision(&self.user_uuid, conn); + pub async fn save(&mut self, conn: &DbConn) -> EmptyResult { + User::update_uuid_revision(&self.user_uuid, conn).await; self.updated_at = Utc::now().naive_utc(); db_run! { conn: @@ -105,9 +102,9 @@ impl Folder { } } - pub fn delete(&self, conn: &DbConn) -> EmptyResult { - User::update_uuid_revision(&self.user_uuid, conn); - FolderCipher::delete_all_by_folder(&self.uuid, conn)?; + pub async fn delete(&self, conn: &DbConn) -> EmptyResult { + User::update_uuid_revision(&self.user_uuid, conn).await; + FolderCipher::delete_all_by_folder(&self.uuid, conn).await?; db_run! { conn: { diesel::delete(folders::table.filter(folders::uuid.eq(&self.uuid))) @@ -116,14 +113,14 @@ impl Folder { }} } - pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { - for folder in Self::find_by_user(user_uuid, conn) { - folder.delete(conn)?; + pub async fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { + for folder in Self::find_by_user(user_uuid, conn).await { + folder.delete(conn).await?; } Ok(()) } - pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { + pub async fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { db_run! { conn: { folders::table .filter(folders::uuid.eq(uuid)) @@ -133,7 +130,7 @@ impl Folder { }} } - pub fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec { db_run! { conn: { folders::table .filter(folders::user_uuid.eq(user_uuid)) @@ -145,7 +142,7 @@ impl Folder { } impl FolderCipher { - pub fn save(&self, conn: &DbConn) -> EmptyResult { + pub async fn save(&self, conn: &DbConn) -> EmptyResult { db_run! { conn: sqlite, mysql { // Not checking for ForeignKey Constraints here. @@ -167,7 +164,7 @@ impl FolderCipher { } } - pub fn delete(self, conn: &DbConn) -> EmptyResult { + pub async fn delete(self, conn: &DbConn) -> EmptyResult { db_run! { conn: { diesel::delete( folders_ciphers::table @@ -179,7 +176,7 @@ impl FolderCipher { }} } - pub fn delete_all_by_cipher(cipher_uuid: &str, conn: &DbConn) -> EmptyResult { + pub async fn delete_all_by_cipher(cipher_uuid: &str, conn: &DbConn) -> EmptyResult { db_run! { conn: { diesel::delete(folders_ciphers::table.filter(folders_ciphers::cipher_uuid.eq(cipher_uuid))) .execute(conn) @@ -187,7 +184,7 @@ impl FolderCipher { }} } - pub fn delete_all_by_folder(folder_uuid: &str, conn: &DbConn) -> EmptyResult { + pub async fn delete_all_by_folder(folder_uuid: &str, conn: &DbConn) -> EmptyResult { db_run! { conn: { diesel::delete(folders_ciphers::table.filter(folders_ciphers::folder_uuid.eq(folder_uuid))) .execute(conn) @@ -195,7 +192,7 @@ impl FolderCipher { }} } - pub fn find_by_folder_and_cipher(folder_uuid: &str, cipher_uuid: &str, conn: &DbConn) -> Option { + pub async fn find_by_folder_and_cipher(folder_uuid: &str, cipher_uuid: &str, conn: &DbConn) -> Option { db_run! { conn: { folders_ciphers::table .filter(folders_ciphers::folder_uuid.eq(folder_uuid)) @@ -206,7 +203,7 @@ impl FolderCipher { }} } - pub fn find_by_folder(folder_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_by_folder(folder_uuid: &str, conn: &DbConn) -> Vec { db_run! { conn: { folders_ciphers::table .filter(folders_ciphers::folder_uuid.eq(folder_uuid)) @@ -215,4 +212,17 @@ impl FolderCipher { .from_db() }} } + + /// Return a vec with (cipher_uuid, folder_uuid) + /// This is used during a full sync so we only need one query for all folder matches. + pub async fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec<(String, String)> { + db_run! { conn: { + folders_ciphers::table + .inner_join(folders::table) + .filter(folders::user_uuid.eq(user_uuid)) + .select(folders_ciphers::all_columns) + .load::<(String, String)>(conn) + .unwrap_or_default() + }} + } } diff --git a/src/db/models/org_policy.rs b/src/db/models/org_policy.rs index 7c6cefd3..65ec0fd8 100644 --- a/src/db/models/org_policy.rs +++ b/src/db/models/org_policy.rs @@ -6,12 +6,11 @@ use crate::db::DbConn; use crate::error::MapResult; use crate::util::UpCase; -use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization}; +use super::{UserOrgStatus, UserOrgType, UserOrganization}; db_object! { - #[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] + #[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[table_name = "org_policies"] - #[belongs_to(Organization, foreign_key = "org_uuid")] #[primary_key(uuid)] pub struct OrgPolicy { pub uuid: String, @@ -22,7 +21,7 @@ db_object! { } } -#[derive(Copy, Clone, PartialEq, num_derive::FromPrimitive)] +#[derive(Copy, Clone, Eq, PartialEq, num_derive::FromPrimitive)] pub enum OrgPolicyType { TwoFactorAuthentication = 0, MasterPassword = 1, @@ -72,7 +71,7 @@ impl OrgPolicy { /// Database methods impl OrgPolicy { - pub fn save(&self, conn: &DbConn) -> EmptyResult { + pub async fn save(&self, conn: &DbConn) -> EmptyResult { db_run! { conn: sqlite, mysql { match diesel::replace_into(org_policies::table) @@ -115,7 +114,7 @@ impl OrgPolicy { } } - pub fn delete(self, conn: &DbConn) -> EmptyResult { + pub async fn delete(self, conn: &DbConn) -> EmptyResult { db_run! { conn: { diesel::delete(org_policies::table.filter(org_policies::uuid.eq(self.uuid))) .execute(conn) @@ -123,7 +122,7 @@ impl OrgPolicy { }} } - pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { + pub async fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { db_run! { conn: { org_policies::table .filter(org_policies::uuid.eq(uuid)) @@ -133,7 +132,7 @@ impl OrgPolicy { }} } - pub fn find_by_org(org_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_by_org(org_uuid: &str, conn: &DbConn) -> Vec { db_run! { conn: { org_policies::table .filter(org_policies::org_uuid.eq(org_uuid)) @@ -143,7 +142,7 @@ impl OrgPolicy { }} } - pub fn find_confirmed_by_user(user_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_confirmed_by_user(user_uuid: &str, conn: &DbConn) -> Vec { db_run! { conn: { org_policies::table .inner_join( @@ -161,7 +160,7 @@ impl OrgPolicy { }} } - pub fn find_by_org_and_type(org_uuid: &str, atype: i32, conn: &DbConn) -> Option { + pub async fn find_by_org_and_type(org_uuid: &str, atype: i32, conn: &DbConn) -> Option { db_run! { conn: { org_policies::table .filter(org_policies::org_uuid.eq(org_uuid)) @@ -172,7 +171,7 @@ impl OrgPolicy { }} } - pub fn delete_all_by_organization(org_uuid: &str, conn: &DbConn) -> EmptyResult { + pub async fn delete_all_by_organization(org_uuid: &str, conn: &DbConn) -> EmptyResult { db_run! { conn: { diesel::delete(org_policies::table.filter(org_policies::org_uuid.eq(org_uuid))) .execute(conn) @@ -183,12 +182,12 @@ impl OrgPolicy { /// Returns true if the user belongs to an org that has enabled the specified policy type, /// and the user is not an owner or admin of that org. This is only useful for checking /// applicability of policy types that have these particular semantics. - pub fn is_applicable_to_user(user_uuid: &str, policy_type: OrgPolicyType, conn: &DbConn) -> bool { + pub async fn is_applicable_to_user(user_uuid: &str, policy_type: OrgPolicyType, conn: &DbConn) -> bool { // TODO: Should check confirmed and accepted users - for policy in OrgPolicy::find_confirmed_by_user(user_uuid, conn) { + for policy in OrgPolicy::find_confirmed_by_user(user_uuid, conn).await { if policy.enabled && policy.has_type(policy_type) { let org_uuid = &policy.org_uuid; - if let Some(user) = UserOrganization::find_by_user_and_org(user_uuid, org_uuid, conn) { + if let Some(user) = UserOrganization::find_by_user_and_org(user_uuid, org_uuid, conn).await { if user.atype < UserOrgType::Admin { return true; } @@ -200,11 +199,11 @@ impl OrgPolicy { /// Returns true if the user belongs to an org that has enabled the `DisableHideEmail` /// option of the `Send Options` policy, and the user is not an owner or admin of that org. - pub fn is_hide_email_disabled(user_uuid: &str, conn: &DbConn) -> bool { - for policy in OrgPolicy::find_confirmed_by_user(user_uuid, conn) { + pub async fn is_hide_email_disabled(user_uuid: &str, conn: &DbConn) -> bool { + for policy in OrgPolicy::find_confirmed_by_user(user_uuid, conn).await { if policy.enabled && policy.has_type(OrgPolicyType::SendOptions) { let org_uuid = &policy.org_uuid; - if let Some(user) = UserOrganization::find_by_user_and_org(user_uuid, org_uuid, conn) { + if let Some(user) = UserOrganization::find_by_user_and_org(user_uuid, org_uuid, conn).await { if user.atype < UserOrgType::Admin { match serde_json::from_str::>(&policy.data) { Ok(opts) => { @@ -220,12 +219,4 @@ impl OrgPolicy { } false } - - /*pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { - db_run! { conn: { - diesel::delete(twofactor::table.filter(twofactor::user_uuid.eq(user_uuid))) - .execute(conn) - .map_res("Error deleting twofactors") - }} - }*/ } diff --git a/src/db/models/organization.rs b/src/db/models/organization.rs index 67dd5357..3a02867c 100644 --- a/src/db/models/organization.rs +++ b/src/db/models/organization.rs @@ -193,10 +193,10 @@ use crate::error::MapResult; /// Database methods impl Organization { - pub fn save(&self, conn: &DbConn) -> EmptyResult { - UserOrganization::find_by_org(&self.uuid, conn).iter().for_each(|user_org| { - User::update_uuid_revision(&user_org.user_uuid, conn); - }); + pub async fn save(&self, conn: &DbConn) -> EmptyResult { + for user_org in UserOrganization::find_by_org(&self.uuid, conn).await.iter() { + User::update_uuid_revision(&user_org.user_uuid, conn).await; + } db_run! { conn: sqlite, mysql { @@ -230,13 +230,13 @@ impl Organization { } } - pub fn delete(self, conn: &DbConn) -> EmptyResult { + pub async fn delete(self, conn: &DbConn) -> EmptyResult { use super::{Cipher, Collection}; - Cipher::delete_all_by_organization(&self.uuid, conn)?; - Collection::delete_all_by_organization(&self.uuid, conn)?; - UserOrganization::delete_all_by_organization(&self.uuid, conn)?; - OrgPolicy::delete_all_by_organization(&self.uuid, conn)?; + Cipher::delete_all_by_organization(&self.uuid, conn).await?; + Collection::delete_all_by_organization(&self.uuid, conn).await?; + UserOrganization::delete_all_by_organization(&self.uuid, conn).await?; + OrgPolicy::delete_all_by_organization(&self.uuid, conn).await?; db_run! { conn: { diesel::delete(organizations::table.filter(organizations::uuid.eq(self.uuid))) @@ -245,7 +245,7 @@ impl Organization { }} } - pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { + pub async fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { db_run! { conn: { organizations::table .filter(organizations::uuid.eq(uuid)) @@ -254,7 +254,7 @@ impl Organization { }} } - pub fn get_all(conn: &DbConn) -> Vec { + pub async fn get_all(conn: &DbConn) -> Vec { db_run! { conn: { organizations::table.load::(conn).expect("Error loading organizations").from_db() }} @@ -262,8 +262,8 @@ impl Organization { } impl UserOrganization { - pub fn to_json(&self, conn: &DbConn) -> Value { - let org = Organization::find_by_uuid(&self.org_uuid, conn).unwrap(); + pub async fn to_json(&self, conn: &DbConn) -> Value { + let org = Organization::find_by_uuid(&self.org_uuid, conn).await.unwrap(); json!({ "Id": self.org_uuid, @@ -322,8 +322,8 @@ impl UserOrganization { }) } - pub fn to_json_user_details(&self, conn: &DbConn) -> Value { - let user = User::find_by_uuid(&self.user_uuid, conn).unwrap(); + pub async fn to_json_user_details(&self, conn: &DbConn) -> Value { + let user = User::find_by_uuid(&self.user_uuid, conn).await.unwrap(); json!({ "Id": self.uuid, @@ -347,11 +347,12 @@ impl UserOrganization { }) } - pub fn to_json_details(&self, conn: &DbConn) -> Value { + pub async fn to_json_details(&self, conn: &DbConn) -> Value { let coll_uuids = if self.access_all { vec![] // If we have complete access, no need to fill the array } else { - let collections = CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn); + let collections = + CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn).await; collections .iter() .map(|c| { @@ -376,8 +377,8 @@ impl UserOrganization { "Object": "organizationUserDetails", }) } - pub fn save(&self, conn: &DbConn) -> EmptyResult { - User::update_uuid_revision(&self.user_uuid, conn); + pub async fn save(&self, conn: &DbConn) -> EmptyResult { + User::update_uuid_revision(&self.user_uuid, conn).await; db_run! { conn: sqlite, mysql { @@ -410,10 +411,10 @@ impl UserOrganization { } } - pub fn delete(self, conn: &DbConn) -> EmptyResult { - User::update_uuid_revision(&self.user_uuid, conn); + pub async fn delete(self, conn: &DbConn) -> EmptyResult { + User::update_uuid_revision(&self.user_uuid, conn).await; - CollectionUser::delete_all_by_user_and_org(&self.user_uuid, &self.org_uuid, conn)?; + CollectionUser::delete_all_by_user_and_org(&self.user_uuid, &self.org_uuid, conn).await?; db_run! { conn: { diesel::delete(users_organizations::table.filter(users_organizations::uuid.eq(self.uuid))) @@ -422,23 +423,23 @@ impl UserOrganization { }} } - pub fn delete_all_by_organization(org_uuid: &str, conn: &DbConn) -> EmptyResult { - for user_org in Self::find_by_org(org_uuid, conn) { - user_org.delete(conn)?; + pub async fn delete_all_by_organization(org_uuid: &str, conn: &DbConn) -> EmptyResult { + for user_org in Self::find_by_org(org_uuid, conn).await { + user_org.delete(conn).await?; } Ok(()) } - pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { - for user_org in Self::find_any_state_by_user(user_uuid, conn) { - user_org.delete(conn)?; + pub async fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { + for user_org in Self::find_any_state_by_user(user_uuid, conn).await { + user_org.delete(conn).await?; } Ok(()) } - pub fn find_by_email_and_org(email: &str, org_id: &str, conn: &DbConn) -> Option { - if let Some(user) = super::User::find_by_mail(email, conn) { - if let Some(user_org) = UserOrganization::find_by_user_and_org(&user.uuid, org_id, conn) { + pub async fn find_by_email_and_org(email: &str, org_id: &str, conn: &DbConn) -> Option { + if let Some(user) = super::User::find_by_mail(email, conn).await { + if let Some(user_org) = UserOrganization::find_by_user_and_org(&user.uuid, org_id, conn).await { return Some(user_org); } } @@ -458,7 +459,7 @@ impl UserOrganization { (self.access_all || self.atype >= UserOrgType::Admin) && self.has_status(UserOrgStatus::Confirmed) } - pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { + pub async fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { db_run! { conn: { users_organizations::table .filter(users_organizations::uuid.eq(uuid)) @@ -467,7 +468,7 @@ impl UserOrganization { }} } - pub fn find_by_uuid_and_org(uuid: &str, org_uuid: &str, conn: &DbConn) -> Option { + pub async fn find_by_uuid_and_org(uuid: &str, org_uuid: &str, conn: &DbConn) -> Option { db_run! { conn: { users_organizations::table .filter(users_organizations::uuid.eq(uuid)) @@ -477,7 +478,7 @@ impl UserOrganization { }} } - pub fn find_confirmed_by_user(user_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_confirmed_by_user(user_uuid: &str, conn: &DbConn) -> Vec { db_run! { conn: { users_organizations::table .filter(users_organizations::user_uuid.eq(user_uuid)) @@ -487,7 +488,7 @@ impl UserOrganization { }} } - pub fn find_invited_by_user(user_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_invited_by_user(user_uuid: &str, conn: &DbConn) -> Vec { db_run! { conn: { users_organizations::table .filter(users_organizations::user_uuid.eq(user_uuid)) @@ -497,7 +498,7 @@ impl UserOrganization { }} } - pub fn find_any_state_by_user(user_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_any_state_by_user(user_uuid: &str, conn: &DbConn) -> Vec { db_run! { conn: { users_organizations::table .filter(users_organizations::user_uuid.eq(user_uuid)) @@ -506,7 +507,7 @@ impl UserOrganization { }} } - pub fn find_by_org(org_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_by_org(org_uuid: &str, conn: &DbConn) -> Vec { db_run! { conn: { users_organizations::table .filter(users_organizations::org_uuid.eq(org_uuid)) @@ -515,7 +516,7 @@ impl UserOrganization { }} } - pub fn count_by_org(org_uuid: &str, conn: &DbConn) -> i64 { + pub async fn count_by_org(org_uuid: &str, conn: &DbConn) -> i64 { db_run! { conn: { users_organizations::table .filter(users_organizations::org_uuid.eq(org_uuid)) @@ -526,7 +527,7 @@ impl UserOrganization { }} } - pub fn find_by_org_and_type(org_uuid: &str, atype: i32, conn: &DbConn) -> Vec { + pub async fn find_by_org_and_type(org_uuid: &str, atype: i32, conn: &DbConn) -> Vec { db_run! { conn: { users_organizations::table .filter(users_organizations::org_uuid.eq(org_uuid)) @@ -536,7 +537,7 @@ impl UserOrganization { }} } - pub fn find_by_user_and_org(user_uuid: &str, org_uuid: &str, conn: &DbConn) -> Option { + pub async fn find_by_user_and_org(user_uuid: &str, org_uuid: &str, conn: &DbConn) -> Option { db_run! { conn: { users_organizations::table .filter(users_organizations::user_uuid.eq(user_uuid)) @@ -546,7 +547,16 @@ impl UserOrganization { }} } - pub fn find_by_user_and_policy(user_uuid: &str, policy_type: OrgPolicyType, conn: &DbConn) -> Vec { + pub async fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec { + db_run! { conn: { + users_organizations::table + .filter(users_organizations::user_uuid.eq(user_uuid)) + .load::(conn) + .expect("Error loading user organizations").from_db() + }} + } + + pub async fn find_by_user_and_policy(user_uuid: &str, policy_type: OrgPolicyType, conn: &DbConn) -> Vec { db_run! { conn: { users_organizations::table .inner_join( @@ -565,7 +575,7 @@ impl UserOrganization { }} } - pub fn find_by_cipher_and_org(cipher_uuid: &str, org_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_by_cipher_and_org(cipher_uuid: &str, org_uuid: &str, conn: &DbConn) -> Vec { db_run! { conn: { users_organizations::table .filter(users_organizations::org_uuid.eq(org_uuid)) @@ -587,7 +597,7 @@ impl UserOrganization { }} } - pub fn find_by_collection_and_org(collection_uuid: &str, org_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_by_collection_and_org(collection_uuid: &str, org_uuid: &str, conn: &DbConn) -> Vec { db_run! { conn: { users_organizations::table .filter(users_organizations::org_uuid.eq(org_uuid)) diff --git a/src/db/models/send.rs b/src/db/models/send.rs index 9cfb7b1e..687571d5 100644 --- a/src/db/models/send.rs +++ b/src/db/models/send.rs @@ -1,14 +1,12 @@ use chrono::{NaiveDateTime, Utc}; use serde_json::Value; -use super::{Organization, User}; +use super::User; db_object! { - #[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] + #[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[table_name = "sends"] #[changeset_options(treat_none_as_null="true")] - #[belongs_to(User, foreign_key = "user_uuid")] - #[belongs_to(Organization, foreign_key = "organization_uuid")] #[primary_key(uuid)] pub struct Send { pub uuid: String, @@ -103,7 +101,7 @@ impl Send { } } - pub fn creator_identifier(&self, conn: &DbConn) -> Option { + pub async fn creator_identifier(&self, conn: &DbConn) -> Option { if let Some(hide_email) = self.hide_email { if hide_email { return None; @@ -111,7 +109,7 @@ impl Send { } if let Some(user_uuid) = &self.user_uuid { - if let Some(user) = User::find_by_uuid(user_uuid, conn) { + if let Some(user) = User::find_by_uuid(user_uuid, conn).await { return Some(user.email); } } @@ -150,7 +148,7 @@ impl Send { }) } - pub fn to_json_access(&self, conn: &DbConn) -> Value { + pub async fn to_json_access(&self, conn: &DbConn) -> Value { use crate::util::format_date; let data: Value = serde_json::from_str(&self.data).unwrap_or_default(); @@ -164,7 +162,7 @@ impl Send { "File": if self.atype == SendType::File as i32 { Some(&data) } else { None }, "ExpirationDate": self.expiration_date.as_ref().map(format_date), - "CreatorIdentifier": self.creator_identifier(conn), + "CreatorIdentifier": self.creator_identifier(conn).await, "Object": "send-access", }) } @@ -176,8 +174,8 @@ use crate::api::EmptyResult; use crate::error::MapResult; impl Send { - pub fn save(&mut self, conn: &DbConn) -> EmptyResult { - self.update_users_revision(conn); + pub async fn save(&mut self, conn: &DbConn) -> EmptyResult { + self.update_users_revision(conn).await; self.revision_date = Utc::now().naive_utc(); db_run! { conn: @@ -211,8 +209,8 @@ impl Send { } } - pub fn delete(&self, conn: &DbConn) -> EmptyResult { - self.update_users_revision(conn); + pub async fn delete(&self, conn: &DbConn) -> EmptyResult { + self.update_users_revision(conn).await; if self.atype == SendType::File as i32 { std::fs::remove_dir_all(std::path::Path::new(&crate::CONFIG.sends_folder()).join(&self.uuid)).ok(); @@ -226,17 +224,17 @@ impl Send { } /// Purge all sends that are past their deletion date. - pub fn purge(conn: &DbConn) { - for send in Self::find_by_past_deletion_date(conn) { - send.delete(conn).ok(); + pub async fn purge(conn: &DbConn) { + for send in Self::find_by_past_deletion_date(conn).await { + send.delete(conn).await.ok(); } } - pub fn update_users_revision(&self, conn: &DbConn) -> Vec { + pub async fn update_users_revision(&self, conn: &DbConn) -> Vec { let mut user_uuids = Vec::new(); match &self.user_uuid { Some(user_uuid) => { - User::update_uuid_revision(user_uuid, conn); + User::update_uuid_revision(user_uuid, conn).await; user_uuids.push(user_uuid.clone()) } None => { @@ -246,14 +244,14 @@ impl Send { user_uuids } - pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { - for send in Self::find_by_user(user_uuid, conn) { - send.delete(conn)?; + pub async fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { + for send in Self::find_by_user(user_uuid, conn).await { + send.delete(conn).await?; } Ok(()) } - pub fn find_by_access_id(access_id: &str, conn: &DbConn) -> Option { + pub async fn find_by_access_id(access_id: &str, conn: &DbConn) -> Option { use data_encoding::BASE64URL_NOPAD; use uuid::Uuid; @@ -267,10 +265,10 @@ impl Send { Err(_) => return None, }; - Self::find_by_uuid(&uuid, conn) + Self::find_by_uuid(&uuid, conn).await } - pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { + pub async fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { db_run! {conn: { sends::table .filter(sends::uuid.eq(uuid)) @@ -280,7 +278,7 @@ impl Send { }} } - pub fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec { db_run! {conn: { sends::table .filter(sends::user_uuid.eq(user_uuid)) @@ -288,7 +286,7 @@ impl Send { }} } - pub fn find_by_org(org_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_by_org(org_uuid: &str, conn: &DbConn) -> Vec { db_run! {conn: { sends::table .filter(sends::organization_uuid.eq(org_uuid)) @@ -296,7 +294,7 @@ impl Send { }} } - pub fn find_by_past_deletion_date(conn: &DbConn) -> Vec { + pub async fn find_by_past_deletion_date(conn: &DbConn) -> Vec { let now = Utc::now().naive_utc(); db_run! {conn: { sends::table diff --git a/src/db/models/two_factor.rs b/src/db/models/two_factor.rs index 6c874df1..56d7e1e7 100644 --- a/src/db/models/two_factor.rs +++ b/src/db/models/two_factor.rs @@ -2,12 +2,9 @@ use serde_json::Value; use crate::{api::EmptyResult, db::DbConn, error::MapResult}; -use super::User; - db_object! { - #[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] + #[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[table_name = "twofactor"] - #[belongs_to(User, foreign_key = "user_uuid")] #[primary_key(uuid)] pub struct TwoFactor { pub uuid: String, @@ -71,7 +68,7 @@ impl TwoFactor { /// Database methods impl TwoFactor { - pub fn save(&self, conn: &DbConn) -> EmptyResult { + pub async fn save(&self, conn: &DbConn) -> EmptyResult { db_run! { conn: sqlite, mysql { match diesel::replace_into(twofactor::table) @@ -110,7 +107,7 @@ impl TwoFactor { } } - pub fn delete(self, conn: &DbConn) -> EmptyResult { + pub async fn delete(self, conn: &DbConn) -> EmptyResult { db_run! { conn: { diesel::delete(twofactor::table.filter(twofactor::uuid.eq(self.uuid))) .execute(conn) @@ -118,7 +115,7 @@ impl TwoFactor { }} } - pub fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec { + pub async fn find_by_user(user_uuid: &str, conn: &DbConn) -> Vec { db_run! { conn: { twofactor::table .filter(twofactor::user_uuid.eq(user_uuid)) @@ -129,7 +126,7 @@ impl TwoFactor { }} } - pub fn find_by_user_and_type(user_uuid: &str, atype: i32, conn: &DbConn) -> Option { + pub async fn find_by_user_and_type(user_uuid: &str, atype: i32, conn: &DbConn) -> Option { db_run! { conn: { twofactor::table .filter(twofactor::user_uuid.eq(user_uuid)) @@ -140,7 +137,7 @@ impl TwoFactor { }} } - pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { + pub async fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { db_run! { conn: { diesel::delete(twofactor::table.filter(twofactor::user_uuid.eq(user_uuid))) .execute(conn) @@ -148,7 +145,7 @@ impl TwoFactor { }} } - pub fn migrate_u2f_to_webauthn(conn: &DbConn) -> EmptyResult { + pub async fn migrate_u2f_to_webauthn(conn: &DbConn) -> EmptyResult { let u2f_factors = db_run! { conn: { twofactor::table .filter(twofactor::atype.eq(TwoFactorType::U2f as i32)) @@ -157,7 +154,7 @@ impl TwoFactor { .from_db() }}; - use crate::api::core::two_factor::u2f::U2FRegistration; + use crate::api::core::two_factor::webauthn::U2FRegistration; use crate::api::core::two_factor::webauthn::{get_webauthn_registrations, WebauthnRegistration}; use webauthn_rs::proto::*; @@ -168,7 +165,7 @@ impl TwoFactor { continue; } - let (_, mut webauthn_regs) = get_webauthn_registrations(&u2f.user_uuid, conn)?; + let (_, mut webauthn_regs) = get_webauthn_registrations(&u2f.user_uuid, conn).await?; // If the user already has webauthn registrations saved, don't overwrite them if !webauthn_regs.is_empty() { @@ -207,10 +204,11 @@ impl TwoFactor { } u2f.data = serde_json::to_string(®s)?; - u2f.save(conn)?; + u2f.save(conn).await?; TwoFactor::new(u2f.user_uuid.clone(), TwoFactorType::Webauthn, serde_json::to_string(&webauthn_regs)?) - .save(conn)?; + .save(conn) + .await?; } Ok(()) diff --git a/src/db/models/two_factor_incomplete.rs b/src/db/models/two_factor_incomplete.rs index d58398ec..7f3021b4 100644 --- a/src/db/models/two_factor_incomplete.rs +++ b/src/db/models/two_factor_incomplete.rs @@ -2,12 +2,9 @@ use chrono::{NaiveDateTime, Utc}; use crate::{api::EmptyResult, auth::ClientIp, db::DbConn, error::MapResult, CONFIG}; -use super::User; - db_object! { - #[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] + #[derive(Identifiable, Queryable, Insertable, AsChangeset)] #[table_name = "twofactor_incomplete"] - #[belongs_to(User, foreign_key = "user_uuid")] #[primary_key(user_uuid, device_uuid)] pub struct TwoFactorIncomplete { pub user_uuid: String, @@ -22,7 +19,7 @@ db_object! { } impl TwoFactorIncomplete { - pub fn mark_incomplete( + pub async fn mark_incomplete( user_uuid: &str, device_uuid: &str, device_name: &str, @@ -36,7 +33,7 @@ impl TwoFactorIncomplete { // Don't update the data for an existing user/device pair, since that // would allow an attacker to arbitrarily delay notifications by // sending repeated 2FA attempts to reset the timer. - let existing = Self::find_by_user_and_device(user_uuid, device_uuid, conn); + let existing = Self::find_by_user_and_device(user_uuid, device_uuid, conn).await; if existing.is_some() { return Ok(()); } @@ -55,15 +52,15 @@ impl TwoFactorIncomplete { }} } - pub fn mark_complete(user_uuid: &str, device_uuid: &str, conn: &DbConn) -> EmptyResult { + pub async fn mark_complete(user_uuid: &str, device_uuid: &str, conn: &DbConn) -> EmptyResult { if CONFIG.incomplete_2fa_time_limit() <= 0 || !CONFIG.mail_enabled() { return Ok(()); } - Self::delete_by_user_and_device(user_uuid, device_uuid, conn) + Self::delete_by_user_and_device(user_uuid, device_uuid, conn).await } - pub fn find_by_user_and_device(user_uuid: &str, device_uuid: &str, conn: &DbConn) -> Option { + pub async fn find_by_user_and_device(user_uuid: &str, device_uuid: &str, conn: &DbConn) -> Option { db_run! { conn: { twofactor_incomplete::table .filter(twofactor_incomplete::user_uuid.eq(user_uuid)) @@ -74,7 +71,7 @@ impl TwoFactorIncomplete { }} } - pub fn find_logins_before(dt: &NaiveDateTime, conn: &DbConn) -> Vec { + pub async fn find_logins_before(dt: &NaiveDateTime, conn: &DbConn) -> Vec { db_run! {conn: { twofactor_incomplete::table .filter(twofactor_incomplete::login_time.lt(dt)) @@ -84,11 +81,11 @@ impl TwoFactorIncomplete { }} } - pub fn delete(self, conn: &DbConn) -> EmptyResult { - Self::delete_by_user_and_device(&self.user_uuid, &self.device_uuid, conn) + pub async fn delete(self, conn: &DbConn) -> EmptyResult { + Self::delete_by_user_and_device(&self.user_uuid, &self.device_uuid, conn).await } - pub fn delete_by_user_and_device(user_uuid: &str, device_uuid: &str, conn: &DbConn) -> EmptyResult { + pub async fn delete_by_user_and_device(user_uuid: &str, device_uuid: &str, conn: &DbConn) -> EmptyResult { db_run! { conn: { diesel::delete(twofactor_incomplete::table .filter(twofactor_incomplete::user_uuid.eq(user_uuid)) @@ -98,7 +95,7 @@ impl TwoFactorIncomplete { }} } - pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { + pub async fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> EmptyResult { db_run! { conn: { diesel::delete(twofactor_incomplete::table.filter(twofactor_incomplete::user_uuid.eq(user_uuid))) .execute(conn) diff --git a/src/db/models/user.rs b/src/db/models/user.rs index 599661e5..a8d27060 100644 --- a/src/db/models/user.rs +++ b/src/db/models/user.rs @@ -171,7 +171,7 @@ impl User { pub fn set_stamp_exception(&mut self, route_exception: Vec) { let stamp_exception = UserStampException { routes: route_exception, - security_stamp: self.security_stamp.to_string(), + security_stamp: self.security_stamp.clone(), expire: (Utc::now().naive_utc() + Duration::minutes(2)).timestamp(), }; self.stamp_exception = Some(serde_json::to_string(&stamp_exception).unwrap_or_default()); @@ -192,12 +192,20 @@ use crate::db::DbConn; use crate::api::EmptyResult; use crate::error::MapResult; +use futures::{stream, stream::StreamExt}; + /// Database methods impl User { - pub fn to_json(&self, conn: &DbConn) -> Value { - let orgs = UserOrganization::find_confirmed_by_user(&self.uuid, conn); - let orgs_json: Vec = orgs.iter().map(|c| c.to_json(conn)).collect(); - let twofactor_enabled = !TwoFactor::find_by_user(&self.uuid, conn).is_empty(); + pub async fn to_json(&self, conn: &DbConn) -> Value { + let orgs_json = stream::iter(UserOrganization::find_confirmed_by_user(&self.uuid, conn).await) + .then(|c| async { + let c = c; // Move out this single variable + c.to_json(conn).await + }) + .collect::>() + .await; + + let twofactor_enabled = !TwoFactor::find_by_user(&self.uuid, conn).await.is_empty(); // TODO: Might want to save the status field in the DB let status = if self.password_hash.is_empty() { @@ -227,7 +235,7 @@ impl User { }) } - pub fn save(&mut self, conn: &DbConn) -> EmptyResult { + pub async fn save(&mut self, conn: &DbConn) -> EmptyResult { if self.email.trim().is_empty() { err!("User email can't be empty") } @@ -265,26 +273,26 @@ impl User { } } - pub fn delete(self, conn: &DbConn) -> EmptyResult { - for user_org in UserOrganization::find_confirmed_by_user(&self.uuid, conn) { + pub async fn delete(self, conn: &DbConn) -> EmptyResult { + for user_org in UserOrganization::find_confirmed_by_user(&self.uuid, conn).await { if user_org.atype == UserOrgType::Owner { let owner_type = UserOrgType::Owner as i32; - if UserOrganization::find_by_org_and_type(&user_org.org_uuid, owner_type, conn).len() <= 1 { + if UserOrganization::find_by_org_and_type(&user_org.org_uuid, owner_type, conn).await.len() <= 1 { err!("Can't delete last owner") } } } - Send::delete_all_by_user(&self.uuid, conn)?; - EmergencyAccess::delete_all_by_user(&self.uuid, conn)?; - UserOrganization::delete_all_by_user(&self.uuid, conn)?; - Cipher::delete_all_by_user(&self.uuid, conn)?; - Favorite::delete_all_by_user(&self.uuid, conn)?; - Folder::delete_all_by_user(&self.uuid, conn)?; - Device::delete_all_by_user(&self.uuid, conn)?; - TwoFactor::delete_all_by_user(&self.uuid, conn)?; - TwoFactorIncomplete::delete_all_by_user(&self.uuid, conn)?; - Invitation::take(&self.email, conn); // Delete invitation if any + Send::delete_all_by_user(&self.uuid, conn).await?; + EmergencyAccess::delete_all_by_user(&self.uuid, conn).await?; + UserOrganization::delete_all_by_user(&self.uuid, conn).await?; + Cipher::delete_all_by_user(&self.uuid, conn).await?; + Favorite::delete_all_by_user(&self.uuid, conn).await?; + Folder::delete_all_by_user(&self.uuid, conn).await?; + Device::delete_all_by_user(&self.uuid, conn).await?; + TwoFactor::delete_all_by_user(&self.uuid, conn).await?; + TwoFactorIncomplete::delete_all_by_user(&self.uuid, conn).await?; + Invitation::take(&self.email, conn).await; // Delete invitation if any db_run! {conn: { diesel::delete(users::table.filter(users::uuid.eq(self.uuid))) @@ -293,13 +301,13 @@ impl User { }} } - pub fn update_uuid_revision(uuid: &str, conn: &DbConn) { - if let Err(e) = Self::_update_revision(uuid, &Utc::now().naive_utc(), conn) { + pub async fn update_uuid_revision(uuid: &str, conn: &DbConn) { + if let Err(e) = Self::_update_revision(uuid, &Utc::now().naive_utc(), conn).await { warn!("Failed to update revision for {}: {:#?}", uuid, e); } } - pub fn update_all_revisions(conn: &DbConn) -> EmptyResult { + pub async fn update_all_revisions(conn: &DbConn) -> EmptyResult { let updated_at = Utc::now().naive_utc(); db_run! {conn: { @@ -312,13 +320,13 @@ impl User { }} } - pub fn update_revision(&mut self, conn: &DbConn) -> EmptyResult { + pub async fn update_revision(&mut self, conn: &DbConn) -> EmptyResult { self.updated_at = Utc::now().naive_utc(); - Self::_update_revision(&self.uuid, &self.updated_at, conn) + Self::_update_revision(&self.uuid, &self.updated_at, conn).await } - fn _update_revision(uuid: &str, date: &NaiveDateTime, conn: &DbConn) -> EmptyResult { + async fn _update_revision(uuid: &str, date: &NaiveDateTime, conn: &DbConn) -> EmptyResult { db_run! {conn: { crate::util::retry(|| { diesel::update(users::table.filter(users::uuid.eq(uuid))) @@ -329,7 +337,7 @@ impl User { }} } - pub fn find_by_mail(mail: &str, conn: &DbConn) -> Option { + pub async fn find_by_mail(mail: &str, conn: &DbConn) -> Option { let lower_mail = mail.to_lowercase(); db_run! {conn: { users::table @@ -340,20 +348,20 @@ impl User { }} } - pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { + pub async fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { db_run! {conn: { users::table.filter(users::uuid.eq(uuid)).first::(conn).ok().from_db() }} } - pub fn get_all(conn: &DbConn) -> Vec { + pub async fn get_all(conn: &DbConn) -> Vec { db_run! {conn: { users::table.load::(conn).expect("Error loading users").from_db() }} } - pub fn last_active(&self, conn: &DbConn) -> Option { - match Device::find_latest_active_by_user(&self.uuid, conn) { + pub async fn last_active(&self, conn: &DbConn) -> Option { + match Device::find_latest_active_by_user(&self.uuid, conn).await { Some(device) => Some(device.updated_at), None => None, } @@ -368,7 +376,7 @@ impl Invitation { } } - pub fn save(&self, conn: &DbConn) -> EmptyResult { + pub async fn save(&self, conn: &DbConn) -> EmptyResult { if self.email.trim().is_empty() { err!("Invitation email can't be empty") } @@ -393,7 +401,7 @@ impl Invitation { } } - pub fn delete(self, conn: &DbConn) -> EmptyResult { + pub async fn delete(self, conn: &DbConn) -> EmptyResult { db_run! {conn: { diesel::delete(invitations::table.filter(invitations::email.eq(self.email))) .execute(conn) @@ -401,7 +409,7 @@ impl Invitation { }} } - pub fn find_by_mail(mail: &str, conn: &DbConn) -> Option { + pub async fn find_by_mail(mail: &str, conn: &DbConn) -> Option { let lower_mail = mail.to_lowercase(); db_run! {conn: { invitations::table @@ -412,9 +420,9 @@ impl Invitation { }} } - pub fn take(mail: &str, conn: &DbConn) -> bool { - match Self::find_by_mail(mail, conn) { - Some(invitation) => invitation.delete(conn).is_ok(), + pub async fn take(mail: &str, conn: &DbConn) -> bool { + match Self::find_by_mail(mail, conn).await { + Some(invitation) => invitation.delete(conn).await.is_ok(), None => false, } } diff --git a/src/db/schemas/mysql/schema.rs b/src/db/schemas/mysql/schema.rs index 61234a16..a49159f2 100644 --- a/src/db/schemas/mysql/schema.rs +++ b/src/db/schemas/mysql/schema.rs @@ -42,7 +42,7 @@ table! { } table! { - devices (uuid) { + devices (uuid, user_uuid) { uuid -> Text, created_at -> Datetime, updated_at -> Datetime, diff --git a/src/db/schemas/postgresql/schema.rs b/src/db/schemas/postgresql/schema.rs index 855b4fbc..9fd6fd97 100644 --- a/src/db/schemas/postgresql/schema.rs +++ b/src/db/schemas/postgresql/schema.rs @@ -42,7 +42,7 @@ table! { } table! { - devices (uuid) { + devices (uuid, user_uuid) { uuid -> Text, created_at -> Timestamp, updated_at -> Timestamp, diff --git a/src/db/schemas/sqlite/schema.rs b/src/db/schemas/sqlite/schema.rs index 855b4fbc..9fd6fd97 100644 --- a/src/db/schemas/sqlite/schema.rs +++ b/src/db/schemas/sqlite/schema.rs @@ -42,7 +42,7 @@ table! { } table! { - devices (uuid) { + devices (uuid, user_uuid) { uuid -> Text, created_at -> Timestamp, updated_at -> Timestamp, diff --git a/src/error.rs b/src/error.rs index 8b0adace..fe42a293 100644 --- a/src/error.rs +++ b/src/error.rs @@ -24,7 +24,7 @@ macro_rules! make_error { } } impl std::fmt::Display for Error { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match &self.error {$( ErrorKind::$name(e) => f.write_str(&$usr_msg_fun(e, &self.message)), )+} @@ -45,10 +45,11 @@ use lettre::transport::smtp::Error as SmtpErr; use openssl::error::ErrorStack as SSLErr; use regex::Error as RegexErr; use reqwest::Error as ReqErr; +use rocket::error::Error as RocketErr; use serde_json::{Error as SerdeErr, Value}; use std::io::Error as IoErr; use std::time::SystemTimeError as TimeErr; -use u2f::u2ferror::U2fError as U2fErr; +use tokio_tungstenite::tungstenite::Error as TungstError; use webauthn_rs::error::WebauthnError as WebauthnErr; use yubico::yubicoerror::YubicoError as YubiErr; @@ -69,7 +70,6 @@ make_error! { Json(Value): _no_source, _serialize, Db(DieselErr): _has_source, _api_error, R2d2(R2d2Err): _has_source, _api_error, - U2f(U2fErr): _has_source, _api_error, Serde(SerdeErr): _has_source, _api_error, JWt(JwtErr): _has_source, _api_error, Handlebars(HbErr): _has_source, _api_error, @@ -84,14 +84,16 @@ make_error! { Address(AddrErr): _has_source, _api_error, Smtp(SmtpErr): _has_source, _api_error, OpenSSL(SSLErr): _has_source, _api_error, + Rocket(RocketErr): _has_source, _api_error, DieselCon(DieselConErr): _has_source, _api_error, DieselMig(DieselMigErr): _has_source, _api_error, Webauthn(WebauthnErr): _has_source, _api_error, + WebSocket(TungstError): _has_source, _api_error, } impl std::fmt::Debug for Error { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self.source() { Some(e) => write!(f, "{}.\n[CAUSE] {:#?}", self.message, e), None => match self.error { @@ -193,8 +195,8 @@ use rocket::http::{ContentType, Status}; use rocket::request::Request; use rocket::response::{self, Responder, Response}; -impl<'r> Responder<'r> for Error { - fn respond_to(self, _: &Request) -> response::Result<'r> { +impl<'r> Responder<'r, 'static> for Error { + fn respond_to(self, _: &Request<'_>) -> response::Result<'static> { match self.error { ErrorKind::Empty(_) => {} // Don't print the error in this situation ErrorKind::Simple(_) => {} // Don't print the error in this situation @@ -202,8 +204,8 @@ impl<'r> Responder<'r> for Error { }; let code = Status::from_code(self.error_code).unwrap_or(Status::BadRequest); - - Response::build().status(code).header(ContentType::JSON).sized_body(Cursor::new(format!("{}", self))).ok() + let body = self.to_string(); + Response::build().status(code).header(ContentType::JSON).sized_body(Some(body.len()), Cursor::new(body)).ok() } } @@ -214,20 +216,20 @@ impl<'r> Responder<'r> for Error { macro_rules! err { ($msg:expr) => {{ error!("{}", $msg); - return Err(crate::error::Error::new($msg, $msg)); + return Err($crate::error::Error::new($msg, $msg)); }}; ($usr_msg:expr, $log_value:expr) => {{ error!("{}. {}", $usr_msg, $log_value); - return Err(crate::error::Error::new($usr_msg, $log_value)); + return Err($crate::error::Error::new($usr_msg, $log_value)); }}; } macro_rules! err_silent { ($msg:expr) => {{ - return Err(crate::error::Error::new($msg, $msg)); + return Err($crate::error::Error::new($msg, $msg)); }}; ($usr_msg:expr, $log_value:expr) => {{ - return Err(crate::error::Error::new($usr_msg, $log_value)); + return Err($crate::error::Error::new($usr_msg, $log_value)); }}; } @@ -235,11 +237,11 @@ macro_rules! err_silent { macro_rules! err_code { ($msg:expr, $err_code: expr) => {{ error!("{}", $msg); - return Err(crate::error::Error::new($msg, $msg).with_code($err_code)); + return Err($crate::error::Error::new($msg, $msg).with_code($err_code)); }}; ($usr_msg:expr, $log_value:expr, $err_code: expr) => {{ error!("{}. {}", $usr_msg, $log_value); - return Err(crate::error::Error::new($usr_msg, $log_value).with_code($err_code)); + return Err($crate::error::Error::new($usr_msg, $log_value).with_code($err_code)); }}; } @@ -247,11 +249,11 @@ macro_rules! err_code { macro_rules! err_discard { ($msg:expr, $data:expr) => {{ std::io::copy(&mut $data.open(), &mut std::io::sink()).ok(); - return Err(crate::error::Error::new($msg, $msg)); + return Err($crate::error::Error::new($msg, $msg)); }}; ($usr_msg:expr, $log_value:expr, $data:expr) => {{ std::io::copy(&mut $data.open(), &mut std::io::sink()).ok(); - return Err(crate::error::Error::new($usr_msg, $log_value)); + return Err($crate::error::Error::new($usr_msg, $log_value)); }}; } diff --git a/src/mail.rs b/src/mail.rs index df9919d2..5cc12658 100644 --- a/src/mail.rs +++ b/src/mail.rs @@ -4,11 +4,11 @@ use chrono::NaiveDateTime; use percent_encoding::{percent_encode, NON_ALPHANUMERIC}; use lettre::{ - message::{header, Mailbox, Message, MultiPart, SinglePart}, + message::{Mailbox, Message, MultiPart}, transport::smtp::authentication::{Credentials, Mechanism as SmtpAuthMechanism}, transport::smtp::client::{Tls, TlsParameters}, transport::smtp::extension::ClientId, - Address, SmtpTransport, Transport, + Address, AsyncSmtpTransport, AsyncTransport, Tokio1Executor, }; use crate::{ @@ -21,16 +21,16 @@ use crate::{ CONFIG, }; -fn mailer() -> SmtpTransport { +fn mailer() -> AsyncSmtpTransport { use std::time::Duration; let host = CONFIG.smtp_host().unwrap(); - let smtp_client = SmtpTransport::builder_dangerous(host.as_str()) + let smtp_client = AsyncSmtpTransport::::builder_dangerous(host.as_str()) .port(CONFIG.smtp_port()) .timeout(Some(Duration::from_secs(CONFIG.smtp_timeout()))); // Determine security - let smtp_client = if CONFIG.smtp_ssl() || CONFIG.smtp_explicit_tls() { + let smtp_client = if CONFIG.smtp_security() != *"off" { let mut tls_parameters = TlsParameters::builder(host); if CONFIG.smtp_accept_invalid_hostnames() { tls_parameters = tls_parameters.dangerous_accept_invalid_hostnames(true); @@ -40,7 +40,7 @@ fn mailer() -> SmtpTransport { } let tls_parameters = tls_parameters.build().unwrap(); - if CONFIG.smtp_explicit_tls() { + if CONFIG.smtp_security() == *"force_tls" { smtp_client.tls(Tls::Wrapper(tls_parameters)) } else { smtp_client.tls(Tls::Required(tls_parameters)) @@ -110,7 +110,7 @@ fn get_template(template_name: &str, data: &serde_json::Value) -> Result<(String Ok((subject, body)) } -pub fn send_password_hint(address: &str, hint: Option) -> EmptyResult { +pub async fn send_password_hint(address: &str, hint: Option) -> EmptyResult { let template_name = if hint.is_some() { "email/pw_hint_some" } else { @@ -119,10 +119,10 @@ pub fn send_password_hint(address: &str, hint: Option) -> EmptyResult { let (subject, body_html, body_text) = get_text(template_name, json!({ "hint": hint, "url": CONFIG.domain() }))?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -pub fn send_delete_account(address: &str, uuid: &str) -> EmptyResult { +pub async fn send_delete_account(address: &str, uuid: &str) -> EmptyResult { let claims = generate_delete_claims(uuid.to_string()); let delete_token = encode_jwt(&claims); @@ -136,10 +136,10 @@ pub fn send_delete_account(address: &str, uuid: &str) -> EmptyResult { }), )?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -pub fn send_verify_email(address: &str, uuid: &str) -> EmptyResult { +pub async fn send_verify_email(address: &str, uuid: &str) -> EmptyResult { let claims = generate_verify_email_claims(uuid.to_string()); let verify_email_token = encode_jwt(&claims); @@ -153,10 +153,10 @@ pub fn send_verify_email(address: &str, uuid: &str) -> EmptyResult { }), )?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -pub fn send_welcome(address: &str) -> EmptyResult { +pub async fn send_welcome(address: &str) -> EmptyResult { let (subject, body_html, body_text) = get_text( "email/welcome", json!({ @@ -164,10 +164,10 @@ pub fn send_welcome(address: &str) -> EmptyResult { }), )?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -pub fn send_welcome_must_verify(address: &str, uuid: &str) -> EmptyResult { +pub async fn send_welcome_must_verify(address: &str, uuid: &str) -> EmptyResult { let claims = generate_verify_email_claims(uuid.to_string()); let verify_email_token = encode_jwt(&claims); @@ -180,10 +180,10 @@ pub fn send_welcome_must_verify(address: &str, uuid: &str) -> EmptyResult { }), )?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -pub fn send_2fa_removed_from_org(address: &str, org_name: &str) -> EmptyResult { +pub async fn send_2fa_removed_from_org(address: &str, org_name: &str) -> EmptyResult { let (subject, body_html, body_text) = get_text( "email/send_2fa_removed_from_org", json!({ @@ -192,10 +192,10 @@ pub fn send_2fa_removed_from_org(address: &str, org_name: &str) -> EmptyResult { }), )?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -pub fn send_single_org_removed_from_org(address: &str, org_name: &str) -> EmptyResult { +pub async fn send_single_org_removed_from_org(address: &str, org_name: &str) -> EmptyResult { let (subject, body_html, body_text) = get_text( "email/send_single_org_removed_from_org", json!({ @@ -204,10 +204,10 @@ pub fn send_single_org_removed_from_org(address: &str, org_name: &str) -> EmptyR }), )?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -pub fn send_invite( +pub async fn send_invite( address: &str, uuid: &str, org_id: Option, @@ -236,10 +236,10 @@ pub fn send_invite( }), )?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -pub fn send_emergency_access_invite( +pub async fn send_emergency_access_invite( address: &str, uuid: &str, emer_id: Option, @@ -267,10 +267,10 @@ pub fn send_emergency_access_invite( }), )?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -pub fn send_emergency_access_invite_accepted(address: &str, grantee_email: &str) -> EmptyResult { +pub async fn send_emergency_access_invite_accepted(address: &str, grantee_email: &str) -> EmptyResult { let (subject, body_html, body_text) = get_text( "email/emergency_access_invite_accepted", json!({ @@ -279,10 +279,10 @@ pub fn send_emergency_access_invite_accepted(address: &str, grantee_email: &str) }), )?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -pub fn send_emergency_access_invite_confirmed(address: &str, grantor_name: &str) -> EmptyResult { +pub async fn send_emergency_access_invite_confirmed(address: &str, grantor_name: &str) -> EmptyResult { let (subject, body_html, body_text) = get_text( "email/emergency_access_invite_confirmed", json!({ @@ -291,10 +291,10 @@ pub fn send_emergency_access_invite_confirmed(address: &str, grantor_name: &str) }), )?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -pub fn send_emergency_access_recovery_approved(address: &str, grantor_name: &str) -> EmptyResult { +pub async fn send_emergency_access_recovery_approved(address: &str, grantor_name: &str) -> EmptyResult { let (subject, body_html, body_text) = get_text( "email/emergency_access_recovery_approved", json!({ @@ -303,10 +303,10 @@ pub fn send_emergency_access_recovery_approved(address: &str, grantor_name: &str }), )?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -pub fn send_emergency_access_recovery_initiated( +pub async fn send_emergency_access_recovery_initiated( address: &str, grantee_name: &str, atype: &str, @@ -322,10 +322,10 @@ pub fn send_emergency_access_recovery_initiated( }), )?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -pub fn send_emergency_access_recovery_reminder( +pub async fn send_emergency_access_recovery_reminder( address: &str, grantee_name: &str, atype: &str, @@ -341,10 +341,10 @@ pub fn send_emergency_access_recovery_reminder( }), )?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -pub fn send_emergency_access_recovery_rejected(address: &str, grantor_name: &str) -> EmptyResult { +pub async fn send_emergency_access_recovery_rejected(address: &str, grantor_name: &str) -> EmptyResult { let (subject, body_html, body_text) = get_text( "email/emergency_access_recovery_rejected", json!({ @@ -353,10 +353,10 @@ pub fn send_emergency_access_recovery_rejected(address: &str, grantor_name: &str }), )?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -pub fn send_emergency_access_recovery_timed_out(address: &str, grantee_name: &str, atype: &str) -> EmptyResult { +pub async fn send_emergency_access_recovery_timed_out(address: &str, grantee_name: &str, atype: &str) -> EmptyResult { let (subject, body_html, body_text) = get_text( "email/emergency_access_recovery_timed_out", json!({ @@ -366,10 +366,10 @@ pub fn send_emergency_access_recovery_timed_out(address: &str, grantee_name: &st }), )?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -pub fn send_invite_accepted(new_user_email: &str, address: &str, org_name: &str) -> EmptyResult { +pub async fn send_invite_accepted(new_user_email: &str, address: &str, org_name: &str) -> EmptyResult { let (subject, body_html, body_text) = get_text( "email/invite_accepted", json!({ @@ -379,10 +379,10 @@ pub fn send_invite_accepted(new_user_email: &str, address: &str, org_name: &str) }), )?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -pub fn send_invite_confirmed(address: &str, org_name: &str) -> EmptyResult { +pub async fn send_invite_confirmed(address: &str, org_name: &str) -> EmptyResult { let (subject, body_html, body_text) = get_text( "email/invite_confirmed", json!({ @@ -391,10 +391,10 @@ pub fn send_invite_confirmed(address: &str, org_name: &str) -> EmptyResult { }), )?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -pub fn send_new_device_logged_in(address: &str, ip: &str, dt: &NaiveDateTime, device: &str) -> EmptyResult { +pub async fn send_new_device_logged_in(address: &str, ip: &str, dt: &NaiveDateTime, device: &str) -> EmptyResult { use crate::util::upcase_first; let device = upcase_first(device); @@ -409,10 +409,10 @@ pub fn send_new_device_logged_in(address: &str, ip: &str, dt: &NaiveDateTime, de }), )?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -pub fn send_incomplete_2fa_login(address: &str, ip: &str, dt: &NaiveDateTime, device: &str) -> EmptyResult { +pub async fn send_incomplete_2fa_login(address: &str, ip: &str, dt: &NaiveDateTime, device: &str) -> EmptyResult { use crate::util::upcase_first; let device = upcase_first(device); @@ -428,10 +428,10 @@ pub fn send_incomplete_2fa_login(address: &str, ip: &str, dt: &NaiveDateTime, de }), )?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -pub fn send_token(address: &str, token: &str) -> EmptyResult { +pub async fn send_token(address: &str, token: &str) -> EmptyResult { let (subject, body_html, body_text) = get_text( "email/twofactor_email", json!({ @@ -440,10 +440,10 @@ pub fn send_token(address: &str, token: &str) -> EmptyResult { }), )?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -pub fn send_change_email(address: &str, token: &str) -> EmptyResult { +pub async fn send_change_email(address: &str, token: &str) -> EmptyResult { let (subject, body_html, body_text) = get_text( "email/change_email", json!({ @@ -452,10 +452,10 @@ pub fn send_change_email(address: &str, token: &str) -> EmptyResult { }), )?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -pub fn send_test(address: &str) -> EmptyResult { +pub async fn send_test(address: &str) -> EmptyResult { let (subject, body_html, body_text) = get_text( "email/smtp_test", json!({ @@ -463,43 +463,19 @@ pub fn send_test(address: &str) -> EmptyResult { }), )?; - send_email(address, &subject, body_html, body_text) + send_email(address, &subject, body_html, body_text).await } -fn send_email(address: &str, subject: &str, body_html: String, body_text: String) -> EmptyResult { - let address_split: Vec<&str> = address.rsplitn(2, '@').collect(); - if address_split.len() != 2 { - err!("Invalid email address (no @)"); - } - - let domain_puny = match idna::domain_to_ascii_strict(address_split[0]) { - Ok(d) => d, - Err(_) => err!("Can't convert email domain to ASCII representation"), - }; - - let address = format!("{}@{}", address_split[1], domain_puny); - - let html = SinglePart::builder() - // We force Base64 encoding because in the past we had issues with different encodings. - .header(header::ContentTransferEncoding::Base64) - .header(header::ContentType::TEXT_HTML) - .body(body_html); - - let text = SinglePart::builder() - // We force Base64 encoding because in the past we had issues with different encodings. - .header(header::ContentTransferEncoding::Base64) - .header(header::ContentType::TEXT_PLAIN) - .body(body_text); - +async fn send_email(address: &str, subject: &str, body_html: String, body_text: String) -> EmptyResult { let smtp_from = &CONFIG.smtp_from(); let email = Message::builder() .message_id(Some(format!("<{}@{}>", crate::util::get_uuid(), smtp_from.split('@').collect::>()[1]))) - .to(Mailbox::new(None, Address::from_str(&address)?)) + .to(Mailbox::new(None, Address::from_str(address)?)) .from(Mailbox::new(Some(CONFIG.smtp_from_name()), Address::from_str(smtp_from)?)) .subject(subject) - .multipart(MultiPart::alternative().singlepart(text).singlepart(html))?; + .multipart(MultiPart::alternative_plain_html(body_text, body_html))?; - match mailer().send(&email) { + match mailer().send(email).await { Ok(_) => Ok(()), // Match some common errors and make them more user friendly Err(e) => { diff --git a/src/main.rs b/src/main.rs index d7bef292..ad47f3c5 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,4 +1,30 @@ -#![forbid(unsafe_code)] +#![forbid(unsafe_code, non_ascii_idents)] +#![deny( + rust_2018_idioms, + rust_2021_compatibility, + noop_method_call, + pointer_structural_match, + trivial_casts, + trivial_numeric_casts, + unused_import_braces, + clippy::cast_lossless, + clippy::clone_on_ref_ptr, + clippy::equatable_if_let, + clippy::float_cmp_const, + clippy::inefficient_to_string, + clippy::linkedlist, + clippy::macro_use_imports, + clippy::manual_assert, + clippy::match_wildcard_for_single_variants, + clippy::mem_forget, + clippy::string_add_assign, + clippy::string_to_string, + clippy::unnecessary_join, + clippy::unnecessary_self_imports, + clippy::unused_async, + clippy::verbose_file_reads, + clippy::zero_sized_map_values +)] #![cfg_attr(feature = "unstable", feature(ip))] // The recursion_limit is mainly triggered by the json!() macro. // The more key/value pairs there are the more recursion occurs. @@ -6,7 +32,13 @@ // If you go above 128 it will cause rust-analyzer to fail, #![recursion_limit = "87"] -extern crate openssl; +// When enabled use MiMalloc as malloc instead of the default malloc +#[cfg(feature = "enable_mimalloc")] +use mimalloc::MiMalloc; +#[cfg(feature = "enable_mimalloc")] +#[cfg_attr(feature = "enable_mimalloc", global_allocator)] +static GLOBAL: MiMalloc = MiMalloc; + #[macro_use] extern crate rocket; #[macro_use] @@ -20,8 +52,19 @@ extern crate diesel; #[macro_use] extern crate diesel_migrations; -use job_scheduler::{Job, JobScheduler}; -use std::{fs::create_dir_all, panic, path::Path, process::exit, str::FromStr, thread, time::Duration}; +use std::{ + fs::{canonicalize, create_dir_all}, + panic, + path::Path, + process::exit, + str::FromStr, + thread, +}; + +use tokio::{ + fs::File, + io::{AsyncBufReadExt, BufReader}, +}; #[macro_use] mod error; @@ -37,9 +80,11 @@ mod util; pub use config::CONFIG; pub use error::{Error, MapResult}; +use rocket::data::{Limits, ToByteUnit}; pub use util::is_running_in_docker; -fn main() { +#[rocket::main] +async fn main() -> Result<(), Error> { parse_args(); launch_info(); @@ -49,20 +94,23 @@ fn main() { let extra_debug = matches!(level, LF::Trace | LF::Debug); - check_data_folder(); + check_data_folder().await; check_rsa_keys().unwrap_or_else(|_| { error!("Error creating keys, exiting..."); exit(1); }); check_web_vault(); - create_icon_cache_folder(); + create_dir(&CONFIG.icon_cache_folder(), "icon cache"); + create_dir(&CONFIG.tmp_folder(), "tmp folder"); + create_dir(&CONFIG.sends_folder(), "sends folder"); + create_dir(&CONFIG.attachments_folder(), "attachments folder"); - let pool = create_db_pool(); - schedule_jobs(pool.clone()); - crate::db::models::TwoFactor::migrate_u2f_to_webauthn(&pool.get().unwrap()).unwrap(); + let pool = create_db_pool().await; + schedule_jobs(pool.clone()).await; + crate::db::models::TwoFactor::migrate_u2f_to_webauthn(&pool.get().await.unwrap()).await.unwrap(); - launch_rocket(pool, extra_debug); // Blocks until program termination. + launch_rocket(pool, extra_debug).await // Blocks until program termination. } const HELP: &str = "\ @@ -126,13 +174,13 @@ fn init_logging(level: log::LevelFilter) -> Result<(), fern::InitError> { // Hide failed to close stream messages .level_for("hyper::server", log::LevelFilter::Warn) // Silence rocket logs - .level_for("_", log::LevelFilter::Off) - .level_for("launch", log::LevelFilter::Off) - .level_for("launch_", log::LevelFilter::Off) - .level_for("rocket::rocket", log::LevelFilter::Off) - .level_for("rocket::fairing", log::LevelFilter::Off) - // Never show html5ever and hyper::proto logs, too noisy - .level_for("html5ever", log::LevelFilter::Off) + .level_for("_", log::LevelFilter::Warn) + .level_for("rocket::launch", log::LevelFilter::Error) + .level_for("rocket::launch_", log::LevelFilter::Error) + .level_for("rocket::rocket", log::LevelFilter::Warn) + .level_for("rocket::server", log::LevelFilter::Warn) + .level_for("rocket::fairing::fairings", log::LevelFilter::Warn) + .level_for("rocket::shield::shield", log::LevelFilter::Warn) .level_for("hyper::proto", log::LevelFilter::Off) .level_for("hyper::client", log::LevelFilter::Off) // Prevent cookie_store logs @@ -243,11 +291,7 @@ fn create_dir(path: &str, description: &str) { create_dir_all(path).expect(&err_msg); } -fn create_icon_cache_folder() { - create_dir(&CONFIG.icon_cache_folder(), "icon cache"); -} - -fn check_data_folder() { +async fn check_data_folder() { let data_folder = &CONFIG.data_folder(); let path = Path::new(data_folder); if !path.exists() { @@ -259,6 +303,53 @@ fn check_data_folder() { } exit(1); } + + if is_running_in_docker() + && std::env::var("I_REALLY_WANT_VOLATILE_STORAGE").is_err() + && !docker_data_folder_is_persistent(data_folder).await + { + error!( + "No persistent volume!\n\ + ########################################################################################\n\ + # It looks like you did not configure a persistent volume! #\n\ + # This will result in permanent data loss when the container is removed or updated! #\n\ + # If you really want to use volatile storage set `I_REALLY_WANT_VOLATILE_STORAGE=true` #\n\ + ########################################################################################\n" + ); + exit(1); + } +} + +/// Detect when using Docker or Podman the DATA_FOLDER is either a bind-mount or a volume created manually. +/// If not created manually, then the data will not be persistent. +/// A none persistent volume in either Docker or Podman is represented by a 64 alphanumerical string. +/// If we detect this string, we will alert about not having a persistent self defined volume. +/// This probably means that someone forgot to add `-v /path/to/vaultwarden_data/:/data` +async fn docker_data_folder_is_persistent(data_folder: &str) -> bool { + if let Ok(mountinfo) = File::open("/proc/self/mountinfo").await { + // Since there can only be one mountpoint to the DATA_FOLDER + // We do a basic check for this mountpoint surrounded by a space. + let data_folder_match = if data_folder.starts_with('/') { + format!(" {data_folder} ") + } else { + format!(" /{data_folder} ") + }; + let mut lines = BufReader::new(mountinfo).lines(); + while let Some(line) = lines.next_line().await.unwrap_or_default() { + // Only execute a regex check if we find the base match + if line.contains(&data_folder_match) { + let re = regex::Regex::new(r"/volumes/[a-z0-9]{64}/_data /").unwrap(); + if re.is_match(&line) { + return false; + } + // If we did found a match for the mountpoint, but not the regex, then still stop searching. + break; + } + } + } + // In all other cases, just assume a true. + // This is just an informative check to try and prevent data loss. + true } fn check_rsa_keys() -> Result<(), crate::error::Error> { @@ -275,7 +366,7 @@ fn check_rsa_keys() -> Result<(), crate::error::Error> { } if !util::file_exists(&pub_path) { - let rsa_key = openssl::rsa::Rsa::private_key_from_pem(&util::read_file(&priv_path)?)?; + let rsa_key = openssl::rsa::Rsa::private_key_from_pem(&std::fs::read(&priv_path)?)?; let pub_key = rsa_key.public_key_to_pem()?; crate::util::write_file(&pub_path, &pub_key)?; @@ -304,8 +395,8 @@ fn check_web_vault() { } } -fn create_db_pool() -> db::DbPool { - match util::retry_db(db::DbPool::from_config, CONFIG.db_connection_retries()) { +async fn create_db_pool() -> db::DbPool { + match util::retry_db(db::DbPool::from_config, CONFIG.db_connection_retries()).await { Ok(p) => p, Err(e) => { error!("Error creating database pool: {:?}", e); @@ -314,51 +405,74 @@ fn create_db_pool() -> db::DbPool { } } -fn launch_rocket(pool: db::DbPool, extra_debug: bool) { +async fn launch_rocket(pool: db::DbPool, extra_debug: bool) -> Result<(), Error> { let basepath = &CONFIG.domain_path(); + let mut config = rocket::Config::from(rocket::Config::figment()); + config.temp_dir = canonicalize(CONFIG.tmp_folder()).unwrap().into(); + config.cli_colors = false; // Make sure Rocket does not color any values for logging. + config.limits = Limits::new() + .limit("json", 20.megabytes()) // 20MB should be enough for very large imports, something like 5000+ vault entries + .limit("data-form", 525.megabytes()) // This needs to match the maximum allowed file size for Send + .limit("file", 525.megabytes()); // This needs to match the maximum allowed file size for attachments + // If adding more paths here, consider also adding them to // crate::utils::LOGGED_ROUTES to make sure they appear in the log - let result = rocket::ignite() - .mount(&[basepath, "/"].concat(), api::web_routes()) - .mount(&[basepath, "/api"].concat(), api::core_routes()) - .mount(&[basepath, "/admin"].concat(), api::admin_routes()) - .mount(&[basepath, "/identity"].concat(), api::identity_routes()) - .mount(&[basepath, "/icons"].concat(), api::icons_routes()) - .mount(&[basepath, "/notifications"].concat(), api::notifications_routes()) + let instance = rocket::custom(config) + .mount([basepath, "/"].concat(), api::web_routes()) + .mount([basepath, "/api"].concat(), api::core_routes()) + .mount([basepath, "/admin"].concat(), api::admin_routes()) + .mount([basepath, "/identity"].concat(), api::identity_routes()) + .mount([basepath, "/icons"].concat(), api::icons_routes()) + .mount([basepath, "/notifications"].concat(), api::notifications_routes()) .manage(pool) .manage(api::start_notification_server()) .attach(util::AppHeaders()) .attach(util::Cors()) .attach(util::BetterLogging(extra_debug)) - .launch(); + .ignite() + .await?; - // Launch and print error if there is one - // The launch will restore the original logging level - error!("Launch error {:#?}", result); + CONFIG.set_rocket_shutdown_handle(instance.shutdown()); + ctrlc::set_handler(move || { + info!("Exiting vaultwarden!"); + CONFIG.shutdown(); + }) + .expect("Error setting Ctrl-C handler"); + + let _ = instance.launch().await?; + + info!("Vaultwarden process exited!"); + Ok(()) } -fn schedule_jobs(pool: db::DbPool) { +async fn schedule_jobs(pool: db::DbPool) { if CONFIG.job_poll_interval_ms() == 0 { info!("Job scheduler disabled."); return; } + + let runtime = tokio::runtime::Runtime::new().unwrap(); + thread::Builder::new() .name("job-scheduler".to_string()) .spawn(move || { + use job_scheduler_ng::{Job, JobScheduler}; + let _runtime_guard = runtime.enter(); + let mut sched = JobScheduler::new(); // Purge sends that are past their deletion date. if !CONFIG.send_purge_schedule().is_empty() { sched.add(Job::new(CONFIG.send_purge_schedule().parse().unwrap(), || { - api::purge_sends(pool.clone()); + runtime.spawn(api::purge_sends(pool.clone())); })); } // Purge trashed items that are old enough to be auto-deleted. if !CONFIG.trash_purge_schedule().is_empty() { sched.add(Job::new(CONFIG.trash_purge_schedule().parse().unwrap(), || { - api::purge_trashed_ciphers(pool.clone()); + runtime.spawn(api::purge_trashed_ciphers(pool.clone())); })); } @@ -366,7 +480,7 @@ fn schedule_jobs(pool: db::DbPool) { // indicates that a user's master password has been compromised. if !CONFIG.incomplete_2fa_schedule().is_empty() { sched.add(Job::new(CONFIG.incomplete_2fa_schedule().parse().unwrap(), || { - api::send_incomplete_2fa_notifications(pool.clone()); + runtime.spawn(api::send_incomplete_2fa_notifications(pool.clone())); })); } @@ -375,7 +489,7 @@ fn schedule_jobs(pool: db::DbPool) { // sending reminders for requests that are about to be granted anyway. if !CONFIG.emergency_request_timeout_schedule().is_empty() { sched.add(Job::new(CONFIG.emergency_request_timeout_schedule().parse().unwrap(), || { - api::emergency_request_timeout_job(pool.clone()); + runtime.spawn(api::emergency_request_timeout_job(pool.clone())); })); } @@ -383,7 +497,7 @@ fn schedule_jobs(pool: db::DbPool) { // emergency access requests. if !CONFIG.emergency_notification_reminder_schedule().is_empty() { sched.add(Job::new(CONFIG.emergency_notification_reminder_schedule().parse().unwrap(), || { - api::emergency_notification_reminder_job(pool.clone()); + runtime.spawn(api::emergency_notification_reminder_job(pool.clone())); })); } @@ -398,7 +512,9 @@ fn schedule_jobs(pool: db::DbPool) { // tick, the one that was added earlier will run first. loop { sched.tick(); - thread::sleep(Duration::from_millis(CONFIG.job_poll_interval_ms())); + runtime.block_on(async move { + tokio::time::sleep(tokio::time::Duration::from_millis(CONFIG.job_poll_interval_ms())).await + }); } }) .expect("Error spawning job scheduler thread"); diff --git a/src/static/global_domains.json b/src/static/global_domains.json index 24da377a..06df70a3 100644 --- a/src/static/global_domains.json +++ b/src/static/global_domains.json @@ -328,6 +328,7 @@ "Type": 33, "Domains": [ "healthcare.gov", + "cuidadodesalud.gov", "cms.gov" ], "Excluded": false @@ -902,6 +903,7 @@ { "Type": 85, "Domains": [ + "proton.me", "protonmail.com", "protonvpn.com" ], @@ -922,5 +924,20 @@ "wise.com" ], "Excluded": false + }, + { + "Type": 88, + "Domains": [ + "takeaway.com", + "just-eat.dk", + "just-eat.no", + "just-eat.fr", + "just-eat.ch", + "lieferando.de", + "lieferando.at", + "thuisbezorgd.nl", + "pyszne.pl" + ], + "Excluded": false } ] \ No newline at end of file diff --git a/src/static/scripts/bootstrap-native.js b/src/static/scripts/bootstrap-native.js index 3827dfa6..bf26cef8 100644 --- a/src/static/scripts/bootstrap-native.js +++ b/src/static/scripts/bootstrap-native.js @@ -1,6 +1,6 @@ /*! - * Native JavaScript for Bootstrap v4.0.8 (https://thednp.github.io/bootstrap.native/) - * Copyright 2015-2021 © dnp_theme + * Native JavaScript for Bootstrap v4.2.0 (https://thednp.github.io/bootstrap.native/) + * Copyright 2015-2022 © dnp_theme * Licensed under MIT (https://github.com/thednp/bootstrap.native/blob/master/LICENSE) */ (function (global, factory) { @@ -9,157 +9,637 @@ (global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.BSN = factory()); })(this, (function () { 'use strict'; - const transitionEndEvent = 'webkitTransition' in document.head.style ? 'webkitTransitionEnd' : 'transitionend'; + /** @type {Record} */ + const EventRegistry = {}; - const supportTransition = 'webkitTransition' in document.head.style || 'transition' in document.head.style; + /** + * The global event listener. + * + * @type {EventListener} + * @this {EventTarget} + */ + function globalListener(e) { + const that = this; + const { type } = e; + + [...EventRegistry[type]].forEach((elementsMap) => { + const [element, listenersMap] = elementsMap; + /* istanbul ignore else */ + if (element === that) { + [...listenersMap].forEach((listenerMap) => { + const [listener, options] = listenerMap; + listener.apply(element, [e]); + + if (options && options.once) { + removeListener(element, type, listener, options); + } + }); + } + }); + } + + /** + * Register a new listener with its options and attach the `globalListener` + * to the target if this is the first listener. + * + * @type {Listener.ListenerAction} + */ + const addListener = (element, eventType, listener, options) => { + // get element listeners first + if (!EventRegistry[eventType]) { + EventRegistry[eventType] = new Map(); + } + const oneEventMap = EventRegistry[eventType]; - const transitionDuration = 'webkitTransition' in document.head.style ? 'webkitTransitionDuration' : 'transitionDuration'; + if (!oneEventMap.has(element)) { + oneEventMap.set(element, new Map()); + } + const oneElementMap = oneEventMap.get(element); - const transitionProperty = 'webkitTransition' in document.head.style ? 'webkitTransitionProperty' : 'transitionProperty'; + // get listeners size + const { size } = oneElementMap; - function getElementTransitionDuration(element) { + // register listener with its options + oneElementMap.set(listener, options); + + // add listener last + if (!size) { + element.addEventListener(eventType, globalListener, options); + } + }; + + /** + * Remove a listener from registry and detach the `globalListener` + * if no listeners are found in the registry. + * + * @type {Listener.ListenerAction} + */ + const removeListener = (element, eventType, listener, options) => { + // get listener first + const oneEventMap = EventRegistry[eventType]; + const oneElementMap = oneEventMap && oneEventMap.get(element); + const savedOptions = oneElementMap && oneElementMap.get(listener); + + // also recover initial options + const { options: eventOptions } = savedOptions !== undefined + ? savedOptions + : { options }; + + // unsubscribe second, remove from registry + if (oneElementMap && oneElementMap.has(listener)) oneElementMap.delete(listener); + if (oneEventMap && (!oneElementMap || !oneElementMap.size)) oneEventMap.delete(element); + if (!oneEventMap || !oneEventMap.size) delete EventRegistry[eventType]; + + // remove listener last + /* istanbul ignore else */ + if (!oneElementMap || !oneElementMap.size) { + element.removeEventListener(eventType, globalListener, eventOptions); + } + }; + + /** + * Advanced event listener based on subscribe / publish pattern. + * @see https://www.patterns.dev/posts/classic-design-patterns/#observerpatternjavascript + * @see https://gist.github.com/shystruk/d16c0ee7ac7d194da9644e5d740c8338#file-subpub-js + * @see https://hackernoon.com/do-you-still-register-window-event-listeners-in-each-component-react-in-example-31a4b1f6f1c8 + */ + const Listener = { + on: addListener, + off: removeListener, + globalListener, + registry: EventRegistry, + }; + + /** + * A global namespace for `click` event. + * @type {string} + */ + const mouseclickEvent = 'click'; + + /** + * A global namespace for 'transitionend' string. + * @type {string} + */ + const transitionEndEvent = 'transitionend'; + + /** + * A global namespace for 'transitionDelay' string. + * @type {string} + */ + const transitionDelay = 'transitionDelay'; + + /** + * A global namespace for `transitionProperty` string for modern browsers. + * + * @type {string} + */ + const transitionProperty = 'transitionProperty'; + + /** + * Shortcut for `window.getComputedStyle(element).propertyName` + * static method. + * + * * If `element` parameter is not an `HTMLElement`, `getComputedStyle` + * throws a `ReferenceError`. + * + * @param {HTMLElement} element target + * @param {string} property the css property + * @return {string} the css property value + */ + function getElementStyle(element, property) { const computedStyle = getComputedStyle(element); - const propertyValue = computedStyle[transitionProperty]; - const durationValue = computedStyle[transitionDuration]; - const durationScale = durationValue.includes('ms') ? 1 : 1000; - const duration = supportTransition && propertyValue && propertyValue !== 'none' + + // must use camelcase strings, + // or non-camelcase strings with `getPropertyValue` + return property.includes('--') + ? computedStyle.getPropertyValue(property) + : computedStyle[property]; + } + + /** + * Utility to get the computed `transitionDelay` + * from Element in miliseconds. + * + * @param {HTMLElement} element target + * @return {number} the value in miliseconds + */ + function getElementTransitionDelay(element) { + const propertyValue = getElementStyle(element, transitionProperty); + const delayValue = getElementStyle(element, transitionDelay); + const delayScale = delayValue.includes('ms') ? /* istanbul ignore next */1 : 1000; + const duration = propertyValue && propertyValue !== 'none' + ? parseFloat(delayValue) * delayScale : 0; + + return !Number.isNaN(duration) ? duration : /* istanbul ignore next */0; + } + + /** + * A global namespace for 'transitionDuration' string. + * @type {string} + */ + const transitionDuration = 'transitionDuration'; + + /** + * Utility to get the computed `transitionDuration` + * from Element in miliseconds. + * + * @param {HTMLElement} element target + * @return {number} the value in miliseconds + */ + function getElementTransitionDuration(element) { + const propertyValue = getElementStyle(element, transitionProperty); + const durationValue = getElementStyle(element, transitionDuration); + const durationScale = durationValue.includes('ms') ? /* istanbul ignore next */1 : 1000; + const duration = propertyValue && propertyValue !== 'none' ? parseFloat(durationValue) * durationScale : 0; - return !Number.isNaN(duration) ? duration : 0; + return !Number.isNaN(duration) ? duration : /* istanbul ignore next */0; } + /** + * Shortcut for the `Element.dispatchEvent(Event)` method. + * + * @param {HTMLElement} element is the target + * @param {Event} event is the `Event` object + */ + const dispatchEvent = (element, event) => element.dispatchEvent(event); + + /** + * Utility to make sure callbacks are consistently + * called when transition ends. + * + * @param {HTMLElement} element target + * @param {EventListener} handler `transitionend` callback + */ function emulateTransitionEnd(element, handler) { let called = 0; const endEvent = new Event(transitionEndEvent); const duration = getElementTransitionDuration(element); + const delay = getElementTransitionDelay(element); if (duration) { - element.addEventListener(transitionEndEvent, function transitionEndWrapper(e) { + /** + * Wrap the handler in on -> off callback + * @type {EventListener} e Event object + */ + const transitionEndWrapper = (e) => { + /* istanbul ignore else */ if (e.target === element) { handler.apply(element, [e]); element.removeEventListener(transitionEndEvent, transitionEndWrapper); called = 1; } - }); + }; + element.addEventListener(transitionEndEvent, transitionEndWrapper); setTimeout(() => { - if (!called) element.dispatchEvent(endEvent); - }, duration + 17); + /* istanbul ignore next */ + if (!called) dispatchEvent(element, endEvent); + }, duration + delay + 17); } else { handler.apply(element, [endEvent]); } } - function queryElement(selector, parent) { - const lookUp = parent && parent instanceof Element ? parent : document; - return selector instanceof Element ? selector : lookUp.querySelector(selector); - } - + /** + * Checks if an object is a `Node`. + * + * @param {any} node the target object + * @returns {boolean} the query result + */ + const isNode = (element) => (element && [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] + .some((x) => +element.nodeType === x)) || false; + + /** + * Check if a target object is `Window`. + * => equivalent to `object instanceof Window` + * + * @param {any} object the target object + * @returns {boolean} the query result + */ + const isWindow = (object) => (object && object.constructor.name === 'Window') || false; + + /** + * Checks if an object is a `Document`. + * @see https://dom.spec.whatwg.org/#node + * + * @param {any} object the target object + * @returns {boolean} the query result + */ + const isDocument = (object) => (object && object.nodeType === 9) || false; + + /** + * Returns the `document` or the `#document` element. + * @see https://github.com/floating-ui/floating-ui + * @param {(Node | Window)=} node + * @returns {Document} + */ + function getDocument(node) { + // node instanceof Document + if (isDocument(node)) return node; + // node instanceof Node + if (isNode(node)) return node.ownerDocument; + // node instanceof Window + if (isWindow(node)) return node.document; + // node is undefined | NULL + return window.document; + } + + /** + * Utility to check if target is typeof `HTMLElement`, `Element`, `Node` + * or find one that matches a selector. + * + * @param {Node | string} selector the input selector or target element + * @param {ParentNode=} parent optional node to look into + * @return {HTMLElement?} the `HTMLElement` or `querySelector` result + */ + function querySelector(selector, parent) { + if (isNode(selector)) { + return selector; + } + const lookUp = isNode(parent) ? parent : getDocument(); + + return lookUp.querySelector(selector); + } + + /** + * Shortcut for `HTMLElement.closest` method which also works + * with children of `ShadowRoot`. The order of the parameters + * is intentional since they're both required. + * + * @see https://stackoverflow.com/q/54520554/803358 + * + * @param {HTMLElement} element Element to look into + * @param {string} selector the selector name + * @return {HTMLElement?} the query result + */ + function closest(element, selector) { + return element ? (element.closest(selector) + // break out of `ShadowRoot` + || closest(element.getRootNode().host, selector)) : null; + } + + /** + * Shortcut for `Object.assign()` static method. + * @param {Record} obj a target object + * @param {Record} source a source object + */ + const ObjectAssign = (obj, source) => Object.assign(obj, source); + + /** + * Check class in `HTMLElement.classList`. + * + * @param {HTMLElement} element target + * @param {string} classNAME to check + * @returns {boolean} + */ function hasClass(element, classNAME) { return element.classList.contains(classNAME); } + /** + * Remove class from `HTMLElement.classList`. + * + * @param {HTMLElement} element target + * @param {string} classNAME to remove + * @returns {void} + */ function removeClass(element, classNAME) { element.classList.remove(classNAME); } - const addEventListener = 'addEventListener'; + /** + * Checks if an element is an `HTMLElement`. + * @see https://dom.spec.whatwg.org/#node + * + * @param {any} element the target object + * @returns {boolean} the query result + */ + const isHTMLElement = (element) => (element && element.nodeType === 1) || false; + + /** @type {Map>>} */ + const componentData = new Map(); + /** + * An interface for web components background data. + * @see https://github.com/thednp/bootstrap.native/blob/master/src/components/base-component.js + */ + const Data = { + /** + * Sets web components data. + * @param {HTMLElement} element target element + * @param {string} component the component's name or a unique key + * @param {Record} instance the component instance + */ + set: (element, component, instance) => { + if (!isHTMLElement(element)) return; + + /* istanbul ignore else */ + if (!componentData.has(component)) { + componentData.set(component, new Map()); + } + + const instanceMap = componentData.get(component); + // not undefined, but defined right above + instanceMap.set(element, instance); + }, + + /** + * Returns all instances for specified component. + * @param {string} component the component's name or a unique key + * @returns {Map>?} all the component instances + */ + getAllFor: (component) => { + const instanceMap = componentData.get(component); + + return instanceMap || null; + }, + + /** + * Returns the instance associated with the target. + * @param {HTMLElement} element target element + * @param {string} component the component's name or a unique key + * @returns {Record?} the instance + */ + get: (element, component) => { + if (!isHTMLElement(element) || !component) return null; + const allForC = Data.getAllFor(component); + const instance = element && allForC && allForC.get(element); + + return instance || null; + }, + + /** + * Removes web components data. + * @param {HTMLElement} element target element + * @param {string} component the component's name or a unique key + */ + remove: (element, component) => { + const instanceMap = componentData.get(component); + if (!instanceMap || !isHTMLElement(element)) return; + + instanceMap.delete(element); + + /* istanbul ignore else */ + if (instanceMap.size === 0) { + componentData.delete(component); + } + }, + }; + + /** + * An alias for `Data.get()`. + * @type {SHORTY.getInstance} + */ + const getInstance = (target, component) => Data.get(target, component); + + /** + * Checks if an object is an `Object`. + * + * @param {any} obj the target object + * @returns {boolean} the query result + */ + const isObject = (obj) => (typeof obj === 'object') || false; + + /** + * Returns a namespaced `CustomEvent` specific to each component. + * @param {string} EventType Event.type + * @param {Record=} config Event.options | Event.properties + * @returns {SHORTY.OriginalEvent} a new namespaced event + */ + function OriginalEvent(EventType, config) { + const OriginalCustomEvent = new CustomEvent(EventType, { + cancelable: true, bubbles: true, + }); - const removeEventListener = 'removeEventListener'; + /* istanbul ignore else */ + if (isObject(config)) { + ObjectAssign(OriginalCustomEvent, config); + } + return OriginalCustomEvent; + } + /** + * Global namespace for most components `fade` class. + */ const fadeClass = 'fade'; + /** + * Global namespace for most components `show` class. + */ const showClass = 'show'; + /** + * Global namespace for most components `dismiss` option. + */ const dataBsDismiss = 'data-bs-dismiss'; - function bootstrapCustomEvent(namespacedEventType, eventProperties) { - const OriginalCustomEvent = new CustomEvent(namespacedEventType, { cancelable: true }); + /** @type {string} */ + const alertString = 'alert'; - if (eventProperties instanceof Object) { - Object.keys(eventProperties).forEach((key) => { - Object.defineProperty(OriginalCustomEvent, key, { - value: eventProperties[key], - }); - }); - } - return OriginalCustomEvent; - } + /** @type {string} */ + const alertComponent = 'Alert'; + /** + * Shortcut for `HTMLElement.getAttribute()` method. + * @param {HTMLElement} element target element + * @param {string} attribute attribute name + * @returns {string?} attribute value + */ + const getAttribute = (element, attribute) => element.getAttribute(attribute); + + /** + * The raw value or a given component option. + * + * @typedef {string | HTMLElement | Function | number | boolean | null} niceValue + */ + + /** + * Utility to normalize component options + * + * @param {any} value the input value + * @return {niceValue} the normalized value + */ function normalizeValue(value) { - if (value === 'true') { + if (['true', true].includes(value)) { // boolean + // if ('true' === value) { // boolean return true; } - if (value === 'false') { + if (['false', false].includes(value)) { // boolean + // if ('false' === value) { // boolean return false; } - if (!Number.isNaN(+value)) { - return +value; + if (value === '' || value === 'null') { // null + return null; } - if (value === '' || value === 'null') { - return null; + if (value !== '' && !Number.isNaN(+value)) { // number + return +value; } - // string / function / Element / Object + // string / function / HTMLElement / object return value; } + /** + * Shortcut for `Object.keys()` static method. + * @param {Record} obj a target object + * @returns {string[]} + */ + const ObjectKeys = (obj) => Object.keys(obj); + + /** + * Shortcut for `String.toLowerCase()`. + * + * @param {string} source input string + * @returns {string} lowercase output string + */ + const toLowerCase = (source) => source.toLowerCase(); + + /** + * Utility to normalize component options. + * + * @param {HTMLElement} element target + * @param {Record} defaultOps component default options + * @param {Record} inputOps component instance options + * @param {string=} ns component namespace + * @return {Record} normalized component options object + */ function normalizeOptions(element, defaultOps, inputOps, ns) { + const data = { ...element.dataset }; + /** @type {Record} */ const normalOps = {}; + /** @type {Record} */ const dataOps = {}; - const data = { ...element.dataset }; + const title = 'title'; - Object.keys(data) - .forEach((k) => { - const key = k.includes(ns) - ? k.replace(ns, '').replace(/[A-Z]/, (match) => match.toLowerCase()) - : k; + ObjectKeys(data).forEach((k) => { + const key = ns && k.includes(ns) + ? k.replace(ns, '').replace(/[A-Z]/, (match) => toLowerCase(match)) + : k; - dataOps[key] = normalizeValue(data[k]); - }); + dataOps[key] = normalizeValue(data[k]); + }); - Object.keys(inputOps) - .forEach((k) => { - inputOps[k] = normalizeValue(inputOps[k]); - }); + ObjectKeys(inputOps).forEach((k) => { + inputOps[k] = normalizeValue(inputOps[k]); + }); - Object.keys(defaultOps) - .forEach((k) => { - if (k in inputOps) { - normalOps[k] = inputOps[k]; - } else if (k in dataOps) { - normalOps[k] = dataOps[k]; - } else { - normalOps[k] = defaultOps[k]; - } - }); + ObjectKeys(defaultOps).forEach((k) => { + /* istanbul ignore else */ + if (k in inputOps) { + normalOps[k] = inputOps[k]; + } else if (k in dataOps) { + normalOps[k] = dataOps[k]; + } else { + normalOps[k] = k === title + ? getAttribute(element, title) + : defaultOps[k]; + } + }); return normalOps; } + var version = "4.2.0"; + + const Version = version; + /* Native JavaScript for Bootstrap 5 | Base Component ----------------------------------------------------- */ + /** Returns a new `BaseComponent` instance. */ class BaseComponent { - constructor(name, target, defaults, config) { + /** + * @param {HTMLElement | string} target `Element` or selector string + * @param {BSN.ComponentOptions=} config component instance options + */ + constructor(target, config) { const self = this; - const element = queryElement(target); + const element = querySelector(target); + + if (!element) { + throw Error(`${self.name} Error: "${target}" is not a valid selector.`); + } - if (element[name]) element[name].dispose(); + /** @static @type {BSN.ComponentOptions} */ + self.options = {}; + + const prevInstance = Data.get(element, self.name); + if (prevInstance) prevInstance.dispose(); + + /** @type {HTMLElement} */ self.element = element; - if (defaults && Object.keys(defaults).length) { - self.options = normalizeOptions(element, defaults, (config || {}), 'bs'); + /* istanbul ignore else */ + if (self.defaults && ObjectKeys(self.defaults).length) { + self.options = normalizeOptions(element, self.defaults, (config || {}), 'bs'); } - element[name] = self; + + Data.set(element, self.name, self); } - dispose(name) { + /* eslint-disable */ + /* istanbul ignore next */ + /** @static */ + get version() { return Version; } + + /* eslint-enable */ + /* istanbul ignore next */ + /** @static */ + get name() { return this.constructor.name; } + + /* istanbul ignore next */ + /** @static */ + get defaults() { return this.constructor.defaults; } + + /** + * Removes component from target element; + */ + dispose() { const self = this; - self.element[name] = null; - Object.keys(self).forEach((prop) => { self[prop] = null; }); + Data.remove(self.element, self.name); + ObjectKeys(self).forEach((prop) => { self[prop] = null; }); } } @@ -168,24 +648,39 @@ // ALERT PRIVATE GC // ================ - const alertString = 'alert'; - const alertComponent = 'Alert'; const alertSelector = `.${alertString}`; const alertDismissSelector = `[${dataBsDismiss}="${alertString}"]`; + /** + * Static method which returns an existing `Alert` instance associated + * to a target `Element`. + * + * @type {BSN.GetInstance} + */ + const getAlertInstance = (element) => getInstance(element, alertComponent); + + /** + * An `Alert` initialization callback. + * @type {BSN.InitCallback} + */ + const alertInitCallback = (element) => new Alert(element); + // ALERT CUSTOM EVENTS // =================== - const closeAlertEvent = bootstrapCustomEvent(`close.bs.${alertString}`); - const closedAlertEvent = bootstrapCustomEvent(`closed.bs.${alertString}`); + const closeAlertEvent = OriginalEvent(`close.bs.${alertString}`); + const closedAlertEvent = OriginalEvent(`closed.bs.${alertString}`); - // ALERT EVENT HANDLERS - // ==================== + // ALERT EVENT HANDLER + // =================== + /** + * Alert `transitionend` callback. + * @param {Alert} self target Alert instance + */ function alertTransitionEnd(self) { - const { element, relatedTarget } = self; + const { element } = self; toggleAlertHandler(self); - if (relatedTarget) closedAlertEvent.relatedTarget = relatedTarget; - element.dispatchEvent(closedAlertEvent); + dispatchEvent(element, closedAlertEvent); self.dispose(); element.remove(); @@ -193,16 +688,25 @@ // ALERT PRIVATE METHOD // ==================== + /** + * Toggle on / off the `click` event listener. + * @param {Alert} self the target alert instance + * @param {boolean=} add when `true`, event listener is added + */ function toggleAlertHandler(self, add) { - const action = add ? addEventListener : removeEventListener; - if (self.dismiss) self.dismiss[action]('click', self.close); + const action = add ? addListener : removeListener; + const { dismiss } = self; + /* istanbul ignore else */ + if (dismiss) action(dismiss, mouseclickEvent, self.close); } // ALERT DEFINITION // ================ + /** Creates a new Alert instance. */ class Alert extends BaseComponent { + /** @param {HTMLElement | string} target element or selector */ constructor(target) { - super(alertComponent, target); + super(target); // bind const self = this; @@ -210,28 +714,37 @@ const { element } = self; // the dismiss button - self.dismiss = queryElement(alertDismissSelector, element); - self.relatedTarget = null; + /** @static @type {HTMLElement?} */ + self.dismiss = querySelector(alertDismissSelector, element); // add event listener - toggleAlertHandler(self, 1); + toggleAlertHandler(self, true); } + /* eslint-disable */ + /** + * Returns component name string. + */ + get name() { return alertComponent; } + /* eslint-enable */ + // ALERT PUBLIC METHODS // ==================== + /** + * Public method that hides the `.alert` element from the user, + * disposes the instance once animation is complete, then + * removes the element from the DOM. + * + * @param {Event=} e most likely the `click` event + * @this {Alert} the `Alert` instance or `EventTarget` + */ close(e) { - const target = e ? e.target : null; - const self = e - ? e.target.closest(alertSelector)[alertComponent] - : this; + const self = e ? getAlertInstance(closest(this, alertSelector)) : this; const { element } = self; - if (self && element && hasClass(element, showClass)) { - if (target) { - closeAlertEvent.relatedTarget = target; - self.relatedTarget = target; - } - element.dispatchEvent(closeAlertEvent); + /* istanbul ignore else */ + if (element && hasClass(element, showClass)) { + dispatchEvent(element, closeAlertEvent); if (closeAlertEvent.defaultPrevented) return; removeClass(element, showClass); @@ -242,123 +755,423 @@ } } + /** Remove the component from target element. */ dispose() { toggleAlertHandler(this); - super.dispose(alertComponent); + super.dispose(); } } - Alert.init = { - component: alertComponent, + ObjectAssign(Alert, { selector: alertSelector, - constructor: Alert, - }; + init: alertInitCallback, + getInstance: getAlertInstance, + }); + + /** + * A global namespace for aria-pressed. + * @type {string} + */ + const ariaPressed = 'aria-pressed'; + /** + * Shortcut for `HTMLElement.setAttribute()` method. + * @param {HTMLElement} element target element + * @param {string} attribute attribute name + * @param {string} value attribute value + * @returns {void} + */ + const setAttribute = (element, attribute, value) => element.setAttribute(attribute, value); + + /** + * Add class to `HTMLElement.classList`. + * + * @param {HTMLElement} element target + * @param {string} classNAME to add + * @returns {void} + */ function addClass(element, classNAME) { element.classList.add(classNAME); } + /** + * Global namespace for most components active class. + */ const activeClass = 'active'; + /** + * Global namespace for most components `toggle` option. + */ const dataBsToggle = 'data-bs-toggle'; + /** @type {string} */ + const buttonString = 'button'; + + /** @type {string} */ + const buttonComponent = 'Button'; + /* Native JavaScript for Bootstrap 5 | Button ---------------------------------------------*/ // BUTTON PRIVATE GC // ================= - const buttonString = 'button'; - const buttonComponent = 'Button'; const buttonSelector = `[${dataBsToggle}="${buttonString}"]`; - const ariaPressed = 'aria-pressed'; + + /** + * Static method which returns an existing `Button` instance associated + * to a target `Element`. + * + * @type {BSN.GetInstance