From a631fc0077035fede429ca89e8d231b24ef004ad Mon Sep 17 00:00:00 2001 From: Jeremy Lin Date: Sat, 18 Dec 2021 16:14:31 -0800 Subject: [PATCH 01/25] Sync global_domains.json to bitwarden/server@2f518fb (Ubisoft) --- src/static/global_domains.json | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/static/global_domains.json b/src/static/global_domains.json index 24879a54..7105407f 100644 --- a/src/static/global_domains.json +++ b/src/static/global_domains.json @@ -905,5 +905,13 @@ "protonvpn.com" ], "Excluded": false + }, + { + "Type": 86, + "Domains": [ + "ubisoft.com", + "ubi.com" + ], + "Excluded": false } ] \ No newline at end of file From d8869adf5225f2bca2dac3e73224b5949a0f3e7b Mon Sep 17 00:00:00 2001 From: Jeremy Lin Date: Sat, 18 Dec 2021 16:15:07 -0800 Subject: [PATCH 02/25] Sync global_domains.json to bitwarden/server@224bfb6 (Wells Fargo) --- src/static/global_domains.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/static/global_domains.json b/src/static/global_domains.json index 7105407f..27b426dd 100644 --- a/src/static/global_domains.json +++ b/src/static/global_domains.json @@ -47,7 +47,8 @@ "Type": 5, "Domains": [ "wellsfargo.com", - "wf.com" + "wf.com", + "wellsfargoadvisors.com" ], "Excluded": false }, From 2f9ac61a4e86c272b8029226ad4b7b3fa7171088 Mon Sep 17 00:00:00 2001 From: Jeremy Lin Date: Mon, 20 Dec 2021 01:34:31 -0800 Subject: [PATCH 03/25] Add support for external icon services If an external icon service is configured, icon requests return an HTTP redirect to the corresponding icon at the external service. An external service may be useful for various reasons, such as if: * The Vaultwarden instance has no external network connectivity. * The Vaultwarden instance has trouble handling large bursts of icon requests. * There are concerns that an attacker may probe the instance to try to detect whether icons for certain sites have been cached, which would suggest that the instance contains entries for those sites. * The external icon service does a better job of providing icons than the built-in fetcher. --- .env.template | 20 ++++++++++++++++--- src/api/icons.rs | 50 +++++++++++++++++++++++++++++++++++++++++++++--- src/config.rs | 30 ++++++++++++++++++++++++++--- 3 files changed, 91 insertions(+), 9 deletions(-) diff --git a/.env.template b/.env.template index 6af6b53b..ca6962b4 100644 --- a/.env.template +++ b/.env.template @@ -129,10 +129,24 @@ ## Number of times to retry the database connection during startup, with 1 second delay between each retry, set to 0 to retry indefinitely # DB_CONNECTION_RETRIES=15 +## Icon service +## The predefined icon services are: internal, bitwarden, duckduckgo, google. +## To specify a custom icon service, set a URL template with exactly one instance of `{}`, +## which is replaced with the domain. For example: `https://icon.example.com/domain/{}`. +## +## `internal` refers to Vaultwarden's built-in icon fetching implementation. +## If an external service is set, an icon request to Vaultwarden will return an HTTP 307 +## redirect to the corresponding icon at the external service. An external service may +## be useful if your Vaultwarden instance has no external network connectivity, or if +## you are concerned that someone may probe your instance to try to detect whether icons +## for certain sites have been cached. +# ICON_SERVICE=internal + ## Disable icon downloading -## Set to true to disable icon downloading, this would still serve icons from $ICON_CACHE_FOLDER, -## but it won't produce any external network request. Needs to set $ICON_CACHE_TTL to 0, -## otherwise it will delete them and they won't be downloaded again. +## Set to true to disable icon downloading in the internal icon service. +## This still serves existing icons from $ICON_CACHE_FOLDER, without generating any external +## network requests. $ICON_CACHE_TTL must also be set to 0; otherwise, the existing icons +## will be deleted eventually, but won't be downloaded again. # DISABLE_ICON_DOWNLOAD=false ## Icon download timeout diff --git a/src/api/icons.rs b/src/api/icons.rs index 675ba43d..ff71cd57 100644 --- a/src/api/icons.rs +++ b/src/api/icons.rs @@ -10,7 +10,11 @@ use std::{ use once_cell::sync::Lazy; use regex::Regex; use reqwest::{blocking::Client, blocking::Response, header}; -use rocket::{http::ContentType, response::Content, Route}; +use rocket::{ + http::ContentType, + response::{Content, Redirect}, + Route, +}; use crate::{ error::Error, @@ -19,7 +23,13 @@ use crate::{ }; pub fn routes() -> Vec { - routes![icon] + match CONFIG.icon_service().as_str() { + "internal" => routes![icon_internal], + "bitwarden" => routes![icon_bitwarden], + "duckduckgo" => routes![icon_duckduckgo], + "google" => routes![icon_google], + _ => routes![icon_custom], + } } static CLIENT: Lazy = Lazy::new(|| { @@ -50,8 +60,42 @@ static ICON_SIZE_REGEX: Lazy = Lazy::new(|| Regex::new(r"(?x)(\d+)\D*(\d+ // Special HashMap which holds the user defined Regex to speedup matching the regex. static ICON_BLACKLIST_REGEX: Lazy>> = Lazy::new(|| RwLock::new(HashMap::new())); +fn icon_redirect(domain: &str, template: &str) -> Option { + if !is_valid_domain(domain) { + warn!("Invalid domain: {}", domain); + return None; + } + + if is_domain_blacklisted(domain) { + return None; + } + + let url = template.replace("{}", domain); + Some(Redirect::temporary(url)) +} + +#[get("//icon.png")] +fn icon_custom(domain: String) -> Option { + icon_redirect(&domain, &CONFIG.icon_service()) +} + +#[get("//icon.png")] +fn icon_bitwarden(domain: String) -> Option { + icon_redirect(&domain, "https://icons.bitwarden.net/{}/icon.png") +} + +#[get("//icon.png")] +fn icon_duckduckgo(domain: String) -> Option { + icon_redirect(&domain, "https://icons.duckduckgo.com/ip3/{}.ico") +} + +#[get("//icon.png")] +fn icon_google(domain: String) -> Option { + icon_redirect(&domain, "https://www.google.com/s2/favicons?domain={}&sz=32") +} + #[get("//icon.png")] -fn icon(domain: String) -> Cached>> { +fn icon_internal(domain: String) -> Cached>> { const FALLBACK_ICON: &[u8] = include_bytes!("../static/images/fallback-icon.png"); if !is_valid_domain(&domain) { diff --git a/src/config.rs b/src/config.rs index 17b39e04..9639b3c4 100644 --- a/src/config.rs +++ b/src/config.rs @@ -406,9 +406,10 @@ make_config! { /// This setting applies globally to all users. incomplete_2fa_time_limit: i64, true, def, 3; - /// Disable icon downloads |> Set to true to disable icon downloading, this would still serve icons from - /// $ICON_CACHE_FOLDER, but it won't produce any external network request. Needs to set $ICON_CACHE_TTL to 0, - /// otherwise it will delete them and they won't be downloaded again. + /// Disable icon downloads |> Set to true to disable icon downloading in the internal icon service. + /// This still serves existing icons from $ICON_CACHE_FOLDER, without generating any external + /// network requests. $ICON_CACHE_TTL must also be set to 0; otherwise, the existing icons + /// will be deleted eventually, but won't be downloaded again. disable_icon_download: bool, true, def, false; /// Allow new signups |> Controls whether new users can register. Users can be invited by the vaultwarden admin even if this is disabled signups_allowed: bool, true, def, true; @@ -449,6 +450,13 @@ make_config! { ip_header: String, true, def, "X-Real-IP".to_string(); /// Internal IP header property, used to avoid recomputing each time _ip_header_enabled: bool, false, gen, |c| &c.ip_header.trim().to_lowercase() != "none"; + /// Icon service |> The predefined icon services are: internal, bitwarden, duckduckgo, google. + /// To specify a custom icon service, set a URL template with exactly one instance of `{}`, + /// which is replaced with the domain. For example: `https://icon.example.com/domain/{}`. + /// `internal` refers to Vaultwarden's built-in icon fetching implementation. If an external + /// service is set, an icon request to Vaultwarden will return an HTTP 307 redirect to the + /// corresponding icon at the external service. + icon_service: String, false, def, "internal".to_string(); /// Positive icon cache expiry |> Number of seconds to consider that an already cached icon is fresh. After this period, the icon will be redownloaded icon_cache_ttl: u64, true, def, 2_592_000; /// Negative icon cache expiry |> Number of seconds before trying to download an icon that failed again. @@ -659,6 +667,22 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { } } + // Check if the icon service is valid + let icon_service = cfg.icon_service.as_str(); + match icon_service { + "internal" | "bitwarden" | "duckduckgo" | "google" => (), + _ => { + if !icon_service.starts_with("http") { + err!(format!("Icon service URL `{}` must start with \"http\"", icon_service)) + } + match icon_service.matches("{}").count() { + 1 => (), // nominal + 0 => err!(format!("Icon service URL `{}` has no placeholder \"{{}}\"", icon_service)), + _ => err!(format!("Icon service URL `{}` has more than one placeholder \"{{}}\"", icon_service)), + } + } + } + Ok(()) } From 5529264c3f35215e58758c25c9682e9ef38957ec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Garc=C3=ADa?= Date: Wed, 22 Dec 2021 21:48:49 +0100 Subject: [PATCH 04/25] Basic ratelimit for user login (including 2FA) and admin login --- Cargo.lock | 79 +++++++++++++++++++++++++++++++++++++++++++-- Cargo.toml | 1 + src/api/admin.rs | 4 +++ src/api/identity.rs | 3 ++ src/config.rs | 10 ++++++ src/main.rs | 1 + src/ratelimit.rs | 38 ++++++++++++++++++++++ 7 files changed, 134 insertions(+), 2 deletions(-) create mode 100644 src/ratelimit.rs diff --git a/Cargo.lock b/Cargo.lock index df139857..8d5d1b68 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -17,6 +17,12 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" +[[package]] +name = "ahash" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8fd72866655d1904d6b0997d0b07ba561047d070fbe29de039031c641b61217" + [[package]] name = "aho-corasick" version = "0.7.18" @@ -412,6 +418,16 @@ dependencies = [ "subtle", ] +[[package]] +name = "dashmap" +version = "4.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e77a43b28d0668df09411cb0bc9a8c2adc40f9a048afe863e05fd43251e8e39c" +dependencies = [ + "cfg-if 1.0.0", + "num_cpus", +] + [[package]] name = "data-encoding" version = "2.3.2" @@ -731,6 +747,12 @@ version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dabf1872aaab32c886832f2276d2f5399887e2bd613698a02359e4ea83f8de12" +[[package]] +name = "futures-timer" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" + [[package]] name = "futures-util" version = "0.3.18" @@ -802,6 +824,23 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" +[[package]] +name = "governor" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06c5d2f987ee8f6dff3fa1a352058dc59b990e447e4c7846aa7d804971314f7b" +dependencies = [ + "dashmap", + "futures", + "futures-timer", + "no-std-compat", + "nonzero_ext", + "parking_lot 0.11.2", + "quanta", + "rand 0.8.4", + "smallvec 1.7.0", +] + [[package]] name = "h2" version = "0.3.7" @@ -842,6 +881,16 @@ dependencies = [ "walkdir", ] +[[package]] +name = "hashbrown" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e91b62f79061a0bc2e046024cb7ba44b08419ed238ecbd9adbd787434b9e8c25" +dependencies = [ + "ahash", + "autocfg", +] + [[package]] name = "hashbrown" version = "0.11.2" @@ -1042,7 +1091,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc633605454125dec4b66843673f01c7df2b89479b32e0ed634e43a91cff62a5" dependencies = [ "autocfg", - "hashbrown", + "hashbrown 0.11.2", ] [[package]] @@ -1480,6 +1529,15 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" +[[package]] +name = "no-std-compat" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b93853da6d84c2e3c7d730d6473e8817692dd89be387eb01b94d7f108ecb5b8c" +dependencies = [ + "hashbrown 0.8.2", +] + [[package]] name = "nom" version = "4.1.1" @@ -1500,6 +1558,12 @@ dependencies = [ "version_check 0.9.3", ] +[[package]] +name = "nonzero_ext" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44a1290799eababa63ea60af0cbc3f03363e328e58f32fb0294798ed3e85f444" + [[package]] name = "ntapi" version = "0.3.6" @@ -1966,11 +2030,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "292972edad6bbecc137ab84c5e36421a4a6c979ea31d3cc73540dd04315b33e1" dependencies = [ "byteorder", - "hashbrown", + "hashbrown 0.11.2", "idna 0.2.3", "psl-types", ] +[[package]] +name = "quanta" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d98dc777a7a39b76b1a26ae9d3f691f4c1bc0455090aa0b64dfa8cb7fc34c135" +dependencies = [ + "libc", + "winapi 0.3.9", +] + [[package]] name = "quick-error" version = "1.2.3" @@ -3213,6 +3287,7 @@ dependencies = [ "diesel_migrations", "dotenv", "fern", + "governor", "handlebars", "html5ever", "idna 0.2.3", diff --git a/Cargo.toml b/Cargo.toml index 5d4617ce..5f8e16b0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -139,6 +139,7 @@ backtrace = "0.3.63" # Macro ident concatenation paste = "1.0.6" +governor = "0.3.2" [patch.crates-io] # Use newest ring diff --git a/src/api/admin.rs b/src/api/admin.rs index 74fd6d8a..60f6aad4 100644 --- a/src/api/admin.rs +++ b/src/api/admin.rs @@ -166,6 +166,10 @@ fn post_admin_login( ) -> Result> { let data = data.into_inner(); + if crate::ratelimit::check_limit_admin(&ip.ip).is_err() { + return Err(Flash::error(Redirect::to(admin_url(referer)), "Too many requests, try again later.")); + } + // If the token is invalid, redirect to login page if !_validate_token(&data.token) { error!("Invalid admin token. IP: {}", ip.ip); diff --git a/src/api/identity.rs b/src/api/identity.rs index 356364b1..3cb26ba3 100644 --- a/src/api/identity.rs +++ b/src/api/identity.rs @@ -84,6 +84,9 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult err!("Scope not supported") } + // Ratelimit the login + crate::ratelimit::check_limit_login(&ip.ip)?; + // Get the user let username = data.username.as_ref().unwrap(); let user = match User::find_by_mail(username, &conn) { diff --git a/src/config.rs b/src/config.rs index 9639b3c4..7312a6c2 100644 --- a/src/config.rs +++ b/src/config.rs @@ -511,6 +511,16 @@ make_config! { /// Allowed iframe ancestors (Know the risks!) |> Allows other domains to embed the web vault into an iframe, useful for embedding into secure intranets allowed_iframe_ancestors: String, true, def, String::new(); + + /// Seconds between login requests |> Number of seconds, on average, between login requests before rate limiting kicks in. Note that this applies to both the login and the 2FA, so it's recommended to allow a burst size of at least 2 + login_ratelimit_seconds: u64, false, def, 60; + /// Max burst size for login requests |> Allow a burst of requests of up to this size, while maintaining the average indicated by `login_ratelimit_seconds` + login_ratelimit_max_burst: u32, false, def, 10; + + /// Seconds between admin requests |> Number of seconds, on average, between admin requests before rate limiting kicks in + admin_ratelimit_seconds: u64, false, def, 300; + /// Max burst size for login requests |> Allow a burst of requests of up to this size, while maintaining the average indicated by `admin_ratelimit_seconds` + admin_ratelimit_max_burst: u32, false, def, 3; }, /// Yubikey settings diff --git a/src/main.rs b/src/main.rs index e23b2e4c..dd9fa51e 100644 --- a/src/main.rs +++ b/src/main.rs @@ -32,6 +32,7 @@ mod crypto; #[macro_use] mod db; mod mail; +mod ratelimit; mod util; pub use config::CONFIG; diff --git a/src/ratelimit.rs b/src/ratelimit.rs new file mode 100644 index 00000000..c85ce7ad --- /dev/null +++ b/src/ratelimit.rs @@ -0,0 +1,38 @@ +use once_cell::sync::Lazy; +use std::{net::IpAddr, num::NonZeroU32, time::Duration}; + +use governor::{clock::DefaultClock, state::keyed::DashMapStateStore, Quota, RateLimiter}; + +use crate::{Error, CONFIG}; + +type Limiter = RateLimiter, DefaultClock>; + +static LIMITER_LOGIN: Lazy = Lazy::new(|| { + let seconds = Duration::from_secs(CONFIG.login_ratelimit_seconds()); + let burst = NonZeroU32::new(CONFIG.login_ratelimit_max_burst()).expect("Non-zero login ratelimit burst"); + RateLimiter::keyed(Quota::with_period(seconds).expect("Non-zero login ratelimit seconds").allow_burst(burst)) +}); + +static LIMITER_ADMIN: Lazy = Lazy::new(|| { + let seconds = Duration::from_secs(CONFIG.admin_ratelimit_seconds()); + let burst = NonZeroU32::new(CONFIG.admin_ratelimit_max_burst()).expect("Non-zero admin ratelimit burst"); + RateLimiter::keyed(Quota::with_period(seconds).expect("Non-zero admin ratelimit seconds").allow_burst(burst)) +}); + +pub fn check_limit_login(ip: &IpAddr) -> Result<(), Error> { + match LIMITER_LOGIN.check_key(ip) { + Ok(_) => Ok(()), + Err(_e) => { + err_code!("Too many login requests", 429); + } + } +} + +pub fn check_limit_admin(ip: &IpAddr) -> Result<(), Error> { + match LIMITER_ADMIN.check_key(ip) { + Ok(_) => Ok(()), + Err(_e) => { + err_code!("Too many admin requests", 429); + } + } +} From 4bd8eae07e7e76fc35a0adf02f01bb3df5e446f5 Mon Sep 17 00:00:00 2001 From: BlackDex Date: Fri, 24 Dec 2021 17:59:12 +0100 Subject: [PATCH 05/25] Fixed #2151 --- src/api/core/organizations.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/api/core/organizations.rs b/src/api/core/organizations.rs index 6b6d4547..fa79c39c 100644 --- a/src/api/core/organizations.rs +++ b/src/api/core/organizations.rs @@ -235,7 +235,7 @@ fn get_user_collections(headers: Headers, conn: DbConn) -> Json { } #[get("/organizations//collections")] -fn get_org_collections(org_id: String, _headers: AdminHeaders, conn: DbConn) -> Json { +fn get_org_collections(org_id: String, _headers: ManagerHeadersLoose, conn: DbConn) -> Json { Json(json!({ "Data": Collection::find_by_organization(&org_id, &conn) From 2c94ea075c3074a7930766f670fcf27eb97e4495 Mon Sep 17 00:00:00 2001 From: BlackDex Date: Fri, 24 Dec 2021 18:24:25 +0100 Subject: [PATCH 06/25] Small changes to icon log messages. As requested in #2136, some small changes on the type of log messages and wording used. Resolves #2136 --- src/api/icons.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/api/icons.rs b/src/api/icons.rs index ff71cd57..8d87b10a 100644 --- a/src/api/icons.rs +++ b/src/api/icons.rs @@ -294,7 +294,7 @@ fn is_domain_blacklisted(domain: &str) -> bool { // Use the pre-generate Regex stored in a Lazy HashMap. if regex.is_match(domain) { - warn!("Blacklisted domain: {} matched ICON_BLACKLIST_REGEX", domain); + debug!("Blacklisted domain: {} matched ICON_BLACKLIST_REGEX", domain); is_blacklisted = true; } } @@ -330,7 +330,7 @@ fn get_icon(domain: &str) -> Option<(Vec, String)> { Some((icon, icon_type.unwrap_or("x-icon").to_string())) } Err(e) => { - error!("Error downloading icon: {:?}", e); + warn!("Unable to download icon: {:?}", e); let miss_indicator = path + ".miss"; save_icon(&miss_indicator, &[]); None @@ -599,7 +599,7 @@ fn get_page(url: &str) -> Result { fn get_page_with_referer(url: &str, referer: &str) -> Result { if is_domain_blacklisted(url::Url::parse(url).unwrap().host_str().unwrap_or_default()) { - err!("Favicon resolves to a blacklisted domain or IP!", url); + warn!("Favicon '{}' resolves to a blacklisted domain or IP!", url); } let mut client = CLIENT.get(url); @@ -757,10 +757,10 @@ fn save_icon(path: &str, icon: &[u8]) { f.write_all(icon).expect("Error writing icon file"); } Err(ref e) if e.kind() == std::io::ErrorKind::NotFound => { - create_dir_all(&CONFIG.icon_cache_folder()).expect("Error creating icon cache"); + create_dir_all(&CONFIG.icon_cache_folder()).expect("Error creating icon cache folder"); } Err(e) => { - warn!("Icon save error: {:?}", e); + warn!("Unable to save icon: {:?}", e); } } } From 605419ae1b187c96fe0306b293d9526f2cbee1ab Mon Sep 17 00:00:00 2001 From: Jeremy Lin Date: Fri, 24 Dec 2021 13:22:10 -0800 Subject: [PATCH 07/25] Sync global_domains.json to bitwarden/server@5a8f334 (TransferWise) --- src/static/global_domains.json | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/static/global_domains.json b/src/static/global_domains.json index 27b426dd..24da377a 100644 --- a/src/static/global_domains.json +++ b/src/static/global_domains.json @@ -914,5 +914,13 @@ "ubi.com" ], "Excluded": false + }, + { + "Type": 87, + "Domains": [ + "transferwise.com", + "wise.com" + ], + "Excluded": false } ] \ No newline at end of file From d4eb21c2d9735e05041ecfc984974aaaec941123 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Garc=C3=ADa?= Date: Sat, 25 Dec 2021 01:10:21 +0100 Subject: [PATCH 08/25] Better document the new rate limiting --- .env.template | 11 +++++++++++ src/config.rs | 6 +++--- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/.env.template b/.env.template index ca6962b4..7fcbbfcb 100644 --- a/.env.template +++ b/.env.template @@ -268,6 +268,17 @@ ## Multiple values must be separated with a whitespace. # ALLOWED_IFRAME_ANCESTORS= +## Number of seconds, on average, between login requests from the same IP address before rate limiting kicks in. +# LOGIN_RATELIMIT_SECONDS=60 +## Allow a burst of requests of up to this size, while maintaining the average indicated by `LOGIN_RATELIMIT_SECONDS`. +## Note that this applies to both the login and the 2FA, so it's recommended to allow a burst size of at least 2. +# LOGIN_RATELIMIT_MAX_BURST=10 + +## Number of seconds, on average, between admin requests from the same IP address before rate limiting kicks in. +# ADMIN_RATELIMIT_SECONDS=300 +## Allow a burst of requests of up to this size, while maintaining the average indicated by `ADMIN_RATELIMIT_SECONDS`. +# ADMIN_RATELIMIT_MAX_BURST=3 + ## Yubico (Yubikey) Settings ## Set your Client ID and Secret Key for Yubikey OTP ## You can generate it here: https://upgrade.yubico.com/getapikey/ diff --git a/src/config.rs b/src/config.rs index 7312a6c2..5bbe8575 100644 --- a/src/config.rs +++ b/src/config.rs @@ -512,12 +512,12 @@ make_config! { /// Allowed iframe ancestors (Know the risks!) |> Allows other domains to embed the web vault into an iframe, useful for embedding into secure intranets allowed_iframe_ancestors: String, true, def, String::new(); - /// Seconds between login requests |> Number of seconds, on average, between login requests before rate limiting kicks in. Note that this applies to both the login and the 2FA, so it's recommended to allow a burst size of at least 2 + /// Seconds between login requests |> Number of seconds, on average, between login and 2FA requests from the same IP address before rate limiting kicks in login_ratelimit_seconds: u64, false, def, 60; - /// Max burst size for login requests |> Allow a burst of requests of up to this size, while maintaining the average indicated by `login_ratelimit_seconds` + /// Max burst size for login requests |> Allow a burst of requests of up to this size, while maintaining the average indicated by `login_ratelimit_seconds`. Note that this applies to both the login and the 2FA, so it's recommended to allow a burst size of at least 2 login_ratelimit_max_burst: u32, false, def, 10; - /// Seconds between admin requests |> Number of seconds, on average, between admin requests before rate limiting kicks in + /// Seconds between admin requests |> Number of seconds, on average, between admin requests from the same IP address before rate limiting kicks in admin_ratelimit_seconds: u64, false, def, 300; /// Max burst size for login requests |> Allow a burst of requests of up to this size, while maintaining the average indicated by `admin_ratelimit_seconds` admin_ratelimit_max_burst: u32, false, def, 3; From 5b430f22bc9b829fdb5543c4a227a31ff8fb2cfe Mon Sep 17 00:00:00 2001 From: BlackDex Date: Sun, 26 Dec 2021 12:40:12 +0100 Subject: [PATCH 09/25] Support all DB's for Alpine and Debian - Using my own rust-musl build containers we now support all database types for both Debian and Alpine. - Added new Alpine containers for armv6 and arm64/aarch64 - The Debian builds can also be done wihout dpkg magic stuff, probably some fixes in Rust regarding linking (Or maybe OpenSSL or Diesel), in any case, it works now without hacking dpkg and apt. - Updated toolchain and crates --- Cargo.lock | 235 +++++++++++++------------- Cargo.toml | 14 +- docker/Dockerfile.j2 | 91 ++++------ docker/amd64/Dockerfile | 15 +- docker/amd64/Dockerfile.alpine | 21 ++- docker/amd64/Dockerfile.buildx | 15 +- docker/amd64/Dockerfile.buildx.alpine | 21 ++- docker/arm64/Dockerfile | 50 ++---- docker/arm64/Dockerfile.alpine | 125 ++++++++++++++ docker/arm64/Dockerfile.buildx | 50 ++---- docker/arm64/Dockerfile.buildx.alpine | 125 ++++++++++++++ docker/armv6/Dockerfile | 50 ++---- docker/armv6/Dockerfile.alpine | 125 ++++++++++++++ docker/armv6/Dockerfile.buildx | 50 ++---- docker/armv6/Dockerfile.buildx.alpine | 125 ++++++++++++++ docker/armv7/Dockerfile | 50 ++---- docker/armv7/Dockerfile.alpine | 21 ++- docker/armv7/Dockerfile.buildx | 50 ++---- docker/armv7/Dockerfile.buildx.alpine | 21 ++- hooks/arches.sh | 5 - rust-toolchain | 2 +- src/error.rs | 2 + 22 files changed, 830 insertions(+), 433 deletions(-) create mode 100644 docker/arm64/Dockerfile.alpine create mode 100644 docker/arm64/Dockerfile.buildx.alpine create mode 100644 docker/armv6/Dockerfile.alpine create mode 100644 docker/armv6/Dockerfile.buildx.alpine diff --git a/Cargo.lock b/Cargo.lock index df139857..243f6b98 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -57,13 +57,13 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.51" +version = "0.1.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44318e776df68115a881de9a8fd1b9e53368d7a4a5ce4cc48517da3393233a5e" +checksum = "061a7acccaa286c011ddc30970520b98fa40e00c9d644633fb26b5fc63a265e3" dependencies = [ - "proc-macro2 1.0.32", + "proc-macro2 1.0.34", "quote 1.0.10", - "syn 1.0.82", + "syn 1.0.84", ] [[package]] @@ -283,13 +283,13 @@ dependencies = [ [[package]] name = "chrono-tz" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64c01c1c607d25c71bbaa67c113d6c6b36c434744b4fd66691d711b5b1bc0c8b" +checksum = "58549f1842da3080ce63002102d5bc954c7bc843d4f47818e642abdc36253552" dependencies = [ "chrono", "chrono-tz-build", - "phf 0.10.0", + "phf 0.10.1", ] [[package]] @@ -299,7 +299,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db058d493fb2f65f41861bfed7e3fe6335264a9f0f92710cab5bdf01fef09069" dependencies = [ "parse-zoneinfo", - "phf 0.10.0", + "phf 0.10.1", "phf_codegen 0.10.0", ] @@ -451,9 +451,9 @@ version = "0.3.0" source = "git+https://github.com/SergioBenitez/Devise.git?rev=e58b3ac9a#e58b3ac9afc3b6ff10a8aaf02a3e768a8f530089" dependencies = [ "bitflags", - "proc-macro2 1.0.32", + "proc-macro2 1.0.34", "quote 1.0.10", - "syn 1.0.82", + "syn 1.0.84", ] [[package]] @@ -479,9 +479,9 @@ version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "45f5098f628d02a7a0f68ddba586fb61e80edec3bdc1be3b921f4ceec60858d3" dependencies = [ - "proc-macro2 1.0.32", + "proc-macro2 1.0.34", "quote 1.0.10", - "syn 1.0.82", + "syn 1.0.84", ] [[package]] @@ -532,9 +532,9 @@ checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" [[package]] name = "encoding_rs" -version = "0.8.29" +version = "0.8.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a74ea89a0a1b98f6332de42c95baff457ada66d1cb4030f9ff151b2041a1c746" +checksum = "7896dc8abb250ffdda33912550faa54c88ec8b998dec0b2c55ab224921ce11df" dependencies = [ "cfg-if 1.0.0", ] @@ -546,9 +546,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c5f0096a91d210159eceb2ff5e1c4da18388a170e1e3ce948aac9c8fdbbf595" dependencies = [ "heck", - "proc-macro2 1.0.32", + "proc-macro2 1.0.34", "quote 1.0.10", - "syn 1.0.82", + "syn 1.0.84", ] [[package]] @@ -568,9 +568,9 @@ checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" [[package]] name = "fastrand" -version = "1.5.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b394ed3d285a429378d3b384b9eb1285267e7df4b166df24b7a6939a04dc392e" +checksum = "779d043b6a0b90cc4c0ed7ee380a6504394cee7efd7db050e3774eee387324b2" dependencies = [ "instant", ] @@ -662,9 +662,9 @@ dependencies = [ [[package]] name = "futures" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cd0210d8c325c245ff06fd95a3b13689a1a276ac8cfa8e8720cb840bfb84b9e" +checksum = "28560757fe2bb34e79f907794bb6b22ae8b0e5c669b638a1132f2592b19035b4" dependencies = [ "futures-channel", "futures-core", @@ -677,9 +677,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fc8cd39e3dbf865f7340dce6a2d401d24fd37c6fe6c4f0ee0de8bfca2252d27" +checksum = "ba3dda0b6588335f360afc675d0564c17a77a2bda81ca178a4b6081bd86c7f0b" dependencies = [ "futures-core", "futures-sink", @@ -687,15 +687,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "629316e42fe7c2a0b9a65b47d159ceaa5453ab14e8f0a3c5eedbb8cd55b4a445" +checksum = "d0c8ff0461b82559810cdccfde3215c3f373807f5e5232b71479bff7bb2583d7" [[package]] name = "futures-executor" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b808bf53348a36cab739d7e04755909b9fcaaa69b7d7e588b37b6ec62704c97" +checksum = "29d6d2ff5bb10fb95c85b8ce46538a2e5f5e7fdc755623a7d4529ab8a4ed9d2a" dependencies = [ "futures-core", "futures-task", @@ -704,38 +704,38 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e481354db6b5c353246ccf6a728b0c5511d752c08da7260546fc0933869daa11" +checksum = "b1f9d34af5a1aac6fb380f735fe510746c38067c5bf16c7fd250280503c971b2" [[package]] name = "futures-macro" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a89f17b21645bc4ed773c69af9c9a0effd4a3f1a3876eadd453469f8854e7fdd" +checksum = "6dbd947adfffb0efc70599b3ddcf7b5597bb5fa9e245eb99f62b3a5f7bb8bd3c" dependencies = [ - "proc-macro2 1.0.32", + "proc-macro2 1.0.34", "quote 1.0.10", - "syn 1.0.82", + "syn 1.0.84", ] [[package]] name = "futures-sink" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "996c6442437b62d21a32cd9906f9c41e7dc1e19a9579843fad948696769305af" +checksum = "e3055baccb68d74ff6480350f8d6eb8fcfa3aa11bdc1a1ae3afdd0514617d508" [[package]] name = "futures-task" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dabf1872aaab32c886832f2276d2f5399887e2bd613698a02359e4ea83f8de12" +checksum = "6ee7c6485c30167ce4dfb83ac568a849fe53274c831081476ee13e0dce1aad72" [[package]] name = "futures-util" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41d22213122356472061ac0f1ab2cee28d2bac8491410fd68c2af53d1cedb83e" +checksum = "d9b5cf40b47a271f77a8b1bec03ca09044d99d2372c0de244e66430761127164" dependencies = [ "futures-channel", "futures-core", @@ -804,9 +804,9 @@ checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" [[package]] name = "h2" -version = "0.3.7" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fd819562fcebdac5afc5c113c3ec36f902840b70fd4fc458799c8ce4607ae55" +checksum = "8f072413d126e57991455e0a922b31e4c8ba7c2ffbebf6b78b4f8521397d65cd" dependencies = [ "bytes 1.1.0", "fnv", @@ -829,9 +829,9 @@ checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" [[package]] name = "handlebars" -version = "4.1.5" +version = "4.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ad84da8f63da982543fc85fcabaee2ad1fdd809d99d64a48887e2e942ddfe46" +checksum = "167fa173496c9eadd8749cca6f8339ac88e248f3ad2442791d0b743318a94fc0" dependencies = [ "log 0.4.14", "pest", @@ -906,9 +906,9 @@ dependencies = [ "log 0.4.14", "mac", "markup5ever", - "proc-macro2 1.0.32", + "proc-macro2 1.0.34", "quote 1.0.10", - "syn 1.0.82", + "syn 1.0.84", ] [[package]] @@ -919,7 +919,7 @@ checksum = "1323096b05d41827dadeaee54c9981958c0f94e670bc94ed80037d1a7b8b186b" dependencies = [ "bytes 1.1.0", "fnv", - "itoa", + "itoa 0.4.8", ] [[package]] @@ -966,9 +966,9 @@ dependencies = [ [[package]] name = "hyper" -version = "0.14.15" +version = "0.14.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "436ec0091e4f20e655156a30a0df3770fe2900aa301e548e08446ec794b6953c" +checksum = "b7ec3e62bdc98a2f0393a5048e4c30ef659440ea6e0e572965103e72bd836f55" dependencies = [ "bytes 1.1.0", "futures-channel", @@ -979,7 +979,7 @@ dependencies = [ "http-body", "httparse", "httpdate", - "itoa", + "itoa 0.4.8", "pin-project-lite", "socket2 0.4.2", "tokio", @@ -1007,7 +1007,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes 1.1.0", - "hyper 0.14.15", + "hyper 0.14.16", "native-tls", "tokio", "tokio-native-tls", @@ -1087,6 +1087,12 @@ version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" +[[package]] +name = "itoa" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" + [[package]] name = "job_scheduler" version = "1.2.1" @@ -1171,9 +1177,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.108" +version = "0.2.112" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8521a1b57e76b1ec69af7599e75e38e7b7fad6610f037db8c79b127201b5d119" +checksum = "1b03d17f364a3a042d5e5d46b053bbbf82c92c9430c592dd4c064dc6ee997125" [[package]] name = "libsqlite3-sys" @@ -1306,9 +1312,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9753f12909fd8d923f75ae5c3258cae1ed3c8ec052e1b38c93c21a6d157f789c" dependencies = [ "migrations_internals", - "proc-macro2 1.0.32", + "proc-macro2 1.0.34", "quote 1.0.10", - "syn 1.0.82", + "syn 1.0.84", ] [[package]] @@ -1526,9 +1532,9 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" dependencies = [ - "proc-macro2 1.0.32", + "proc-macro2 1.0.34", "quote 1.0.10", - "syn 1.0.82", + "syn 1.0.84", ] [[package]] @@ -1552,9 +1558,9 @@ dependencies = [ [[package]] name = "num_cpus" -version = "1.13.0" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" +checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" dependencies = [ "hermit-abi", "libc", @@ -1571,9 +1577,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "692fcb63b64b1758029e0a96ee63e049ce8c5948587f2f7208df04625e5f6b56" +checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5" [[package]] name = "opaque-debug" @@ -1609,18 +1615,18 @@ checksum = "28988d872ab76095a6e6ac88d99b54fd267702734fd7ffe610ca27f533ddb95a" [[package]] name = "openssl-src" -version = "300.0.2+3.0.0" +version = "111.17.0+1.1.1m" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14a760a11390b1a5daf72074d4f6ff1a6e772534ae191f999f57e9ee8146d1fb" +checksum = "05d6a336abd10814198f66e2a91ccd7336611f30334119ca8ce300536666fcf4" dependencies = [ "cc", ] [[package]] name = "openssl-sys" -version = "0.9.71" +version = "0.9.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7df13d165e607909b363a4757a6f133f8a818a74e9d3a98d09c6128e15fa4c73" +checksum = "7e46109c383602735fa0a2e48dd2b7c892b048e1bf69e5c3b1d804b7d9c203cb" dependencies = [ "autocfg", "cc", @@ -1791,9 +1797,9 @@ checksum = "99b8db626e31e5b81787b9783425769681b347011cc59471e33ea46d2ea0cf55" dependencies = [ "pest", "pest_meta", - "proc-macro2 1.0.32", + "proc-macro2 1.0.34", "quote 1.0.10", - "syn 1.0.82", + "syn 1.0.84", ] [[package]] @@ -1818,9 +1824,9 @@ dependencies = [ [[package]] name = "phf" -version = "0.10.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9fc3db1018c4b59d7d582a739436478b6035138b6aecbce989fc91c3e98409f" +checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259" dependencies = [ "phf_shared 0.10.0", ] @@ -1904,9 +1910,9 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "pkg-config" -version = "0.3.22" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12295df4f294471248581bc09bef3c38a5e46f1e36d6a37353621a0c6c357e1f" +checksum = "58893f751c9b0412871a09abd62ecd2a00298c6c83befa223ef98c52aef40cbe" [[package]] name = "ppv-lite86" @@ -1946,18 +1952,18 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.32" +version = "1.0.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba508cc11742c0dc5c1659771673afbab7a0efab23aa17e854cbab0837ed0b43" +checksum = "2f84e92c0f7c9d58328b85a78557813e4bd845130db68d7184635344399423b1" dependencies = [ "unicode-xid 0.2.2", ] [[package]] name = "psl-types" -version = "2.0.9" +version = "2.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4af8f675df9e68626b5059f8909ae261b8f5c3e8ab14813ad7f6cc7a134dcafb" +checksum = "e8eda7c62d9ecaafdf8b62374c006de0adf61666ae96a96ba74a37134aa4e470" [[package]] name = "publicsuffix" @@ -1998,7 +2004,7 @@ version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05" dependencies = [ - "proc-macro2 1.0.32", + "proc-macro2 1.0.34", ] [[package]] @@ -2183,9 +2189,9 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.11.7" +version = "0.11.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07bea77bc708afa10e59905c3d4af7c8fd43c9214251673095ff8b14345fcbc5" +checksum = "7c4e0a76dc12a116108933f6301b95e83634e0c47b0afbed6abbaa0601e99258" dependencies = [ "async-compression", "base64 0.13.0", @@ -2197,7 +2203,7 @@ dependencies = [ "futures-util", "http", "http-body", - "hyper 0.14.15", + "hyper 0.14.16", "hyper-tls", "ipnet", "js-sys", @@ -2361,9 +2367,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.6" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c9613b5a66ab9ba26415184cfc41156594925a9cf3a2057e57f31ff145f6568" +checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" [[package]] name = "safemem" @@ -2455,9 +2461,9 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.130" +version = "1.0.132" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f12d06de37cf59146fbdecab66aa99f9fe4f78722e3607577a5375d66bd0c913" +checksum = "8b9875c23cf305cd1fd7eb77234cbb705f21ea6a72c637a5c6db5fe4b8e7f008" dependencies = [ "serde_derive", ] @@ -2483,22 +2489,22 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.130" +version = "1.0.132" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7bc1a1ab1961464eae040d96713baa5a724a8152c1222492465b54322ec508b" +checksum = "ecc0db5cb2556c0e558887d9bbdcf6ac4471e83ff66cf696e5419024d1606276" dependencies = [ - "proc-macro2 1.0.32", + "proc-macro2 1.0.34", "quote 1.0.10", - "syn 1.0.82", + "syn 1.0.84", ] [[package]] name = "serde_json" -version = "1.0.72" +version = "1.0.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0ffa0837f2dfa6fb90868c2b5468cad482e175f7dad97e7421951e663f2b527" +checksum = "bcbd0344bc6533bc7ec56df11d42fb70f1b912351c0825ccb7211b59d8af7cf5" dependencies = [ - "itoa", + "itoa 1.0.1", "ryu", "serde", ] @@ -2510,7 +2516,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "edfa57a7f8d9c1d260a549e7224100f6c43d43f9103e06dd8b4095a9b2b43ce9" dependencies = [ "form_urlencoded", - "itoa", + "itoa 0.4.8", "ryu", "serde", ] @@ -2665,11 +2671,11 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c87a60a40fccc84bef0652345bbbbbe20a605bf5d0ce81719fc476f5c03b50ef" dependencies = [ - "proc-macro2 1.0.32", + "proc-macro2 1.0.34", "quote 1.0.10", "serde", "serde_derive", - "syn 1.0.82", + "syn 1.0.84", ] [[package]] @@ -2679,13 +2685,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "58fa5ff6ad0d98d1ffa8cb115892b6e69d67799f6763e162a1c9db421dc22e11" dependencies = [ "base-x", - "proc-macro2 1.0.32", + "proc-macro2 1.0.34", "quote 1.0.10", "serde", "serde_derive", "serde_json", "sha1", - "syn 1.0.82", + "syn 1.0.84", ] [[package]] @@ -2716,7 +2722,7 @@ checksum = "f24c8e5e19d22a726626f1a5e16fe15b132dcf21d10177fa5a45ce7962996b97" dependencies = [ "phf_generator 0.8.0", "phf_shared 0.8.0", - "proc-macro2 1.0.32", + "proc-macro2 1.0.34", "quote 1.0.10", ] @@ -2739,11 +2745,11 @@ dependencies = [ [[package]] name = "syn" -version = "1.0.82" +version = "1.0.84" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8daf5dd0bb60cbd4137b1b587d2fc0ae729bc07cf01cd70b36a1ed5ade3b9d59" +checksum = "ecb2e6da8ee5eb9a61068762a32fa9619cc591ceb055b3687f4cd4051ec2e06b" dependencies = [ - "proc-macro2 1.0.32", + "proc-macro2 1.0.34", "quote 1.0.10", "unicode-xid 0.2.2", ] @@ -2800,9 +2806,9 @@ version = "1.0.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" dependencies = [ - "proc-macro2 1.0.32", + "proc-macro2 1.0.34", "quote 1.0.10", - "syn 1.0.82", + "syn 1.0.84", ] [[package]] @@ -2857,10 +2863,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd3c141a1b43194f3f56a1411225df8646c55781d5f26db825b3d98507eb482f" dependencies = [ "proc-macro-hack", - "proc-macro2 1.0.32", + "proc-macro2 1.0.34", "quote 1.0.10", "standback", - "syn 1.0.82", + "syn 1.0.84", ] [[package]] @@ -2880,11 +2886,10 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "1.14.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70e992e41e0d2fb9f755b37446f20900f64446ef54874f40a60c78f021ac6144" +checksum = "fbbf1c778ec206785635ce8ad57fe52b3009ae9e0c9f574a728f3049d3e55838" dependencies = [ - "autocfg", "bytes 1.1.0", "libc", "memchr", @@ -2976,9 +2981,9 @@ version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4f480b8f81512e825f337ad51e94c1eb5d3bbdf2b363dcd01e2b19a9ffe3f8e" dependencies = [ - "proc-macro2 1.0.32", + "proc-macro2 1.0.34", "quote 1.0.10", - "syn 1.0.82", + "syn 1.0.84", ] [[package]] @@ -3064,9 +3069,9 @@ checksum = "1410f6f91f21d1612654e7cc69193b0334f909dcf2c790c4826254fbb86f8887" [[package]] name = "typenum" -version = "1.14.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b63708a265f51345575b27fe43f9500ad611579e764c79edbc2037b1121959ec" +checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" [[package]] name = "u2f" @@ -3321,9 +3326,9 @@ dependencies = [ "bumpalo", "lazy_static", "log 0.4.14", - "proc-macro2 1.0.32", + "proc-macro2 1.0.34", "quote 1.0.10", - "syn 1.0.82", + "syn 1.0.84", "wasm-bindgen-shared", ] @@ -3355,9 +3360,9 @@ version = "0.2.78" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7803e0eea25835f8abdc585cd3021b3deb11543c6fe226dcd30b228857c5c5ab" dependencies = [ - "proc-macro2 1.0.32", + "proc-macro2 1.0.34", "quote 1.0.10", - "syn 1.0.82", + "syn 1.0.84", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -3380,12 +3385,11 @@ dependencies = [ [[package]] name = "webauthn-rs" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5275a4ed4cd88814475b5ec51b84886eb17691fd3171f565581eca91d3489a10" +checksum = "261310333d5941ec47038eed295ee0ed6f5cee36b7575dc6c4b63ee8e7fe590c" dependencies = [ "base64 0.13.0", - "log 0.4.14", "nom 4.1.1", "openssl", "rand 0.8.4", @@ -3395,6 +3399,7 @@ dependencies = [ "serde_derive", "serde_json", "thiserror", + "tracing", "url 2.2.2", ] diff --git a/Cargo.toml b/Cargo.toml index 5d4617ce..f0615fc2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -34,7 +34,7 @@ rocket = { version = "=0.5.0-dev", features = ["tls"], default-features = false rocket_contrib = "=0.5.0-dev" # HTTP client -reqwest = { version = "0.11.7", features = ["blocking", "json", "gzip", "brotli", "socks", "cookies", "trust-dns"] } +reqwest = { version = "0.11.8", features = ["blocking", "json", "gzip", "brotli", "socks", "cookies", "trust-dns"] } # Used for custom short lived cookie jar cookie = "0.15.1" @@ -55,8 +55,8 @@ rmpv = "1.0.0" chashmap = "2.2.2" # A generic serialization/deserialization framework -serde = { version = "1.0.130", features = ["derive"] } -serde_json = "1.0.72" +serde = { version = "1.0.132", features = ["derive"] } +serde_json = "1.0.73" # Logging log = "0.4.14" @@ -78,7 +78,7 @@ uuid = { version = "0.8.2", features = ["v4"] } # Date and time libraries chrono = { version = "0.4.19", features = ["serde"] } -chrono-tz = "0.6.0" +chrono-tz = "0.6.1" time = "0.2.27" # Job scheduler @@ -95,7 +95,7 @@ jsonwebtoken = "7.2.0" # U2F library u2f = "0.2.0" -webauthn-rs = "0.3.0" +webauthn-rs = "0.3.1" # Yubico Library yubico = { version = "0.10.0", features = ["online-tokio"], default-features = false } @@ -104,7 +104,7 @@ yubico = { version = "0.10.0", features = ["online-tokio"], default-features = f dotenv = { version = "0.15.0", default-features = false } # Lazy initialization -once_cell = "1.8.0" +once_cell = "1.9.0" # Numerical libraries num-traits = "0.2.14" @@ -115,7 +115,7 @@ tracing = { version = "0.1.29", features = ["log"] } # Needed to have lettre tra lettre = { version = "0.10.0-rc.4", features = ["smtp-transport", "builder", "serde", "native-tls", "hostname", "tracing"], default-features = false } # Template library -handlebars = { version = "4.1.5", features = ["dir_source"] } +handlebars = { version = "4.1.6", features = ["dir_source"] } # For favicon extraction from main website html5ever = "0.25.1" diff --git a/docker/Dockerfile.j2 b/docker/Dockerfile.j2 index 6039e06f..792d660f 100644 --- a/docker/Dockerfile.j2 +++ b/docker/Dockerfile.j2 @@ -3,16 +3,24 @@ # This file was generated using a Jinja2 template. # Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles. -{% set build_stage_base_image = "rust:1.55-buster" %} +{% set build_stage_base_image = "rust:1.57-buster" %} {% if "alpine" in target_file %} {% if "amd64" in target_file %} -{% set build_stage_base_image = "clux/muslrust:nightly-2021-10-23" %} -{% set runtime_stage_base_image = "alpine:3.14" %} +{% set build_stage_base_image = "blackdex/rust-musl:x86_64-musl-nightly-2021-12-25" %} +{% set runtime_stage_base_image = "alpine:3.15" %} {% set package_arch_target = "x86_64-unknown-linux-musl" %} {% elif "armv7" in target_file %} -{% set build_stage_base_image = "messense/rust-musl-cross:armv7-musleabihf" %} -{% set runtime_stage_base_image = "balenalib/armv7hf-alpine:3.14" %} +{% set build_stage_base_image = "blackdex/rust-musl:armv7-musleabihf-nightly-2021-12-25" %} +{% set runtime_stage_base_image = "balenalib/armv7hf-alpine:3.15" %} {% set package_arch_target = "armv7-unknown-linux-musleabihf" %} +{% elif "armv6" in target_file %} +{% set build_stage_base_image = "blackdex/rust-musl:arm-musleabi-nightly-2021-12-25" %} +{% set runtime_stage_base_image = "balenalib/rpi-alpine:3.15" %} +{% set package_arch_target = "arm-unknown-linux-musleabi" %} +{% elif "arm64" in target_file %} +{% set build_stage_base_image = "blackdex/rust-musl:aarch64-musl-nightly-2021-12-25" %} +{% set runtime_stage_base_image = "balenalib/aarch64-alpine:3.15" %} +{% set package_arch_target = "aarch64-unknown-linux-musl" %} {% endif %} {% elif "amd64" in target_file %} {% set runtime_stage_base_image = "debian:buster-slim" %} @@ -75,22 +83,7 @@ FROM vaultwarden/web-vault@{{ vault_image_digest }} as vault ########################## BUILD IMAGE ########################## FROM {{ build_stage_base_image }} as build -{% if "alpine" in target_file %} -{% if "amd64" in target_file %} -# Alpine-based AMD64 (musl) does not support mysql/mariadb during compile time. -ARG DB=sqlite,postgresql -{% set features = "sqlite,postgresql" %} -{% else %} -# Alpine-based ARM (musl) only supports sqlite during compile time. -# We now also need to add vendored_openssl, because the current base image we use to build has OpenSSL removed. -ARG DB=sqlite,vendored_openssl -{% set features = "sqlite" %} -{% endif %} -{% else %} -# Debian-based builds support multidb -ARG DB=sqlite,mysql,postgresql -{% set features = "sqlite,mysql,postgresql" %} -{% endif %} + # Build time options to avoid dpkg warnings and help with reproducible builds. ENV DEBIAN_FRONTEND=noninteractive \ @@ -118,51 +111,32 @@ ENV RUSTFLAGS='-C link-arg=-s' ENV CFLAGS_armv7_unknown_linux_musleabihf="-mfpu=vfpv3-d16" {% endif %} {% elif "arm" in target_file %} -# NOTE: Any apt-get/dpkg after this stage will fail because of broken dependencies. -# For Diesel-RS migrations_macros to compile with MySQL/MariaDB we need to do some magic. -# We at least need libmariadb3:amd64 installed for the x86_64 version of libmariadb.so (client) -# We also need the libmariadb-dev-compat:amd64 but it can not be installed together with the {{ package_arch_prefix }} version. -# What we can do is a force install, because nothing important is overlapping each other. # # Install required build libs for {{ package_arch_name }} architecture. -# To compile both mysql and postgresql we need some extra packages for both host arch and target arch -RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > /etc/apt/sources.list.d/deb-src.list \ - && dpkg --add-architecture {{ package_arch_name }} \ +# hadolint ignore=DL3059 +RUN dpkg --add-architecture {{ package_arch_name }} \ && apt-get update \ && apt-get install -y \ --no-install-recommends \ libssl-dev{{ package_arch_prefix }} \ libc6-dev{{ package_arch_prefix }} \ libpq5{{ package_arch_prefix }} \ - libpq-dev \ - libmariadb3:amd64 \ + libpq-dev{{ package_arch_prefix }} \ + libmariadb3{{ package_arch_prefix }} \ libmariadb-dev{{ package_arch_prefix }} \ libmariadb-dev-compat{{ package_arch_prefix }} \ gcc-{{ package_cross_compiler }} \ # - # Manual install libmariadb-dev-compat:amd64 ( After this broken dependencies will break apt ) - && apt-get download libmariadb-dev-compat:amd64 \ - && dpkg --force-all -i ./libmariadb-dev-compat*.deb \ - && rm -rvf ./libmariadb-dev-compat*.deb \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - # - # For Diesel-RS migrations_macros to compile with PostgreSQL we need to do some magic. - # The libpq5{{ package_arch_prefix }} package seems to not provide a symlink to libpq.so.5 with the name libpq.so. - # This is only provided by the libpq-dev package which can't be installed for both arch at the same time. - # Without this specific file the ld command will fail and compilation fails with it. - && ln -sfnr /usr/lib/{{ package_cross_compiler }}/libpq.so.5 /usr/lib/{{ package_cross_compiler }}/libpq.so \ - # # Make sure cargo has the right target config && echo '[target.{{ package_arch_target }}]' >> "${CARGO_HOME}/config" \ && echo 'linker = "{{ package_cross_compiler }}-gcc"' >> "${CARGO_HOME}/config" \ && echo 'rustflags = ["-L/usr/lib/{{ package_cross_compiler }}"]' >> "${CARGO_HOME}/config" # Set arm specific environment values -ENV CC_{{ package_arch_target | replace("-", "_") }}="/usr/bin/{{ package_cross_compiler }}-gcc" -ENV CROSS_COMPILE="1" -ENV OPENSSL_INCLUDE_DIR="/usr/include/{{ package_cross_compiler }}" -ENV OPENSSL_LIB_DIR="/usr/lib/{{ package_cross_compiler }}" +ENV CC_{{ package_arch_target | replace("-", "_") }}="/usr/bin/{{ package_cross_compiler }}-gcc" \ + CROSS_COMPILE="1" \ + OPENSSL_INCLUDE_DIR="/usr/include/{{ package_cross_compiler }}" \ + OPENSSL_LIB_DIR="/usr/lib/{{ package_cross_compiler }}" {% elif "amd64" in target_file %} # Install DB packages @@ -188,6 +162,9 @@ COPY ./build.rs ./build.rs RUN {{ mount_rust_cache -}} rustup target add {{ package_arch_target }} {% endif %} +# Configure the DB ARG as late as possible to not invalidate the cached layers above +ARG DB=sqlite,mysql,postgresql + # Builds your dependencies and removes the # dummy project, except the target folder # This folder contains the compiled dependencies @@ -203,6 +180,7 @@ RUN touch src/main.rs # Builds again, this time it'll just be # your actual source files being built +# hadolint ignore=DL3059 RUN {{ mount_rust_cache -}} cargo build --features ${DB} --release{{ package_arch_target_param }} {% if "alpine" in target_file %} {% if "armv7" in target_file %} @@ -216,13 +194,14 @@ RUN musl-strip target/{{ package_arch_target }}/release/vaultwarden # because we already have a binary built FROM {{ runtime_stage_base_image }} -ENV ROCKET_ENV "staging" -ENV ROCKET_PORT=80 -ENV ROCKET_WORKERS=10 -{% if "alpine" in runtime_stage_base_image %} -ENV SSL_CERT_DIR=/etc/ssl/certs +ENV ROCKET_ENV="staging" \ + ROCKET_PORT=80 \ + ROCKET_WORKERS=10 +{%- if "alpine" in runtime_stage_base_image %} \ + SSL_CERT_DIR=/etc/ssl/certs {% endif %} + {% if "amd64" not in target_file %} # hadolint ignore=DL3059 RUN [ "cross-build-start" ] @@ -236,12 +215,6 @@ RUN mkdir /data \ tzdata \ curl \ dumb-init \ -{% if "mysql" in features %} - mariadb-connector-c \ -{% endif %} -{% if "postgresql" in features %} - postgresql-libs \ -{% endif %} ca-certificates {% else %} && apt-get update && apt-get install -y \ diff --git a/docker/amd64/Dockerfile b/docker/amd64/Dockerfile index a18552b4..c6bf80b7 100644 --- a/docker/amd64/Dockerfile +++ b/docker/amd64/Dockerfile @@ -27,10 +27,9 @@ FROM vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527 as vault ########################## BUILD IMAGE ########################## -FROM rust:1.55-buster as build +FROM rust:1.57-buster as build + -# Debian-based builds support multidb -ARG DB=sqlite,mysql,postgresql # Build time options to avoid dpkg warnings and help with reproducible builds. ENV DEBIAN_FRONTEND=noninteractive \ @@ -64,6 +63,9 @@ COPY ./rust-toolchain ./rust-toolchain COPY ./build.rs ./build.rs +# Configure the DB ARG as late as possible to not invalidate the cached layers above +ARG DB=sqlite,mysql,postgresql + # Builds your dependencies and removes the # dummy project, except the target folder # This folder contains the compiled dependencies @@ -79,6 +81,7 @@ RUN touch src/main.rs # Builds again, this time it'll just be # your actual source files being built +# hadolint ignore=DL3059 RUN cargo build --features ${DB} --release ######################## RUNTIME IMAGE ######################## @@ -86,9 +89,9 @@ RUN cargo build --features ${DB} --release # because we already have a binary built FROM debian:buster-slim -ENV ROCKET_ENV "staging" -ENV ROCKET_PORT=80 -ENV ROCKET_WORKERS=10 +ENV ROCKET_ENV="staging" \ + ROCKET_PORT=80 \ + ROCKET_WORKERS=10 # Create data folder and Install needed libraries diff --git a/docker/amd64/Dockerfile.alpine b/docker/amd64/Dockerfile.alpine index 780944d0..a483c833 100644 --- a/docker/amd64/Dockerfile.alpine +++ b/docker/amd64/Dockerfile.alpine @@ -27,10 +27,9 @@ FROM vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527 as vault ########################## BUILD IMAGE ########################## -FROM clux/muslrust:nightly-2021-10-23 as build +FROM blackdex/rust-musl:x86_64-musl-nightly-2021-12-25 as build + -# Alpine-based AMD64 (musl) does not support mysql/mariadb during compile time. -ARG DB=sqlite,postgresql # Build time options to avoid dpkg warnings and help with reproducible builds. ENV DEBIAN_FRONTEND=noninteractive \ @@ -58,6 +57,9 @@ COPY ./build.rs ./build.rs RUN rustup target add x86_64-unknown-linux-musl +# Configure the DB ARG as late as possible to not invalidate the cached layers above +ARG DB=sqlite,mysql,postgresql + # Builds your dependencies and removes the # dummy project, except the target folder # This folder contains the compiled dependencies @@ -73,17 +75,19 @@ RUN touch src/main.rs # Builds again, this time it'll just be # your actual source files being built +# hadolint ignore=DL3059 RUN cargo build --features ${DB} --release --target=x86_64-unknown-linux-musl ######################## RUNTIME IMAGE ######################## # Create a new stage with a minimal image # because we already have a binary built -FROM alpine:3.14 +FROM alpine:3.15 + +ENV ROCKET_ENV="staging" \ + ROCKET_PORT=80 \ + ROCKET_WORKERS=10 \ + SSL_CERT_DIR=/etc/ssl/certs -ENV ROCKET_ENV "staging" -ENV ROCKET_PORT=80 -ENV ROCKET_WORKERS=10 -ENV SSL_CERT_DIR=/etc/ssl/certs # Create data folder and Install needed libraries @@ -93,7 +97,6 @@ RUN mkdir /data \ tzdata \ curl \ dumb-init \ - postgresql-libs \ ca-certificates diff --git a/docker/amd64/Dockerfile.buildx b/docker/amd64/Dockerfile.buildx index c9634b89..b7193704 100644 --- a/docker/amd64/Dockerfile.buildx +++ b/docker/amd64/Dockerfile.buildx @@ -27,10 +27,9 @@ FROM vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527 as vault ########################## BUILD IMAGE ########################## -FROM rust:1.55-buster as build +FROM rust:1.57-buster as build + -# Debian-based builds support multidb -ARG DB=sqlite,mysql,postgresql # Build time options to avoid dpkg warnings and help with reproducible builds. ENV DEBIAN_FRONTEND=noninteractive \ @@ -64,6 +63,9 @@ COPY ./rust-toolchain ./rust-toolchain COPY ./build.rs ./build.rs +# Configure the DB ARG as late as possible to not invalidate the cached layers above +ARG DB=sqlite,mysql,postgresql + # Builds your dependencies and removes the # dummy project, except the target folder # This folder contains the compiled dependencies @@ -79,6 +81,7 @@ RUN touch src/main.rs # Builds again, this time it'll just be # your actual source files being built +# hadolint ignore=DL3059 RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release ######################## RUNTIME IMAGE ######################## @@ -86,9 +89,9 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/. # because we already have a binary built FROM debian:buster-slim -ENV ROCKET_ENV "staging" -ENV ROCKET_PORT=80 -ENV ROCKET_WORKERS=10 +ENV ROCKET_ENV="staging" \ + ROCKET_PORT=80 \ + ROCKET_WORKERS=10 # Create data folder and Install needed libraries diff --git a/docker/amd64/Dockerfile.buildx.alpine b/docker/amd64/Dockerfile.buildx.alpine index 9e4f69ea..649d83de 100644 --- a/docker/amd64/Dockerfile.buildx.alpine +++ b/docker/amd64/Dockerfile.buildx.alpine @@ -27,10 +27,9 @@ FROM vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527 as vault ########################## BUILD IMAGE ########################## -FROM clux/muslrust:nightly-2021-10-23 as build +FROM blackdex/rust-musl:x86_64-musl-nightly-2021-12-25 as build + -# Alpine-based AMD64 (musl) does not support mysql/mariadb during compile time. -ARG DB=sqlite,postgresql # Build time options to avoid dpkg warnings and help with reproducible builds. ENV DEBIAN_FRONTEND=noninteractive \ @@ -58,6 +57,9 @@ COPY ./build.rs ./build.rs RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry rustup target add x86_64-unknown-linux-musl +# Configure the DB ARG as late as possible to not invalidate the cached layers above +ARG DB=sqlite,mysql,postgresql + # Builds your dependencies and removes the # dummy project, except the target folder # This folder contains the compiled dependencies @@ -73,17 +75,19 @@ RUN touch src/main.rs # Builds again, this time it'll just be # your actual source files being built +# hadolint ignore=DL3059 RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=x86_64-unknown-linux-musl ######################## RUNTIME IMAGE ######################## # Create a new stage with a minimal image # because we already have a binary built -FROM alpine:3.14 +FROM alpine:3.15 + +ENV ROCKET_ENV="staging" \ + ROCKET_PORT=80 \ + ROCKET_WORKERS=10 \ + SSL_CERT_DIR=/etc/ssl/certs -ENV ROCKET_ENV "staging" -ENV ROCKET_PORT=80 -ENV ROCKET_WORKERS=10 -ENV SSL_CERT_DIR=/etc/ssl/certs # Create data folder and Install needed libraries @@ -93,7 +97,6 @@ RUN mkdir /data \ tzdata \ curl \ dumb-init \ - postgresql-libs \ ca-certificates diff --git a/docker/arm64/Dockerfile b/docker/arm64/Dockerfile index 77aa63c3..7873e21e 100644 --- a/docker/arm64/Dockerfile +++ b/docker/arm64/Dockerfile @@ -27,10 +27,9 @@ FROM vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527 as vault ########################## BUILD IMAGE ########################## -FROM rust:1.55-buster as build +FROM rust:1.57-buster as build + -# Debian-based builds support multidb -ARG DB=sqlite,mysql,postgresql # Build time options to avoid dpkg warnings and help with reproducible builds. ENV DEBIAN_FRONTEND=noninteractive \ @@ -45,51 +44,32 @@ ENV DEBIAN_FRONTEND=noninteractive \ RUN mkdir -pv "${CARGO_HOME}" \ && rustup set profile minimal -# NOTE: Any apt-get/dpkg after this stage will fail because of broken dependencies. -# For Diesel-RS migrations_macros to compile with MySQL/MariaDB we need to do some magic. -# We at least need libmariadb3:amd64 installed for the x86_64 version of libmariadb.so (client) -# We also need the libmariadb-dev-compat:amd64 but it can not be installed together with the :arm64 version. -# What we can do is a force install, because nothing important is overlapping each other. # # Install required build libs for arm64 architecture. -# To compile both mysql and postgresql we need some extra packages for both host arch and target arch -RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > /etc/apt/sources.list.d/deb-src.list \ - && dpkg --add-architecture arm64 \ +# hadolint ignore=DL3059 +RUN dpkg --add-architecture arm64 \ && apt-get update \ && apt-get install -y \ --no-install-recommends \ libssl-dev:arm64 \ libc6-dev:arm64 \ libpq5:arm64 \ - libpq-dev \ - libmariadb3:amd64 \ + libpq-dev:arm64 \ + libmariadb3:arm64 \ libmariadb-dev:arm64 \ libmariadb-dev-compat:arm64 \ gcc-aarch64-linux-gnu \ # - # Manual install libmariadb-dev-compat:amd64 ( After this broken dependencies will break apt ) - && apt-get download libmariadb-dev-compat:amd64 \ - && dpkg --force-all -i ./libmariadb-dev-compat*.deb \ - && rm -rvf ./libmariadb-dev-compat*.deb \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - # - # For Diesel-RS migrations_macros to compile with PostgreSQL we need to do some magic. - # The libpq5:arm64 package seems to not provide a symlink to libpq.so.5 with the name libpq.so. - # This is only provided by the libpq-dev package which can't be installed for both arch at the same time. - # Without this specific file the ld command will fail and compilation fails with it. - && ln -sfnr /usr/lib/aarch64-linux-gnu/libpq.so.5 /usr/lib/aarch64-linux-gnu/libpq.so \ - # # Make sure cargo has the right target config && echo '[target.aarch64-unknown-linux-gnu]' >> "${CARGO_HOME}/config" \ && echo 'linker = "aarch64-linux-gnu-gcc"' >> "${CARGO_HOME}/config" \ && echo 'rustflags = ["-L/usr/lib/aarch64-linux-gnu"]' >> "${CARGO_HOME}/config" # Set arm specific environment values -ENV CC_aarch64_unknown_linux_gnu="/usr/bin/aarch64-linux-gnu-gcc" -ENV CROSS_COMPILE="1" -ENV OPENSSL_INCLUDE_DIR="/usr/include/aarch64-linux-gnu" -ENV OPENSSL_LIB_DIR="/usr/lib/aarch64-linux-gnu" +ENV CC_aarch64_unknown_linux_gnu="/usr/bin/aarch64-linux-gnu-gcc" \ + CROSS_COMPILE="1" \ + OPENSSL_INCLUDE_DIR="/usr/include/aarch64-linux-gnu" \ + OPENSSL_LIB_DIR="/usr/lib/aarch64-linux-gnu" # Creates a dummy project used to grab dependencies @@ -103,6 +83,9 @@ COPY ./build.rs ./build.rs RUN rustup target add aarch64-unknown-linux-gnu +# Configure the DB ARG as late as possible to not invalidate the cached layers above +ARG DB=sqlite,mysql,postgresql + # Builds your dependencies and removes the # dummy project, except the target folder # This folder contains the compiled dependencies @@ -118,6 +101,7 @@ RUN touch src/main.rs # Builds again, this time it'll just be # your actual source files being built +# hadolint ignore=DL3059 RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu ######################## RUNTIME IMAGE ######################## @@ -125,9 +109,9 @@ RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu # because we already have a binary built FROM balenalib/aarch64-debian:buster -ENV ROCKET_ENV "staging" -ENV ROCKET_PORT=80 -ENV ROCKET_WORKERS=10 +ENV ROCKET_ENV="staging" \ + ROCKET_PORT=80 \ + ROCKET_WORKERS=10 # hadolint ignore=DL3059 RUN [ "cross-build-start" ] diff --git a/docker/arm64/Dockerfile.alpine b/docker/arm64/Dockerfile.alpine new file mode 100644 index 00000000..15a76ee7 --- /dev/null +++ b/docker/arm64/Dockerfile.alpine @@ -0,0 +1,125 @@ +# syntax=docker/dockerfile:1 + +# This file was generated using a Jinja2 template. +# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles. + +# Using multistage build: +# https://docs.docker.com/develop/develop-images/multistage-build/ +# https://whitfin.io/speeding-up-rust-docker-builds/ +####################### VAULT BUILD IMAGE ####################### +# The web-vault digest specifies a particular web-vault build on Docker Hub. +# Using the digest instead of the tag name provides better security, +# as the digest of an image is immutable, whereas a tag name can later +# be changed to point to a malicious image. +# +# To verify the current digest for a given tag name: +# - From https://hub.docker.com/r/vaultwarden/web-vault/tags, +# click the tag name to view the digest of the image it currently points to. +# - From the command line: +# $ docker pull vaultwarden/web-vault:v2.25.0 +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.0 +# [vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527] +# +# - Conversely, to get the tag name from the digest: +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527 +# [vaultwarden/web-vault:v2.25.0] +# +FROM vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527 as vault + +########################## BUILD IMAGE ########################## +FROM blackdex/rust-musl:aarch64-musl-nightly-2021-12-25 as build + + + +# Build time options to avoid dpkg warnings and help with reproducible builds. +ENV DEBIAN_FRONTEND=noninteractive \ + LANG=C.UTF-8 \ + TZ=UTC \ + TERM=xterm-256color \ + CARGO_HOME="/root/.cargo" \ + USER="root" + + +# Create CARGO_HOME folder and don't download rust docs +RUN mkdir -pv "${CARGO_HOME}" \ + && rustup set profile minimal + +ENV RUSTFLAGS='-C link-arg=-s' + +# Creates a dummy project used to grab dependencies +RUN USER=root cargo new --bin /app +WORKDIR /app + +# Copies over *only* your manifests and build files +COPY ./Cargo.* ./ +COPY ./rust-toolchain ./rust-toolchain +COPY ./build.rs ./build.rs + +RUN rustup target add aarch64-unknown-linux-musl + +# Configure the DB ARG as late as possible to not invalidate the cached layers above +ARG DB=sqlite,mysql,postgresql + +# Builds your dependencies and removes the +# dummy project, except the target folder +# This folder contains the compiled dependencies +RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-musl \ + && find . -not -path "./target*" -delete + +# Copies the complete project +# To avoid copying unneeded files, use .dockerignore +COPY . . + +# Make sure that we actually build the project +RUN touch src/main.rs + +# Builds again, this time it'll just be +# your actual source files being built +# hadolint ignore=DL3059 +RUN cargo build --features ${DB} --release --target=aarch64-unknown-linux-musl + +######################## RUNTIME IMAGE ######################## +# Create a new stage with a minimal image +# because we already have a binary built +FROM balenalib/aarch64-alpine:3.15 + +ENV ROCKET_ENV="staging" \ + ROCKET_PORT=80 \ + ROCKET_WORKERS=10 \ + SSL_CERT_DIR=/etc/ssl/certs + + +# hadolint ignore=DL3059 +RUN [ "cross-build-start" ] + +# Create data folder and Install needed libraries +RUN mkdir /data \ + && apk add --no-cache \ + openssl \ + tzdata \ + curl \ + dumb-init \ + ca-certificates + +# hadolint ignore=DL3059 +RUN [ "cross-build-end" ] + +VOLUME /data +EXPOSE 80 +EXPOSE 3012 + +# Copies the files from the context (Rocket.toml file and web-vault) +# and the binary from the "build" stage to the current stage +WORKDIR / +COPY Rocket.toml . +COPY --from=vault /web-vault ./web-vault +COPY --from=build /app/target/aarch64-unknown-linux-musl/release/vaultwarden . + +COPY docker/healthcheck.sh /healthcheck.sh +COPY docker/start.sh /start.sh + +HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] + +# Configures the startup! +ENTRYPOINT ["/usr/bin/dumb-init", "--"] +CMD ["/start.sh"] diff --git a/docker/arm64/Dockerfile.buildx b/docker/arm64/Dockerfile.buildx index b0a82c93..aa6009d1 100644 --- a/docker/arm64/Dockerfile.buildx +++ b/docker/arm64/Dockerfile.buildx @@ -27,10 +27,9 @@ FROM vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527 as vault ########################## BUILD IMAGE ########################## -FROM rust:1.55-buster as build +FROM rust:1.57-buster as build + -# Debian-based builds support multidb -ARG DB=sqlite,mysql,postgresql # Build time options to avoid dpkg warnings and help with reproducible builds. ENV DEBIAN_FRONTEND=noninteractive \ @@ -45,51 +44,32 @@ ENV DEBIAN_FRONTEND=noninteractive \ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \ && rustup set profile minimal -# NOTE: Any apt-get/dpkg after this stage will fail because of broken dependencies. -# For Diesel-RS migrations_macros to compile with MySQL/MariaDB we need to do some magic. -# We at least need libmariadb3:amd64 installed for the x86_64 version of libmariadb.so (client) -# We also need the libmariadb-dev-compat:amd64 but it can not be installed together with the :arm64 version. -# What we can do is a force install, because nothing important is overlapping each other. # # Install required build libs for arm64 architecture. -# To compile both mysql and postgresql we need some extra packages for both host arch and target arch -RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > /etc/apt/sources.list.d/deb-src.list \ - && dpkg --add-architecture arm64 \ +# hadolint ignore=DL3059 +RUN dpkg --add-architecture arm64 \ && apt-get update \ && apt-get install -y \ --no-install-recommends \ libssl-dev:arm64 \ libc6-dev:arm64 \ libpq5:arm64 \ - libpq-dev \ - libmariadb3:amd64 \ + libpq-dev:arm64 \ + libmariadb3:arm64 \ libmariadb-dev:arm64 \ libmariadb-dev-compat:arm64 \ gcc-aarch64-linux-gnu \ # - # Manual install libmariadb-dev-compat:amd64 ( After this broken dependencies will break apt ) - && apt-get download libmariadb-dev-compat:amd64 \ - && dpkg --force-all -i ./libmariadb-dev-compat*.deb \ - && rm -rvf ./libmariadb-dev-compat*.deb \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - # - # For Diesel-RS migrations_macros to compile with PostgreSQL we need to do some magic. - # The libpq5:arm64 package seems to not provide a symlink to libpq.so.5 with the name libpq.so. - # This is only provided by the libpq-dev package which can't be installed for both arch at the same time. - # Without this specific file the ld command will fail and compilation fails with it. - && ln -sfnr /usr/lib/aarch64-linux-gnu/libpq.so.5 /usr/lib/aarch64-linux-gnu/libpq.so \ - # # Make sure cargo has the right target config && echo '[target.aarch64-unknown-linux-gnu]' >> "${CARGO_HOME}/config" \ && echo 'linker = "aarch64-linux-gnu-gcc"' >> "${CARGO_HOME}/config" \ && echo 'rustflags = ["-L/usr/lib/aarch64-linux-gnu"]' >> "${CARGO_HOME}/config" # Set arm specific environment values -ENV CC_aarch64_unknown_linux_gnu="/usr/bin/aarch64-linux-gnu-gcc" -ENV CROSS_COMPILE="1" -ENV OPENSSL_INCLUDE_DIR="/usr/include/aarch64-linux-gnu" -ENV OPENSSL_LIB_DIR="/usr/lib/aarch64-linux-gnu" +ENV CC_aarch64_unknown_linux_gnu="/usr/bin/aarch64-linux-gnu-gcc" \ + CROSS_COMPILE="1" \ + OPENSSL_INCLUDE_DIR="/usr/include/aarch64-linux-gnu" \ + OPENSSL_LIB_DIR="/usr/lib/aarch64-linux-gnu" # Creates a dummy project used to grab dependencies @@ -103,6 +83,9 @@ COPY ./build.rs ./build.rs RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry rustup target add aarch64-unknown-linux-gnu +# Configure the DB ARG as late as possible to not invalidate the cached layers above +ARG DB=sqlite,mysql,postgresql + # Builds your dependencies and removes the # dummy project, except the target folder # This folder contains the compiled dependencies @@ -118,6 +101,7 @@ RUN touch src/main.rs # Builds again, this time it'll just be # your actual source files being built +# hadolint ignore=DL3059 RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=aarch64-unknown-linux-gnu ######################## RUNTIME IMAGE ######################## @@ -125,9 +109,9 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/. # because we already have a binary built FROM balenalib/aarch64-debian:buster -ENV ROCKET_ENV "staging" -ENV ROCKET_PORT=80 -ENV ROCKET_WORKERS=10 +ENV ROCKET_ENV="staging" \ + ROCKET_PORT=80 \ + ROCKET_WORKERS=10 # hadolint ignore=DL3059 RUN [ "cross-build-start" ] diff --git a/docker/arm64/Dockerfile.buildx.alpine b/docker/arm64/Dockerfile.buildx.alpine new file mode 100644 index 00000000..34f2c8ba --- /dev/null +++ b/docker/arm64/Dockerfile.buildx.alpine @@ -0,0 +1,125 @@ +# syntax=docker/dockerfile:1 + +# This file was generated using a Jinja2 template. +# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles. + +# Using multistage build: +# https://docs.docker.com/develop/develop-images/multistage-build/ +# https://whitfin.io/speeding-up-rust-docker-builds/ +####################### VAULT BUILD IMAGE ####################### +# The web-vault digest specifies a particular web-vault build on Docker Hub. +# Using the digest instead of the tag name provides better security, +# as the digest of an image is immutable, whereas a tag name can later +# be changed to point to a malicious image. +# +# To verify the current digest for a given tag name: +# - From https://hub.docker.com/r/vaultwarden/web-vault/tags, +# click the tag name to view the digest of the image it currently points to. +# - From the command line: +# $ docker pull vaultwarden/web-vault:v2.25.0 +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.0 +# [vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527] +# +# - Conversely, to get the tag name from the digest: +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527 +# [vaultwarden/web-vault:v2.25.0] +# +FROM vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527 as vault + +########################## BUILD IMAGE ########################## +FROM blackdex/rust-musl:aarch64-musl-nightly-2021-12-25 as build + + + +# Build time options to avoid dpkg warnings and help with reproducible builds. +ENV DEBIAN_FRONTEND=noninteractive \ + LANG=C.UTF-8 \ + TZ=UTC \ + TERM=xterm-256color \ + CARGO_HOME="/root/.cargo" \ + USER="root" + + +# Create CARGO_HOME folder and don't download rust docs +RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \ + && rustup set profile minimal + +ENV RUSTFLAGS='-C link-arg=-s' + +# Creates a dummy project used to grab dependencies +RUN USER=root cargo new --bin /app +WORKDIR /app + +# Copies over *only* your manifests and build files +COPY ./Cargo.* ./ +COPY ./rust-toolchain ./rust-toolchain +COPY ./build.rs ./build.rs + +RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry rustup target add aarch64-unknown-linux-musl + +# Configure the DB ARG as late as possible to not invalidate the cached layers above +ARG DB=sqlite,mysql,postgresql + +# Builds your dependencies and removes the +# dummy project, except the target folder +# This folder contains the compiled dependencies +RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=aarch64-unknown-linux-musl \ + && find . -not -path "./target*" -delete + +# Copies the complete project +# To avoid copying unneeded files, use .dockerignore +COPY . . + +# Make sure that we actually build the project +RUN touch src/main.rs + +# Builds again, this time it'll just be +# your actual source files being built +# hadolint ignore=DL3059 +RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=aarch64-unknown-linux-musl + +######################## RUNTIME IMAGE ######################## +# Create a new stage with a minimal image +# because we already have a binary built +FROM balenalib/aarch64-alpine:3.15 + +ENV ROCKET_ENV="staging" \ + ROCKET_PORT=80 \ + ROCKET_WORKERS=10 \ + SSL_CERT_DIR=/etc/ssl/certs + + +# hadolint ignore=DL3059 +RUN [ "cross-build-start" ] + +# Create data folder and Install needed libraries +RUN mkdir /data \ + && apk add --no-cache \ + openssl \ + tzdata \ + curl \ + dumb-init \ + ca-certificates + +# hadolint ignore=DL3059 +RUN [ "cross-build-end" ] + +VOLUME /data +EXPOSE 80 +EXPOSE 3012 + +# Copies the files from the context (Rocket.toml file and web-vault) +# and the binary from the "build" stage to the current stage +WORKDIR / +COPY Rocket.toml . +COPY --from=vault /web-vault ./web-vault +COPY --from=build /app/target/aarch64-unknown-linux-musl/release/vaultwarden . + +COPY docker/healthcheck.sh /healthcheck.sh +COPY docker/start.sh /start.sh + +HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] + +# Configures the startup! +ENTRYPOINT ["/usr/bin/dumb-init", "--"] +CMD ["/start.sh"] diff --git a/docker/armv6/Dockerfile b/docker/armv6/Dockerfile index b1194687..1509bc5d 100644 --- a/docker/armv6/Dockerfile +++ b/docker/armv6/Dockerfile @@ -27,10 +27,9 @@ FROM vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527 as vault ########################## BUILD IMAGE ########################## -FROM rust:1.55-buster as build +FROM rust:1.57-buster as build + -# Debian-based builds support multidb -ARG DB=sqlite,mysql,postgresql # Build time options to avoid dpkg warnings and help with reproducible builds. ENV DEBIAN_FRONTEND=noninteractive \ @@ -45,51 +44,32 @@ ENV DEBIAN_FRONTEND=noninteractive \ RUN mkdir -pv "${CARGO_HOME}" \ && rustup set profile minimal -# NOTE: Any apt-get/dpkg after this stage will fail because of broken dependencies. -# For Diesel-RS migrations_macros to compile with MySQL/MariaDB we need to do some magic. -# We at least need libmariadb3:amd64 installed for the x86_64 version of libmariadb.so (client) -# We also need the libmariadb-dev-compat:amd64 but it can not be installed together with the :armel version. -# What we can do is a force install, because nothing important is overlapping each other. # # Install required build libs for armel architecture. -# To compile both mysql and postgresql we need some extra packages for both host arch and target arch -RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > /etc/apt/sources.list.d/deb-src.list \ - && dpkg --add-architecture armel \ +# hadolint ignore=DL3059 +RUN dpkg --add-architecture armel \ && apt-get update \ && apt-get install -y \ --no-install-recommends \ libssl-dev:armel \ libc6-dev:armel \ libpq5:armel \ - libpq-dev \ - libmariadb3:amd64 \ + libpq-dev:armel \ + libmariadb3:armel \ libmariadb-dev:armel \ libmariadb-dev-compat:armel \ gcc-arm-linux-gnueabi \ # - # Manual install libmariadb-dev-compat:amd64 ( After this broken dependencies will break apt ) - && apt-get download libmariadb-dev-compat:amd64 \ - && dpkg --force-all -i ./libmariadb-dev-compat*.deb \ - && rm -rvf ./libmariadb-dev-compat*.deb \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - # - # For Diesel-RS migrations_macros to compile with PostgreSQL we need to do some magic. - # The libpq5:armel package seems to not provide a symlink to libpq.so.5 with the name libpq.so. - # This is only provided by the libpq-dev package which can't be installed for both arch at the same time. - # Without this specific file the ld command will fail and compilation fails with it. - && ln -sfnr /usr/lib/arm-linux-gnueabi/libpq.so.5 /usr/lib/arm-linux-gnueabi/libpq.so \ - # # Make sure cargo has the right target config && echo '[target.arm-unknown-linux-gnueabi]' >> "${CARGO_HOME}/config" \ && echo 'linker = "arm-linux-gnueabi-gcc"' >> "${CARGO_HOME}/config" \ && echo 'rustflags = ["-L/usr/lib/arm-linux-gnueabi"]' >> "${CARGO_HOME}/config" # Set arm specific environment values -ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc" -ENV CROSS_COMPILE="1" -ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi" -ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi" +ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc" \ + CROSS_COMPILE="1" \ + OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi" \ + OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi" # Creates a dummy project used to grab dependencies @@ -103,6 +83,9 @@ COPY ./build.rs ./build.rs RUN rustup target add arm-unknown-linux-gnueabi +# Configure the DB ARG as late as possible to not invalidate the cached layers above +ARG DB=sqlite,mysql,postgresql + # Builds your dependencies and removes the # dummy project, except the target folder # This folder contains the compiled dependencies @@ -118,6 +101,7 @@ RUN touch src/main.rs # Builds again, this time it'll just be # your actual source files being built +# hadolint ignore=DL3059 RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi ######################## RUNTIME IMAGE ######################## @@ -125,9 +109,9 @@ RUN cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi # because we already have a binary built FROM balenalib/rpi-debian:buster -ENV ROCKET_ENV "staging" -ENV ROCKET_PORT=80 -ENV ROCKET_WORKERS=10 +ENV ROCKET_ENV="staging" \ + ROCKET_PORT=80 \ + ROCKET_WORKERS=10 # hadolint ignore=DL3059 RUN [ "cross-build-start" ] diff --git a/docker/armv6/Dockerfile.alpine b/docker/armv6/Dockerfile.alpine new file mode 100644 index 00000000..796f9ad0 --- /dev/null +++ b/docker/armv6/Dockerfile.alpine @@ -0,0 +1,125 @@ +# syntax=docker/dockerfile:1 + +# This file was generated using a Jinja2 template. +# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles. + +# Using multistage build: +# https://docs.docker.com/develop/develop-images/multistage-build/ +# https://whitfin.io/speeding-up-rust-docker-builds/ +####################### VAULT BUILD IMAGE ####################### +# The web-vault digest specifies a particular web-vault build on Docker Hub. +# Using the digest instead of the tag name provides better security, +# as the digest of an image is immutable, whereas a tag name can later +# be changed to point to a malicious image. +# +# To verify the current digest for a given tag name: +# - From https://hub.docker.com/r/vaultwarden/web-vault/tags, +# click the tag name to view the digest of the image it currently points to. +# - From the command line: +# $ docker pull vaultwarden/web-vault:v2.25.0 +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.0 +# [vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527] +# +# - Conversely, to get the tag name from the digest: +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527 +# [vaultwarden/web-vault:v2.25.0] +# +FROM vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527 as vault + +########################## BUILD IMAGE ########################## +FROM blackdex/rust-musl:arm-musleabi-nightly-2021-12-25 as build + + + +# Build time options to avoid dpkg warnings and help with reproducible builds. +ENV DEBIAN_FRONTEND=noninteractive \ + LANG=C.UTF-8 \ + TZ=UTC \ + TERM=xterm-256color \ + CARGO_HOME="/root/.cargo" \ + USER="root" + + +# Create CARGO_HOME folder and don't download rust docs +RUN mkdir -pv "${CARGO_HOME}" \ + && rustup set profile minimal + +ENV RUSTFLAGS='-C link-arg=-s' + +# Creates a dummy project used to grab dependencies +RUN USER=root cargo new --bin /app +WORKDIR /app + +# Copies over *only* your manifests and build files +COPY ./Cargo.* ./ +COPY ./rust-toolchain ./rust-toolchain +COPY ./build.rs ./build.rs + +RUN rustup target add arm-unknown-linux-musleabi + +# Configure the DB ARG as late as possible to not invalidate the cached layers above +ARG DB=sqlite,mysql,postgresql + +# Builds your dependencies and removes the +# dummy project, except the target folder +# This folder contains the compiled dependencies +RUN cargo build --features ${DB} --release --target=arm-unknown-linux-musleabi \ + && find . -not -path "./target*" -delete + +# Copies the complete project +# To avoid copying unneeded files, use .dockerignore +COPY . . + +# Make sure that we actually build the project +RUN touch src/main.rs + +# Builds again, this time it'll just be +# your actual source files being built +# hadolint ignore=DL3059 +RUN cargo build --features ${DB} --release --target=arm-unknown-linux-musleabi + +######################## RUNTIME IMAGE ######################## +# Create a new stage with a minimal image +# because we already have a binary built +FROM balenalib/rpi-alpine:3.15 + +ENV ROCKET_ENV="staging" \ + ROCKET_PORT=80 \ + ROCKET_WORKERS=10 \ + SSL_CERT_DIR=/etc/ssl/certs + + +# hadolint ignore=DL3059 +RUN [ "cross-build-start" ] + +# Create data folder and Install needed libraries +RUN mkdir /data \ + && apk add --no-cache \ + openssl \ + tzdata \ + curl \ + dumb-init \ + ca-certificates + +# hadolint ignore=DL3059 +RUN [ "cross-build-end" ] + +VOLUME /data +EXPOSE 80 +EXPOSE 3012 + +# Copies the files from the context (Rocket.toml file and web-vault) +# and the binary from the "build" stage to the current stage +WORKDIR / +COPY Rocket.toml . +COPY --from=vault /web-vault ./web-vault +COPY --from=build /app/target/arm-unknown-linux-musleabi/release/vaultwarden . + +COPY docker/healthcheck.sh /healthcheck.sh +COPY docker/start.sh /start.sh + +HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] + +# Configures the startup! +ENTRYPOINT ["/usr/bin/dumb-init", "--"] +CMD ["/start.sh"] diff --git a/docker/armv6/Dockerfile.buildx b/docker/armv6/Dockerfile.buildx index 45cb955f..343a4c54 100644 --- a/docker/armv6/Dockerfile.buildx +++ b/docker/armv6/Dockerfile.buildx @@ -27,10 +27,9 @@ FROM vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527 as vault ########################## BUILD IMAGE ########################## -FROM rust:1.55-buster as build +FROM rust:1.57-buster as build + -# Debian-based builds support multidb -ARG DB=sqlite,mysql,postgresql # Build time options to avoid dpkg warnings and help with reproducible builds. ENV DEBIAN_FRONTEND=noninteractive \ @@ -45,51 +44,32 @@ ENV DEBIAN_FRONTEND=noninteractive \ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \ && rustup set profile minimal -# NOTE: Any apt-get/dpkg after this stage will fail because of broken dependencies. -# For Diesel-RS migrations_macros to compile with MySQL/MariaDB we need to do some magic. -# We at least need libmariadb3:amd64 installed for the x86_64 version of libmariadb.so (client) -# We also need the libmariadb-dev-compat:amd64 but it can not be installed together with the :armel version. -# What we can do is a force install, because nothing important is overlapping each other. # # Install required build libs for armel architecture. -# To compile both mysql and postgresql we need some extra packages for both host arch and target arch -RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > /etc/apt/sources.list.d/deb-src.list \ - && dpkg --add-architecture armel \ +# hadolint ignore=DL3059 +RUN dpkg --add-architecture armel \ && apt-get update \ && apt-get install -y \ --no-install-recommends \ libssl-dev:armel \ libc6-dev:armel \ libpq5:armel \ - libpq-dev \ - libmariadb3:amd64 \ + libpq-dev:armel \ + libmariadb3:armel \ libmariadb-dev:armel \ libmariadb-dev-compat:armel \ gcc-arm-linux-gnueabi \ # - # Manual install libmariadb-dev-compat:amd64 ( After this broken dependencies will break apt ) - && apt-get download libmariadb-dev-compat:amd64 \ - && dpkg --force-all -i ./libmariadb-dev-compat*.deb \ - && rm -rvf ./libmariadb-dev-compat*.deb \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - # - # For Diesel-RS migrations_macros to compile with PostgreSQL we need to do some magic. - # The libpq5:armel package seems to not provide a symlink to libpq.so.5 with the name libpq.so. - # This is only provided by the libpq-dev package which can't be installed for both arch at the same time. - # Without this specific file the ld command will fail and compilation fails with it. - && ln -sfnr /usr/lib/arm-linux-gnueabi/libpq.so.5 /usr/lib/arm-linux-gnueabi/libpq.so \ - # # Make sure cargo has the right target config && echo '[target.arm-unknown-linux-gnueabi]' >> "${CARGO_HOME}/config" \ && echo 'linker = "arm-linux-gnueabi-gcc"' >> "${CARGO_HOME}/config" \ && echo 'rustflags = ["-L/usr/lib/arm-linux-gnueabi"]' >> "${CARGO_HOME}/config" # Set arm specific environment values -ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc" -ENV CROSS_COMPILE="1" -ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi" -ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi" +ENV CC_arm_unknown_linux_gnueabi="/usr/bin/arm-linux-gnueabi-gcc" \ + CROSS_COMPILE="1" \ + OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabi" \ + OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabi" # Creates a dummy project used to grab dependencies @@ -103,6 +83,9 @@ COPY ./build.rs ./build.rs RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry rustup target add arm-unknown-linux-gnueabi +# Configure the DB ARG as late as possible to not invalidate the cached layers above +ARG DB=sqlite,mysql,postgresql + # Builds your dependencies and removes the # dummy project, except the target folder # This folder contains the compiled dependencies @@ -118,6 +101,7 @@ RUN touch src/main.rs # Builds again, this time it'll just be # your actual source files being built +# hadolint ignore=DL3059 RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=arm-unknown-linux-gnueabi ######################## RUNTIME IMAGE ######################## @@ -125,9 +109,9 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/. # because we already have a binary built FROM balenalib/rpi-debian:buster -ENV ROCKET_ENV "staging" -ENV ROCKET_PORT=80 -ENV ROCKET_WORKERS=10 +ENV ROCKET_ENV="staging" \ + ROCKET_PORT=80 \ + ROCKET_WORKERS=10 # hadolint ignore=DL3059 RUN [ "cross-build-start" ] diff --git a/docker/armv6/Dockerfile.buildx.alpine b/docker/armv6/Dockerfile.buildx.alpine new file mode 100644 index 00000000..baa4f753 --- /dev/null +++ b/docker/armv6/Dockerfile.buildx.alpine @@ -0,0 +1,125 @@ +# syntax=docker/dockerfile:1 + +# This file was generated using a Jinja2 template. +# Please make your changes in `Dockerfile.j2` and then `make` the individual Dockerfiles. + +# Using multistage build: +# https://docs.docker.com/develop/develop-images/multistage-build/ +# https://whitfin.io/speeding-up-rust-docker-builds/ +####################### VAULT BUILD IMAGE ####################### +# The web-vault digest specifies a particular web-vault build on Docker Hub. +# Using the digest instead of the tag name provides better security, +# as the digest of an image is immutable, whereas a tag name can later +# be changed to point to a malicious image. +# +# To verify the current digest for a given tag name: +# - From https://hub.docker.com/r/vaultwarden/web-vault/tags, +# click the tag name to view the digest of the image it currently points to. +# - From the command line: +# $ docker pull vaultwarden/web-vault:v2.25.0 +# $ docker image inspect --format "{{.RepoDigests}}" vaultwarden/web-vault:v2.25.0 +# [vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527] +# +# - Conversely, to get the tag name from the digest: +# $ docker image inspect --format "{{.RepoTags}}" vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527 +# [vaultwarden/web-vault:v2.25.0] +# +FROM vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527 as vault + +########################## BUILD IMAGE ########################## +FROM blackdex/rust-musl:arm-musleabi-nightly-2021-12-25 as build + + + +# Build time options to avoid dpkg warnings and help with reproducible builds. +ENV DEBIAN_FRONTEND=noninteractive \ + LANG=C.UTF-8 \ + TZ=UTC \ + TERM=xterm-256color \ + CARGO_HOME="/root/.cargo" \ + USER="root" + + +# Create CARGO_HOME folder and don't download rust docs +RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \ + && rustup set profile minimal + +ENV RUSTFLAGS='-C link-arg=-s' + +# Creates a dummy project used to grab dependencies +RUN USER=root cargo new --bin /app +WORKDIR /app + +# Copies over *only* your manifests and build files +COPY ./Cargo.* ./ +COPY ./rust-toolchain ./rust-toolchain +COPY ./build.rs ./build.rs + +RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry rustup target add arm-unknown-linux-musleabi + +# Configure the DB ARG as late as possible to not invalidate the cached layers above +ARG DB=sqlite,mysql,postgresql + +# Builds your dependencies and removes the +# dummy project, except the target folder +# This folder contains the compiled dependencies +RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=arm-unknown-linux-musleabi \ + && find . -not -path "./target*" -delete + +# Copies the complete project +# To avoid copying unneeded files, use .dockerignore +COPY . . + +# Make sure that we actually build the project +RUN touch src/main.rs + +# Builds again, this time it'll just be +# your actual source files being built +# hadolint ignore=DL3059 +RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=arm-unknown-linux-musleabi + +######################## RUNTIME IMAGE ######################## +# Create a new stage with a minimal image +# because we already have a binary built +FROM balenalib/rpi-alpine:3.15 + +ENV ROCKET_ENV="staging" \ + ROCKET_PORT=80 \ + ROCKET_WORKERS=10 \ + SSL_CERT_DIR=/etc/ssl/certs + + +# hadolint ignore=DL3059 +RUN [ "cross-build-start" ] + +# Create data folder and Install needed libraries +RUN mkdir /data \ + && apk add --no-cache \ + openssl \ + tzdata \ + curl \ + dumb-init \ + ca-certificates + +# hadolint ignore=DL3059 +RUN [ "cross-build-end" ] + +VOLUME /data +EXPOSE 80 +EXPOSE 3012 + +# Copies the files from the context (Rocket.toml file and web-vault) +# and the binary from the "build" stage to the current stage +WORKDIR / +COPY Rocket.toml . +COPY --from=vault /web-vault ./web-vault +COPY --from=build /app/target/arm-unknown-linux-musleabi/release/vaultwarden . + +COPY docker/healthcheck.sh /healthcheck.sh +COPY docker/start.sh /start.sh + +HEALTHCHECK --interval=60s --timeout=10s CMD ["/healthcheck.sh"] + +# Configures the startup! +ENTRYPOINT ["/usr/bin/dumb-init", "--"] +CMD ["/start.sh"] diff --git a/docker/armv7/Dockerfile b/docker/armv7/Dockerfile index c39500ee..b3deeade 100644 --- a/docker/armv7/Dockerfile +++ b/docker/armv7/Dockerfile @@ -27,10 +27,9 @@ FROM vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527 as vault ########################## BUILD IMAGE ########################## -FROM rust:1.55-buster as build +FROM rust:1.57-buster as build + -# Debian-based builds support multidb -ARG DB=sqlite,mysql,postgresql # Build time options to avoid dpkg warnings and help with reproducible builds. ENV DEBIAN_FRONTEND=noninteractive \ @@ -45,51 +44,32 @@ ENV DEBIAN_FRONTEND=noninteractive \ RUN mkdir -pv "${CARGO_HOME}" \ && rustup set profile minimal -# NOTE: Any apt-get/dpkg after this stage will fail because of broken dependencies. -# For Diesel-RS migrations_macros to compile with MySQL/MariaDB we need to do some magic. -# We at least need libmariadb3:amd64 installed for the x86_64 version of libmariadb.so (client) -# We also need the libmariadb-dev-compat:amd64 but it can not be installed together with the :armhf version. -# What we can do is a force install, because nothing important is overlapping each other. # # Install required build libs for armhf architecture. -# To compile both mysql and postgresql we need some extra packages for both host arch and target arch -RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > /etc/apt/sources.list.d/deb-src.list \ - && dpkg --add-architecture armhf \ +# hadolint ignore=DL3059 +RUN dpkg --add-architecture armhf \ && apt-get update \ && apt-get install -y \ --no-install-recommends \ libssl-dev:armhf \ libc6-dev:armhf \ libpq5:armhf \ - libpq-dev \ - libmariadb3:amd64 \ + libpq-dev:armhf \ + libmariadb3:armhf \ libmariadb-dev:armhf \ libmariadb-dev-compat:armhf \ gcc-arm-linux-gnueabihf \ # - # Manual install libmariadb-dev-compat:amd64 ( After this broken dependencies will break apt ) - && apt-get download libmariadb-dev-compat:amd64 \ - && dpkg --force-all -i ./libmariadb-dev-compat*.deb \ - && rm -rvf ./libmariadb-dev-compat*.deb \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - # - # For Diesel-RS migrations_macros to compile with PostgreSQL we need to do some magic. - # The libpq5:armhf package seems to not provide a symlink to libpq.so.5 with the name libpq.so. - # This is only provided by the libpq-dev package which can't be installed for both arch at the same time. - # Without this specific file the ld command will fail and compilation fails with it. - && ln -sfnr /usr/lib/arm-linux-gnueabihf/libpq.so.5 /usr/lib/arm-linux-gnueabihf/libpq.so \ - # # Make sure cargo has the right target config && echo '[target.armv7-unknown-linux-gnueabihf]' >> "${CARGO_HOME}/config" \ && echo 'linker = "arm-linux-gnueabihf-gcc"' >> "${CARGO_HOME}/config" \ && echo 'rustflags = ["-L/usr/lib/arm-linux-gnueabihf"]' >> "${CARGO_HOME}/config" # Set arm specific environment values -ENV CC_armv7_unknown_linux_gnueabihf="/usr/bin/arm-linux-gnueabihf-gcc" -ENV CROSS_COMPILE="1" -ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabihf" -ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf" +ENV CC_armv7_unknown_linux_gnueabihf="/usr/bin/arm-linux-gnueabihf-gcc" \ + CROSS_COMPILE="1" \ + OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabihf" \ + OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf" # Creates a dummy project used to grab dependencies @@ -103,6 +83,9 @@ COPY ./build.rs ./build.rs RUN rustup target add armv7-unknown-linux-gnueabihf +# Configure the DB ARG as late as possible to not invalidate the cached layers above +ARG DB=sqlite,mysql,postgresql + # Builds your dependencies and removes the # dummy project, except the target folder # This folder contains the compiled dependencies @@ -118,6 +101,7 @@ RUN touch src/main.rs # Builds again, this time it'll just be # your actual source files being built +# hadolint ignore=DL3059 RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf ######################## RUNTIME IMAGE ######################## @@ -125,9 +109,9 @@ RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabih # because we already have a binary built FROM balenalib/armv7hf-debian:buster -ENV ROCKET_ENV "staging" -ENV ROCKET_PORT=80 -ENV ROCKET_WORKERS=10 +ENV ROCKET_ENV="staging" \ + ROCKET_PORT=80 \ + ROCKET_WORKERS=10 # hadolint ignore=DL3059 RUN [ "cross-build-start" ] diff --git a/docker/armv7/Dockerfile.alpine b/docker/armv7/Dockerfile.alpine index 0b5fef71..0d9ffa27 100644 --- a/docker/armv7/Dockerfile.alpine +++ b/docker/armv7/Dockerfile.alpine @@ -27,11 +27,9 @@ FROM vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527 as vault ########################## BUILD IMAGE ########################## -FROM messense/rust-musl-cross:armv7-musleabihf as build +FROM blackdex/rust-musl:armv7-musleabihf-nightly-2021-12-25 as build + -# Alpine-based ARM (musl) only supports sqlite during compile time. -# We now also need to add vendored_openssl, because the current base image we use to build has OpenSSL removed. -ARG DB=sqlite,vendored_openssl # Build time options to avoid dpkg warnings and help with reproducible builds. ENV DEBIAN_FRONTEND=noninteractive \ @@ -60,6 +58,9 @@ COPY ./build.rs ./build.rs RUN rustup target add armv7-unknown-linux-musleabihf +# Configure the DB ARG as late as possible to not invalidate the cached layers above +ARG DB=sqlite,mysql,postgresql + # Builds your dependencies and removes the # dummy project, except the target folder # This folder contains the compiled dependencies @@ -75,6 +76,7 @@ RUN touch src/main.rs # Builds again, this time it'll just be # your actual source files being built +# hadolint ignore=DL3059 RUN cargo build --features ${DB} --release --target=armv7-unknown-linux-musleabihf # hadolint ignore=DL3059 RUN musl-strip target/armv7-unknown-linux-musleabihf/release/vaultwarden @@ -82,12 +84,13 @@ RUN musl-strip target/armv7-unknown-linux-musleabihf/release/vaultwarden ######################## RUNTIME IMAGE ######################## # Create a new stage with a minimal image # because we already have a binary built -FROM balenalib/armv7hf-alpine:3.14 +FROM balenalib/armv7hf-alpine:3.15 + +ENV ROCKET_ENV="staging" \ + ROCKET_PORT=80 \ + ROCKET_WORKERS=10 \ + SSL_CERT_DIR=/etc/ssl/certs -ENV ROCKET_ENV "staging" -ENV ROCKET_PORT=80 -ENV ROCKET_WORKERS=10 -ENV SSL_CERT_DIR=/etc/ssl/certs # hadolint ignore=DL3059 RUN [ "cross-build-start" ] diff --git a/docker/armv7/Dockerfile.buildx b/docker/armv7/Dockerfile.buildx index e513f4e0..79b10b29 100644 --- a/docker/armv7/Dockerfile.buildx +++ b/docker/armv7/Dockerfile.buildx @@ -27,10 +27,9 @@ FROM vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527 as vault ########################## BUILD IMAGE ########################## -FROM rust:1.55-buster as build +FROM rust:1.57-buster as build + -# Debian-based builds support multidb -ARG DB=sqlite,mysql,postgresql # Build time options to avoid dpkg warnings and help with reproducible builds. ENV DEBIAN_FRONTEND=noninteractive \ @@ -45,51 +44,32 @@ ENV DEBIAN_FRONTEND=noninteractive \ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry mkdir -pv "${CARGO_HOME}" \ && rustup set profile minimal -# NOTE: Any apt-get/dpkg after this stage will fail because of broken dependencies. -# For Diesel-RS migrations_macros to compile with MySQL/MariaDB we need to do some magic. -# We at least need libmariadb3:amd64 installed for the x86_64 version of libmariadb.so (client) -# We also need the libmariadb-dev-compat:amd64 but it can not be installed together with the :armhf version. -# What we can do is a force install, because nothing important is overlapping each other. # # Install required build libs for armhf architecture. -# To compile both mysql and postgresql we need some extra packages for both host arch and target arch -RUN sed 's/^deb/deb-src/' /etc/apt/sources.list > /etc/apt/sources.list.d/deb-src.list \ - && dpkg --add-architecture armhf \ +# hadolint ignore=DL3059 +RUN dpkg --add-architecture armhf \ && apt-get update \ && apt-get install -y \ --no-install-recommends \ libssl-dev:armhf \ libc6-dev:armhf \ libpq5:armhf \ - libpq-dev \ - libmariadb3:amd64 \ + libpq-dev:armhf \ + libmariadb3:armhf \ libmariadb-dev:armhf \ libmariadb-dev-compat:armhf \ gcc-arm-linux-gnueabihf \ # - # Manual install libmariadb-dev-compat:amd64 ( After this broken dependencies will break apt ) - && apt-get download libmariadb-dev-compat:amd64 \ - && dpkg --force-all -i ./libmariadb-dev-compat*.deb \ - && rm -rvf ./libmariadb-dev-compat*.deb \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - # - # For Diesel-RS migrations_macros to compile with PostgreSQL we need to do some magic. - # The libpq5:armhf package seems to not provide a symlink to libpq.so.5 with the name libpq.so. - # This is only provided by the libpq-dev package which can't be installed for both arch at the same time. - # Without this specific file the ld command will fail and compilation fails with it. - && ln -sfnr /usr/lib/arm-linux-gnueabihf/libpq.so.5 /usr/lib/arm-linux-gnueabihf/libpq.so \ - # # Make sure cargo has the right target config && echo '[target.armv7-unknown-linux-gnueabihf]' >> "${CARGO_HOME}/config" \ && echo 'linker = "arm-linux-gnueabihf-gcc"' >> "${CARGO_HOME}/config" \ && echo 'rustflags = ["-L/usr/lib/arm-linux-gnueabihf"]' >> "${CARGO_HOME}/config" # Set arm specific environment values -ENV CC_armv7_unknown_linux_gnueabihf="/usr/bin/arm-linux-gnueabihf-gcc" -ENV CROSS_COMPILE="1" -ENV OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabihf" -ENV OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf" +ENV CC_armv7_unknown_linux_gnueabihf="/usr/bin/arm-linux-gnueabihf-gcc" \ + CROSS_COMPILE="1" \ + OPENSSL_INCLUDE_DIR="/usr/include/arm-linux-gnueabihf" \ + OPENSSL_LIB_DIR="/usr/lib/arm-linux-gnueabihf" # Creates a dummy project used to grab dependencies @@ -103,6 +83,9 @@ COPY ./build.rs ./build.rs RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry rustup target add armv7-unknown-linux-gnueabihf +# Configure the DB ARG as late as possible to not invalidate the cached layers above +ARG DB=sqlite,mysql,postgresql + # Builds your dependencies and removes the # dummy project, except the target folder # This folder contains the compiled dependencies @@ -118,6 +101,7 @@ RUN touch src/main.rs # Builds again, this time it'll just be # your actual source files being built +# hadolint ignore=DL3059 RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=armv7-unknown-linux-gnueabihf ######################## RUNTIME IMAGE ######################## @@ -125,9 +109,9 @@ RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/. # because we already have a binary built FROM balenalib/armv7hf-debian:buster -ENV ROCKET_ENV "staging" -ENV ROCKET_PORT=80 -ENV ROCKET_WORKERS=10 +ENV ROCKET_ENV="staging" \ + ROCKET_PORT=80 \ + ROCKET_WORKERS=10 # hadolint ignore=DL3059 RUN [ "cross-build-start" ] diff --git a/docker/armv7/Dockerfile.buildx.alpine b/docker/armv7/Dockerfile.buildx.alpine index bd7a6b1c..9875decc 100644 --- a/docker/armv7/Dockerfile.buildx.alpine +++ b/docker/armv7/Dockerfile.buildx.alpine @@ -27,11 +27,9 @@ FROM vaultwarden/web-vault@sha256:0df389deac9e83c739a1f4ff595f12f493b6c27cb4a22bb8fcaba9dc49b9b527 as vault ########################## BUILD IMAGE ########################## -FROM messense/rust-musl-cross:armv7-musleabihf as build +FROM blackdex/rust-musl:armv7-musleabihf-nightly-2021-12-25 as build + -# Alpine-based ARM (musl) only supports sqlite during compile time. -# We now also need to add vendored_openssl, because the current base image we use to build has OpenSSL removed. -ARG DB=sqlite,vendored_openssl # Build time options to avoid dpkg warnings and help with reproducible builds. ENV DEBIAN_FRONTEND=noninteractive \ @@ -60,6 +58,9 @@ COPY ./build.rs ./build.rs RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry rustup target add armv7-unknown-linux-musleabihf +# Configure the DB ARG as late as possible to not invalidate the cached layers above +ARG DB=sqlite,mysql,postgresql + # Builds your dependencies and removes the # dummy project, except the target folder # This folder contains the compiled dependencies @@ -75,6 +76,7 @@ RUN touch src/main.rs # Builds again, this time it'll just be # your actual source files being built +# hadolint ignore=DL3059 RUN --mount=type=cache,target=/root/.cargo/git --mount=type=cache,target=/root/.cargo/registry cargo build --features ${DB} --release --target=armv7-unknown-linux-musleabihf # hadolint ignore=DL3059 RUN musl-strip target/armv7-unknown-linux-musleabihf/release/vaultwarden @@ -82,12 +84,13 @@ RUN musl-strip target/armv7-unknown-linux-musleabihf/release/vaultwarden ######################## RUNTIME IMAGE ######################## # Create a new stage with a minimal image # because we already have a binary built -FROM balenalib/armv7hf-alpine:3.14 +FROM balenalib/armv7hf-alpine:3.15 + +ENV ROCKET_ENV="staging" \ + ROCKET_PORT=80 \ + ROCKET_WORKERS=10 \ + SSL_CERT_DIR=/etc/ssl/certs -ENV ROCKET_ENV "staging" -ENV ROCKET_PORT=80 -ENV ROCKET_WORKERS=10 -ENV SSL_CERT_DIR=/etc/ssl/certs # hadolint ignore=DL3059 RUN [ "cross-build-start" ] diff --git a/hooks/arches.sh b/hooks/arches.sh index 01a9e991..c27adf31 100644 --- a/hooks/arches.sh +++ b/hooks/arches.sh @@ -7,10 +7,5 @@ arches=( ) if [[ "${DOCKER_TAG}" == *alpine ]]; then - # The Alpine image build currently only works for certain arches. distro_suffix=.alpine - arches=( - amd64 - armv7 - ) fi diff --git a/rust-toolchain b/rust-toolchain index a619163a..979286d9 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1 +1 @@ -nightly-2021-11-05 +nightly-2021-12-25 diff --git a/src/error.rs b/src/error.rs index 4d6b6207..8b0adace 100644 --- a/src/error.rs +++ b/src/error.rs @@ -119,11 +119,13 @@ impl Error { Empty {}.into() } + #[must_use] pub fn with_msg>(mut self, msg: M) -> Self { self.message = msg.into(); self } + #[must_use] pub const fn with_code(mut self, code: u16) -> Self { self.error_code = code; self From 4584cfe3c18820d8235628cdf629655abd601bc7 Mon Sep 17 00:00:00 2001 From: Jake Howard Date: Tue, 28 Dec 2021 16:24:42 +0000 Subject: [PATCH 10/25] Additionally set expires header when caching responses Browsers are rather smart, but also dumb. This uses the `Expires` header alongside `cache-control` to better prompt the browser to actually cache. Unfortunately, firefox still tries to "race" its own cache, in an attempt to respond to requests faster, so still ends up making a bunch of requests which could have been cached. Doesn't appear there's any way around this. --- Cargo.lock | 1 + Cargo.toml | 1 + src/api/icons.rs | 9 ++++++-- src/api/web.rs | 53 +++++++++++++++++++++++++----------------------- src/util.rs | 51 ++++++++++++++++++++++++++++++++++------------ 5 files changed, 75 insertions(+), 40 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8af4f0ea..1a048db5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3295,6 +3295,7 @@ dependencies = [ "governor", "handlebars", "html5ever", + "httpdate", "idna 0.2.3", "job_scheduler", "jsonwebtoken", diff --git a/Cargo.toml b/Cargo.toml index f6ff7f68..82c96386 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -80,6 +80,7 @@ uuid = { version = "0.8.2", features = ["v4"] } chrono = { version = "0.4.19", features = ["serde"] } chrono-tz = "0.6.1" time = "0.2.27" +httpdate = "1.0" # Job scheduler job_scheduler = "1.2.1" diff --git a/src/api/icons.rs b/src/api/icons.rs index 8d87b10a..3d1de094 100644 --- a/src/api/icons.rs +++ b/src/api/icons.rs @@ -103,14 +103,19 @@ fn icon_internal(domain: String) -> Cached>> { return Cached::ttl( Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl(), + true, ); } match get_icon(&domain) { Some((icon, icon_type)) => { - Cached::ttl(Content(ContentType::new("image", icon_type), icon), CONFIG.icon_cache_ttl()) + Cached::ttl(Content(ContentType::new("image", icon_type), icon), CONFIG.icon_cache_ttl(), true) } - _ => Cached::ttl(Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl()), + _ => Cached::ttl( + Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), + CONFIG.icon_cache_negttl(), + true, + ), } } diff --git a/src/api/web.rs b/src/api/web.rs index 9c960c27..154dc2cf 100644 --- a/src/api/web.rs +++ b/src/api/web.rs @@ -22,41 +22,44 @@ pub fn routes() -> Vec { #[get("/")] fn web_index() -> Cached> { - Cached::short(NamedFile::open(Path::new(&CONFIG.web_vault_folder()).join("index.html")).ok()) + Cached::short(NamedFile::open(Path::new(&CONFIG.web_vault_folder()).join("index.html")).ok(), false) } #[get("/app-id.json")] fn app_id() -> Cached>> { let content_type = ContentType::new("application", "fido.trusted-apps+json"); - Cached::long(Content( - content_type, - Json(json!({ - "trustedFacets": [ - { - "version": { "major": 1, "minor": 0 }, - "ids": [ - // Per : - // - // "In the Web case, the FacetID MUST be the Web Origin [RFC6454] - // of the web page triggering the FIDO operation, written as - // a URI with an empty path. Default ports are omitted and any - // path component is ignored." - // - // This leaves it unclear as to whether the path must be empty, - // or whether it can be non-empty and will be ignored. To be on - // the safe side, use a proper web origin (with empty path). - &CONFIG.domain_origin(), - "ios:bundle-id:com.8bit.bitwarden", - "android:apk-key-hash:dUGFzUzf3lmHSLBDBIv+WaFyZMI" ] - }] - })), - )) + Cached::long( + Content( + content_type, + Json(json!({ + "trustedFacets": [ + { + "version": { "major": 1, "minor": 0 }, + "ids": [ + // Per : + // + // "In the Web case, the FacetID MUST be the Web Origin [RFC6454] + // of the web page triggering the FIDO operation, written as + // a URI with an empty path. Default ports are omitted and any + // path component is ignored." + // + // This leaves it unclear as to whether the path must be empty, + // or whether it can be non-empty and will be ignored. To be on + // the safe side, use a proper web origin (with empty path). + &CONFIG.domain_origin(), + "ios:bundle-id:com.8bit.bitwarden", + "android:apk-key-hash:dUGFzUzf3lmHSLBDBIv+WaFyZMI" ] + }] + })), + ), + true, + ) } #[get("/", rank = 10)] // Only match this if the other routes don't match fn web_files(p: PathBuf) -> Cached> { - Cached::long(NamedFile::open(Path::new(&CONFIG.web_vault_folder()).join(p)).ok()) + Cached::long(NamedFile::open(Path::new(&CONFIG.web_vault_folder()).join(p)).ok(), true) } #[get("/attachments//")] diff --git a/src/util.rs b/src/util.rs index 2e47077b..aacdd868 100644 --- a/src/util.rs +++ b/src/util.rs @@ -11,6 +11,10 @@ use rocket::{ Data, Request, Response, Rocket, }; +use httpdate::HttpDate; +use std::thread::sleep; +use std::time::{Duration, SystemTime}; + use crate::CONFIG; pub struct AppHeaders(); @@ -99,29 +103,52 @@ impl Fairing for Cors { } } -pub struct Cached(R, String); +pub struct Cached { + response: R, + is_immutable: bool, + ttl: u64, +} impl Cached { - pub fn long(r: R) -> Cached { - // 7 days - Self::ttl(r, 604800) + pub fn long(response: R, is_immutable: bool) -> Cached { + Self { + response, + is_immutable, + ttl: 604800, // 7 days + } } - pub fn short(r: R) -> Cached { - // 10 minutes - Self(r, String::from("public, max-age=600")) + pub fn short(response: R, is_immutable: bool) -> Cached { + Self { + response, + is_immutable, + ttl: 600, // 10 minutes + } } - pub fn ttl(r: R, ttl: u64) -> Cached { - Self(r, format!("public, immutable, max-age={}", ttl)) + pub fn ttl(response: R, ttl: u64, is_immutable: bool) -> Cached { + Self { + response, + is_immutable, + ttl: ttl, + } } } impl<'r, R: Responder<'r>> Responder<'r> for Cached { fn respond_to(self, req: &Request) -> response::Result<'r> { - match self.0.respond_to(req) { + let cache_control_header = if self.is_immutable { + format!("public, immutable, max-age={}", self.ttl) + } else { + format!("public, max-age={}", self.ttl) + }; + + let time_now = SystemTime::now(); + + match self.response.respond_to(req) { Ok(mut res) => { - res.set_raw_header("Cache-Control", self.1); + res.set_raw_header("Cache-Control", cache_control_header); + res.set_raw_header("Expires", HttpDate::from(time_now + Duration::from_secs(self.ttl)).to_string()); Ok(res) } e @ Err(_) => e, @@ -551,8 +578,6 @@ where } } -use std::{thread::sleep, time::Duration}; - pub fn retry_db(func: F, max_tries: u32) -> Result where F: Fn() -> Result, From 248e7dabc2e389ac67e77c1c6078259c29b7f997 Mon Sep 17 00:00:00 2001 From: Jake Howard Date: Tue, 28 Dec 2021 21:54:09 +0000 Subject: [PATCH 11/25] Collapse field name definition MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Daniel García --- src/util.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/util.rs b/src/util.rs index aacdd868..1d70e097 100644 --- a/src/util.rs +++ b/src/util.rs @@ -130,7 +130,7 @@ impl Cached { Self { response, is_immutable, - ttl: ttl, + ttl, } } } From 690d0ed1bb3c9d3b22e3b50aebbcd4bb3b3764d0 Mon Sep 17 00:00:00 2001 From: Jake Howard Date: Wed, 29 Dec 2021 16:17:38 +0000 Subject: [PATCH 12/25] Add our own HTTP date formatter --- Cargo.lock | 1 - Cargo.toml | 1 - src/util.rs | 19 +++++++++++++++---- 3 files changed, 15 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1a048db5..8af4f0ea 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3295,7 +3295,6 @@ dependencies = [ "governor", "handlebars", "html5ever", - "httpdate", "idna 0.2.3", "job_scheduler", "jsonwebtoken", diff --git a/Cargo.toml b/Cargo.toml index 82c96386..f6ff7f68 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -80,7 +80,6 @@ uuid = { version = "0.8.2", features = ["v4"] } chrono = { version = "0.4.19", features = ["serde"] } chrono-tz = "0.6.1" time = "0.2.27" -httpdate = "1.0" # Job scheduler job_scheduler = "1.2.1" diff --git a/src/util.rs b/src/util.rs index 1d70e097..0b287ccf 100644 --- a/src/util.rs +++ b/src/util.rs @@ -11,9 +11,8 @@ use rocket::{ Data, Request, Response, Rocket, }; -use httpdate::HttpDate; use std::thread::sleep; -use std::time::{Duration, SystemTime}; +use std::time::Duration; use crate::CONFIG; @@ -143,12 +142,13 @@ impl<'r, R: Responder<'r>> Responder<'r> for Cached { format!("public, max-age={}", self.ttl) }; - let time_now = SystemTime::now(); + let time_now = chrono::Local::now(); match self.response.respond_to(req) { Ok(mut res) => { res.set_raw_header("Cache-Control", cache_control_header); - res.set_raw_header("Expires", HttpDate::from(time_now + Duration::from_secs(self.ttl)).to_string()); + let expiry_time = time_now + chrono::Duration::seconds(self.ttl.try_into().unwrap()); + res.set_raw_header("Expires", format_datetime_http(&expiry_time)); Ok(res) } e @ Err(_) => e, @@ -436,6 +436,17 @@ pub fn format_naive_datetime_local(dt: &NaiveDateTime, fmt: &str) -> String { format_datetime_local(&Local.from_utc_datetime(dt), fmt) } +/// Formats a `DateTime` as required for HTTP +/// +/// https://httpwg.org/specs/rfc7231.html#http.date +pub fn format_datetime_http(dt: &DateTime) -> String { + let expiry_time: chrono::DateTime = chrono::DateTime::from_utc(dt.naive_utc(), chrono::Utc); + + // HACK: HTTP expects the date to always be GMT (UTC) rather than giving an + // offset (which would always be 0 in UTC anyway) + return expiry_time.to_rfc2822().replace("+0000", "GMT"); +} + // // Deployment environment methods // From 6ddbe84bde04c7ec8b52e9641b490adedd8b22a2 Mon Sep 17 00:00:00 2001 From: Jake Howard Date: Wed, 29 Dec 2021 16:29:42 +0000 Subject: [PATCH 13/25] Remove unnecessary return --- src/util.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/util.rs b/src/util.rs index 0b287ccf..1a5e674b 100644 --- a/src/util.rs +++ b/src/util.rs @@ -444,7 +444,7 @@ pub fn format_datetime_http(dt: &DateTime) -> String { // HACK: HTTP expects the date to always be GMT (UTC) rather than giving an // offset (which would always be 0 in UTC anyway) - return expiry_time.to_rfc2822().replace("+0000", "GMT"); + expiry_time.to_rfc2822().replace("+0000", "GMT") } // From b7eedbcddc5bc52391472329fa6a43a2c9ae0a94 Mon Sep 17 00:00:00 2001 From: Jeremy Lin Date: Wed, 29 Dec 2021 18:01:32 -0800 Subject: [PATCH 14/25] Add config option to set the HTTP redirect code for external icons The default code is 307 (temporary) to make it easier to test different icon services, but once a service has been decided on, users should ideally switch to using permanent redirects for cacheability. --- .env.template | 9 ++++++++- src/api/icons.rs | 9 ++++++++- src/config.rs | 13 ++++++++++++- 3 files changed, 28 insertions(+), 3 deletions(-) diff --git a/.env.template b/.env.template index 7fcbbfcb..fecac220 100644 --- a/.env.template +++ b/.env.template @@ -135,13 +135,20 @@ ## which is replaced with the domain. For example: `https://icon.example.com/domain/{}`. ## ## `internal` refers to Vaultwarden's built-in icon fetching implementation. -## If an external service is set, an icon request to Vaultwarden will return an HTTP 307 +## If an external service is set, an icon request to Vaultwarden will return an HTTP ## redirect to the corresponding icon at the external service. An external service may ## be useful if your Vaultwarden instance has no external network connectivity, or if ## you are concerned that someone may probe your instance to try to detect whether icons ## for certain sites have been cached. # ICON_SERVICE=internal +## Icon redirect code +## The HTTP status code to use for redirects to an external icon service. +## The supported codes are 307 (temporary) and 308 (permanent). +## Temporary redirects are useful while testing different icon services, but once a service +## has been decided on, consider using permanent redirects for cacheability. +# ICON_REDIRECT_CODE=307 + ## Disable icon downloading ## Set to true to disable icon downloading in the internal icon service. ## This still serves existing icons from $ICON_CACHE_FOLDER, without generating any external diff --git a/src/api/icons.rs b/src/api/icons.rs index 3d1de094..4e8c753a 100644 --- a/src/api/icons.rs +++ b/src/api/icons.rs @@ -71,7 +71,14 @@ fn icon_redirect(domain: &str, template: &str) -> Option { } let url = template.replace("{}", domain); - Some(Redirect::temporary(url)) + match CONFIG.icon_redirect_code() { + 308 => Some(Redirect::permanent(url)), + 307 => Some(Redirect::temporary(url)), + _ => { + error!("Unexpected redirect code {}", CONFIG.icon_redirect_code()); + None + } + } } #[get("//icon.png")] diff --git a/src/config.rs b/src/config.rs index 5bbe8575..9554aee3 100644 --- a/src/config.rs +++ b/src/config.rs @@ -454,9 +454,14 @@ make_config! { /// To specify a custom icon service, set a URL template with exactly one instance of `{}`, /// which is replaced with the domain. For example: `https://icon.example.com/domain/{}`. /// `internal` refers to Vaultwarden's built-in icon fetching implementation. If an external - /// service is set, an icon request to Vaultwarden will return an HTTP 307 redirect to the + /// service is set, an icon request to Vaultwarden will return an HTTP redirect to the /// corresponding icon at the external service. icon_service: String, false, def, "internal".to_string(); + /// Icon redirect code |> The HTTP status code to use for redirects to an external icon service. + /// The supported codes are 307 (temporary) and 308 (permanent). + /// Temporary redirects are useful while testing different icon services, but once a service + /// has been decided on, consider using permanent redirects for cacheability. + icon_redirect_code: u32, true, def, 307; /// Positive icon cache expiry |> Number of seconds to consider that an already cached icon is fresh. After this period, the icon will be redownloaded icon_cache_ttl: u64, true, def, 2_592_000; /// Negative icon cache expiry |> Number of seconds before trying to download an icon that failed again. @@ -693,6 +698,12 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { } } + // Check if the icon redirect code is valid + match cfg.icon_redirect_code { + 307 | 308 => (), + _ => err!("Only HTTP 307/308 redirects are supported"), + } + Ok(()) } From 1fa178d1d3124aad5a11635297f13ff33d2fc062 Mon Sep 17 00:00:00 2001 From: BlackDex Date: Fri, 31 Dec 2021 11:53:21 +0100 Subject: [PATCH 15/25] Fixed issue #2154 For emergency access invitations we need to check if invites are allowed, not if sign-ups are allowed. --- src/api/core/emergency_access.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/api/core/emergency_access.rs b/src/api/core/emergency_access.rs index 6459829f..8ad1fdd1 100644 --- a/src/api/core/emergency_access.rs +++ b/src/api/core/emergency_access.rs @@ -182,7 +182,7 @@ fn send_invite(data: JsonUpcase, headers: Headers, co let grantee_user = match User::find_by_mail(&email, &conn) { None => { - if !CONFIG.signups_allowed() { + if !CONFIG.invitations_allowed() { err!(format!("Grantee user does not exist: {}", email)) } From bf5aefd12976cef55276e00db770710b22d7b62f Mon Sep 17 00:00:00 2001 From: BlackDex Date: Fri, 31 Dec 2021 15:59:58 +0100 Subject: [PATCH 16/25] Fix issue with Bitwarden CLI. The CLI seems to send a String instead of an Integer for the maximum access count. It now accepts both types and converts it to an i32 in all cases. Fixes #2196 --- src/api/core/sends.rs | 38 ++++++++++++++++++++++---------------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/src/api/core/sends.rs b/src/api/core/sends.rs index aa4fd792..72437f15 100644 --- a/src/api/core/sends.rs +++ b/src/api/core/sends.rs @@ -7,7 +7,7 @@ use rocket_contrib::json::Json; use serde_json::Value; use crate::{ - api::{ApiResult, EmptyResult, JsonResult, JsonUpcase, Notify, UpdateType}, + api::{ApiResult, EmptyResult, JsonResult, JsonUpcase, Notify, NumberOrString, UpdateType}, auth::{Headers, Host}, db::{models::*, DbConn, DbPool}, util::SafeString, @@ -42,21 +42,21 @@ pub fn purge_sends(pool: DbPool) { #[derive(Deserialize)] #[allow(non_snake_case)] -pub struct SendData { - pub Type: i32, - pub Key: String, - pub Password: Option, - pub MaxAccessCount: Option, - pub ExpirationDate: Option>, - pub DeletionDate: DateTime, - pub Disabled: bool, - pub HideEmail: Option, +struct SendData { + Type: i32, + Key: String, + Password: Option, + MaxAccessCount: Option, + ExpirationDate: Option>, + DeletionDate: DateTime, + Disabled: bool, + HideEmail: Option, // Data field - pub Name: String, - pub Notes: Option, - pub Text: Option, - pub File: Option, + Name: String, + Notes: Option, + Text: Option, + File: Option, } /// Enforces the `Disable Send` policy. A non-owner/admin user belonging to @@ -119,7 +119,10 @@ fn create_send(data: SendData, user_uuid: String) -> ApiResult { let mut send = Send::new(data.Type, data.Name, data_str, data.Key, data.DeletionDate.naive_utc()); send.user_uuid = Some(user_uuid); send.notes = data.Notes; - send.max_access_count = data.MaxAccessCount; + send.max_access_count = match data.MaxAccessCount { + Some(m) => Some(m.into_i32()?), + _ => None, + }; send.expiration_date = data.ExpirationDate.map(|d| d.naive_utc()); send.disabled = data.Disabled; send.hide_email = data.HideEmail; @@ -414,7 +417,10 @@ fn put_send(id: String, data: JsonUpcase, headers: Headers, conn: DbCo send.akey = data.Key; send.deletion_date = data.DeletionDate.naive_utc(); send.notes = data.Notes; - send.max_access_count = data.MaxAccessCount; + send.max_access_count = match data.MaxAccessCount { + Some(m) => Some(m.into_i32()?), + _ => None, + }; send.expiration_date = data.ExpirationDate.map(|d| d.naive_utc()); send.hide_email = data.HideEmail; send.disabled = data.Disabled; From 76b7de15de7211684e05deacbbaa3e3bfd3ee051 Mon Sep 17 00:00:00 2001 From: BlackDex Date: Fri, 7 Jan 2022 18:55:48 +0100 Subject: [PATCH 17/25] Fix emergency access invites for new users If a new user gets invited it should check if the user is invited via emergency access, if so, allow that user to register. --- src/api/core/accounts.rs | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/src/api/core/accounts.rs b/src/api/core/accounts.rs index e3ebcde0..66de801f 100644 --- a/src/api/core/accounts.rs +++ b/src/api/core/accounts.rs @@ -87,14 +87,11 @@ fn register(data: JsonUpcase, conn: DbConn) -> EmptyResult { user_org.status = UserOrgStatus::Accepted as i32; user_org.save(&conn)?; } - + user + } else if EmergencyAccess::find_invited_by_grantee_email(&email, &conn).is_some() { user } else if CONFIG.is_signup_allowed(&email) { - // check if it's invited by emergency contact - match EmergencyAccess::find_invited_by_grantee_email(&data.Email, &conn) { - Some(_) => user, - _ => err!("Account with this email already exists"), - } + err!("Account with this email already exists") } else { err!("Registration not allowed or user already exists") } From a16c65677006f1b7b50d508a173a71d94bbf8753 Mon Sep 17 00:00:00 2001 From: Jeremy Lin Date: Sat, 8 Jan 2022 23:40:35 -0800 Subject: [PATCH 18/25] Add support for legacy HTTP 301/302 redirects for external icons At least on Android, it seems the Bitwarden mobile client responds to HTTP 307, but not to HTTP 308 for some reason. --- .env.template | 7 ++++--- src/api/icons.rs | 4 +++- src/config.rs | 11 ++++++----- 3 files changed, 13 insertions(+), 9 deletions(-) diff --git a/.env.template b/.env.template index fecac220..1f4c937f 100644 --- a/.env.template +++ b/.env.template @@ -144,10 +144,11 @@ ## Icon redirect code ## The HTTP status code to use for redirects to an external icon service. -## The supported codes are 307 (temporary) and 308 (permanent). +## The supported codes are 301 (legacy permanent), 302 (legacy temporary), 307 (temporary), and 308 (permanent). ## Temporary redirects are useful while testing different icon services, but once a service -## has been decided on, consider using permanent redirects for cacheability. -# ICON_REDIRECT_CODE=307 +## has been decided on, consider using permanent redirects for cacheability. The legacy codes +## are currently better supported by the Bitwarden clients. +# ICON_REDIRECT_CODE=302 ## Disable icon downloading ## Set to true to disable icon downloading in the internal icon service. diff --git a/src/api/icons.rs b/src/api/icons.rs index 4e8c753a..a2e5cc3a 100644 --- a/src/api/icons.rs +++ b/src/api/icons.rs @@ -72,8 +72,10 @@ fn icon_redirect(domain: &str, template: &str) -> Option { let url = template.replace("{}", domain); match CONFIG.icon_redirect_code() { - 308 => Some(Redirect::permanent(url)), + 301 => Some(Redirect::moved(url)), // legacy permanent redirect + 302 => Some(Redirect::found(url)), // legacy temporary redirect 307 => Some(Redirect::temporary(url)), + 308 => Some(Redirect::permanent(url)), _ => { error!("Unexpected redirect code {}", CONFIG.icon_redirect_code()); None diff --git a/src/config.rs b/src/config.rs index 9554aee3..92fe8b9d 100644 --- a/src/config.rs +++ b/src/config.rs @@ -458,10 +458,11 @@ make_config! { /// corresponding icon at the external service. icon_service: String, false, def, "internal".to_string(); /// Icon redirect code |> The HTTP status code to use for redirects to an external icon service. - /// The supported codes are 307 (temporary) and 308 (permanent). + /// The supported codes are 301 (legacy permanent), 302 (legacy temporary), 307 (temporary), and 308 (permanent). /// Temporary redirects are useful while testing different icon services, but once a service - /// has been decided on, consider using permanent redirects for cacheability. - icon_redirect_code: u32, true, def, 307; + /// has been decided on, consider using permanent redirects for cacheability. The legacy codes + /// are currently better supported by the Bitwarden clients. + icon_redirect_code: u32, true, def, 302; /// Positive icon cache expiry |> Number of seconds to consider that an already cached icon is fresh. After this period, the icon will be redownloaded icon_cache_ttl: u64, true, def, 2_592_000; /// Negative icon cache expiry |> Number of seconds before trying to download an icon that failed again. @@ -700,8 +701,8 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { // Check if the icon redirect code is valid match cfg.icon_redirect_code { - 307 | 308 => (), - _ => err!("Only HTTP 307/308 redirects are supported"), + 301 | 302 | 307 | 308 => (), + _ => err!("Only HTTP 301/302 and 307/308 redirects are supported"), } Ok(()) From e4e16ed50f7e51a42cb1d6811be3f93ee04d3e45 Mon Sep 17 00:00:00 2001 From: iamdoubz <4871781+iamdoubz@users.noreply.github.com> Date: Tue, 11 Jan 2022 10:39:02 -0600 Subject: [PATCH 19/25] Upgrade Feature-Policy to Permissions-Policy Convert old, soon to be defunct, Feature-Policy with its replacement Permissions-Policy --- src/util.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/util.rs b/src/util.rs index 1a5e674b..4defc63f 100644 --- a/src/util.rs +++ b/src/util.rs @@ -27,7 +27,7 @@ impl Fairing for AppHeaders { } fn on_response(&self, _req: &Request, res: &mut Response) { - res.set_raw_header("Feature-Policy", "accelerometer 'none'; ambient-light-sensor 'none'; autoplay 'none'; camera 'none'; encrypted-media 'none'; fullscreen 'none'; geolocation 'none'; gyroscope 'none'; magnetometer 'none'; microphone 'none'; midi 'none'; payment 'none'; picture-in-picture 'none'; sync-xhr 'self' https://haveibeenpwned.com https://2fa.directory; usb 'none'; vr 'none'"); + res.set_raw_header("Permissions-Policy", "accelerometer=(), ambient-light-sensor=(), autoplay=(), camera=(), encrypted-media=(), fullscreen=(), geolocation=(), gyroscope=(), magnetometer=(), microphone=(), midi=(), payment=(), picture-in-picture=(), sync-xhr=(self \"https://haveibeenpwned.com\" \"https://2fa.directory\"), usb=(), vr=()"); res.set_raw_header("Referrer-Policy", "same-origin"); res.set_raw_header("X-Frame-Options", "SAMEORIGIN"); res.set_raw_header("X-Content-Type-Options", "nosniff"); From 69ee4a70b453b81b96347fb299bf5c14f1038156 Mon Sep 17 00:00:00 2001 From: Jeremy Lin Date: Wed, 19 Jan 2022 02:51:26 -0800 Subject: [PATCH 20/25] Add support for API keys This is mainly useful for CLI-based login automation. --- .../2022-01-17-234911_add_api_key/down.sql | 0 .../2022-01-17-234911_add_api_key/up.sql | 2 + .../2022-01-17-234911_add_api_key/down.sql | 0 .../2022-01-17-234911_add_api_key/up.sql | 2 + .../2022-01-17-234911_add_api_key/down.sql | 0 .../2022-01-17-234911_add_api_key/up.sql | 2 + src/api/core/accounts.rs | 39 ++++++++- src/api/identity.rs | 86 ++++++++++++++++++- src/crypto.rs | 28 ++++++ src/db/models/user.rs | 9 +- src/db/schemas/mysql/schema.rs | 1 + src/db/schemas/postgresql/schema.rs | 1 + src/db/schemas/sqlite/schema.rs | 1 + 13 files changed, 164 insertions(+), 7 deletions(-) create mode 100644 migrations/mysql/2022-01-17-234911_add_api_key/down.sql create mode 100644 migrations/mysql/2022-01-17-234911_add_api_key/up.sql create mode 100644 migrations/postgresql/2022-01-17-234911_add_api_key/down.sql create mode 100644 migrations/postgresql/2022-01-17-234911_add_api_key/up.sql create mode 100644 migrations/sqlite/2022-01-17-234911_add_api_key/down.sql create mode 100644 migrations/sqlite/2022-01-17-234911_add_api_key/up.sql diff --git a/migrations/mysql/2022-01-17-234911_add_api_key/down.sql b/migrations/mysql/2022-01-17-234911_add_api_key/down.sql new file mode 100644 index 00000000..e69de29b diff --git a/migrations/mysql/2022-01-17-234911_add_api_key/up.sql b/migrations/mysql/2022-01-17-234911_add_api_key/up.sql new file mode 100644 index 00000000..85ac377b --- /dev/null +++ b/migrations/mysql/2022-01-17-234911_add_api_key/up.sql @@ -0,0 +1,2 @@ +ALTER TABLE users +ADD COLUMN api_key VARCHAR(255); diff --git a/migrations/postgresql/2022-01-17-234911_add_api_key/down.sql b/migrations/postgresql/2022-01-17-234911_add_api_key/down.sql new file mode 100644 index 00000000..e69de29b diff --git a/migrations/postgresql/2022-01-17-234911_add_api_key/up.sql b/migrations/postgresql/2022-01-17-234911_add_api_key/up.sql new file mode 100644 index 00000000..b1117556 --- /dev/null +++ b/migrations/postgresql/2022-01-17-234911_add_api_key/up.sql @@ -0,0 +1,2 @@ +ALTER TABLE users +ADD COLUMN api_key TEXT; diff --git a/migrations/sqlite/2022-01-17-234911_add_api_key/down.sql b/migrations/sqlite/2022-01-17-234911_add_api_key/down.sql new file mode 100644 index 00000000..e69de29b diff --git a/migrations/sqlite/2022-01-17-234911_add_api_key/up.sql b/migrations/sqlite/2022-01-17-234911_add_api_key/up.sql new file mode 100644 index 00000000..b1117556 --- /dev/null +++ b/migrations/sqlite/2022-01-17-234911_add_api_key/up.sql @@ -0,0 +1,2 @@ +ALTER TABLE users +ADD COLUMN api_key TEXT; diff --git a/src/api/core/accounts.rs b/src/api/core/accounts.rs index e3ebcde0..1b16ae64 100644 --- a/src/api/core/accounts.rs +++ b/src/api/core/accounts.rs @@ -34,6 +34,8 @@ pub fn routes() -> Vec { password_hint, prelogin, verify_password, + api_key, + rotate_api_key, ] } @@ -647,15 +649,17 @@ fn prelogin(data: JsonUpcase, conn: DbConn) -> Json { "KdfIterations": kdf_iter })) } + +// https://github.com/bitwarden/server/blob/master/src/Api/Models/Request/Accounts/SecretVerificationRequestModel.cs #[derive(Deserialize)] #[allow(non_snake_case)] -struct VerifyPasswordData { +struct SecretVerificationRequest { MasterPasswordHash: String, } #[post("/accounts/verify-password", data = "")] -fn verify_password(data: JsonUpcase, headers: Headers) -> EmptyResult { - let data: VerifyPasswordData = data.into_inner().data; +fn verify_password(data: JsonUpcase, headers: Headers) -> EmptyResult { + let data: SecretVerificationRequest = data.into_inner().data; let user = headers.user; if !user.check_valid_password(&data.MasterPasswordHash) { @@ -664,3 +668,32 @@ fn verify_password(data: JsonUpcase, headers: Headers) -> Em Ok(()) } + +fn _api_key(data: JsonUpcase, rotate: bool, headers: Headers, conn: DbConn) -> JsonResult { + let data: SecretVerificationRequest = data.into_inner().data; + let mut user = headers.user; + + if !user.check_valid_password(&data.MasterPasswordHash) { + err!("Invalid password") + } + + if rotate || user.api_key.is_none() { + user.api_key = Some(crypto::generate_api_key()); + user.save(&conn).expect("Error saving API key"); + } + + Ok(Json(json!({ + "ApiKey": user.api_key, + "Object": "apiKey", + }))) +} + +#[post("/accounts/api-key", data = "")] +fn api_key(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { + _api_key(data, false, headers, conn) +} + +#[post("/accounts/rotate-api-key", data = "")] +fn rotate_api_key(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonResult { + _api_key(data, true, headers, conn) +} diff --git a/src/api/identity.rs b/src/api/identity.rs index 3cb26ba3..49310930 100644 --- a/src/api/identity.rs +++ b/src/api/identity.rs @@ -43,6 +43,13 @@ fn login(data: Form, conn: DbConn, ip: ClientIp) -> JsonResult { _password_login(data, conn, &ip) } + "client_credentials" => { + _check_is_some(&data.client_id, "client_id cannot be blank")?; + _check_is_some(&data.client_secret, "client_secret cannot be blank")?; + _check_is_some(&data.scope, "scope cannot be blank")?; + + _api_key_login(data, conn, &ip) + } t => err!("Invalid type", t), } } @@ -178,6 +185,75 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult Ok(Json(result)) } +fn _api_key_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult { + // Validate scope + let scope = data.scope.as_ref().unwrap(); + if scope != "api" { + err!("Scope not supported") + } + + // Ratelimit the login + crate::ratelimit::check_limit_login(&ip.ip)?; + + // Get the user via the client_id + let client_id = data.client_id.as_ref().unwrap(); + let user_uuid = match client_id.strip_prefix("user.") { + Some(uuid) => uuid, + None => err!("Malformed client_id", format!("IP: {}.", ip.ip)), + }; + let user = match User::find_by_uuid(user_uuid, &conn) { + Some(user) => user, + None => err!("Invalid client_id", format!("IP: {}.", ip.ip)), + }; + + // Check if the user is disabled + if !user.enabled { + err!("This user has been disabled (API key login)", format!("IP: {}. Username: {}.", ip.ip, user.email)) + } + + // Check API key. Note that API key logins bypass 2FA. + let client_secret = data.client_secret.as_ref().unwrap(); + if !user.check_valid_api_key(client_secret) { + err!("Incorrect client_secret", format!("IP: {}. Username: {}.", ip.ip, user.email)) + } + + let (mut device, new_device) = get_device(&data, &conn, &user); + + if CONFIG.mail_enabled() && new_device { + let now = Utc::now().naive_utc(); + if let Err(e) = mail::send_new_device_logged_in(&user.email, &ip.ip.to_string(), &now, &device.name) { + error!("Error sending new device email: {:#?}", e); + + if CONFIG.require_device_email() { + err!("Could not send login notification email. Please contact your administrator.") + } + } + } + + // Common + let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, &conn); + + let (access_token, expires_in) = device.refresh_tokens(&user, orgs); + device.save(&conn)?; + + info!("User {} logged in successfully via API key. IP: {}", user.email, ip.ip); + + Ok(Json(json!({ + "access_token": access_token, + "expires_in": expires_in, + "token_type": "Bearer", + "refresh_token": device.refresh_token, + "Key": user.akey, + "PrivateKey": user.private_key, + + "Kdf": user.client_kdf_type, + "KdfIterations": user.client_kdf_iter, + "ResetMasterPassword": false, // TODO: Same as above + "scope": "api", + "unofficialServer": true, + }))) +} + /// Retrieves an existing device or creates a new device from ConnectData and the User fn get_device(data: &ConnectData, conn: &DbConn, user: &User) -> (Device, bool) { // On iOS, device_type sends "iOS", on others it sends a number @@ -374,17 +450,20 @@ fn _json_err_twofactor(providers: &[i32], user_uuid: &str, conn: &DbConn) -> Api Ok(result) } +// https://github.com/bitwarden/jslib/blob/master/common/src/models/request/tokenRequest.ts // https://github.com/bitwarden/mobile/blob/master/src/Core/Models/Request/TokenRequest.cs #[derive(Debug, Clone, Default)] #[allow(non_snake_case)] struct ConnectData { - grant_type: String, // refresh_token, password + // refresh_token, password, client_credentials (API key) + grant_type: String, // Needed for grant_type="refresh_token" refresh_token: Option, - // Needed for grant_type="password" - client_id: Option, // web, cli, desktop, browser, mobile + // Needed for grant_type = "password" | "client_credentials" + client_id: Option, // web, cli, desktop, browser, mobile + client_secret: Option, // API key login (cli only) password: Option, scope: Option, username: Option, @@ -414,6 +493,7 @@ impl<'f> FromForm<'f> for ConnectData { "granttype" => form.grant_type = value, "refreshtoken" => form.refresh_token = Some(value), "clientid" => form.client_id = Some(value), + "clientsecret" => form.client_secret = Some(value), "password" => form.password = Some(value), "scope" => form.scope = Some(value), "username" => form.username = Some(value), diff --git a/src/crypto.rs b/src/crypto.rs index 61e55649..e30439fc 100644 --- a/src/crypto.rs +++ b/src/crypto.rs @@ -51,6 +51,28 @@ pub fn get_random(mut array: Vec) -> Vec { array } +/// Generates a random string over a specified alphabet. +pub fn get_random_string(alphabet: &[u8], num_chars: usize) -> String { + // Ref: https://rust-lang-nursery.github.io/rust-cookbook/algorithms/randomness.html + use rand::Rng; + let mut rng = rand::thread_rng(); + + (0..num_chars) + .map(|_| { + let i = rng.gen_range(0..alphabet.len()); + alphabet[i] as char + }) + .collect() +} + +/// Generates a random alphanumeric string. +pub fn get_random_string_alphanum(num_chars: usize) -> String { + const ALPHABET: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ\ + abcdefghijklmnopqrstuvwxyz\ + 0123456789"; + get_random_string(ALPHABET, num_chars) +} + pub fn generate_id(num_bytes: usize) -> String { HEXLOWER.encode(&get_random(vec![0; num_bytes])) } @@ -84,6 +106,12 @@ pub fn generate_token(token_size: u32) -> Result { Ok(token) } +/// Generates a personal API key. +/// Upstream uses 30 chars, which is ~178 bits of entropy. +pub fn generate_api_key() -> String { + get_random_string_alphanum(30) +} + // // Constant time compare // diff --git a/src/db/models/user.rs b/src/db/models/user.rs index 0197535b..599661e5 100644 --- a/src/db/models/user.rs +++ b/src/db/models/user.rs @@ -44,8 +44,9 @@ db_object! { pub client_kdf_type: i32, pub client_kdf_iter: i32, - } + pub api_key: Option, + } #[derive(Identifiable, Queryable, Insertable)] #[table_name = "invitations"] @@ -110,6 +111,8 @@ impl User { client_kdf_type: Self::CLIENT_KDF_TYPE_DEFAULT, client_kdf_iter: Self::CLIENT_KDF_ITER_DEFAULT, + + api_key: None, } } @@ -130,6 +133,10 @@ impl User { } } + pub fn check_valid_api_key(&self, key: &str) -> bool { + matches!(self.api_key, Some(ref api_key) if crate::crypto::ct_eq(api_key, key)) + } + /// Set the password hash generated /// And resets the security_stamp. Based upon the allow_next_route the security_stamp will be different. /// diff --git a/src/db/schemas/mysql/schema.rs b/src/db/schemas/mysql/schema.rs index 8bfeae4c..61234a16 100644 --- a/src/db/schemas/mysql/schema.rs +++ b/src/db/schemas/mysql/schema.rs @@ -178,6 +178,7 @@ table! { excluded_globals -> Text, client_kdf_type -> Integer, client_kdf_iter -> Integer, + api_key -> Nullable, } } diff --git a/src/db/schemas/postgresql/schema.rs b/src/db/schemas/postgresql/schema.rs index 06939ab6..855b4fbc 100644 --- a/src/db/schemas/postgresql/schema.rs +++ b/src/db/schemas/postgresql/schema.rs @@ -178,6 +178,7 @@ table! { excluded_globals -> Text, client_kdf_type -> Integer, client_kdf_iter -> Integer, + api_key -> Nullable, } } diff --git a/src/db/schemas/sqlite/schema.rs b/src/db/schemas/sqlite/schema.rs index 06939ab6..855b4fbc 100644 --- a/src/db/schemas/sqlite/schema.rs +++ b/src/db/schemas/sqlite/schema.rs @@ -178,6 +178,7 @@ table! { excluded_globals -> Text, client_kdf_type -> Integer, client_kdf_iter -> Integer, + api_key -> Nullable, } } From 8f7900759fdd2f2578461ac8aa38bbcd854a6b0f Mon Sep 17 00:00:00 2001 From: Jeremy Lin Date: Thu, 20 Jan 2022 21:50:58 -0800 Subject: [PATCH 21/25] Fix `scope` and `refresh_token` for API key logins API key logins use a scope of `api`, not `api offline_access`. Since `offline_access` is not requested, no `refresh_token` is returned either. --- src/api/identity.rs | 27 +++++++++++++++------------ src/db/models/device.rs | 9 +++++++-- 2 files changed, 22 insertions(+), 14 deletions(-) diff --git a/src/api/identity.rs b/src/api/identity.rs index 49310930..0adc542f 100644 --- a/src/api/identity.rs +++ b/src/api/identity.rs @@ -61,13 +61,15 @@ fn _refresh_login(data: ConnectData, conn: DbConn) -> JsonResult { // Get device by refresh token let mut device = Device::find_by_refresh_token(&token, &conn).map_res("Invalid refresh token")?; - // COMMON + let scope = "api offline_access"; + let scope_vec = vec!["api".into(), "offline_access".into()]; + + // Common let user = User::find_by_uuid(&device.user_uuid, &conn).unwrap(); let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, &conn); - - let (access_token, expires_in) = device.refresh_tokens(&user, orgs); - + let (access_token, expires_in) = device.refresh_tokens(&user, orgs, scope_vec); device.save(&conn)?; + Ok(Json(json!({ "access_token": access_token, "expires_in": expires_in, @@ -79,7 +81,7 @@ fn _refresh_login(data: ConnectData, conn: DbConn) -> JsonResult { "Kdf": user.client_kdf_type, "KdfIterations": user.client_kdf_iter, "ResetMasterPassword": false, // TODO: according to official server seems something like: user.password_hash.is_empty(), but would need testing - "scope": "api offline_access", + "scope": scope, "unofficialServer": true, }))) } @@ -90,6 +92,7 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult if scope != "api offline_access" { err!("Scope not supported") } + let scope_vec = vec!["api".into(), "offline_access".into()]; // Ratelimit the login crate::ratelimit::check_limit_login(&ip.ip)?; @@ -157,8 +160,7 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult // Common let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, &conn); - - let (access_token, expires_in) = device.refresh_tokens(&user, orgs); + let (access_token, expires_in) = device.refresh_tokens(&user, orgs, scope_vec); device.save(&conn)?; let mut result = json!({ @@ -173,7 +175,7 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult "Kdf": user.client_kdf_type, "KdfIterations": user.client_kdf_iter, "ResetMasterPassword": false,// TODO: Same as above - "scope": "api offline_access", + "scope": scope, "unofficialServer": true, }); @@ -191,6 +193,7 @@ fn _api_key_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult if scope != "api" { err!("Scope not supported") } + let scope_vec = vec!["api".into()]; // Ratelimit the login crate::ratelimit::check_limit_login(&ip.ip)?; @@ -232,24 +235,24 @@ fn _api_key_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult // Common let orgs = UserOrganization::find_confirmed_by_user(&user.uuid, &conn); - - let (access_token, expires_in) = device.refresh_tokens(&user, orgs); + let (access_token, expires_in) = device.refresh_tokens(&user, orgs, scope_vec); device.save(&conn)?; info!("User {} logged in successfully via API key. IP: {}", user.email, ip.ip); + // Note: No refresh_token is returned. The CLI just repeats the + // client_credentials login flow when the existing token expires. Ok(Json(json!({ "access_token": access_token, "expires_in": expires_in, "token_type": "Bearer", - "refresh_token": device.refresh_token, "Key": user.akey, "PrivateKey": user.private_key, "Kdf": user.client_kdf_type, "KdfIterations": user.client_kdf_iter, "ResetMasterPassword": false, // TODO: Same as above - "scope": "api", + "scope": scope, "unofficialServer": true, }))) } diff --git a/src/db/models/device.rs b/src/db/models/device.rs index 2fbdea01..05955c04 100644 --- a/src/db/models/device.rs +++ b/src/db/models/device.rs @@ -60,7 +60,12 @@ impl Device { self.twofactor_remember = None; } - pub fn refresh_tokens(&mut self, user: &super::User, orgs: Vec) -> (String, i64) { + pub fn refresh_tokens( + &mut self, + user: &super::User, + orgs: Vec, + scope: Vec, + ) -> (String, i64) { // If there is no refresh token, we create one if self.refresh_token.is_empty() { use crate::crypto; @@ -98,7 +103,7 @@ impl Device { sstamp: user.security_stamp.to_string(), device: self.uuid.to_string(), - scope: vec!["api".into(), "offline_access".into()], + scope, amr: vec!["Application".into()], }; From f4a9645b54f1c7361799c394360fdc079df9f502 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Garc=C3=ADa?= Date: Sun, 23 Jan 2022 23:40:59 +0100 Subject: [PATCH 22/25] Remove references to "bwrs" #2195 Squashed commit of the following: MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit commit 1bdf1c7954e0731c95703d10118f3874ab5155d3 Merge: 8ba6e61 7257251 Author: Daniel García Date: Sun Jan 23 23:40:17 2022 +0100 Merge branch 'remove-bwrs' of https://github.com/RealOrangeOne/vaultwarden into RealOrangeOne-remove-bwrs commit 7257251ecf23af18deb894e8f2e5519a15360c76 Author: Jake Howard Date: Thu Jan 6 17:48:18 2022 +0000 Use `or_else` to save potentially unnecessary function call commit 40ae81dd3c43a596375d5bfdcc00053e786328cc Author: Jake Howard Date: Wed Jan 5 21:18:24 2022 +0000 Move $BWRS_VERSION fallback into build.rs commit 743ef74b307a662960f3ca1b9636f3608506516d Author: Jake Howard Date: Sat Jan 1 23:08:27 2022 +0000 Revert "Add feature to enable use of `Option::or` in const context" This reverts commit fe8e043b8aaf77c083747bf11760f29b53df0bba. We want to run on stable soon, where these features are not supported commit a1f0da638c8b6ba32209318b105bde1efdd47082 Author: Jake Howard Date: Sat Jan 1 13:04:47 2022 +0000 Rename web vault version file https://github.com/dani-garcia/bw_web_builds/pull/58 commit fe8e043b8aaf77c083747bf11760f29b53df0bba Author: Jake Howard Date: Sat Jan 1 12:56:44 2022 +0000 Add feature to enable use of `Option::or` in const context commit 687435c8b2b995e90bf6f0ee619bc305e37bc183 Author: Jake Howard Date: Sat Jan 1 12:27:28 2022 +0000 Continue to allow using `$BWRS_VERSION` commit 8e2f708e5037db8071251c582ebaf1a97d8e5923 Author: Jake Howard Date: Fri Dec 31 11:41:34 2021 +0000 Remove references to "bwrs" The only remaining one is getting the version of the web vault, which requires coordinating with the web vault patching. --- build.rs | 41 +++++++++----------- src/api/admin.rs | 7 ++-- src/api/core/mod.rs | 2 +- src/api/web.rs | 2 +- src/main.rs | 10 +++-- src/static/templates/admin/base.hbs | 10 ++--- src/static/templates/admin/organizations.hbs | 6 +-- src/static/templates/admin/users.hbs | 6 +-- src/static/templates/email/email_footer.hbs | 6 +-- src/static/templates/email/email_header.hbs | 4 +- 10 files changed, 46 insertions(+), 48 deletions(-) diff --git a/build.rs b/build.rs index 39c16095..7d0a7bce 100644 --- a/build.rs +++ b/build.rs @@ -15,11 +15,14 @@ fn main() { "You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite" ); - if let Ok(version) = env::var("BWRS_VERSION") { - println!("cargo:rustc-env=BWRS_VERSION={}", version); + // Support $BWRS_VERSION for legacy compatibility, but default to $VW_VERSION. + // If neither exist, read from git. + let maybe_vaultwarden_version = + env::var("VW_VERSION").or_else(|_| env::var("BWRS_VERSION")).or_else(|_| version_from_git_info()); + + if let Ok(version) = maybe_vaultwarden_version { + println!("cargo:rustc-env=VW_VERSION={}", version); println!("cargo:rustc-env=CARGO_PKG_VERSION={}", version); - } else { - read_git_info().ok(); } } @@ -33,7 +36,13 @@ fn run(args: &[&str]) -> Result { } /// This method reads info from Git, namely tags, branch, and revision -fn read_git_info() -> Result<(), std::io::Error> { +/// To access these values, use: +/// - env!("GIT_EXACT_TAG") +/// - env!("GIT_LAST_TAG") +/// - env!("GIT_BRANCH") +/// - env!("GIT_REV") +/// - env!("VW_VERSION") +fn version_from_git_info() -> Result { // The exact tag for the current commit, can be empty when // the current commit doesn't have an associated tag let exact_tag = run(&["git", "describe", "--abbrev=0", "--tags", "--exact-match"]).ok(); @@ -56,23 +65,11 @@ fn read_git_info() -> Result<(), std::io::Error> { println!("cargo:rustc-env=GIT_REV={}", rev_short); // Combined version - let version = if let Some(exact) = exact_tag { - exact + if let Some(exact) = exact_tag { + Ok(exact) } else if &branch != "main" && &branch != "master" { - format!("{}-{} ({})", last_tag, rev_short, branch) + Ok(format!("{}-{} ({})", last_tag, rev_short, branch)) } else { - format!("{}-{}", last_tag, rev_short) - }; - - println!("cargo:rustc-env=BWRS_VERSION={}", version); - println!("cargo:rustc-env=CARGO_PKG_VERSION={}", version); - - // To access these values, use: - // env!("GIT_EXACT_TAG") - // env!("GIT_LAST_TAG") - // env!("GIT_BRANCH") - // env!("GIT_REV") - // env!("BWRS_VERSION") - - Ok(()) + Ok(format!("{}-{}", last_tag, rev_short)) + } } diff --git a/src/api/admin.rs b/src/api/admin.rs index 60f6aad4..38d30c99 100644 --- a/src/api/admin.rs +++ b/src/api/admin.rs @@ -21,7 +21,7 @@ use crate::{ util::{ docker_base_image, format_naive_datetime_local, get_display_size, get_reqwest_client, is_running_in_docker, }, - CONFIG, + CONFIG, VERSION, }; pub fn routes() -> Vec { @@ -74,11 +74,10 @@ fn admin_disabled() -> &'static str { "The admin panel is disabled, please configure the 'ADMIN_TOKEN' variable to enable it" } -const COOKIE_NAME: &str = "BWRS_ADMIN"; +const COOKIE_NAME: &str = "VW_ADMIN"; const ADMIN_PATH: &str = "/admin"; const BASE_TEMPLATE: &str = "admin/base"; -const VERSION: Option<&str> = option_env!("BWRS_VERSION"); fn admin_path() -> String { format!("{}{}", CONFIG.domain_path(), ADMIN_PATH) @@ -486,7 +485,7 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu // Get current running versions let web_vault_version: WebVaultVersion = - match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "bwrs-version.json")) { + match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "vw-version.json")) { Ok(s) => serde_json::from_str(&s)?, _ => match read_file_string(&format!("{}/{}", CONFIG.web_vault_folder(), "version.json")) { Ok(s) => serde_json::from_str(&s)?, diff --git a/src/api/core/mod.rs b/src/api/core/mod.rs index f828dc44..77e8780d 100644 --- a/src/api/core/mod.rs +++ b/src/api/core/mod.rs @@ -170,7 +170,7 @@ fn hibp_breach(username: String) -> JsonResult { "BreachDate": "2019-08-18T00:00:00Z", "AddedDate": "2019-08-18T00:00:00Z", "Description": format!("Go to: https://haveibeenpwned.com/account/{account} for a manual check.

HaveIBeenPwned API key not set!
Go to https://haveibeenpwned.com/API/Key to purchase an API key from HaveIBeenPwned.

", account=username), - "LogoPath": "bwrs_static/hibp.png", + "LogoPath": "vw_static/hibp.png", "PwnCount": 0, "DataClasses": [ "Error - No API key set!" diff --git a/src/api/web.rs b/src/api/web.rs index 154dc2cf..9a5f74cc 100644 --- a/src/api/web.rs +++ b/src/api/web.rs @@ -77,7 +77,7 @@ fn alive(_conn: DbConn) -> Json { Json(format_date(&Utc::now().naive_utc())) } -#[get("/bwrs_static/")] +#[get("/vw_static/")] fn static_files(filename: String) -> Result, Error> { match filename.as_ref() { "mail-github.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/mail-github.png"))), diff --git a/src/main.rs b/src/main.rs index dd9fa51e..d7bef292 100644 --- a/src/main.rs +++ b/src/main.rs @@ -76,16 +76,18 @@ const HELP: &str = "\ -v, --version Prints the app version "; +pub const VERSION: Option<&str> = option_env!("VW_VERSION"); + fn parse_args() { - const NO_VERSION: &str = "(Version info from Git not present)"; let mut pargs = pico_args::Arguments::from_env(); + let version = VERSION.unwrap_or("(Version info from Git not present)"); if pargs.contains(["-h", "--help"]) { - println!("vaultwarden {}", option_env!("BWRS_VERSION").unwrap_or(NO_VERSION)); + println!("vaultwarden {}", version); print!("{}", HELP); exit(0); } else if pargs.contains(["-v", "--version"]) { - println!("vaultwarden {}", option_env!("BWRS_VERSION").unwrap_or(NO_VERSION)); + println!("vaultwarden {}", version); exit(0); } } @@ -94,7 +96,7 @@ fn launch_info() { println!("/--------------------------------------------------------------------\\"); println!("| Starting Vaultwarden |"); - if let Some(version) = option_env!("BWRS_VERSION") { + if let Some(version) = VERSION { println!("|{:^68}|", format!("Version {}", version)); } diff --git a/src/static/templates/admin/base.hbs b/src/static/templates/admin/base.hbs index 9c876723..d385cdcd 100644 --- a/src/static/templates/admin/base.hbs +++ b/src/static/templates/admin/base.hbs @@ -4,9 +4,9 @@ - + Vaultwarden Admin Panel - + - + - + diff --git a/src/static/templates/admin/organizations.hbs b/src/static/templates/admin/organizations.hbs index 05509659..ac3c0f30 100644 --- a/src/static/templates/admin/organizations.hbs +++ b/src/static/templates/admin/organizations.hbs @@ -48,9 +48,9 @@ - - - + + + - + + +