diff --git a/build.rs b/build.rs index 0277d21e..bce425e8 100644 --- a/build.rs +++ b/build.rs @@ -1,7 +1,7 @@ -use std::process::Command; use std::env; +use std::process::Command; -fn main() { +fn main() { // This allow using #[cfg(sqlite)] instead of #[cfg(feature = "sqlite")], which helps when trying to add them through macros #[cfg(feature = "sqlite")] println!("cargo:rustc-cfg=sqlite"); @@ -11,8 +11,10 @@ fn main() { println!("cargo:rustc-cfg=postgresql"); #[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))] - compile_error!("You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite"); - + compile_error!( + "You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite" + ); + if let Ok(version) = env::var("BWRS_VERSION") { println!("cargo:rustc-env=BWRS_VERSION={}", version); println!("cargo:rustc-env=CARGO_PKG_VERSION={}", version); @@ -61,7 +63,7 @@ fn read_git_info() -> Result<(), std::io::Error> { } else { format!("{}-{}", last_tag, rev_short) }; - + println!("cargo:rustc-env=BWRS_VERSION={}", version); println!("cargo:rustc-env=CARGO_PKG_VERSION={}", version); diff --git a/src/api/admin.rs b/src/api/admin.rs index 415311e7..0b519af3 100644 --- a/src/api/admin.rs +++ b/src/api/admin.rs @@ -105,7 +105,8 @@ fn admin_url(referer: Referer) -> String { fn admin_login(flash: Option) -> ApiResult> { // If there is an error, show it let msg = flash.map(|msg| format!("{}: {}", msg.name(), msg.msg())); - let json = json!({"page_content": "admin/login", "version": VERSION, "error": msg, "urlpath": CONFIG.domain_path()}); + let json = + json!({"page_content": "admin/login", "version": VERSION, "error": msg, "urlpath": CONFIG.domain_path()}); // Return the page let text = CONFIG.render_template(BASE_TEMPLATE, &json)?; @@ -291,14 +292,16 @@ fn get_users_json(_token: AdminToken, conn: DbConn) -> JsonResult { #[get("/users/overview")] fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult> { let users = User::get_all(&conn); - let users_json: Vec = users.iter() + let users_json: Vec = users + .iter() .map(|u| { let mut usr = u.to_json(&conn); usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &conn)); usr["attachment_count"] = json!(Attachment::count_by_user(&u.uuid, &conn)); usr["attachment_size"] = json!(get_display_size(Attachment::size_by_user(&u.uuid, &conn) as i32)); usr - }).collect(); + }) + .collect(); let text = AdminTemplateData::users(users_json).render()?; Ok(Html(text)) @@ -335,14 +338,17 @@ fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult { #[get("/organizations/overview")] fn organizations_overview(_token: AdminToken, conn: DbConn) -> ApiResult> { let organizations = Organization::get_all(&conn); - let organizations_json: Vec = organizations.iter().map(|o| { + let organizations_json: Vec = organizations + .iter() + .map(|o| { let mut org = o.to_json(); org["user_count"] = json!(UserOrganization::count_by_org(&o.uuid, &conn)); org["cipher_count"] = json!(Cipher::count_by_org(&o.uuid, &conn)); org["attachment_count"] = json!(Attachment::count_by_org(&o.uuid, &conn)); org["attachment_size"] = json!(get_display_size(Attachment::size_by_org(&o.uuid, &conn) as i32)); org - }).collect(); + }) + .collect(); let text = AdminTemplateData::organizations(organizations_json).render()?; Ok(Html(text)) @@ -368,21 +374,20 @@ fn get_github_api(url: &str) -> Result { use std::time::Duration; let github_api = Client::builder().build()?; - Ok( - github_api.get(url) + Ok(github_api + .get(url) .timeout(Duration::from_secs(10)) .header(USER_AGENT, "Bitwarden_RS") .send()? .error_for_status()? - .json::()? - ) + .json::()?) } #[get("/diagnostics")] fn diagnostics(_token: AdminToken, _conn: DbConn) -> ApiResult> { - use std::net::ToSocketAddrs; - use chrono::prelude::*; use crate::util::read_file_string; + use chrono::prelude::*; + use std::net::ToSocketAddrs; let vault_version_path = format!("{}/{}", CONFIG.web_vault_folder(), "version.json"); let vault_version_str = read_file_string(&vault_version_path)?; @@ -397,20 +402,22 @@ fn diagnostics(_token: AdminToken, _conn: DbConn) -> ApiResult> { // If the DNS Check failed, do not even attempt to check for new versions since we were not able to resolve github.com let (latest_release, latest_commit, latest_web_build) = if dns_ok { ( - match get_github_api::("https://api.github.com/repos/dani-garcia/bitwarden_rs/releases/latest") { + match get_github_api::("https://api.github.com/repos/dani-garcia/bitwarden_rs/releases/latest") + { Ok(r) => r.tag_name, - _ => "-".to_string() + _ => "-".to_string(), }, match get_github_api::("https://api.github.com/repos/dani-garcia/bitwarden_rs/commits/master") { Ok(mut c) => { c.sha.truncate(8); c.sha + } + _ => "-".to_string(), }, - _ => "-".to_string() - }, - match get_github_api::("https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest") { + match get_github_api::("https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest") + { Ok(r) => r.tag_name.trim_start_matches('v').to_string(), - _ => "-".to_string() + _ => "-".to_string(), }, ) } else { diff --git a/src/api/core/ciphers.rs b/src/api/core/ciphers.rs index 335a57e5..83a4bf07 100644 --- a/src/api/core/ciphers.rs +++ b/src/api/core/ciphers.rs @@ -732,7 +732,13 @@ fn post_attachment( let file_name = HEXLOWER.encode(&crypto::get_random(vec![0; 10])); let path = base_path.join(&file_name); - let size = match field.data.save().memory_threshold(0).size_limit(size_limit).with_path(path.clone()) { + let size = match field + .data + .save() + .memory_threshold(0) + .size_limit(size_limit) + .with_path(path.clone()) + { SaveResult::Full(SavedData::File(_, size)) => size as i32, SaveResult::Full(other) => { std::fs::remove_file(path).ok(); @@ -781,7 +787,11 @@ fn post_attachment_admin( post_attachment(uuid, data, content_type, headers, conn, nt) } -#[post("/ciphers//attachment//share", format = "multipart/form-data", data = "")] +#[post( + "/ciphers//attachment//share", + format = "multipart/form-data", + data = "" +)] fn post_attachment_share( uuid: String, attachment_id: String, @@ -884,12 +894,22 @@ fn delete_cipher_selected_admin(data: JsonUpcase, headers: Headers, conn: } #[post("/ciphers/delete-admin", data = "")] -fn delete_cipher_selected_post_admin(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { +fn delete_cipher_selected_post_admin( + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify, +) -> EmptyResult { delete_cipher_selected_post(data, headers, conn, nt) } #[put("/ciphers/delete-admin", data = "")] -fn delete_cipher_selected_put_admin(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { +fn delete_cipher_selected_put_admin( + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify, +) -> EmptyResult { delete_cipher_selected_put(data, headers, conn, nt) } @@ -1041,7 +1061,13 @@ fn _delete_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, soft_del Ok(()) } -fn _delete_multiple_ciphers(data: JsonUpcase, headers: Headers, conn: DbConn, soft_delete: bool, nt: Notify) -> EmptyResult { +fn _delete_multiple_ciphers( + data: JsonUpcase, + headers: Headers, + conn: DbConn, + soft_delete: bool, + nt: Notify, +) -> EmptyResult { let data: Value = data.into_inner().data; let uuids = match data.get("Ids") { diff --git a/src/api/core/organizations.rs b/src/api/core/organizations.rs index 1d7b99b6..feeec194 100644 --- a/src/api/core/organizations.rs +++ b/src/api/core/organizations.rs @@ -410,9 +410,7 @@ fn put_collection_users( continue; } - CollectionUser::save(&user.user_uuid, &coll_id, - d.ReadOnly, d.HidePasswords, - &conn)?; + CollectionUser::save(&user.user_uuid, &coll_id, d.ReadOnly, d.HidePasswords, &conn)?; } Ok(()) @@ -528,9 +526,7 @@ fn send_invite(org_id: String, data: JsonUpcase, headers: AdminHeade match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) { None => err!("Collection not found in Organization"), Some(collection) => { - CollectionUser::save(&user.uuid, &collection.uuid, - col.ReadOnly, col.HidePasswords, - &conn)?; + CollectionUser::save(&user.uuid, &collection.uuid, col.ReadOnly, col.HidePasswords, &conn)?; } } } @@ -785,9 +781,13 @@ fn edit_user( match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) { None => err!("Collection not found in Organization"), Some(collection) => { - CollectionUser::save(&user_to_edit.user_uuid, &collection.uuid, - col.ReadOnly, col.HidePasswords, - &conn)?; + CollectionUser::save( + &user_to_edit.user_uuid, + &collection.uuid, + col.ReadOnly, + col.HidePasswords, + &conn, + )?; } } } @@ -973,7 +973,13 @@ struct PolicyData { } #[put("/organizations//policies/", data = "")] -fn put_policy(org_id: String, pol_type: i32, data: Json, _headers: AdminHeaders, conn: DbConn) -> JsonResult { +fn put_policy( + org_id: String, + pol_type: i32, + data: Json, + _headers: AdminHeaders, + conn: DbConn, +) -> JsonResult { let data: PolicyData = data.into_inner(); let pol_type_enum = match OrgPolicyType::from_i32(pol_type) { @@ -1043,4 +1049,4 @@ fn get_plans(_headers: Headers, _conn: DbConn) -> JsonResult { ], "ContinuationToken": null }))) -} \ No newline at end of file +} diff --git a/src/api/core/two_factor/authenticator.rs b/src/api/core/two_factor/authenticator.rs index f4bd5df5..5f6cb452 100644 --- a/src/api/core/two_factor/authenticator.rs +++ b/src/api/core/two_factor/authenticator.rs @@ -141,7 +141,11 @@ pub fn validate_totp_code(user_uuid: &str, totp_code: u64, secret: &str, ip: &Cl // The amount of steps back and forward in time // Also check if we need to disable time drifted TOTP codes. // If that is the case, we set the steps to 0 so only the current TOTP is valid. - let steps: i64 = if CONFIG.authenticator_disable_time_drift() { 0 } else { 1 }; + let steps: i64 = if CONFIG.authenticator_disable_time_drift() { + 0 + } else { + 1 + }; for step in -steps..=steps { let time_step = current_timestamp / 30i64 + step; diff --git a/src/api/core/two_factor/email.rs b/src/api/core/two_factor/email.rs index 6aa6e013..a12d47ec 100644 --- a/src/api/core/two_factor/email.rs +++ b/src/api/core/two_factor/email.rs @@ -65,7 +65,10 @@ pub fn send_token(user_uuid: &str, conn: &DbConn) -> EmptyResult { twofactor.data = twofactor_data.to_json(); twofactor.save(&conn)?; - mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?)?; + mail::send_token( + &twofactor_data.email, + &twofactor_data.last_token.map_res("Token is empty")?, + )?; Ok(()) } @@ -132,7 +135,10 @@ fn send_email(data: JsonUpcase, headers: Headers, conn: DbConn) - ); twofactor.save(&conn)?; - mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?)?; + mail::send_token( + &twofactor_data.email, + &twofactor_data.last_token.map_res("Token is empty")?, + )?; Ok(()) } @@ -186,7 +192,8 @@ fn email(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonRes /// Validate the email code when used as TwoFactor token mechanism pub fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, conn: &DbConn) -> EmptyResult { let mut email_data = EmailTokenData::from_json(&data)?; - let mut twofactor = TwoFactor::find_by_user_and_type(&user_uuid, TwoFactorType::Email as i32, &conn).map_res("Two factor not found")?; + let mut twofactor = TwoFactor::find_by_user_and_type(&user_uuid, TwoFactorType::Email as i32, &conn) + .map_res("Two factor not found")?; let issued_token = match &email_data.last_token { Some(t) => t, _ => err!("No token available"), diff --git a/src/api/icons.rs b/src/api/icons.rs index f4e4edf7..212047f2 100644 --- a/src/api/icons.rs +++ b/src/api/icons.rs @@ -56,9 +56,7 @@ fn icon(domain: String) -> Option>>> { return None; } - get_icon(&domain).map(|icon| { - Cached::long(Content(ContentType::new("image", "x-icon"), icon)) - }) + get_icon(&domain).map(|icon| Cached::long(Content(ContentType::new("image", "x-icon"), icon))) } /// TODO: This is extracted from IpAddr::is_global, which is unstable: diff --git a/src/api/identity.rs b/src/api/identity.rs index 46a70405..8c07e186 100644 --- a/src/api/identity.rs +++ b/src/api/identity.rs @@ -106,7 +106,10 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult if user.verified_at.is_none() && CONFIG.mail_enabled() && CONFIG.signups_verify() { let now = now.naive_utc(); - if user.last_verifying_at.is_none() || now.signed_duration_since(user.last_verifying_at.unwrap()).num_seconds() > CONFIG.signups_verify_resend_time() as i64 { + if user.last_verifying_at.is_none() + || now.signed_duration_since(user.last_verifying_at.unwrap()).num_seconds() + > CONFIG.signups_verify_resend_time() as i64 + { let resend_limit = CONFIG.signups_verify_resend_limit() as i32; if resend_limit == 0 || user.login_verify_count < resend_limit { // We want to send another email verification if we require signups to verify @@ -160,7 +163,7 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult "Key": user.akey, "PrivateKey": user.private_key, //"TwoFactorToken": "11122233333444555666777888999" - + "Kdf": user.client_kdf_type, "KdfIterations": user.client_kdf_iter, "ResetMasterPassword": false,// TODO: Same as above @@ -223,12 +226,13 @@ fn twofactor_auth( let twofactor_code = match data.two_factor_token { Some(ref code) => code, - None => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA token not provided"), + None => err_json!( + _json_err_twofactor(&twofactor_ids, user_uuid, conn)?, + "2FA token not provided" + ), }; - let selected_twofactor = twofactors - .into_iter() - .find(|tf| tf.atype == selected_id && tf.enabled); + let selected_twofactor = twofactors.into_iter().find(|tf| tf.atype == selected_id && tf.enabled); use crate::api::core::two_factor as _tf; use crate::crypto::ct_eq; @@ -237,18 +241,27 @@ fn twofactor_auth( let mut remember = data.two_factor_remember.unwrap_or(0); match TwoFactorType::from_i32(selected_id) { - Some(TwoFactorType::Authenticator) => _tf::authenticator::validate_totp_code_str(user_uuid, twofactor_code, &selected_data?, ip, conn)?, + Some(TwoFactorType::Authenticator) => { + _tf::authenticator::validate_totp_code_str(user_uuid, twofactor_code, &selected_data?, ip, conn)? + } Some(TwoFactorType::U2f) => _tf::u2f::validate_u2f_login(user_uuid, twofactor_code, conn)?, Some(TwoFactorType::YubiKey) => _tf::yubikey::validate_yubikey_login(twofactor_code, &selected_data?)?, - Some(TwoFactorType::Duo) => _tf::duo::validate_duo_login(data.username.as_ref().unwrap(), twofactor_code, conn)?, - Some(TwoFactorType::Email) => _tf::email::validate_email_code_str(user_uuid, twofactor_code, &selected_data?, conn)?, + Some(TwoFactorType::Duo) => { + _tf::duo::validate_duo_login(data.username.as_ref().unwrap(), twofactor_code, conn)? + } + Some(TwoFactorType::Email) => { + _tf::email::validate_email_code_str(user_uuid, twofactor_code, &selected_data?, conn)? + } Some(TwoFactorType::Remember) => { match device.twofactor_remember { Some(ref code) if !CONFIG.disable_2fa_remember() && ct_eq(code, twofactor_code) => { remember = 1; // Make sure we also return the token here, otherwise it will only remember the first time } - _ => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA Remember token not provided"), + _ => err_json!( + _json_err_twofactor(&twofactor_ids, user_uuid, conn)?, + "2FA Remember token not provided" + ), } } _ => err!("Invalid two factor provider"), diff --git a/src/api/notifications.rs b/src/api/notifications.rs index 0a01eaa8..d590cdc2 100644 --- a/src/api/notifications.rs +++ b/src/api/notifications.rs @@ -21,7 +21,7 @@ static SHOW_WEBSOCKETS_MSG: AtomicBool = AtomicBool::new(true); fn websockets_err() -> EmptyResult { if CONFIG.websocket_enabled() && SHOW_WEBSOCKETS_MSG.compare_and_swap(true, false, Ordering::Relaxed) { err!( - "########################################################### + "########################################################### '/notifications/hub' should be proxied to the websocket server or notifications won't work. Go to the Wiki for more info, or disable WebSockets setting WEBSOCKET_ENABLED=false. ###########################################################################################" @@ -161,7 +161,7 @@ impl WSHandler { } } }; - + // Otherwise verify the query parameter value let path = hs.request.resource(); if let Some(params) = path.split('?').nth(1) { diff --git a/src/api/web.rs b/src/api/web.rs index 4b0e292b..a457ea49 100644 --- a/src/api/web.rs +++ b/src/api/web.rs @@ -71,19 +71,52 @@ fn alive() -> Json { #[get("/bwrs_static/")] fn static_files(filename: String) -> Result, Error> { match filename.as_ref() { - "mail-github.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/mail-github.png"))), - "logo-gray.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/logo-gray.png"))), - "shield-white.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/shield-white.png"))), - "error-x.svg" => Ok(Content(ContentType::SVG, include_bytes!("../static/images/error-x.svg"))), + "mail-github.png" => Ok(Content( + ContentType::PNG, + include_bytes!("../static/images/mail-github.png"), + )), + "logo-gray.png" => Ok(Content( + ContentType::PNG, + include_bytes!("../static/images/logo-gray.png"), + )), + "shield-white.png" => Ok(Content( + ContentType::PNG, + include_bytes!("../static/images/shield-white.png"), + )), + "error-x.svg" => Ok(Content( + ContentType::SVG, + include_bytes!("../static/images/error-x.svg"), + )), "hibp.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/hibp.png"))), - "bootstrap.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/bootstrap.css"))), - "bootstrap-native.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/bootstrap-native.js"))), - "md5.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/md5.js"))), - "identicon.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/identicon.js"))), - "datatables.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/datatables.js"))), - "datatables.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/datatables.css"))), - "jquery-3.5.1.slim.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/jquery-3.5.1.slim.js"))), + "bootstrap.css" => Ok(Content( + ContentType::CSS, + include_bytes!("../static/scripts/bootstrap.css"), + )), + "bootstrap-native.js" => Ok(Content( + ContentType::JavaScript, + include_bytes!("../static/scripts/bootstrap-native.js"), + )), + "md5.js" => Ok(Content( + ContentType::JavaScript, + include_bytes!("../static/scripts/md5.js"), + )), + "identicon.js" => Ok(Content( + ContentType::JavaScript, + include_bytes!("../static/scripts/identicon.js"), + )), + "datatables.js" => Ok(Content( + ContentType::JavaScript, + include_bytes!("../static/scripts/datatables.js"), + )), + "datatables.css" => Ok(Content( + ContentType::CSS, + include_bytes!("../static/scripts/datatables.css"), + )), + "jquery-3.5.1.slim.js" => Ok(Content( + ContentType::JavaScript, + include_bytes!("../static/scripts/jquery-3.5.1.slim.js"), + )), _ => err!(format!("Static file not found: {}", filename)), } } diff --git a/src/auth.rs b/src/auth.rs index da6f8fa4..61e6f5dc 100644 --- a/src/auth.rs +++ b/src/auth.rs @@ -215,9 +215,7 @@ pub fn generate_admin_claims() -> AdminJWTClaims { // // Bearer token authentication // -use rocket::{ - request::{FromRequest, Request, Outcome}, -}; +use rocket::request::{FromRequest, Outcome, Request}; use crate::db::{ models::{Device, User, UserOrgStatus, UserOrgType, UserOrganization}, diff --git a/src/config.rs b/src/config.rs index 60dc317c..6e032ed3 100644 --- a/src/config.rs +++ b/src/config.rs @@ -458,7 +458,6 @@ make_config! { } fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { - // Validate connection URL is valid and DB feature is enabled DbConnType::from_url(&cfg.database_url)?; @@ -472,7 +471,9 @@ fn validate_config(cfg: &ConfigItems) -> Result<(), Error> { let dom = cfg.domain.to_lowercase(); if !dom.starts_with("http://") && !dom.starts_with("https://") { - err!("DOMAIN variable needs to contain the protocol (http, https). Use 'http[s]://bw.example.com' instead of 'bw.example.com'"); + err!( + "DOMAIN variable needs to contain the protocol (http, https). Use 'http[s]://bw.example.com' instead of 'bw.example.com'" + ); } let whitelist = &cfg.signups_domains_whitelist; @@ -567,7 +568,12 @@ impl Config { validate_config(&config)?; Ok(Config { - inner: RwLock::new(Inner { templates: load_templates(&config.templates_folder), config, _env, _usr }), + inner: RwLock::new(Inner { + templates: load_templates(&config.templates_folder), + config, + _env, + _usr, + }), }) } diff --git a/src/db/mod.rs b/src/db/mod.rs index f9dc2885..5afe10ed 100644 --- a/src/db/mod.rs +++ b/src/db/mod.rs @@ -25,7 +25,6 @@ pub mod __mysql_schema; #[path = "schemas/postgresql/schema.rs"] pub mod __postgresql_schema; - // This is used to generate the main DbConn and DbPool enums, which contain one variant for each database supported macro_rules! generate_connections { ( $( $name:ident: $ty:ty ),+ ) => { @@ -109,7 +108,6 @@ impl DbConnType { } } - #[macro_export] macro_rules! db_run { // Same for all dbs @@ -124,10 +122,10 @@ macro_rules! db_run { $($( #[cfg($db)] crate::db::DbConn::$db(ref $conn) => { - paste::paste! { + paste::paste! { #[allow(unused)] use crate::db::[<__ $db _schema>]::{self as schema, *}; #[allow(unused)] use [<__ $db _model>]::*; - #[allow(unused)] use crate::db::FromDb; + #[allow(unused)] use crate::db::FromDb; } $body }, @@ -136,14 +134,13 @@ macro_rules! db_run { }; } - pub trait FromDb { type Output; #[allow(clippy::wrong_self_convention)] fn from_db(self) -> Self::Output; } -// For each struct eg. Cipher, we create a CipherDb inside a module named __$db_model (where $db is sqlite, mysql or postgresql), +// For each struct eg. Cipher, we create a CipherDb inside a module named __$db_model (where $db is sqlite, mysql or postgresql), // to implement the Diesel traits. We also provide methods to convert between them and the basic structs. Later, that module will be auto imported when using db_run! #[macro_export] macro_rules! db_object { @@ -153,10 +150,10 @@ macro_rules! db_object { $( $( #[$field_attr:meta] )* $vis:vis $field:ident : $typ:ty ),+ $(,)? } - )+ ) => { + )+ ) => { // Create the normal struct, without attributes $( pub struct $name { $( /*$( #[$field_attr] )**/ $vis $field : $typ, )+ } )+ - + #[cfg(sqlite)] pub mod __sqlite_model { $( db_object! { @db sqlite | $( #[$attr] )* | $name | $( $( #[$field_attr] )* $field : $typ ),+ } )+ } #[cfg(mysql)] @@ -177,7 +174,7 @@ macro_rules! db_object { )+ } impl [<$name Db>] { - #[allow(clippy::wrong_self_convention)] + #[allow(clippy::wrong_self_convention)] #[inline(always)] pub fn to_db(x: &super::$name) -> Self { Self { $( $field: x.$field.clone(), )+ } } } @@ -259,10 +256,9 @@ mod sqlite_migrations { use diesel::{Connection, RunQueryDsl}; // Make sure the database is up to date (create if it doesn't exist, or run the migrations) - let connection = - diesel::sqlite::SqliteConnection::establish(&crate::CONFIG.database_url())?; + let connection = diesel::sqlite::SqliteConnection::establish(&crate::CONFIG.database_url())?; // Disable Foreign Key Checks during migration - + // Scoped to a connection. diesel::sql_query("PRAGMA foreign_keys = OFF") .execute(&connection) @@ -288,8 +284,7 @@ mod mysql_migrations { pub fn run_migrations() -> Result<(), super::Error> { use diesel::{Connection, RunQueryDsl}; // Make sure the database is up to date (create if it doesn't exist, or run the migrations) - let connection = - diesel::mysql::MysqlConnection::establish(&crate::CONFIG.database_url())?; + let connection = diesel::mysql::MysqlConnection::establish(&crate::CONFIG.database_url())?; // Disable Foreign Key Checks during migration // Scoped to a connection/session. @@ -310,10 +305,9 @@ mod postgresql_migrations { pub fn run_migrations() -> Result<(), super::Error> { use diesel::{Connection, RunQueryDsl}; // Make sure the database is up to date (create if it doesn't exist, or run the migrations) - let connection = - diesel::pg::PgConnection::establish(&crate::CONFIG.database_url())?; + let connection = diesel::pg::PgConnection::establish(&crate::CONFIG.database_url())?; // Disable Foreign Key Checks during migration - + // FIXME: Per https://www.postgresql.org/docs/12/sql-set-constraints.html, // "SET CONSTRAINTS sets the behavior of constraint checking within the // current transaction", so this setting probably won't take effect for diff --git a/src/db/models/attachment.rs b/src/db/models/attachment.rs index f89a6dbc..5f35706b 100644 --- a/src/db/models/attachment.rs +++ b/src/db/models/attachment.rs @@ -59,7 +59,6 @@ use crate::error::MapResult; /// Database methods impl Attachment { - pub fn save(&self, conn: &DbConn) -> EmptyResult { db_run! { conn: sqlite, mysql { diff --git a/src/db/models/cipher.rs b/src/db/models/cipher.rs index 0536d46b..e22e3a8f 100644 --- a/src/db/models/cipher.rs +++ b/src/db/models/cipher.rs @@ -2,14 +2,7 @@ use chrono::{NaiveDateTime, Utc}; use serde_json::Value; use super::{ - Attachment, - CollectionCipher, - Favorite, - FolderCipher, - Organization, - User, - UserOrgStatus, - UserOrgType, + Attachment, CollectionCipher, Favorite, FolderCipher, Organization, User, UserOrgStatus, UserOrgType, UserOrganization, }; @@ -85,31 +78,40 @@ impl Cipher { let attachments = Attachment::find_by_cipher(&self.uuid, conn); let attachments_json: Vec = attachments.iter().map(|c| c.to_json(host)).collect(); - let fields_json = self.fields.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null); - let password_history_json = self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null); - - let (read_only, hide_passwords) = - match self.get_access_restrictions(&user_uuid, conn) { - Some((ro, hp)) => (ro, hp), - None => { - error!("Cipher ownership assertion failure"); - (true, true) - }, - }; + let fields_json = self + .fields + .as_ref() + .and_then(|s| serde_json::from_str(s).ok()) + .unwrap_or(Value::Null); + let password_history_json = self + .password_history + .as_ref() + .and_then(|s| serde_json::from_str(s).ok()) + .unwrap_or(Value::Null); + + let (read_only, hide_passwords) = match self.get_access_restrictions(&user_uuid, conn) { + Some((ro, hp)) => (ro, hp), + None => { + error!("Cipher ownership assertion failure"); + (true, true) + } + }; // Get the data or a default empty value to avoid issues with the mobile apps - let mut data_json: Value = serde_json::from_str(&self.data).unwrap_or_else(|_| json!({ - "Fields":null, - "Name": self.name, - "Notes":null, - "Password":null, - "PasswordHistory":null, - "PasswordRevisionDate":null, - "Response":null, - "Totp":null, - "Uris":null, - "Username":null - })); + let mut data_json: Value = serde_json::from_str(&self.data).unwrap_or_else(|_| { + json!({ + "Fields":null, + "Name": self.name, + "Notes":null, + "Password":null, + "PasswordHistory":null, + "PasswordRevisionDate":null, + "Response":null, + "Totp":null, + "Uris":null, + "Username":null + }) + }); // TODO: ******* Backwards compat start ********** // To remove backwards compatibility, just remove this entire section diff --git a/src/db/models/collection.rs b/src/db/models/collection.rs index 4b506a97..e073decc 100644 --- a/src/db/models/collection.rs +++ b/src/db/models/collection.rs @@ -1,6 +1,6 @@ use serde_json::Value; -use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization, User, Cipher}; +use super::{Cipher, Organization, User, UserOrgStatus, UserOrgType, UserOrganization}; db_object! { #[derive(Debug, Identifiable, Queryable, Insertable, Associations, AsChangeset)] @@ -253,7 +253,13 @@ impl CollectionUser { }} } - pub fn save(user_uuid: &str, collection_uuid: &str, read_only: bool, hide_passwords: bool, conn: &DbConn) -> EmptyResult { + pub fn save( + user_uuid: &str, + collection_uuid: &str, + read_only: bool, + hide_passwords: bool, + conn: &DbConn, + ) -> EmptyResult { User::update_uuid_revision(&user_uuid, conn); db_run! { conn: @@ -364,7 +370,7 @@ impl CollectionUser { diesel::delete(users_collections::table.filter( users_collections::user_uuid.eq(user_uuid) .and(users_collections::collection_uuid.eq(user.collection_uuid)) - + )) .execute(conn) .map_res("Error removing user from collections")?; diff --git a/src/db/models/device.rs b/src/db/models/device.rs index 6d6743ff..ceae97a1 100644 --- a/src/db/models/device.rs +++ b/src/db/models/device.rs @@ -74,10 +74,26 @@ impl Device { let time_now = Utc::now().naive_utc(); self.updated_at = time_now; - let orgowner: Vec<_> = orgs.iter().filter(|o| o.atype == 0).map(|o| o.org_uuid.clone()).collect(); - let orgadmin: Vec<_> = orgs.iter().filter(|o| o.atype == 1).map(|o| o.org_uuid.clone()).collect(); - let orguser: Vec<_> = orgs.iter().filter(|o| o.atype == 2).map(|o| o.org_uuid.clone()).collect(); - let orgmanager: Vec<_> = orgs.iter().filter(|o| o.atype == 3).map(|o| o.org_uuid.clone()).collect(); + let orgowner: Vec<_> = orgs + .iter() + .filter(|o| o.atype == 0) + .map(|o| o.org_uuid.clone()) + .collect(); + let orgadmin: Vec<_> = orgs + .iter() + .filter(|o| o.atype == 1) + .map(|o| o.org_uuid.clone()) + .collect(); + let orguser: Vec<_> = orgs + .iter() + .filter(|o| o.atype == 2) + .map(|o| o.org_uuid.clone()) + .collect(); + let orgmanager: Vec<_> = orgs + .iter() + .filter(|o| o.atype == 3) + .map(|o| o.org_uuid.clone()) + .collect(); // Create the JWT claims struct, to send to the client use crate::auth::{encode_jwt, LoginJWTClaims, DEFAULT_VALIDITY, JWT_LOGIN_ISSUER}; @@ -117,7 +133,7 @@ impl Device { pub fn save(&mut self, conn: &DbConn) -> EmptyResult { self.updated_at = Utc::now().naive_utc(); - db_run! { conn: + db_run! { conn: sqlite, mysql { crate::util::retry( || diesel::replace_into(devices::table).values(DeviceDb::to_db(self)).execute(conn), diff --git a/src/db/models/favorite.rs b/src/db/models/favorite.rs index f419e07d..9e36cc75 100644 --- a/src/db/models/favorite.rs +++ b/src/db/models/favorite.rs @@ -20,7 +20,7 @@ use crate::error::MapResult; impl Favorite { // Returns whether the specified cipher is a favorite of the specified user. pub fn is_favorite(cipher_uuid: &str, user_uuid: &str, conn: &DbConn) -> bool { - db_run!{ conn: { + db_run! { conn: { let query = favorites::table .filter(favorites::cipher_uuid.eq(cipher_uuid)) .filter(favorites::user_uuid.eq(user_uuid)) @@ -36,19 +36,19 @@ impl Favorite { match (old, new) { (false, true) => { User::update_uuid_revision(user_uuid, &conn); - db_run!{ conn: { - diesel::insert_into(favorites::table) - .values(( - favorites::user_uuid.eq(user_uuid), - favorites::cipher_uuid.eq(cipher_uuid), - )) - .execute(conn) - .map_res("Error adding favorite") - }} + db_run! { conn: { + diesel::insert_into(favorites::table) + .values(( + favorites::user_uuid.eq(user_uuid), + favorites::cipher_uuid.eq(cipher_uuid), + )) + .execute(conn) + .map_res("Error adding favorite") + }} } (true, false) => { User::update_uuid_revision(user_uuid, &conn); - db_run!{ conn: { + db_run! { conn: { diesel::delete( favorites::table .filter(favorites::user_uuid.eq(user_uuid)) @@ -59,7 +59,7 @@ impl Favorite { }} } // Otherwise, the favorite status is already what it should be. - _ => Ok(()) + _ => Ok(()), } } diff --git a/src/db/models/folder.rs b/src/db/models/folder.rs index 3656afb3..be791caf 100644 --- a/src/db/models/folder.rs +++ b/src/db/models/folder.rs @@ -109,7 +109,6 @@ impl Folder { User::update_uuid_revision(&self.user_uuid, conn); FolderCipher::delete_all_by_folder(&self.uuid, &conn)?; - db_run! { conn: { diesel::delete(folders::table.filter(folders::uuid.eq(&self.uuid))) .execute(conn) diff --git a/src/db/models/organization.rs b/src/db/models/organization.rs index 5fb4f36e..20074e66 100644 --- a/src/db/models/organization.rs +++ b/src/db/models/organization.rs @@ -1,8 +1,8 @@ +use num_traits::FromPrimitive; use serde_json::Value; use std::cmp::Ordering; -use num_traits::FromPrimitive; -use super::{CollectionUser, User, OrgPolicy}; +use super::{CollectionUser, OrgPolicy, User}; db_object! { #[derive(Debug, Identifiable, Queryable, Insertable, AsChangeset)] @@ -35,8 +35,7 @@ pub enum UserOrgStatus { Confirmed = 2, } -#[derive(Copy, Clone, PartialEq, Eq)] -#[derive(num_derive::FromPrimitive)] +#[derive(Copy, Clone, PartialEq, Eq, num_derive::FromPrimitive)] pub enum UserOrgType { Owner = 0, Admin = 1, @@ -244,7 +243,6 @@ impl Organization { UserOrganization::delete_all_by_organization(&self.uuid, &conn)?; OrgPolicy::delete_all_by_organization(&self.uuid, &conn)?; - db_run! { conn: { diesel::delete(organizations::table.filter(organizations::uuid.eq(self.uuid))) .execute(conn) @@ -332,11 +330,13 @@ impl UserOrganization { let collections = CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn); collections .iter() - .map(|c| json!({ - "Id": c.collection_uuid, - "ReadOnly": c.read_only, - "HidePasswords": c.hide_passwords, - })) + .map(|c| { + json!({ + "Id": c.collection_uuid, + "ReadOnly": c.read_only, + "HidePasswords": c.hide_passwords, + }) + }) .collect() }; @@ -417,8 +417,7 @@ impl UserOrganization { } pub fn has_full_access(self) -> bool { - (self.access_all || self.atype >= UserOrgType::Admin) && - self.has_status(UserOrgStatus::Confirmed) + (self.access_all || self.atype >= UserOrgType::Admin) && self.has_status(UserOrgStatus::Confirmed) } pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { diff --git a/src/error.rs b/src/error.rs index 80f9dafc..316703ac 100644 --- a/src/error.rs +++ b/src/error.rs @@ -33,10 +33,10 @@ macro_rules! make_error { }; } +use diesel::r2d2::PoolError as R2d2Err; use diesel::result::Error as DieselErr; use diesel::ConnectionError as DieselConErr; use diesel_migrations::RunMigrationsError as DieselMigErr; -use diesel::r2d2::PoolError as R2d2Err; use handlebars::RenderError as HbErr; use jsonwebtoken::errors::Error as JWTErr; use regex::Error as RegexErr; @@ -190,7 +190,7 @@ use rocket::response::{self, Responder, Response}; impl<'r> Responder<'r> for Error { fn respond_to(self, _: &Request) -> response::Result<'r> { match self.error { - ErrorKind::EmptyError(_) => {} // Don't print the error in this situation + ErrorKind::EmptyError(_) => {} // Don't print the error in this situation ErrorKind::SimpleError(_) => {} // Don't print the error in this situation _ => error!(target: "error", "{:#?}", self), }; diff --git a/src/mail.rs b/src/mail.rs index 7419169a..afb102bc 100644 --- a/src/mail.rs +++ b/src/mail.rs @@ -59,21 +59,32 @@ fn mailer() -> SmtpTransport { let smtp_client = match CONFIG.smtp_auth_mechanism() { Some(mechanism) => { - let allowed_mechanisms = vec![SmtpAuthMechanism::Plain, SmtpAuthMechanism::Login, SmtpAuthMechanism::Xoauth2]; + let allowed_mechanisms = vec![ + SmtpAuthMechanism::Plain, + SmtpAuthMechanism::Login, + SmtpAuthMechanism::Xoauth2, + ]; let mut selected_mechanisms = vec![]; for wanted_mechanism in mechanism.split(',') { for m in &allowed_mechanisms { - if m.to_string().to_lowercase() == wanted_mechanism.trim_matches(|c| c == '"' || c == '\'' || c == ' ').to_lowercase() { + if m.to_string().to_lowercase() + == wanted_mechanism + .trim_matches(|c| c == '"' || c == '\'' || c == ' ') + .to_lowercase() + { selected_mechanisms.push(*m); } } - }; + } if !selected_mechanisms.is_empty() { smtp_client.authentication(selected_mechanisms) } else { // Only show a warning, and return without setting an actual authentication mechanism - warn!("No valid SMTP Auth mechanism found for '{}', using default values", mechanism); + warn!( + "No valid SMTP Auth mechanism found for '{}', using default values", + mechanism + ); smtp_client } } @@ -330,7 +341,10 @@ fn send_email(address: &str, subject: &str, body_html: &str, body_text: &str) -> use uuid::Uuid; let unique_id = Uuid::new_v4().to_simple(); let boundary = format!("_Part_{}_", unique_id); - let alternative = MultiPart::alternative().boundary(boundary).singlepart(text).singlepart(html); + let alternative = MultiPart::alternative() + .boundary(boundary) + .singlepart(text) + .singlepart(html); let smtp_from = &CONFIG.smtp_from(); let email = Message::builder() @@ -349,18 +363,18 @@ fn send_email(address: &str, subject: &str, body_html: &str, body_text: &str) -> Err(e) => match e { lettre::transport::smtp::Error::Client(x) => { err!(format!("SMTP Client error: {}", x)); - }, + } lettre::transport::smtp::Error::Transient(x) => { err!(format!("SMTP 4xx error: {:?}", x.message)); - }, + } lettre::transport::smtp::Error::Permanent(x) => { err!(format!("SMTP 5xx error: {:?}", x.message)); - }, + } lettre::transport::smtp::Error::Io(x) => { err!(format!("SMTP IO error: {}", x)); - }, + } // Fallback for all other errors - _ => Err(e.into()) - } + _ => Err(e.into()), + }, } } diff --git a/src/main.rs b/src/main.rs index 1da0d88e..45b8e4e4 100644 --- a/src/main.rs +++ b/src/main.rs @@ -118,7 +118,9 @@ fn init_logging(level: log::LevelFilter) -> Result<(), fern::InitError> { // Enable smtp debug logging only specifically for smtp when need. // This can contain sensitive information we do not want in the default debug/trace logging. if CONFIG.smtp_debug() { - println!("[WARNING] SMTP Debugging is enabled (SMTP_DEBUG=true). Sensitive information could be disclosed via logs!"); + println!( + "[WARNING] SMTP Debugging is enabled (SMTP_DEBUG=true). Sensitive information could be disclosed via logs!" + ); println!("[WARNING] Only enable SMTP_DEBUG during troubleshooting!\n"); logger = logger.level_for("lettre::transport::smtp", log::LevelFilter::Debug) } else { @@ -270,7 +272,10 @@ fn check_web_vault() { let index_path = Path::new(&CONFIG.web_vault_folder()).join("index.html"); if !index_path.exists() { - error!("Web vault is not found at '{}'. To install it, please follow the steps in: ", CONFIG.web_vault_folder()); + error!( + "Web vault is not found at '{}'. To install it, please follow the steps in: ", + CONFIG.web_vault_folder() + ); error!("https://github.com/dani-garcia/bitwarden_rs/wiki/Building-binary#install-the-web-vault"); error!("You can also set the environment variable 'WEB_VAULT_ENABLED=false' to disable it"); exit(1); diff --git a/src/util.rs b/src/util.rs index f33f5148..c7b1d65a 100644 --- a/src/util.rs +++ b/src/util.rs @@ -28,7 +28,10 @@ impl Fairing for AppHeaders { res.set_raw_header("X-Frame-Options", "SAMEORIGIN"); res.set_raw_header("X-Content-Type-Options", "nosniff"); res.set_raw_header("X-XSS-Protection", "1; mode=block"); - let csp = format!("frame-ancestors 'self' chrome-extension://nngceckbapebfimnlniiiahkandclblb moz-extension://* {};", CONFIG.allowed_iframe_ancestors()); + let csp = format!( + "frame-ancestors 'self' chrome-extension://nngceckbapebfimnlniiiahkandclblb moz-extension://* {};", + CONFIG.allowed_iframe_ancestors() + ); res.set_raw_header("Content-Security-Policy", csp); // Disable cache unless otherwise specified @@ -283,8 +286,7 @@ where use std::env; -pub fn get_env_str_value(key: &str) -> Option -{ +pub fn get_env_str_value(key: &str) -> Option { let key_file = format!("{}_FILE", key); let value_from_env = env::var(key); let value_file = env::var(&key_file); @@ -294,9 +296,9 @@ pub fn get_env_str_value(key: &str) -> Option (Ok(v_env), Err(_)) => Some(v_env), (Err(_), Ok(v_file)) => match fs::read_to_string(v_file) { Ok(content) => Some(content.trim().to_string()), - Err(e) => panic!("Failed to load {}: {:?}", key, e) + Err(e) => panic!("Failed to load {}: {:?}", key, e), }, - _ => None + _ => None, } }