Browse Source

Adjust query_logger and some other small items

Signed-off-by: BlackDex <black.dex@gmail.com>
pull/6279/head
BlackDex 3 weeks ago
parent
commit
e58d64fc00
No known key found for this signature in database GPG Key ID: 58C80A2AA6C765E1
  1. 24
      src/db/mod.rs
  2. 73
      src/db/query_logger.rs
  3. 5
      src/main.rs

24
src/db/mod.rs

@ -127,8 +127,8 @@ impl Drop for DbPool {
impl DbPool { impl DbPool {
// For the given database URL, guess its type, run migrations, create pool, and return it // For the given database URL, guess its type, run migrations, create pool, and return it
pub fn from_config() -> Result<Self, Error> { pub fn from_config() -> Result<Self, Error> {
let url = CONFIG.database_url(); let db_url = CONFIG.database_url();
let conn_type = DbConnType::from_url(&url)?; let conn_type = DbConnType::from_url(&db_url)?;
// Only set the default instrumentation if the log level is specifically set to either warn, info or debug // Only set the default instrumentation if the log level is specifically set to either warn, info or debug
if log_enabled!(target: "vaultwarden::db::query_logger", log::Level::Warn) if log_enabled!(target: "vaultwarden::db::query_logger", log::Level::Warn)
@ -141,20 +141,20 @@ impl DbPool {
match conn_type { match conn_type {
#[cfg(mysql)] #[cfg(mysql)]
DbConnType::Mysql => { DbConnType::Mysql => {
mysql_migrations::run_migrations(&url)?; mysql_migrations::run_migrations(&db_url)?;
} }
#[cfg(postgresql)] #[cfg(postgresql)]
DbConnType::Postgresql => { DbConnType::Postgresql => {
postgresql_migrations::run_migrations(&url)?; postgresql_migrations::run_migrations(&db_url)?;
} }
#[cfg(sqlite)] #[cfg(sqlite)]
DbConnType::Sqlite => { DbConnType::Sqlite => {
sqlite_migrations::run_migrations(&url)?; sqlite_migrations::run_migrations(&db_url)?;
} }
} }
let max_conns = CONFIG.database_max_conns(); let max_conns = CONFIG.database_max_conns();
let manager = ConnectionManager::<DbConnInner>::new(&url); let manager = ConnectionManager::<DbConnInner>::new(&db_url);
let pool = Pool::builder() let pool = Pool::builder()
.max_size(max_conns) .max_size(max_conns)
.min_idle(Some(CONFIG.database_min_conns())) .min_idle(Some(CONFIG.database_min_conns()))
@ -364,10 +364,10 @@ mod sqlite_migrations {
use diesel_migrations::{EmbeddedMigrations, MigrationHarness}; use diesel_migrations::{EmbeddedMigrations, MigrationHarness};
pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations/sqlite"); pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations/sqlite");
pub fn run_migrations(url: &str) -> Result<(), super::Error> { pub fn run_migrations(db_url: &str) -> Result<(), super::Error> {
// Establish a connection to the sqlite database (this will create a new one, if it does // Establish a connection to the sqlite database (this will create a new one, if it does
// not exist, and exit if there is an error). // not exist, and exit if there is an error).
let mut connection = diesel::sqlite::SqliteConnection::establish(url)?; let mut connection = diesel::sqlite::SqliteConnection::establish(db_url)?;
// Run the migrations after successfully establishing a connection // Run the migrations after successfully establishing a connection
// Disable Foreign Key Checks during migration // Disable Foreign Key Checks during migration
@ -392,9 +392,9 @@ mod mysql_migrations {
use diesel_migrations::{EmbeddedMigrations, MigrationHarness}; use diesel_migrations::{EmbeddedMigrations, MigrationHarness};
pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations/mysql"); pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations/mysql");
pub fn run_migrations(url: &str) -> Result<(), super::Error> { pub fn run_migrations(db_url: &str) -> Result<(), super::Error> {
// Make sure the database is up to date (create if it doesn't exist, or run the migrations) // Make sure the database is up to date (create if it doesn't exist, or run the migrations)
let mut connection = diesel::mysql::MysqlConnection::establish(url)?; let mut connection = diesel::mysql::MysqlConnection::establish(db_url)?;
// Disable Foreign Key Checks during migration // Disable Foreign Key Checks during migration
// Scoped to a connection/session. // Scoped to a connection/session.
@ -413,9 +413,9 @@ mod postgresql_migrations {
use diesel_migrations::{EmbeddedMigrations, MigrationHarness}; use diesel_migrations::{EmbeddedMigrations, MigrationHarness};
pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations/postgresql"); pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations/postgresql");
pub fn run_migrations(url: &str) -> Result<(), super::Error> { pub fn run_migrations(db_url: &str) -> Result<(), super::Error> {
// Make sure the database is up to date (create if it doesn't exist, or run the migrations) // Make sure the database is up to date (create if it doesn't exist, or run the migrations)
let mut connection = diesel::pg::PgConnection::establish(url)?; let mut connection = diesel::pg::PgConnection::establish(db_url)?;
connection.run_pending_migrations(MIGRATIONS).expect("Error running migrations"); connection.run_pending_migrations(MIGRATIONS).expect("Error running migrations");
Ok(()) Ok(())

73
src/db/query_logger.rs

@ -1,56 +1,59 @@
use dashmap::DashMap;
use diesel::connection::{Instrumentation, InstrumentationEvent}; use diesel::connection::{Instrumentation, InstrumentationEvent};
use std::{ use std::{cell::RefCell, collections::HashMap, time::Instant};
sync::{Arc, LazyLock},
thread,
time::Instant,
};
pub static QUERY_PERF_TRACKER: LazyLock<Arc<DashMap<(thread::ThreadId, String), Instant>>> = thread_local! {
LazyLock::new(|| Arc::new(DashMap::new())); static QUERY_PERF_TRACKER: RefCell<HashMap<String, Instant>> = RefCell::new(HashMap::new());
}
pub fn simple_logger() -> Option<Box<dyn Instrumentation>> { pub fn simple_logger() -> Option<Box<dyn Instrumentation>> {
Some(Box::new(|event: InstrumentationEvent<'_>| match event { Some(Box::new(|event: InstrumentationEvent<'_>| match event {
InstrumentationEvent::StartEstablishConnection { // TODO: Figure out where the invalid connection errors are coming from
url, // There seem to be some invalid errors when connecting to a SQLite database
.. // Until the cause of this is found and resolved, disable the Connection logging
} => { // InstrumentationEvent::StartEstablishConnection {
debug!("Establishing connection: {url}") // url,
} // ..
InstrumentationEvent::FinishEstablishConnection { // } => {
url, // debug!("Establishing connection: {url}")
error, // }
.. // InstrumentationEvent::FinishEstablishConnection {
} => { // url,
if let Some(e) = error { // error,
error!("Error during establishing a connection with {url}: {e:?}") // ..
} else { // } => {
debug!("Connection established: {url}") // if let Some(e) = error {
} // error!("Error during establishing a connection with {url}: {e:?}")
} // } else {
// debug!("Connection established: {url}")
// }
// }
InstrumentationEvent::StartQuery { InstrumentationEvent::StartQuery {
query, query,
.. ..
} => { } => {
let query_string = format!("{query:?}"); let query_string = format!("{query:?}");
let start = Instant::now(); let start = Instant::now();
QUERY_PERF_TRACKER.insert((thread::current().id(), query_string), start); QUERY_PERF_TRACKER.with_borrow_mut(|map| {
map.insert(query_string, start);
});
} }
InstrumentationEvent::FinishQuery { InstrumentationEvent::FinishQuery {
query, query,
.. ..
} => { } => {
let query_string = format!("{query:?}"); let query_string = format!("{query:?}");
if let Some((_, start)) = QUERY_PERF_TRACKER.remove(&(thread::current().id(), query_string.clone())) { QUERY_PERF_TRACKER.with_borrow_mut(|map| {
let duration = start.elapsed(); if let Some(start) = map.remove(&query_string) {
if duration.as_secs() >= 5 { let duration = start.elapsed();
warn!("SLOW QUERY [{:.2}s]: {}", duration.as_secs_f32(), query_string); if duration.as_secs() >= 5 {
} else if duration.as_secs() >= 1 { warn!("SLOW QUERY [{:.2}s]: {}", duration.as_secs_f32(), query_string);
info!("SLOW QUERY [{:.2}s]: {}", duration.as_secs_f32(), query_string); } else if duration.as_secs() >= 1 {
} else { info!("SLOW QUERY [{:.2}s]: {}", duration.as_secs_f32(), query_string);
debug!("QUERY [{:?}]: {}", duration, query_string); } else {
debug!("QUERY [{:?}]: {}", duration, query_string);
}
} }
} });
} }
_ => {} _ => {}
})) }))

5
src/main.rs

@ -1,9 +1,8 @@
#![cfg_attr(feature = "unstable", feature(ip))] #![cfg_attr(feature = "unstable", feature(ip))]
// The recursion_limit is mainly triggered by the json!() macro. // The recursion_limit is mainly triggered by the json!() macro.
// The more key/value pairs there are the more recursion occurs. // The more key/value pairs there are the more recursion occurs.
// We want to keep this as low as possible, but not higher then 128. // We want to keep this as low as possible!
// If you go above 128 it will cause rust-analyzer to fail, #![recursion_limit = "165"]
#![recursion_limit = "200"]
// When enabled use MiMalloc as malloc instead of the default malloc // When enabled use MiMalloc as malloc instead of the default malloc
#[cfg(feature = "enable_mimalloc")] #[cfg(feature = "enable_mimalloc")]

Loading…
Cancel
Save