Browse Source

Adjust query_logger and some other small items

Signed-off-by: BlackDex <black.dex@gmail.com>
pull/6279/head
BlackDex 3 weeks ago
parent
commit
e58d64fc00
No known key found for this signature in database GPG Key ID: 58C80A2AA6C765E1
  1. 24
      src/db/mod.rs
  2. 73
      src/db/query_logger.rs
  3. 5
      src/main.rs

24
src/db/mod.rs

@ -127,8 +127,8 @@ impl Drop for DbPool {
impl DbPool {
// For the given database URL, guess its type, run migrations, create pool, and return it
pub fn from_config() -> Result<Self, Error> {
let url = CONFIG.database_url();
let conn_type = DbConnType::from_url(&url)?;
let db_url = CONFIG.database_url();
let conn_type = DbConnType::from_url(&db_url)?;
// Only set the default instrumentation if the log level is specifically set to either warn, info or debug
if log_enabled!(target: "vaultwarden::db::query_logger", log::Level::Warn)
@ -141,20 +141,20 @@ impl DbPool {
match conn_type {
#[cfg(mysql)]
DbConnType::Mysql => {
mysql_migrations::run_migrations(&url)?;
mysql_migrations::run_migrations(&db_url)?;
}
#[cfg(postgresql)]
DbConnType::Postgresql => {
postgresql_migrations::run_migrations(&url)?;
postgresql_migrations::run_migrations(&db_url)?;
}
#[cfg(sqlite)]
DbConnType::Sqlite => {
sqlite_migrations::run_migrations(&url)?;
sqlite_migrations::run_migrations(&db_url)?;
}
}
let max_conns = CONFIG.database_max_conns();
let manager = ConnectionManager::<DbConnInner>::new(&url);
let manager = ConnectionManager::<DbConnInner>::new(&db_url);
let pool = Pool::builder()
.max_size(max_conns)
.min_idle(Some(CONFIG.database_min_conns()))
@ -364,10 +364,10 @@ mod sqlite_migrations {
use diesel_migrations::{EmbeddedMigrations, MigrationHarness};
pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations/sqlite");
pub fn run_migrations(url: &str) -> Result<(), super::Error> {
pub fn run_migrations(db_url: &str) -> Result<(), super::Error> {
// Establish a connection to the sqlite database (this will create a new one, if it does
// not exist, and exit if there is an error).
let mut connection = diesel::sqlite::SqliteConnection::establish(url)?;
let mut connection = diesel::sqlite::SqliteConnection::establish(db_url)?;
// Run the migrations after successfully establishing a connection
// Disable Foreign Key Checks during migration
@ -392,9 +392,9 @@ mod mysql_migrations {
use diesel_migrations::{EmbeddedMigrations, MigrationHarness};
pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations/mysql");
pub fn run_migrations(url: &str) -> Result<(), super::Error> {
pub fn run_migrations(db_url: &str) -> Result<(), super::Error> {
// Make sure the database is up to date (create if it doesn't exist, or run the migrations)
let mut connection = diesel::mysql::MysqlConnection::establish(url)?;
let mut connection = diesel::mysql::MysqlConnection::establish(db_url)?;
// Disable Foreign Key Checks during migration
// Scoped to a connection/session.
@ -413,9 +413,9 @@ mod postgresql_migrations {
use diesel_migrations::{EmbeddedMigrations, MigrationHarness};
pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations/postgresql");
pub fn run_migrations(url: &str) -> Result<(), super::Error> {
pub fn run_migrations(db_url: &str) -> Result<(), super::Error> {
// Make sure the database is up to date (create if it doesn't exist, or run the migrations)
let mut connection = diesel::pg::PgConnection::establish(url)?;
let mut connection = diesel::pg::PgConnection::establish(db_url)?;
connection.run_pending_migrations(MIGRATIONS).expect("Error running migrations");
Ok(())

73
src/db/query_logger.rs

@ -1,56 +1,59 @@
use dashmap::DashMap;
use diesel::connection::{Instrumentation, InstrumentationEvent};
use std::{
sync::{Arc, LazyLock},
thread,
time::Instant,
};
use std::{cell::RefCell, collections::HashMap, time::Instant};
pub static QUERY_PERF_TRACKER: LazyLock<Arc<DashMap<(thread::ThreadId, String), Instant>>> =
LazyLock::new(|| Arc::new(DashMap::new()));
thread_local! {
static QUERY_PERF_TRACKER: RefCell<HashMap<String, Instant>> = RefCell::new(HashMap::new());
}
pub fn simple_logger() -> Option<Box<dyn Instrumentation>> {
Some(Box::new(|event: InstrumentationEvent<'_>| match event {
InstrumentationEvent::StartEstablishConnection {
url,
..
} => {
debug!("Establishing connection: {url}")
}
InstrumentationEvent::FinishEstablishConnection {
url,
error,
..
} => {
if let Some(e) = error {
error!("Error during establishing a connection with {url}: {e:?}")
} else {
debug!("Connection established: {url}")
}
}
// TODO: Figure out where the invalid connection errors are coming from
// There seem to be some invalid errors when connecting to a SQLite database
// Until the cause of this is found and resolved, disable the Connection logging
// InstrumentationEvent::StartEstablishConnection {
// url,
// ..
// } => {
// debug!("Establishing connection: {url}")
// }
// InstrumentationEvent::FinishEstablishConnection {
// url,
// error,
// ..
// } => {
// if let Some(e) = error {
// error!("Error during establishing a connection with {url}: {e:?}")
// } else {
// debug!("Connection established: {url}")
// }
// }
InstrumentationEvent::StartQuery {
query,
..
} => {
let query_string = format!("{query:?}");
let start = Instant::now();
QUERY_PERF_TRACKER.insert((thread::current().id(), query_string), start);
QUERY_PERF_TRACKER.with_borrow_mut(|map| {
map.insert(query_string, start);
});
}
InstrumentationEvent::FinishQuery {
query,
..
} => {
let query_string = format!("{query:?}");
if let Some((_, start)) = QUERY_PERF_TRACKER.remove(&(thread::current().id(), query_string.clone())) {
let duration = start.elapsed();
if duration.as_secs() >= 5 {
warn!("SLOW QUERY [{:.2}s]: {}", duration.as_secs_f32(), query_string);
} else if duration.as_secs() >= 1 {
info!("SLOW QUERY [{:.2}s]: {}", duration.as_secs_f32(), query_string);
} else {
debug!("QUERY [{:?}]: {}", duration, query_string);
QUERY_PERF_TRACKER.with_borrow_mut(|map| {
if let Some(start) = map.remove(&query_string) {
let duration = start.elapsed();
if duration.as_secs() >= 5 {
warn!("SLOW QUERY [{:.2}s]: {}", duration.as_secs_f32(), query_string);
} else if duration.as_secs() >= 1 {
info!("SLOW QUERY [{:.2}s]: {}", duration.as_secs_f32(), query_string);
} else {
debug!("QUERY [{:?}]: {}", duration, query_string);
}
}
}
});
}
_ => {}
}))

5
src/main.rs

@ -1,9 +1,8 @@
#![cfg_attr(feature = "unstable", feature(ip))]
// The recursion_limit is mainly triggered by the json!() macro.
// The more key/value pairs there are the more recursion occurs.
// We want to keep this as low as possible, but not higher then 128.
// If you go above 128 it will cause rust-analyzer to fail,
#![recursion_limit = "200"]
// We want to keep this as low as possible!
#![recursion_limit = "165"]
// When enabled use MiMalloc as malloc instead of the default malloc
#[cfg(feature = "enable_mimalloc")]

Loading…
Cancel
Save