Directories for database file, backups and logs are defined in configuration file now

This commit is contained in:
Greg Burri 2025-04-12 15:58:19 +02:00
parent 18e4f846fb
commit b4411ae892
6 changed files with 118 additions and 44 deletions

View file

@ -26,11 +26,19 @@ pub struct Config {
#[serde(default = "smtp_password_default")] #[serde(default = "smtp_password_default")]
pub smtp_password: String, pub smtp_password: String,
#[serde(default)] #[serde(default = "backup_time_default")]
pub backup_time: Option<NaiveTime>, // If not set, no backup will be done. pub backup_time: Option<NaiveTime>, // If not set, no backup will be done.
#[serde(default = "backup_directory_default")] /// Directory where the database is stored.
pub backup_directory: String, /// It's located in the current directory.
#[serde(default = "database_directory_default")]
pub database_directory: String,
#[serde(default = "backups_directory_default")]
pub backups_directory: String,
#[serde(default = "logs_directory_default")]
pub logs_directory: String,
} }
fn port_default() -> u16 { fn port_default() -> u16 {
@ -49,10 +57,22 @@ fn smtp_password_default() -> String {
"password".to_string() "password".to_string()
} }
fn backup_directory_default() -> String { fn backup_time_default() -> Option<NaiveTime> {
NaiveTime::from_hms_opt(4, 0, 0) // 4 am.
}
fn database_directory_default() -> String {
"data".to_string() "data".to_string()
} }
fn backups_directory_default() -> String {
"data/backups".to_string()
}
fn logs_directory_default() -> String {
"data/logs".to_string()
}
impl Config { impl Config {
pub fn default() -> Self { pub fn default() -> Self {
from_str("()").unwrap() from_str("()").unwrap()
@ -68,7 +88,9 @@ impl fmt::Debug for Config {
.field("smtp_login", &self.smtp_login) .field("smtp_login", &self.smtp_login)
.field("smtp_password", &"*****") .field("smtp_password", &"*****")
.field("backup_time", &self.backup_time) .field("backup_time", &self.backup_time)
.field("backup_directory", &self.backup_directory) .field("database_directory", &self.database_directory)
.field("backups_directory", &self.backups_directory)
.field("logs_directory", &self.logs_directory)
.finish() .finish()
} }
} }

View file

@ -11,10 +11,6 @@ pub const FILE_CONF: &str = "conf.ron";
/// it's located in the current directory. /// it's located in the current directory.
pub const TRANSLATION_FILE: &str = "translation.ron"; pub const TRANSLATION_FILE: &str = "translation.ron";
/// Directory where the database is stored.
/// It's located in the current directory.
pub const DB_DIRECTORY: &str = "data";
/// Filename of the database. /// Filename of the database.
/// It's located in the `DB_DIRECTORY` directory. /// It's located in the `DB_DIRECTORY` directory.
pub const DB_FILENAME: &str = "recipes.sqlite"; pub const DB_FILENAME: &str = "recipes.sqlite";

View file

@ -16,6 +16,10 @@ pub fn start<P>(
where where
P: AsRef<std::path::Path> + Send + Sync + 'static, P: AsRef<std::path::Path> + Send + Sync + 'static,
{ {
if !directory.as_ref().exists() {
std::fs::DirBuilder::new().create(&directory).unwrap();
}
if !directory.as_ref().is_dir() { if !directory.as_ref().is_dir() {
panic!( panic!(
"Path must be a directory: {}", "Path must be a directory: {}",

View file

@ -51,8 +51,11 @@ pub struct Connection {
} }
impl Connection { impl Connection {
pub async fn new() -> Result<Connection> { pub async fn new<P>(directory: P) -> Result<Connection>
let path = Path::new(consts::DB_DIRECTORY).join(consts::DB_FILENAME); where
P: AsRef<Path>,
{
let path = directory.as_ref().join(consts::DB_FILENAME);
Self::new_from_file(path).await Self::new_from_file(path).await
} }

56
backend/src/log.rs Normal file
View file

@ -0,0 +1,56 @@
use std::{fs, path::Path};
use tracing_subscriber::{
fmt::writer::MakeWriterExt, layer::SubscriberExt, util::SubscriberInitExt,
};
#[cfg(debug_assertions)]
const TRACING_LEVEL: tracing::Level = tracing::Level::DEBUG;
#[cfg(debug_assertions)]
const TRACING_DISPLAY_THREAD: bool = true;
#[cfg(not(debug_assertions))]
const TRACING_LEVEL: tracing::Level = tracing::Level::INFO;
#[cfg(not(debug_assertions))]
const TRACING_DISPLAY_THREAD: bool = false;
pub fn init<P>(directory: P)
where
P: AsRef<Path>,
{
if !directory.as_ref().exists() {
fs::DirBuilder::new().create(&directory).unwrap();
}
let log_filepath = directory.as_ref().join(format!(
"recipes_{}.log",
chrono::Local::now().format("%Y-%m-%d_%H%M%S")
));
println!("log file: {}", log_filepath.to_str().unwrap_or_default());
match std::fs::File::create(log_filepath) {
Ok(log_file) => {
let layer_file = tracing_subscriber::fmt::layer()
.with_writer(log_file.with_max_level(TRACING_LEVEL))
.with_ansi(false)
.with_thread_ids(TRACING_DISPLAY_THREAD)
.with_thread_names(TRACING_DISPLAY_THREAD);
let layer_stdout = tracing_subscriber::fmt::layer()
.with_writer(std::io::stdout.with_max_level(TRACING_LEVEL))
.with_thread_ids(TRACING_DISPLAY_THREAD)
.with_thread_names(TRACING_DISPLAY_THREAD);
tracing_subscriber::Registry::default()
.with(layer_file)
.with(layer_stdout)
.init();
}
Err(error) => {
println!("Unable to open log file: {}", error);
}
}
}

View file

@ -34,6 +34,7 @@ mod data;
mod email; mod email;
mod hash; mod hash;
mod html_templates; mod html_templates;
mod log;
mod ron_extractor; mod ron_extractor;
mod ron_utils; mod ron_utils;
mod services; mod services;
@ -81,18 +82,6 @@ impl axum::response::IntoResponse for AppError {
} }
} }
#[cfg(debug_assertions)]
const TRACING_LEVEL: tracing::Level = tracing::Level::DEBUG;
#[cfg(debug_assertions)]
const TRACING_DISPLAY_THREAD: bool = true;
#[cfg(not(debug_assertions))]
const TRACING_LEVEL: tracing::Level = tracing::Level::INFO;
#[cfg(not(debug_assertions))]
const TRACING_DISPLAY_THREAD: bool = false;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
struct Context { struct Context {
user: Option<model::User>, user: Option<model::User>,
@ -100,37 +89,38 @@ struct Context {
dark_theme: bool, dark_theme: bool,
} }
use tracing_subscriber::{Registry, fmt::layer, layer::SubscriberExt, util::SubscriberInitExt};
// TODO: Should main returns 'Result'? // TODO: Should main returns 'Result'?
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
tracing_subscriber::fmt() let config = config::load();
.with_max_level(TRACING_LEVEL) log::init(&config.logs_directory);
.with_thread_ids(TRACING_DISPLAY_THREAD)
.with_thread_names(TRACING_DISPLAY_THREAD)
.init();
if !process_args().await { event!(Level::INFO, "Configuration: {:?}", config);
if !process_args(&config.database_directory).await {
return; return;
} }
event!(Level::INFO, "Starting Recipes as web server..."); event!(Level::INFO, "Starting Recipes as web server...");
let config = config::load(); let Ok(db_connection) = db::Connection::new(&config.database_directory).await else {
let port = config.port;
event!(Level::INFO, "Configuration: {:?}", config);
let Ok(db_connection) = db::Connection::new().await else {
event!(Level::ERROR, "Unable to connect to the database"); event!(Level::ERROR, "Unable to connect to the database");
return; return;
}; };
if let Some(backup_time) = config.backup_time {
backup::start( backup::start(
"data", config.backups_directory.clone(),
db_connection.clone(), db_connection.clone(),
// TODO: take from config. backup_time,
NaiveTime::from_hms_opt(4, 0, 0).expect("Invalid time of day"),
); );
} else {
event!(Level::INFO, "Backups disabled by config");
}
let port = config.port;
let state = AppState { let state = AppState {
config, config,
@ -474,12 +464,15 @@ struct Args {
} }
/// Returns `true` if the server can be started. /// Returns `true` if the server can be started.
async fn process_args() -> bool { async fn process_args<P>(database_directory: P) -> bool
where
P: AsRef<Path>,
{
let args = Args::parse(); let args = Args::parse();
if args.dbtest { if args.dbtest {
// Make a backup of the database. // Make a backup of the database.
let db_path = Path::new(consts::DB_DIRECTORY).join(consts::DB_FILENAME); let db_path = database_directory.as_ref().join(consts::DB_FILENAME);
if db_path.exists() { if db_path.exists() {
let db_path_bckup = (1..) let db_path_bckup = (1..)
.find_map(|n| { .find_map(|n| {
@ -498,7 +491,7 @@ async fn process_args() -> bool {
}); });
} }
match db::Connection::new().await { match db::Connection::new(database_directory).await {
Ok(con) => { Ok(con) => {
if let Err(error) = con.execute_file("sql/data_test.sql").await { if let Err(error) = con.execute_file("sql/data_test.sql").await {
event!(Level::ERROR, "{}", error); event!(Level::ERROR, "{}", error);