database: Add a lot of logging

This commit is contained in:
Elias Projahn 2021-04-25 23:12:19 +02:00
parent 20683ca824
commit f967f6ade9
9 changed files with 30 additions and 4 deletions

View file

@ -6,6 +6,7 @@ edition = "2018"
[dependencies] [dependencies]
diesel = { version = "1.4.5", features = ["sqlite"] } diesel = { version = "1.4.5", features = ["sqlite"] }
diesel_migrations = "1.4.0" diesel_migrations = "1.4.0"
log = "0.4.14"
rand = "0.7.3" rand = "0.7.3"
serde = { version = "1.0.117", features = ["derive"] } serde = { version = "1.0.117", features = ["derive"] }
serde_json = "1.0.59" serde_json = "1.0.59"

View file

@ -1,6 +1,7 @@
use super::schema::ensembles; use super::schema::ensembles;
use super::{Database, Result}; use super::{Database, Result};
use diesel::prelude::*; use diesel::prelude::*;
use log::info;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
/// An ensemble that takes part in recordings. /// An ensemble that takes part in recordings.
@ -14,6 +15,7 @@ pub struct Ensemble {
impl Database { impl Database {
/// Update an existing ensemble or insert a new one. /// Update an existing ensemble or insert a new one.
pub fn update_ensemble(&self, ensemble: Ensemble) -> Result<()> { pub fn update_ensemble(&self, ensemble: Ensemble) -> Result<()> {
info!("Updating ensemble {:?}", ensemble);
self.defer_foreign_keys()?; self.defer_foreign_keys()?;
self.connection.transaction(|| { self.connection.transaction(|| {
@ -38,15 +40,14 @@ impl Database {
/// Delete an existing ensemble. /// Delete an existing ensemble.
pub fn delete_ensemble(&self, id: &str) -> Result<()> { pub fn delete_ensemble(&self, id: &str) -> Result<()> {
info!("Deleting ensemble {}", id);
diesel::delete(ensembles::table.filter(ensembles::id.eq(id))).execute(&self.connection)?; diesel::delete(ensembles::table.filter(ensembles::id.eq(id))).execute(&self.connection)?;
Ok(()) Ok(())
} }
/// Get all existing ensembles. /// Get all existing ensembles.
pub fn get_ensembles(&self) -> Result<Vec<Ensemble>> { pub fn get_ensembles(&self) -> Result<Vec<Ensemble>> {
let ensembles = ensembles::table.load::<Ensemble>(&self.connection)?; let ensembles = ensembles::table.load::<Ensemble>(&self.connection)?;
Ok(ensembles) Ok(ensembles)
} }
} }

View file

@ -1,6 +1,7 @@
use super::schema::instruments; use super::schema::instruments;
use super::{Database, Result}; use super::{Database, Result};
use diesel::prelude::*; use diesel::prelude::*;
use log::info;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
/// An instrument or any other possible role within a recording. /// An instrument or any other possible role within a recording.
@ -14,6 +15,7 @@ pub struct Instrument {
impl Database { impl Database {
/// Update an existing instrument or insert a new one. /// Update an existing instrument or insert a new one.
pub fn update_instrument(&self, instrument: Instrument) -> Result<()> { pub fn update_instrument(&self, instrument: Instrument) -> Result<()> {
info!("Updating instrument {:?}", instrument);
self.defer_foreign_keys()?; self.defer_foreign_keys()?;
self.connection.transaction(|| { self.connection.transaction(|| {
@ -38,6 +40,7 @@ impl Database {
/// Delete an existing instrument. /// Delete an existing instrument.
pub fn delete_instrument(&self, id: &str) -> Result<()> { pub fn delete_instrument(&self, id: &str) -> Result<()> {
info!("Deleting instrument {}", id);
diesel::delete(instruments::table.filter(instruments::id.eq(id))) diesel::delete(instruments::table.filter(instruments::id.eq(id)))
.execute(&self.connection)?; .execute(&self.connection)?;

View file

@ -7,6 +7,7 @@ extern crate diesel;
extern crate diesel_migrations; extern crate diesel_migrations;
use diesel::prelude::*; use diesel::prelude::*;
use log::info;
pub mod ensembles; pub mod ensembles;
pub use ensembles::*; pub use ensembles::*;
@ -53,9 +54,11 @@ pub struct Database {
impl Database { impl Database {
/// Create a new database interface and run migrations if necessary. /// Create a new database interface and run migrations if necessary.
pub fn new(file_name: &str) -> Result<Database> { pub fn new(file_name: &str) -> Result<Database> {
info!("Opening database file '{}'", file_name);
let connection = SqliteConnection::establish(file_name)?; let connection = SqliteConnection::establish(file_name)?;
diesel::sql_query("PRAGMA foreign_keys = ON").execute(&connection)?; diesel::sql_query("PRAGMA foreign_keys = ON").execute(&connection)?;
info!("Running migrations if necessary");
embedded_migrations::run(&connection)?; embedded_migrations::run(&connection)?;
Ok(Database { connection }) Ok(Database { connection })

View file

@ -2,6 +2,7 @@ use super::generate_id;
use super::schema::{ensembles, mediums, performances, persons, recordings, tracks}; use super::schema::{ensembles, mediums, performances, persons, recordings, tracks};
use super::{Database, Error, Recording, Result}; use super::{Database, Error, Recording, Result};
use diesel::prelude::*; use diesel::prelude::*;
use log::info;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
/// Representation of someting like a physical audio disc or a folder with /// Representation of someting like a physical audio disc or a folder with
@ -68,6 +69,7 @@ struct TrackRow {
impl Database { impl Database {
/// Update an existing medium or insert a new one. /// Update an existing medium or insert a new one.
pub fn update_medium(&self, medium: Medium) -> Result<()> { pub fn update_medium(&self, medium: Medium) -> Result<()> {
info!("Updating medium {:?}", medium);
self.defer_foreign_keys()?; self.defer_foreign_keys()?;
self.connection.transaction::<(), Error, _>(|| { self.connection.transaction::<(), Error, _>(|| {
@ -204,6 +206,7 @@ impl Database {
/// Delete a medium and all of its tracks. This will fail, if the music /// Delete a medium and all of its tracks. This will fail, if the music
/// library contains audio files referencing any of those tracks. /// library contains audio files referencing any of those tracks.
pub fn delete_medium(&self, id: &str) -> Result<()> { pub fn delete_medium(&self, id: &str) -> Result<()> {
info!("Deleting medium {}", id);
diesel::delete(mediums::table.filter(mediums::id.eq(id))).execute(&self.connection)?; diesel::delete(mediums::table.filter(mediums::id.eq(id))).execute(&self.connection)?;
Ok(()) Ok(())
} }

View file

@ -1,6 +1,7 @@
use super::schema::persons; use super::schema::persons;
use super::{Database, Result}; use super::{Database, Result};
use diesel::prelude::*; use diesel::prelude::*;
use log::info;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
/// A person that is a composer, an interpret or both. /// A person that is a composer, an interpret or both.
@ -27,6 +28,7 @@ impl Person {
impl Database { impl Database {
/// Update an existing person or insert a new one. /// Update an existing person or insert a new one.
pub fn update_person(&self, person: Person) -> Result<()> { pub fn update_person(&self, person: Person) -> Result<()> {
info!("Updating person {:?}", person);
self.defer_foreign_keys()?; self.defer_foreign_keys()?;
self.connection.transaction(|| { self.connection.transaction(|| {
@ -51,8 +53,8 @@ impl Database {
/// Delete an existing person. /// Delete an existing person.
pub fn delete_person(&self, id: &str) -> Result<()> { pub fn delete_person(&self, id: &str) -> Result<()> {
info!("Deleting person {}", id);
diesel::delete(persons::table.filter(persons::id.eq(id))).execute(&self.connection)?; diesel::delete(persons::table.filter(persons::id.eq(id))).execute(&self.connection)?;
Ok(()) Ok(())
} }

View file

@ -2,6 +2,7 @@ use super::generate_id;
use super::schema::{ensembles, performances, persons, recordings}; use super::schema::{ensembles, performances, persons, recordings};
use super::{Database, Ensemble, Error, Instrument, Person, Result, Work}; use super::{Database, Ensemble, Error, Instrument, Person, Result, Work};
use diesel::prelude::*; use diesel::prelude::*;
use log::info;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
/// Database table data for a recording. /// Database table data for a recording.
@ -120,6 +121,7 @@ impl Database {
/// Update an existing recording or insert a new one. /// Update an existing recording or insert a new one.
// TODO: Think about whether to also insert the other items. // TODO: Think about whether to also insert the other items.
pub fn update_recording(&self, recording: Recording) -> Result<()> { pub fn update_recording(&self, recording: Recording) -> Result<()> {
info!("Updating recording {:?}", recording);
self.defer_foreign_keys()?; self.defer_foreign_keys()?;
self.connection.transaction::<(), Error, _>(|| { self.connection.transaction::<(), Error, _>(|| {
let recording_id = &recording.id; let recording_id = &recording.id;
@ -308,6 +310,7 @@ impl Database {
/// Delete an existing recording. This will fail if there are still references to this /// Delete an existing recording. This will fail if there are still references to this
/// recording from other tables that are not directly part of the recording data. /// recording from other tables that are not directly part of the recording data.
pub fn delete_recording(&self, id: &str) -> Result<()> { pub fn delete_recording(&self, id: &str) -> Result<()> {
info!("Deleting recording {}", id);
diesel::delete(recordings::table.filter(recordings::id.eq(id))) diesel::delete(recordings::table.filter(recordings::id.eq(id)))
.execute(&self.connection)?; .execute(&self.connection)?;
Ok(()) Ok(())

View file

@ -1,9 +1,11 @@
use super::*; use super::*;
use log::debug;
use tokio::sync::oneshot::{self, Sender}; use tokio::sync::oneshot::{self, Sender};
use std::sync::mpsc; use std::sync::mpsc;
use std::thread; use std::thread;
/// An action the database thread can perform. /// An action the database thread can perform.
#[derive(Debug)]
pub enum Action { pub enum Action {
UpdatePerson(Person, Sender<Result<()>>), UpdatePerson(Person, Sender<Result<()>>),
GetPerson(String, Sender<Result<Option<Person>>>), GetPerson(String, Sender<Result<Option<Person>>>),
@ -50,6 +52,8 @@ impl DbThread {
let (ready_sender, ready_receiver) = oneshot::channel(); let (ready_sender, ready_receiver) = oneshot::channel();
thread::spawn(move || { thread::spawn(move || {
debug!("Database thread for '{}' started", path);
let db = match Database::new(&path) { let db = match Database::new(&path) {
Ok(db) => { Ok(db) => {
ready_sender.send(Ok(())).unwrap(); ready_sender.send(Ok(())).unwrap();
@ -62,6 +66,7 @@ impl DbThread {
}; };
for action in action_receiver { for action in action_receiver {
debug!("Database thread for '{}' got action {:?}", path, action);
match action { match action {
UpdatePerson(person, sender) => { UpdatePerson(person, sender) => {
sender.send(db.update_person(person)).unwrap(); sender.send(db.update_person(person)).unwrap();
@ -153,6 +158,8 @@ impl DbThread {
} }
} }
} }
debug!("Database thread for '{}' stopped", path);
}); });
ready_receiver.await??; ready_receiver.await??;

View file

@ -3,6 +3,7 @@ use super::schema::{instrumentations, work_parts, work_sections, works};
use super::{Database, Error, Instrument, Person, Result}; use super::{Database, Error, Instrument, Person, Result};
use diesel::prelude::*; use diesel::prelude::*;
use diesel::{Insertable, Queryable}; use diesel::{Insertable, Queryable};
use log::info;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
/// Table row data for a work. /// Table row data for a work.
@ -103,6 +104,7 @@ impl Database {
/// Update an existing work or insert a new one. /// Update an existing work or insert a new one.
// TODO: Think about also inserting related items. // TODO: Think about also inserting related items.
pub fn update_work(&self, work: Work) -> Result<()> { pub fn update_work(&self, work: Work) -> Result<()> {
info!("Updating work {:?}", work);
self.defer_foreign_keys()?; self.defer_foreign_keys()?;
self.connection.transaction::<(), Error, _>(|| { self.connection.transaction::<(), Error, _>(|| {
@ -256,6 +258,7 @@ impl Database {
/// Delete an existing work. This will fail if there are still other tables that relate to /// Delete an existing work. This will fail if there are still other tables that relate to
/// this work except for the things that are part of the information on the work it /// this work except for the things that are part of the information on the work it
pub fn delete_work(&self, id: &str) -> Result<()> { pub fn delete_work(&self, id: &str) -> Result<()> {
info!("Deleting work {}", id);
diesel::delete(works::table.filter(works::id.eq(id))).execute(&self.connection)?; diesel::delete(works::table.filter(works::id.eq(id))).execute(&self.connection)?;
Ok(()) Ok(())
} }