diff --git a/Cargo.lock b/Cargo.lock index 644e344..ec68ea1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -71,12 +71,6 @@ version = "3.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba" -[[package]] -name = "byteorder" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" - [[package]] name = "cairo-rs" version = "0.16.7" @@ -229,21 +223,21 @@ dependencies = [ [[package]] name = "diesel" -version = "1.4.8" +version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b28135ecf6b7d446b43e27e225622a038cc4e2930a1022f51cdb97ada19b8e4d" +checksum = "68c186a7418a2aac330bb76cde82f16c36b03a66fb91db32d20214311f9f6545" dependencies = [ - "byteorder", "diesel_derives", "libsqlite3-sys", ] [[package]] name = "diesel_derives" -version = "1.4.1" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45f5098f628d02a7a0f68ddba586fb61e80edec3bdc1be3b921f4ceec60858d3" +checksum = "143b758c91dbc3fe1fdcb0dba5bd13276c6a66422f2ef5795b58488248a310aa" dependencies = [ + "proc-macro-error", "proc-macro2", "quote", "syn", @@ -251,10 +245,11 @@ dependencies = [ [[package]] name = "diesel_migrations" -version = "1.4.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf3cde8413353dc7f5d72fa8ce0b99a560a359d2c5ef1e5817ca731cd9008f4c" +checksum = "e9ae22beef5e9d6fab9225ddb073c1c6c1a7a6ded5019d5da11d1e5c5adc34e2" dependencies = [ + "diesel", "migrations_internals", "migrations_macros", ] @@ -1059,23 +1054,23 @@ dependencies = [ [[package]] name = "migrations_internals" -version = "1.4.1" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b4fc84e4af020b837029e017966f86a1c2d5e83e64b589963d5047525995860" +checksum = "c493c09323068c01e54c685f7da41a9ccf9219735c3766fbfd6099806ea08fbc" dependencies = [ - "diesel", + "serde", + "toml", ] [[package]] name = "migrations_macros" -version = "1.4.2" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9753f12909fd8d923f75ae5c3258cae1ed3c8ec052e1b38c93c21a6d157f789c" +checksum = "8a8ff27a350511de30cdabb77147501c36ef02e0451d957abea2f30caffb2b58" dependencies = [ "migrations_internals", "proc-macro2", "quote", - "syn", ] [[package]] @@ -1454,6 +1449,20 @@ name = "serde" version = "1.0.151" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97fed41fc1a24994d044e6db6935e69511a1153b52c15eb42493b26fa87feba0" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.151" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "255abe9a125a985c05190d687b320c12f9b1f0b99445e608c21ba0782c719ad8" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] [[package]] name = "sha2" diff --git a/crates/database/Cargo.toml b/crates/database/Cargo.toml index e2538ea..11a5d5c 100644 --- a/crates/database/Cargo.toml +++ b/crates/database/Cargo.toml @@ -4,8 +4,8 @@ version = "0.1.0" edition = "2021" [dependencies] -diesel = { version = "1", features = ["sqlite"] } -diesel_migrations = "1" +diesel = { version = "2", features = ["sqlite"] } +diesel_migrations = "2" chrono = "0.4" log = "0.4" rand = "0.8" diff --git a/crates/database/src/ensembles.rs b/crates/database/src/ensembles.rs index 350f3eb..1d21ca0 100644 --- a/crates/database/src/ensembles.rs +++ b/crates/database/src/ensembles.rs @@ -32,10 +32,10 @@ impl Database { ensemble.last_used = Some(Utc::now().timestamp()); - self.connection.transaction(|| { + self.connection.lock().unwrap().transaction(|connection| { diesel::replace_into(ensembles::table) .values(ensemble) - .execute(&self.connection) + .execute(connection) })?; Ok(()) @@ -45,7 +45,7 @@ impl Database { pub fn get_ensemble(&self, id: &str) -> Result> { let ensemble = ensembles::table .filter(ensembles::id.eq(id)) - .load::(&self.connection)? + .load::(&mut *self.connection.lock().unwrap())? .into_iter() .next(); @@ -55,13 +55,14 @@ impl Database { /// Delete an existing ensemble. pub fn delete_ensemble(&self, id: &str) -> Result<()> { info!("Deleting ensemble {}", id); - diesel::delete(ensembles::table.filter(ensembles::id.eq(id))).execute(&self.connection)?; + diesel::delete(ensembles::table.filter(ensembles::id.eq(id))) + .execute(&mut *self.connection.lock().unwrap())?; Ok(()) } /// Get all existing ensembles. pub fn get_ensembles(&self) -> Result> { - let ensembles = ensembles::table.load::(&self.connection)?; + let ensembles = ensembles::table.load::(&mut *self.connection.lock().unwrap())?; Ok(ensembles) } @@ -69,7 +70,7 @@ impl Database { pub fn get_recent_ensembles(&self) -> Result> { let ensembles = ensembles::table .order(ensembles::last_used.desc()) - .load::(&self.connection)?; + .load::(&mut *self.connection.lock().unwrap())?; Ok(ensembles) } diff --git a/crates/database/src/error.rs b/crates/database/src/error.rs index 4a78023..c3b8524 100644 --- a/crates/database/src/error.rs +++ b/crates/database/src/error.rs @@ -5,7 +5,7 @@ pub enum Error { ConnectionError(#[from] diesel::result::ConnectionError), #[error(transparent)] - MigrationsError(#[from] diesel_migrations::RunMigrationsError), + Migrations(#[from] Box), #[error(transparent)] QueryError(#[from] diesel::result::Error), diff --git a/crates/database/src/instruments.rs b/crates/database/src/instruments.rs index 76cf31c..1b3f436 100644 --- a/crates/database/src/instruments.rs +++ b/crates/database/src/instruments.rs @@ -32,10 +32,10 @@ impl Database { instrument.last_used = Some(Utc::now().timestamp()); - self.connection.transaction(|| { + self.connection.lock().unwrap().transaction(|connection| { diesel::replace_into(instruments::table) .values(instrument) - .execute(&self.connection) + .execute(connection) })?; Ok(()) @@ -45,7 +45,7 @@ impl Database { pub fn get_instrument(&self, id: &str) -> Result> { let instrument = instruments::table .filter(instruments::id.eq(id)) - .load::(&self.connection)? + .load::(&mut *self.connection.lock().unwrap())? .into_iter() .next(); @@ -56,14 +56,15 @@ impl Database { pub fn delete_instrument(&self, id: &str) -> Result<()> { info!("Deleting instrument {}", id); diesel::delete(instruments::table.filter(instruments::id.eq(id))) - .execute(&self.connection)?; + .execute(&mut *self.connection.lock().unwrap())?; Ok(()) } /// Get all existing instruments. pub fn get_instruments(&self) -> Result> { - let instruments = instruments::table.load::(&self.connection)?; + let instruments = + instruments::table.load::(&mut *self.connection.lock().unwrap())?; Ok(instruments) } @@ -72,7 +73,7 @@ impl Database { pub fn get_recent_instruments(&self) -> Result> { let instruments = instruments::table .order(instruments::last_used.desc()) - .load::(&self.connection)?; + .load::(&mut *self.connection.lock().unwrap())?; Ok(instruments) } diff --git a/crates/database/src/lib.rs b/crates/database/src/lib.rs index 6254ea0..3dc7780 100644 --- a/crates/database/src/lib.rs +++ b/crates/database/src/lib.rs @@ -1,10 +1,5 @@ -// Required for schema.rs -#[macro_use] -extern crate diesel; - -// Required for embed_migrations macro in database.rs -#[macro_use] -extern crate diesel_migrations; +use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness}; +use std::sync::{Arc, Mutex}; use diesel::prelude::*; use log::info; @@ -33,7 +28,7 @@ pub use works::*; mod schema; // This makes the SQL migration scripts accessible from the code. -embed_migrations!(); +const MIGRATIONS: EmbeddedMigrations = embed_migrations!(); /// Generate a random string suitable as an item ID. pub fn generate_id() -> String { @@ -42,25 +37,28 @@ pub fn generate_id() -> String { /// Interface to a Musicus database. pub struct Database { - connection: SqliteConnection, + connection: Arc>, } impl Database { /// Create a new database interface and run migrations if necessary. pub fn new(file_name: &str) -> Result { info!("Opening database file '{}'", file_name); - let connection = SqliteConnection::establish(file_name)?; - diesel::sql_query("PRAGMA foreign_keys = ON").execute(&connection)?; + let mut connection = SqliteConnection::establish(file_name)?; + diesel::sql_query("PRAGMA foreign_keys = ON").execute(&mut connection)?; info!("Running migrations if necessary"); - embedded_migrations::run(&connection)?; + connection.run_pending_migrations(MIGRATIONS)?; - Ok(Database { connection }) + Ok(Database { + connection: Arc::new(Mutex::new(connection)), + }) } /// Defer all foreign keys for the next transaction. fn defer_foreign_keys(&self) -> Result<()> { - diesel::sql_query("PRAGMA defer_foreign_keys = ON").execute(&self.connection)?; + diesel::sql_query("PRAGMA defer_foreign_keys = ON") + .execute(&mut *self.connection.lock().unwrap())?; Ok(()) } } diff --git a/crates/database/src/medium.rs b/crates/database/src/medium.rs index 4f1a799..68dd104 100644 --- a/crates/database/src/medium.rs +++ b/crates/database/src/medium.rs @@ -109,61 +109,64 @@ impl Database { info!("Updating medium {:?}", medium); self.defer_foreign_keys()?; - self.connection.transaction::<(), Error, _>(|| { - let medium_id = &medium.id; + self.connection + .lock() + .unwrap() + .transaction::<(), Error, _>(|connection| { + let medium_id = &medium.id; - // This will also delete the tracks. - self.delete_medium(medium_id)?; + // This will also delete the tracks. + self.delete_medium(medium_id)?; - // Add the new medium. + // Add the new medium. - let medium_row = MediumRow { - id: medium_id.to_owned(), - name: medium.name.clone(), - discid: medium.discid.clone(), - last_used: Some(Utc::now().timestamp()), - last_played: medium.last_played.map(|t| t.timestamp()), - }; - - diesel::insert_into(mediums::table) - .values(medium_row) - .execute(&self.connection)?; - - for (index, track) in medium.tracks.iter().enumerate() { - // Add associated items from the server, if they don't already exist. - - if self.get_recording(&track.recording.id)?.is_none() { - self.update_recording(track.recording.clone())?; - } - - // Add the actual track data. - - let work_parts = track - .work_parts - .iter() - .map(|part_index| part_index.to_string()) - .collect::>() - .join(","); - - let track_row = TrackRow { - id: generate_id(), - medium: medium_id.to_owned(), - index: index as i32, - recording: track.recording.id.clone(), - work_parts, - source_index: track.source_index as i32, - path: track.path.clone(), + let medium_row = MediumRow { + id: medium_id.to_owned(), + name: medium.name.clone(), + discid: medium.discid.clone(), last_used: Some(Utc::now().timestamp()), - last_played: track.last_played.map(|t| t.timestamp()), + last_played: medium.last_played.map(|t| t.timestamp()), }; - diesel::insert_into(tracks::table) - .values(track_row) - .execute(&self.connection)?; - } + diesel::insert_into(mediums::table) + .values(medium_row) + .execute(connection)?; - Ok(()) - })?; + for (index, track) in medium.tracks.iter().enumerate() { + // Add associated items from the server, if they don't already exist. + + if self.get_recording(&track.recording.id)?.is_none() { + self.update_recording(track.recording.clone())?; + } + + // Add the actual track data. + + let work_parts = track + .work_parts + .iter() + .map(|part_index| part_index.to_string()) + .collect::>() + .join(","); + + let track_row = TrackRow { + id: generate_id(), + medium: medium_id.to_owned(), + index: index as i32, + recording: track.recording.id.clone(), + work_parts, + source_index: track.source_index as i32, + path: track.path.clone(), + last_used: Some(Utc::now().timestamp()), + last_played: track.last_played.map(|t| t.timestamp()), + }; + + diesel::insert_into(tracks::table) + .values(track_row) + .execute(connection)?; + } + + Ok(()) + })?; Ok(()) } @@ -172,7 +175,7 @@ impl Database { pub fn get_medium(&self, id: &str) -> Result> { let row = mediums::table .filter(mediums::id.eq(id)) - .load::(&self.connection)? + .load::(&mut *self.connection.lock().unwrap())? .into_iter() .next(); @@ -190,7 +193,7 @@ impl Database { let rows = mediums::table .filter(mediums::discid.nullable().eq(source_id)) - .load::(&self.connection)?; + .load::(&mut *self.connection.lock().unwrap())?; for row in rows { let medium = self.get_medium_data(row)?; @@ -212,7 +215,7 @@ impl Database { .filter(persons::id.eq(person_id)) .select(mediums::table::all_columns()) .distinct() - .load::(&self.connection)?; + .load::(&mut *self.connection.lock().unwrap())?; for row in rows { let medium = self.get_medium_data(row)?; @@ -234,7 +237,7 @@ impl Database { .filter(ensembles::id.eq(ensemble_id)) .select(mediums::table::all_columns()) .distinct() - .load::(&self.connection)?; + .load::(&mut *self.connection.lock().unwrap())?; for row in rows { let medium = self.get_medium_data(row)?; @@ -248,7 +251,8 @@ impl Database { /// library contains audio files referencing any of those tracks. pub fn delete_medium(&self, id: &str) -> Result<()> { info!("Deleting medium {}", id); - diesel::delete(mediums::table.filter(mediums::id.eq(id))).execute(&self.connection)?; + diesel::delete(mediums::table.filter(mediums::id.eq(id))) + .execute(&mut *self.connection.lock().unwrap())?; Ok(()) } @@ -260,7 +264,7 @@ impl Database { .inner_join(recordings::table.on(recordings::id.eq(tracks::recording))) .filter(recordings::id.eq(recording_id)) .select(tracks::table::all_columns()) - .load::(&self.connection)?; + .load::(&mut *self.connection.lock().unwrap())?; for row in rows { let track = self.get_track_from_row(row)?; @@ -273,7 +277,7 @@ impl Database { /// Get a random track from the database. pub fn random_track(&self) -> Result { let row = diesel::sql_query("SELECT * FROM tracks ORDER BY RANDOM() LIMIT 1") - .load::(&self.connection)? + .load::(&mut *self.connection.lock().unwrap())? .into_iter() .next() .ok_or(Error::Other("Failed to generate random track"))?; @@ -286,7 +290,7 @@ impl Database { let track_rows = tracks::table .filter(tracks::medium.eq(&row.id)) .order_by(tracks::index) - .load::(&self.connection)?; + .load::(&mut *self.connection.lock().unwrap())?; let mut tracks = Vec::new(); diff --git a/crates/database/src/persons.rs b/crates/database/src/persons.rs index 3fffb24..6c401c3 100644 --- a/crates/database/src/persons.rs +++ b/crates/database/src/persons.rs @@ -44,10 +44,10 @@ impl Database { person.last_used = Some(Utc::now().timestamp()); - self.connection.transaction(|| { + self.connection.lock().unwrap().transaction(|connection| { diesel::replace_into(persons::table) .values(person) - .execute(&self.connection) + .execute(connection) })?; Ok(()) @@ -57,7 +57,7 @@ impl Database { pub fn get_person(&self, id: &str) -> Result> { let person = persons::table .filter(persons::id.eq(id)) - .load::(&self.connection)? + .load::(&mut *self.connection.lock().unwrap())? .into_iter() .next(); @@ -67,13 +67,14 @@ impl Database { /// Delete an existing person. pub fn delete_person(&self, id: &str) -> Result<()> { info!("Deleting person {}", id); - diesel::delete(persons::table.filter(persons::id.eq(id))).execute(&self.connection)?; + diesel::delete(persons::table.filter(persons::id.eq(id))) + .execute(&mut *self.connection.lock().unwrap())?; Ok(()) } /// Get all existing persons. pub fn get_persons(&self) -> Result> { - let persons = persons::table.load::(&self.connection)?; + let persons = persons::table.load::(&mut *self.connection.lock().unwrap())?; Ok(persons) } @@ -82,7 +83,7 @@ impl Database { pub fn get_recent_persons(&self) -> Result> { let persons = persons::table .order(persons::last_used.desc()) - .load::(&self.connection)?; + .load::(&mut *self.connection.lock().unwrap())?; Ok(persons) } diff --git a/crates/database/src/recordings.rs b/crates/database/src/recordings.rs index 84a918e..16a2928 100644 --- a/crates/database/src/recordings.rs +++ b/crates/database/src/recordings.rs @@ -131,65 +131,68 @@ impl Database { pub fn update_recording(&self, recording: Recording) -> Result<()> { info!("Updating recording {:?}", recording); self.defer_foreign_keys()?; - self.connection.transaction::<(), Error, _>(|| { - let recording_id = &recording.id; - self.delete_recording(recording_id)?; + self.connection + .lock() + .unwrap() + .transaction::<(), Error, _>(|connection| { + let recording_id = &recording.id; + self.delete_recording(recording_id)?; - // Add associated items from the server, if they don't already exist. + // Add associated items from the server, if they don't already exist. - if self.get_work(&recording.work.id)?.is_none() { - self.update_work(recording.work.clone())?; - } + if self.get_work(&recording.work.id)?.is_none() { + self.update_work(recording.work.clone())?; + } - for performance in &recording.performances { - match &performance.performer { - PersonOrEnsemble::Person(person) => { - if self.get_person(&person.id)?.is_none() { - self.update_person(person.clone())?; + for performance in &recording.performances { + match &performance.performer { + PersonOrEnsemble::Person(person) => { + if self.get_person(&person.id)?.is_none() { + self.update_person(person.clone())?; + } + } + PersonOrEnsemble::Ensemble(ensemble) => { + if self.get_ensemble(&ensemble.id)?.is_none() { + self.update_ensemble(ensemble.clone())?; + } } } - PersonOrEnsemble::Ensemble(ensemble) => { - if self.get_ensemble(&ensemble.id)?.is_none() { - self.update_ensemble(ensemble.clone())?; + + if let Some(role) = &performance.role { + if self.get_instrument(&role.id)?.is_none() { + self.update_instrument(role.clone())?; } } } - if let Some(role) = &performance.role { - if self.get_instrument(&role.id)?.is_none() { - self.update_instrument(role.clone())?; - } - } - } + // Add the actual recording. - // Add the actual recording. - - let row: RecordingRow = recording.clone().into(); - diesel::insert_into(recordings::table) - .values(row) - .execute(&self.connection)?; - - for performance in recording.performances { - let (person, ensemble) = match performance.performer { - PersonOrEnsemble::Person(person) => (Some(person.id), None), - PersonOrEnsemble::Ensemble(ensemble) => (None, Some(ensemble.id)), - }; - - let row = PerformanceRow { - id: rand::random(), - recording: recording_id.to_string(), - person, - ensemble, - role: performance.role.map(|role| role.id), - }; - - diesel::insert_into(performances::table) + let row: RecordingRow = recording.clone().into(); + diesel::insert_into(recordings::table) .values(row) - .execute(&self.connection)?; - } + .execute(connection)?; - Ok(()) - })?; + for performance in recording.performances { + let (person, ensemble) = match performance.performer { + PersonOrEnsemble::Person(person) => (Some(person.id), None), + PersonOrEnsemble::Ensemble(ensemble) => (None, Some(ensemble.id)), + }; + + let row = PerformanceRow { + id: rand::random(), + recording: recording_id.to_string(), + person, + ensemble, + role: performance.role.map(|role| role.id), + }; + + diesel::insert_into(performances::table) + .values(row) + .execute(connection)?; + } + + Ok(()) + })?; Ok(()) } @@ -198,7 +201,7 @@ impl Database { pub fn recording_exists(&self, id: &str) -> Result { let exists = recordings::table .filter(recordings::id.eq(id)) - .load::(&self.connection)? + .load::(&mut *self.connection.lock().unwrap())? .first() .is_some(); @@ -209,7 +212,7 @@ impl Database { pub fn get_recording(&self, id: &str) -> Result> { let row = recordings::table .filter(recordings::id.eq(id)) - .load::(&self.connection)? + .load::(&mut *self.connection.lock().unwrap())? .into_iter() .next(); @@ -224,7 +227,7 @@ impl Database { /// Get a random recording from the database. pub fn random_recording(&self) -> Result { let row = diesel::sql_query("SELECT * FROM recordings ORDER BY RANDOM() LIMIT 1") - .load::(&self.connection)? + .load::(&mut *self.connection.lock().unwrap())? .into_iter() .next() .ok_or(Error::Other("Failed to find random recording."))?; @@ -238,7 +241,7 @@ impl Database { let performance_rows = performances::table .filter(performances::recording.eq(&row.id)) - .load::(&self.connection)?; + .load::(&mut *self.connection.lock().unwrap())?; for row in performance_rows { performance_descriptions.push(Performance { @@ -291,7 +294,7 @@ impl Database { .inner_join(persons::table.on(persons::id.nullable().eq(performances::person))) .filter(persons::id.eq(person_id)) .select(recordings::table::all_columns()) - .load::(&self.connection)?; + .load::(&mut *self.connection.lock().unwrap())?; for row in rows { recordings.push(self.get_recording_data(row)?); @@ -309,7 +312,7 @@ impl Database { .inner_join(ensembles::table.on(ensembles::id.nullable().eq(performances::ensemble))) .filter(ensembles::id.eq(ensemble_id)) .select(recordings::table::all_columns()) - .load::(&self.connection)?; + .load::(&mut *self.connection.lock().unwrap())?; for row in rows { recordings.push(self.get_recording_data(row)?); @@ -324,7 +327,7 @@ impl Database { let rows = recordings::table .filter(recordings::work.eq(work_id)) - .load::(&self.connection)?; + .load::(&mut *self.connection.lock().unwrap())?; for row in rows { recordings.push(self.get_recording_data(row)?); @@ -338,7 +341,7 @@ impl Database { pub fn delete_recording(&self, id: &str) -> Result<()> { info!("Deleting recording {}", id); diesel::delete(recordings::table.filter(recordings::id.eq(id))) - .execute(&self.connection)?; + .execute(&mut *self.connection.lock().unwrap())?; Ok(()) } } diff --git a/crates/database/src/schema.rs b/crates/database/src/schema.rs index 1d86f97..6dbcc03 100644 --- a/crates/database/src/schema.rs +++ b/crates/database/src/schema.rs @@ -1,4 +1,6 @@ -table! { +// @generated automatically by Diesel CLI. + +diesel::table! { ensembles (id) { id -> Text, name -> Text, @@ -7,7 +9,7 @@ table! { } } -table! { +diesel::table! { instrumentations (id) { id -> BigInt, work -> Text, @@ -15,7 +17,7 @@ table! { } } -table! { +diesel::table! { instruments (id) { id -> Text, name -> Text, @@ -24,7 +26,7 @@ table! { } } -table! { +diesel::table! { mediums (id) { id -> Text, name -> Text, @@ -34,7 +36,7 @@ table! { } } -table! { +diesel::table! { performances (id) { id -> BigInt, recording -> Text, @@ -44,7 +46,7 @@ table! { } } -table! { +diesel::table! { persons (id) { id -> Text, first_name -> Text, @@ -54,7 +56,7 @@ table! { } } -table! { +diesel::table! { recordings (id) { id -> Text, work -> Text, @@ -64,7 +66,7 @@ table! { } } -table! { +diesel::table! { tracks (id) { id -> Text, medium -> Text, @@ -78,7 +80,7 @@ table! { } } -table! { +diesel::table! { work_parts (id) { id -> BigInt, work -> Text, @@ -87,7 +89,7 @@ table! { } } -table! { +diesel::table! { works (id) { id -> Text, composer -> Text, @@ -97,19 +99,19 @@ table! { } } -joinable!(instrumentations -> instruments (instrument)); -joinable!(instrumentations -> works (work)); -joinable!(performances -> ensembles (ensemble)); -joinable!(performances -> instruments (role)); -joinable!(performances -> persons (person)); -joinable!(performances -> recordings (recording)); -joinable!(recordings -> works (work)); -joinable!(tracks -> mediums (medium)); -joinable!(tracks -> recordings (recording)); -joinable!(work_parts -> works (work)); -joinable!(works -> persons (composer)); +diesel::joinable!(instrumentations -> instruments (instrument)); +diesel::joinable!(instrumentations -> works (work)); +diesel::joinable!(performances -> ensembles (ensemble)); +diesel::joinable!(performances -> instruments (role)); +diesel::joinable!(performances -> persons (person)); +diesel::joinable!(performances -> recordings (recording)); +diesel::joinable!(recordings -> works (work)); +diesel::joinable!(tracks -> mediums (medium)); +diesel::joinable!(tracks -> recordings (recording)); +diesel::joinable!(work_parts -> works (work)); +diesel::joinable!(works -> persons (composer)); -allow_tables_to_appear_in_same_query!( +diesel::allow_tables_to_appear_in_same_query!( ensembles, instrumentations, instruments, diff --git a/crates/database/src/works.rs b/crates/database/src/works.rs index 140b266..776bab2 100644 --- a/crates/database/src/works.rs +++ b/crates/database/src/works.rs @@ -112,60 +112,63 @@ impl Database { info!("Updating work {:?}", work); self.defer_foreign_keys()?; - self.connection.transaction::<(), Error, _>(|| { - let work_id = &work.id; - self.delete_work(work_id)?; + self.connection + .lock() + .unwrap() + .transaction::<(), Error, _>(|connection| { + let work_id = &work.id; + self.delete_work(work_id)?; - // Add associated items from the server, if they don't already exist. + // Add associated items from the server, if they don't already exist. - if self.get_person(&work.composer.id)?.is_none() { - self.update_person(work.composer.clone())?; - } - - for instrument in &work.instruments { - if self.get_instrument(&instrument.id)?.is_none() { - self.update_instrument(instrument.clone())?; + if self.get_person(&work.composer.id)?.is_none() { + self.update_person(work.composer.clone())?; } - } - // Add the actual work. + for instrument in &work.instruments { + if self.get_instrument(&instrument.id)?.is_none() { + self.update_instrument(instrument.clone())?; + } + } - let row: WorkRow = work.clone().into(); - diesel::insert_into(works::table) - .values(row) - .execute(&self.connection)?; + // Add the actual work. - let Work { - instruments, parts, .. - } = work; - - for instrument in instruments { - let row = InstrumentationRow { - id: rand::random(), - work: work_id.to_string(), - instrument: instrument.id, - }; - - diesel::insert_into(instrumentations::table) + let row: WorkRow = work.clone().into(); + diesel::insert_into(works::table) .values(row) - .execute(&self.connection)?; - } + .execute(&mut *self.connection.lock().unwrap())?; - for (index, part) in parts.into_iter().enumerate() { - let row = WorkPartRow { - id: rand::random(), - work: work_id.to_string(), - part_index: index as i64, - title: part.title, - }; + let Work { + instruments, parts, .. + } = work; - diesel::insert_into(work_parts::table) - .values(row) - .execute(&self.connection)?; - } + for instrument in instruments { + let row = InstrumentationRow { + id: rand::random(), + work: work_id.to_string(), + instrument: instrument.id, + }; - Ok(()) - })?; + diesel::insert_into(instrumentations::table) + .values(row) + .execute(connection)?; + } + + for (index, part) in parts.into_iter().enumerate() { + let row = WorkPartRow { + id: rand::random(), + work: work_id.to_string(), + part_index: index as i64, + title: part.title, + }; + + diesel::insert_into(work_parts::table) + .values(row) + .execute(connection)?; + } + + Ok(()) + })?; Ok(()) } @@ -174,7 +177,7 @@ impl Database { pub fn get_work(&self, id: &str) -> Result> { let row = works::table .filter(works::id.eq(id)) - .load::(&self.connection)? + .load::(&mut *self.connection.lock().unwrap())? .first() .cloned(); @@ -192,7 +195,7 @@ impl Database { let instrumentations = instrumentations::table .filter(instrumentations::work.eq(&row.id)) - .load::(&self.connection)?; + .load::(&mut *self.connection.lock().unwrap())?; for instrumentation in instrumentations { let id = instrumentation.instrument; @@ -206,7 +209,7 @@ impl Database { let part_rows = work_parts::table .filter(work_parts::work.eq(&row.id)) - .load::(&self.connection)?; + .load::(&mut *self.connection.lock().unwrap())?; for part_row in part_rows { parts.push(WorkPart { @@ -234,7 +237,8 @@ impl Database { /// this work except for the things that are part of the information on the work it pub fn delete_work(&self, id: &str) -> Result<()> { info!("Deleting work {}", id); - diesel::delete(works::table.filter(works::id.eq(id))).execute(&self.connection)?; + diesel::delete(works::table.filter(works::id.eq(id))) + .execute(&mut *self.connection.lock().unwrap())?; Ok(()) } @@ -244,7 +248,7 @@ impl Database { let rows = works::table .filter(works::composer.eq(composer_id)) - .load::(&self.connection)?; + .load::(&mut *self.connection.lock().unwrap())?; for row in rows { works.push(self.get_work_data(row)?);