mirror of
https://github.com/johrpan/musicus.git
synced 2025-10-26 11:47:25 +01:00
Update diesel to version 2
This commit is contained in:
parent
d6b79fae59
commit
8b45ec4940
11 changed files with 259 additions and 236 deletions
47
Cargo.lock
generated
47
Cargo.lock
generated
|
|
@ -71,12 +71,6 @@ version = "3.11.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba"
|
checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "byteorder"
|
|
||||||
version = "1.4.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cairo-rs"
|
name = "cairo-rs"
|
||||||
version = "0.16.7"
|
version = "0.16.7"
|
||||||
|
|
@ -229,21 +223,21 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "diesel"
|
name = "diesel"
|
||||||
version = "1.4.8"
|
version = "2.0.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b28135ecf6b7d446b43e27e225622a038cc4e2930a1022f51cdb97ada19b8e4d"
|
checksum = "68c186a7418a2aac330bb76cde82f16c36b03a66fb91db32d20214311f9f6545"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"byteorder",
|
|
||||||
"diesel_derives",
|
"diesel_derives",
|
||||||
"libsqlite3-sys",
|
"libsqlite3-sys",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "diesel_derives"
|
name = "diesel_derives"
|
||||||
version = "1.4.1"
|
version = "2.0.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "45f5098f628d02a7a0f68ddba586fb61e80edec3bdc1be3b921f4ceec60858d3"
|
checksum = "143b758c91dbc3fe1fdcb0dba5bd13276c6a66422f2ef5795b58488248a310aa"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"proc-macro-error",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn",
|
||||||
|
|
@ -251,10 +245,11 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "diesel_migrations"
|
name = "diesel_migrations"
|
||||||
version = "1.4.0"
|
version = "2.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "bf3cde8413353dc7f5d72fa8ce0b99a560a359d2c5ef1e5817ca731cd9008f4c"
|
checksum = "e9ae22beef5e9d6fab9225ddb073c1c6c1a7a6ded5019d5da11d1e5c5adc34e2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"diesel",
|
||||||
"migrations_internals",
|
"migrations_internals",
|
||||||
"migrations_macros",
|
"migrations_macros",
|
||||||
]
|
]
|
||||||
|
|
@ -1059,23 +1054,23 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "migrations_internals"
|
name = "migrations_internals"
|
||||||
version = "1.4.1"
|
version = "2.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2b4fc84e4af020b837029e017966f86a1c2d5e83e64b589963d5047525995860"
|
checksum = "c493c09323068c01e54c685f7da41a9ccf9219735c3766fbfd6099806ea08fbc"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"diesel",
|
"serde",
|
||||||
|
"toml",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "migrations_macros"
|
name = "migrations_macros"
|
||||||
version = "1.4.2"
|
version = "2.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9753f12909fd8d923f75ae5c3258cae1ed3c8ec052e1b38c93c21a6d157f789c"
|
checksum = "8a8ff27a350511de30cdabb77147501c36ef02e0451d957abea2f30caffb2b58"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"migrations_internals",
|
"migrations_internals",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -1454,6 +1449,20 @@ name = "serde"
|
||||||
version = "1.0.151"
|
version = "1.0.151"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "97fed41fc1a24994d044e6db6935e69511a1153b52c15eb42493b26fa87feba0"
|
checksum = "97fed41fc1a24994d044e6db6935e69511a1153b52c15eb42493b26fa87feba0"
|
||||||
|
dependencies = [
|
||||||
|
"serde_derive",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serde_derive"
|
||||||
|
version = "1.0.151"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "255abe9a125a985c05190d687b320c12f9b1f0b99445e608c21ba0782c719ad8"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sha2"
|
name = "sha2"
|
||||||
|
|
|
||||||
|
|
@ -4,8 +4,8 @@ version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
diesel = { version = "1", features = ["sqlite"] }
|
diesel = { version = "2", features = ["sqlite"] }
|
||||||
diesel_migrations = "1"
|
diesel_migrations = "2"
|
||||||
chrono = "0.4"
|
chrono = "0.4"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
rand = "0.8"
|
rand = "0.8"
|
||||||
|
|
|
||||||
|
|
@ -32,10 +32,10 @@ impl Database {
|
||||||
|
|
||||||
ensemble.last_used = Some(Utc::now().timestamp());
|
ensemble.last_used = Some(Utc::now().timestamp());
|
||||||
|
|
||||||
self.connection.transaction(|| {
|
self.connection.lock().unwrap().transaction(|connection| {
|
||||||
diesel::replace_into(ensembles::table)
|
diesel::replace_into(ensembles::table)
|
||||||
.values(ensemble)
|
.values(ensemble)
|
||||||
.execute(&self.connection)
|
.execute(connection)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
@ -45,7 +45,7 @@ impl Database {
|
||||||
pub fn get_ensemble(&self, id: &str) -> Result<Option<Ensemble>> {
|
pub fn get_ensemble(&self, id: &str) -> Result<Option<Ensemble>> {
|
||||||
let ensemble = ensembles::table
|
let ensemble = ensembles::table
|
||||||
.filter(ensembles::id.eq(id))
|
.filter(ensembles::id.eq(id))
|
||||||
.load::<Ensemble>(&self.connection)?
|
.load::<Ensemble>(&mut *self.connection.lock().unwrap())?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.next();
|
.next();
|
||||||
|
|
||||||
|
|
@ -55,13 +55,14 @@ impl Database {
|
||||||
/// Delete an existing ensemble.
|
/// Delete an existing ensemble.
|
||||||
pub fn delete_ensemble(&self, id: &str) -> Result<()> {
|
pub fn delete_ensemble(&self, id: &str) -> Result<()> {
|
||||||
info!("Deleting ensemble {}", id);
|
info!("Deleting ensemble {}", id);
|
||||||
diesel::delete(ensembles::table.filter(ensembles::id.eq(id))).execute(&self.connection)?;
|
diesel::delete(ensembles::table.filter(ensembles::id.eq(id)))
|
||||||
|
.execute(&mut *self.connection.lock().unwrap())?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get all existing ensembles.
|
/// Get all existing ensembles.
|
||||||
pub fn get_ensembles(&self) -> Result<Vec<Ensemble>> {
|
pub fn get_ensembles(&self) -> Result<Vec<Ensemble>> {
|
||||||
let ensembles = ensembles::table.load::<Ensemble>(&self.connection)?;
|
let ensembles = ensembles::table.load::<Ensemble>(&mut *self.connection.lock().unwrap())?;
|
||||||
Ok(ensembles)
|
Ok(ensembles)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -69,7 +70,7 @@ impl Database {
|
||||||
pub fn get_recent_ensembles(&self) -> Result<Vec<Ensemble>> {
|
pub fn get_recent_ensembles(&self) -> Result<Vec<Ensemble>> {
|
||||||
let ensembles = ensembles::table
|
let ensembles = ensembles::table
|
||||||
.order(ensembles::last_used.desc())
|
.order(ensembles::last_used.desc())
|
||||||
.load::<Ensemble>(&self.connection)?;
|
.load::<Ensemble>(&mut *self.connection.lock().unwrap())?;
|
||||||
|
|
||||||
Ok(ensembles)
|
Ok(ensembles)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ pub enum Error {
|
||||||
ConnectionError(#[from] diesel::result::ConnectionError),
|
ConnectionError(#[from] diesel::result::ConnectionError),
|
||||||
|
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
MigrationsError(#[from] diesel_migrations::RunMigrationsError),
|
Migrations(#[from] Box<dyn std::error::Error + Send + Sync>),
|
||||||
|
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
QueryError(#[from] diesel::result::Error),
|
QueryError(#[from] diesel::result::Error),
|
||||||
|
|
|
||||||
|
|
@ -32,10 +32,10 @@ impl Database {
|
||||||
|
|
||||||
instrument.last_used = Some(Utc::now().timestamp());
|
instrument.last_used = Some(Utc::now().timestamp());
|
||||||
|
|
||||||
self.connection.transaction(|| {
|
self.connection.lock().unwrap().transaction(|connection| {
|
||||||
diesel::replace_into(instruments::table)
|
diesel::replace_into(instruments::table)
|
||||||
.values(instrument)
|
.values(instrument)
|
||||||
.execute(&self.connection)
|
.execute(connection)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
@ -45,7 +45,7 @@ impl Database {
|
||||||
pub fn get_instrument(&self, id: &str) -> Result<Option<Instrument>> {
|
pub fn get_instrument(&self, id: &str) -> Result<Option<Instrument>> {
|
||||||
let instrument = instruments::table
|
let instrument = instruments::table
|
||||||
.filter(instruments::id.eq(id))
|
.filter(instruments::id.eq(id))
|
||||||
.load::<Instrument>(&self.connection)?
|
.load::<Instrument>(&mut *self.connection.lock().unwrap())?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.next();
|
.next();
|
||||||
|
|
||||||
|
|
@ -56,14 +56,15 @@ impl Database {
|
||||||
pub fn delete_instrument(&self, id: &str) -> Result<()> {
|
pub fn delete_instrument(&self, id: &str) -> Result<()> {
|
||||||
info!("Deleting instrument {}", id);
|
info!("Deleting instrument {}", id);
|
||||||
diesel::delete(instruments::table.filter(instruments::id.eq(id)))
|
diesel::delete(instruments::table.filter(instruments::id.eq(id)))
|
||||||
.execute(&self.connection)?;
|
.execute(&mut *self.connection.lock().unwrap())?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get all existing instruments.
|
/// Get all existing instruments.
|
||||||
pub fn get_instruments(&self) -> Result<Vec<Instrument>> {
|
pub fn get_instruments(&self) -> Result<Vec<Instrument>> {
|
||||||
let instruments = instruments::table.load::<Instrument>(&self.connection)?;
|
let instruments =
|
||||||
|
instruments::table.load::<Instrument>(&mut *self.connection.lock().unwrap())?;
|
||||||
|
|
||||||
Ok(instruments)
|
Ok(instruments)
|
||||||
}
|
}
|
||||||
|
|
@ -72,7 +73,7 @@ impl Database {
|
||||||
pub fn get_recent_instruments(&self) -> Result<Vec<Instrument>> {
|
pub fn get_recent_instruments(&self) -> Result<Vec<Instrument>> {
|
||||||
let instruments = instruments::table
|
let instruments = instruments::table
|
||||||
.order(instruments::last_used.desc())
|
.order(instruments::last_used.desc())
|
||||||
.load::<Instrument>(&self.connection)?;
|
.load::<Instrument>(&mut *self.connection.lock().unwrap())?;
|
||||||
|
|
||||||
Ok(instruments)
|
Ok(instruments)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,5 @@
|
||||||
// Required for schema.rs
|
use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness};
|
||||||
#[macro_use]
|
use std::sync::{Arc, Mutex};
|
||||||
extern crate diesel;
|
|
||||||
|
|
||||||
// Required for embed_migrations macro in database.rs
|
|
||||||
#[macro_use]
|
|
||||||
extern crate diesel_migrations;
|
|
||||||
|
|
||||||
use diesel::prelude::*;
|
use diesel::prelude::*;
|
||||||
use log::info;
|
use log::info;
|
||||||
|
|
@ -33,7 +28,7 @@ pub use works::*;
|
||||||
mod schema;
|
mod schema;
|
||||||
|
|
||||||
// This makes the SQL migration scripts accessible from the code.
|
// This makes the SQL migration scripts accessible from the code.
|
||||||
embed_migrations!();
|
const MIGRATIONS: EmbeddedMigrations = embed_migrations!();
|
||||||
|
|
||||||
/// Generate a random string suitable as an item ID.
|
/// Generate a random string suitable as an item ID.
|
||||||
pub fn generate_id() -> String {
|
pub fn generate_id() -> String {
|
||||||
|
|
@ -42,25 +37,28 @@ pub fn generate_id() -> String {
|
||||||
|
|
||||||
/// Interface to a Musicus database.
|
/// Interface to a Musicus database.
|
||||||
pub struct Database {
|
pub struct Database {
|
||||||
connection: SqliteConnection,
|
connection: Arc<Mutex<SqliteConnection>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Database {
|
impl Database {
|
||||||
/// Create a new database interface and run migrations if necessary.
|
/// Create a new database interface and run migrations if necessary.
|
||||||
pub fn new(file_name: &str) -> Result<Database> {
|
pub fn new(file_name: &str) -> Result<Database> {
|
||||||
info!("Opening database file '{}'", file_name);
|
info!("Opening database file '{}'", file_name);
|
||||||
let connection = SqliteConnection::establish(file_name)?;
|
let mut connection = SqliteConnection::establish(file_name)?;
|
||||||
diesel::sql_query("PRAGMA foreign_keys = ON").execute(&connection)?;
|
diesel::sql_query("PRAGMA foreign_keys = ON").execute(&mut connection)?;
|
||||||
|
|
||||||
info!("Running migrations if necessary");
|
info!("Running migrations if necessary");
|
||||||
embedded_migrations::run(&connection)?;
|
connection.run_pending_migrations(MIGRATIONS)?;
|
||||||
|
|
||||||
Ok(Database { connection })
|
Ok(Database {
|
||||||
|
connection: Arc::new(Mutex::new(connection)),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Defer all foreign keys for the next transaction.
|
/// Defer all foreign keys for the next transaction.
|
||||||
fn defer_foreign_keys(&self) -> Result<()> {
|
fn defer_foreign_keys(&self) -> Result<()> {
|
||||||
diesel::sql_query("PRAGMA defer_foreign_keys = ON").execute(&self.connection)?;
|
diesel::sql_query("PRAGMA defer_foreign_keys = ON")
|
||||||
|
.execute(&mut *self.connection.lock().unwrap())?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -109,61 +109,64 @@ impl Database {
|
||||||
info!("Updating medium {:?}", medium);
|
info!("Updating medium {:?}", medium);
|
||||||
self.defer_foreign_keys()?;
|
self.defer_foreign_keys()?;
|
||||||
|
|
||||||
self.connection.transaction::<(), Error, _>(|| {
|
self.connection
|
||||||
let medium_id = &medium.id;
|
.lock()
|
||||||
|
.unwrap()
|
||||||
|
.transaction::<(), Error, _>(|connection| {
|
||||||
|
let medium_id = &medium.id;
|
||||||
|
|
||||||
// This will also delete the tracks.
|
// This will also delete the tracks.
|
||||||
self.delete_medium(medium_id)?;
|
self.delete_medium(medium_id)?;
|
||||||
|
|
||||||
// Add the new medium.
|
// Add the new medium.
|
||||||
|
|
||||||
let medium_row = MediumRow {
|
let medium_row = MediumRow {
|
||||||
id: medium_id.to_owned(),
|
id: medium_id.to_owned(),
|
||||||
name: medium.name.clone(),
|
name: medium.name.clone(),
|
||||||
discid: medium.discid.clone(),
|
discid: medium.discid.clone(),
|
||||||
last_used: Some(Utc::now().timestamp()),
|
|
||||||
last_played: medium.last_played.map(|t| t.timestamp()),
|
|
||||||
};
|
|
||||||
|
|
||||||
diesel::insert_into(mediums::table)
|
|
||||||
.values(medium_row)
|
|
||||||
.execute(&self.connection)?;
|
|
||||||
|
|
||||||
for (index, track) in medium.tracks.iter().enumerate() {
|
|
||||||
// Add associated items from the server, if they don't already exist.
|
|
||||||
|
|
||||||
if self.get_recording(&track.recording.id)?.is_none() {
|
|
||||||
self.update_recording(track.recording.clone())?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add the actual track data.
|
|
||||||
|
|
||||||
let work_parts = track
|
|
||||||
.work_parts
|
|
||||||
.iter()
|
|
||||||
.map(|part_index| part_index.to_string())
|
|
||||||
.collect::<Vec<String>>()
|
|
||||||
.join(",");
|
|
||||||
|
|
||||||
let track_row = TrackRow {
|
|
||||||
id: generate_id(),
|
|
||||||
medium: medium_id.to_owned(),
|
|
||||||
index: index as i32,
|
|
||||||
recording: track.recording.id.clone(),
|
|
||||||
work_parts,
|
|
||||||
source_index: track.source_index as i32,
|
|
||||||
path: track.path.clone(),
|
|
||||||
last_used: Some(Utc::now().timestamp()),
|
last_used: Some(Utc::now().timestamp()),
|
||||||
last_played: track.last_played.map(|t| t.timestamp()),
|
last_played: medium.last_played.map(|t| t.timestamp()),
|
||||||
};
|
};
|
||||||
|
|
||||||
diesel::insert_into(tracks::table)
|
diesel::insert_into(mediums::table)
|
||||||
.values(track_row)
|
.values(medium_row)
|
||||||
.execute(&self.connection)?;
|
.execute(connection)?;
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
for (index, track) in medium.tracks.iter().enumerate() {
|
||||||
})?;
|
// Add associated items from the server, if they don't already exist.
|
||||||
|
|
||||||
|
if self.get_recording(&track.recording.id)?.is_none() {
|
||||||
|
self.update_recording(track.recording.clone())?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the actual track data.
|
||||||
|
|
||||||
|
let work_parts = track
|
||||||
|
.work_parts
|
||||||
|
.iter()
|
||||||
|
.map(|part_index| part_index.to_string())
|
||||||
|
.collect::<Vec<String>>()
|
||||||
|
.join(",");
|
||||||
|
|
||||||
|
let track_row = TrackRow {
|
||||||
|
id: generate_id(),
|
||||||
|
medium: medium_id.to_owned(),
|
||||||
|
index: index as i32,
|
||||||
|
recording: track.recording.id.clone(),
|
||||||
|
work_parts,
|
||||||
|
source_index: track.source_index as i32,
|
||||||
|
path: track.path.clone(),
|
||||||
|
last_used: Some(Utc::now().timestamp()),
|
||||||
|
last_played: track.last_played.map(|t| t.timestamp()),
|
||||||
|
};
|
||||||
|
|
||||||
|
diesel::insert_into(tracks::table)
|
||||||
|
.values(track_row)
|
||||||
|
.execute(connection)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
})?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
@ -172,7 +175,7 @@ impl Database {
|
||||||
pub fn get_medium(&self, id: &str) -> Result<Option<Medium>> {
|
pub fn get_medium(&self, id: &str) -> Result<Option<Medium>> {
|
||||||
let row = mediums::table
|
let row = mediums::table
|
||||||
.filter(mediums::id.eq(id))
|
.filter(mediums::id.eq(id))
|
||||||
.load::<MediumRow>(&self.connection)?
|
.load::<MediumRow>(&mut *self.connection.lock().unwrap())?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.next();
|
.next();
|
||||||
|
|
||||||
|
|
@ -190,7 +193,7 @@ impl Database {
|
||||||
|
|
||||||
let rows = mediums::table
|
let rows = mediums::table
|
||||||
.filter(mediums::discid.nullable().eq(source_id))
|
.filter(mediums::discid.nullable().eq(source_id))
|
||||||
.load::<MediumRow>(&self.connection)?;
|
.load::<MediumRow>(&mut *self.connection.lock().unwrap())?;
|
||||||
|
|
||||||
for row in rows {
|
for row in rows {
|
||||||
let medium = self.get_medium_data(row)?;
|
let medium = self.get_medium_data(row)?;
|
||||||
|
|
@ -212,7 +215,7 @@ impl Database {
|
||||||
.filter(persons::id.eq(person_id))
|
.filter(persons::id.eq(person_id))
|
||||||
.select(mediums::table::all_columns())
|
.select(mediums::table::all_columns())
|
||||||
.distinct()
|
.distinct()
|
||||||
.load::<MediumRow>(&self.connection)?;
|
.load::<MediumRow>(&mut *self.connection.lock().unwrap())?;
|
||||||
|
|
||||||
for row in rows {
|
for row in rows {
|
||||||
let medium = self.get_medium_data(row)?;
|
let medium = self.get_medium_data(row)?;
|
||||||
|
|
@ -234,7 +237,7 @@ impl Database {
|
||||||
.filter(ensembles::id.eq(ensemble_id))
|
.filter(ensembles::id.eq(ensemble_id))
|
||||||
.select(mediums::table::all_columns())
|
.select(mediums::table::all_columns())
|
||||||
.distinct()
|
.distinct()
|
||||||
.load::<MediumRow>(&self.connection)?;
|
.load::<MediumRow>(&mut *self.connection.lock().unwrap())?;
|
||||||
|
|
||||||
for row in rows {
|
for row in rows {
|
||||||
let medium = self.get_medium_data(row)?;
|
let medium = self.get_medium_data(row)?;
|
||||||
|
|
@ -248,7 +251,8 @@ impl Database {
|
||||||
/// library contains audio files referencing any of those tracks.
|
/// library contains audio files referencing any of those tracks.
|
||||||
pub fn delete_medium(&self, id: &str) -> Result<()> {
|
pub fn delete_medium(&self, id: &str) -> Result<()> {
|
||||||
info!("Deleting medium {}", id);
|
info!("Deleting medium {}", id);
|
||||||
diesel::delete(mediums::table.filter(mediums::id.eq(id))).execute(&self.connection)?;
|
diesel::delete(mediums::table.filter(mediums::id.eq(id)))
|
||||||
|
.execute(&mut *self.connection.lock().unwrap())?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -260,7 +264,7 @@ impl Database {
|
||||||
.inner_join(recordings::table.on(recordings::id.eq(tracks::recording)))
|
.inner_join(recordings::table.on(recordings::id.eq(tracks::recording)))
|
||||||
.filter(recordings::id.eq(recording_id))
|
.filter(recordings::id.eq(recording_id))
|
||||||
.select(tracks::table::all_columns())
|
.select(tracks::table::all_columns())
|
||||||
.load::<TrackRow>(&self.connection)?;
|
.load::<TrackRow>(&mut *self.connection.lock().unwrap())?;
|
||||||
|
|
||||||
for row in rows {
|
for row in rows {
|
||||||
let track = self.get_track_from_row(row)?;
|
let track = self.get_track_from_row(row)?;
|
||||||
|
|
@ -273,7 +277,7 @@ impl Database {
|
||||||
/// Get a random track from the database.
|
/// Get a random track from the database.
|
||||||
pub fn random_track(&self) -> Result<Track> {
|
pub fn random_track(&self) -> Result<Track> {
|
||||||
let row = diesel::sql_query("SELECT * FROM tracks ORDER BY RANDOM() LIMIT 1")
|
let row = diesel::sql_query("SELECT * FROM tracks ORDER BY RANDOM() LIMIT 1")
|
||||||
.load::<TrackRow>(&self.connection)?
|
.load::<TrackRow>(&mut *self.connection.lock().unwrap())?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.next()
|
.next()
|
||||||
.ok_or(Error::Other("Failed to generate random track"))?;
|
.ok_or(Error::Other("Failed to generate random track"))?;
|
||||||
|
|
@ -286,7 +290,7 @@ impl Database {
|
||||||
let track_rows = tracks::table
|
let track_rows = tracks::table
|
||||||
.filter(tracks::medium.eq(&row.id))
|
.filter(tracks::medium.eq(&row.id))
|
||||||
.order_by(tracks::index)
|
.order_by(tracks::index)
|
||||||
.load::<TrackRow>(&self.connection)?;
|
.load::<TrackRow>(&mut *self.connection.lock().unwrap())?;
|
||||||
|
|
||||||
let mut tracks = Vec::new();
|
let mut tracks = Vec::new();
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -44,10 +44,10 @@ impl Database {
|
||||||
|
|
||||||
person.last_used = Some(Utc::now().timestamp());
|
person.last_used = Some(Utc::now().timestamp());
|
||||||
|
|
||||||
self.connection.transaction(|| {
|
self.connection.lock().unwrap().transaction(|connection| {
|
||||||
diesel::replace_into(persons::table)
|
diesel::replace_into(persons::table)
|
||||||
.values(person)
|
.values(person)
|
||||||
.execute(&self.connection)
|
.execute(connection)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
@ -57,7 +57,7 @@ impl Database {
|
||||||
pub fn get_person(&self, id: &str) -> Result<Option<Person>> {
|
pub fn get_person(&self, id: &str) -> Result<Option<Person>> {
|
||||||
let person = persons::table
|
let person = persons::table
|
||||||
.filter(persons::id.eq(id))
|
.filter(persons::id.eq(id))
|
||||||
.load::<Person>(&self.connection)?
|
.load::<Person>(&mut *self.connection.lock().unwrap())?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.next();
|
.next();
|
||||||
|
|
||||||
|
|
@ -67,13 +67,14 @@ impl Database {
|
||||||
/// Delete an existing person.
|
/// Delete an existing person.
|
||||||
pub fn delete_person(&self, id: &str) -> Result<()> {
|
pub fn delete_person(&self, id: &str) -> Result<()> {
|
||||||
info!("Deleting person {}", id);
|
info!("Deleting person {}", id);
|
||||||
diesel::delete(persons::table.filter(persons::id.eq(id))).execute(&self.connection)?;
|
diesel::delete(persons::table.filter(persons::id.eq(id)))
|
||||||
|
.execute(&mut *self.connection.lock().unwrap())?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get all existing persons.
|
/// Get all existing persons.
|
||||||
pub fn get_persons(&self) -> Result<Vec<Person>> {
|
pub fn get_persons(&self) -> Result<Vec<Person>> {
|
||||||
let persons = persons::table.load::<Person>(&self.connection)?;
|
let persons = persons::table.load::<Person>(&mut *self.connection.lock().unwrap())?;
|
||||||
|
|
||||||
Ok(persons)
|
Ok(persons)
|
||||||
}
|
}
|
||||||
|
|
@ -82,7 +83,7 @@ impl Database {
|
||||||
pub fn get_recent_persons(&self) -> Result<Vec<Person>> {
|
pub fn get_recent_persons(&self) -> Result<Vec<Person>> {
|
||||||
let persons = persons::table
|
let persons = persons::table
|
||||||
.order(persons::last_used.desc())
|
.order(persons::last_used.desc())
|
||||||
.load::<Person>(&self.connection)?;
|
.load::<Person>(&mut *self.connection.lock().unwrap())?;
|
||||||
|
|
||||||
Ok(persons)
|
Ok(persons)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -131,65 +131,68 @@ impl Database {
|
||||||
pub fn update_recording(&self, recording: Recording) -> Result<()> {
|
pub fn update_recording(&self, recording: Recording) -> Result<()> {
|
||||||
info!("Updating recording {:?}", recording);
|
info!("Updating recording {:?}", recording);
|
||||||
self.defer_foreign_keys()?;
|
self.defer_foreign_keys()?;
|
||||||
self.connection.transaction::<(), Error, _>(|| {
|
self.connection
|
||||||
let recording_id = &recording.id;
|
.lock()
|
||||||
self.delete_recording(recording_id)?;
|
.unwrap()
|
||||||
|
.transaction::<(), Error, _>(|connection| {
|
||||||
|
let recording_id = &recording.id;
|
||||||
|
self.delete_recording(recording_id)?;
|
||||||
|
|
||||||
// Add associated items from the server, if they don't already exist.
|
// Add associated items from the server, if they don't already exist.
|
||||||
|
|
||||||
if self.get_work(&recording.work.id)?.is_none() {
|
if self.get_work(&recording.work.id)?.is_none() {
|
||||||
self.update_work(recording.work.clone())?;
|
self.update_work(recording.work.clone())?;
|
||||||
}
|
}
|
||||||
|
|
||||||
for performance in &recording.performances {
|
for performance in &recording.performances {
|
||||||
match &performance.performer {
|
match &performance.performer {
|
||||||
PersonOrEnsemble::Person(person) => {
|
PersonOrEnsemble::Person(person) => {
|
||||||
if self.get_person(&person.id)?.is_none() {
|
if self.get_person(&person.id)?.is_none() {
|
||||||
self.update_person(person.clone())?;
|
self.update_person(person.clone())?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
PersonOrEnsemble::Ensemble(ensemble) => {
|
||||||
|
if self.get_ensemble(&ensemble.id)?.is_none() {
|
||||||
|
self.update_ensemble(ensemble.clone())?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
PersonOrEnsemble::Ensemble(ensemble) => {
|
|
||||||
if self.get_ensemble(&ensemble.id)?.is_none() {
|
if let Some(role) = &performance.role {
|
||||||
self.update_ensemble(ensemble.clone())?;
|
if self.get_instrument(&role.id)?.is_none() {
|
||||||
|
self.update_instrument(role.clone())?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(role) = &performance.role {
|
// Add the actual recording.
|
||||||
if self.get_instrument(&role.id)?.is_none() {
|
|
||||||
self.update_instrument(role.clone())?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add the actual recording.
|
let row: RecordingRow = recording.clone().into();
|
||||||
|
diesel::insert_into(recordings::table)
|
||||||
let row: RecordingRow = recording.clone().into();
|
|
||||||
diesel::insert_into(recordings::table)
|
|
||||||
.values(row)
|
|
||||||
.execute(&self.connection)?;
|
|
||||||
|
|
||||||
for performance in recording.performances {
|
|
||||||
let (person, ensemble) = match performance.performer {
|
|
||||||
PersonOrEnsemble::Person(person) => (Some(person.id), None),
|
|
||||||
PersonOrEnsemble::Ensemble(ensemble) => (None, Some(ensemble.id)),
|
|
||||||
};
|
|
||||||
|
|
||||||
let row = PerformanceRow {
|
|
||||||
id: rand::random(),
|
|
||||||
recording: recording_id.to_string(),
|
|
||||||
person,
|
|
||||||
ensemble,
|
|
||||||
role: performance.role.map(|role| role.id),
|
|
||||||
};
|
|
||||||
|
|
||||||
diesel::insert_into(performances::table)
|
|
||||||
.values(row)
|
.values(row)
|
||||||
.execute(&self.connection)?;
|
.execute(connection)?;
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
for performance in recording.performances {
|
||||||
})?;
|
let (person, ensemble) = match performance.performer {
|
||||||
|
PersonOrEnsemble::Person(person) => (Some(person.id), None),
|
||||||
|
PersonOrEnsemble::Ensemble(ensemble) => (None, Some(ensemble.id)),
|
||||||
|
};
|
||||||
|
|
||||||
|
let row = PerformanceRow {
|
||||||
|
id: rand::random(),
|
||||||
|
recording: recording_id.to_string(),
|
||||||
|
person,
|
||||||
|
ensemble,
|
||||||
|
role: performance.role.map(|role| role.id),
|
||||||
|
};
|
||||||
|
|
||||||
|
diesel::insert_into(performances::table)
|
||||||
|
.values(row)
|
||||||
|
.execute(connection)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
})?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
@ -198,7 +201,7 @@ impl Database {
|
||||||
pub fn recording_exists(&self, id: &str) -> Result<bool> {
|
pub fn recording_exists(&self, id: &str) -> Result<bool> {
|
||||||
let exists = recordings::table
|
let exists = recordings::table
|
||||||
.filter(recordings::id.eq(id))
|
.filter(recordings::id.eq(id))
|
||||||
.load::<RecordingRow>(&self.connection)?
|
.load::<RecordingRow>(&mut *self.connection.lock().unwrap())?
|
||||||
.first()
|
.first()
|
||||||
.is_some();
|
.is_some();
|
||||||
|
|
||||||
|
|
@ -209,7 +212,7 @@ impl Database {
|
||||||
pub fn get_recording(&self, id: &str) -> Result<Option<Recording>> {
|
pub fn get_recording(&self, id: &str) -> Result<Option<Recording>> {
|
||||||
let row = recordings::table
|
let row = recordings::table
|
||||||
.filter(recordings::id.eq(id))
|
.filter(recordings::id.eq(id))
|
||||||
.load::<RecordingRow>(&self.connection)?
|
.load::<RecordingRow>(&mut *self.connection.lock().unwrap())?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.next();
|
.next();
|
||||||
|
|
||||||
|
|
@ -224,7 +227,7 @@ impl Database {
|
||||||
/// Get a random recording from the database.
|
/// Get a random recording from the database.
|
||||||
pub fn random_recording(&self) -> Result<Recording> {
|
pub fn random_recording(&self) -> Result<Recording> {
|
||||||
let row = diesel::sql_query("SELECT * FROM recordings ORDER BY RANDOM() LIMIT 1")
|
let row = diesel::sql_query("SELECT * FROM recordings ORDER BY RANDOM() LIMIT 1")
|
||||||
.load::<RecordingRow>(&self.connection)?
|
.load::<RecordingRow>(&mut *self.connection.lock().unwrap())?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.next()
|
.next()
|
||||||
.ok_or(Error::Other("Failed to find random recording."))?;
|
.ok_or(Error::Other("Failed to find random recording."))?;
|
||||||
|
|
@ -238,7 +241,7 @@ impl Database {
|
||||||
|
|
||||||
let performance_rows = performances::table
|
let performance_rows = performances::table
|
||||||
.filter(performances::recording.eq(&row.id))
|
.filter(performances::recording.eq(&row.id))
|
||||||
.load::<PerformanceRow>(&self.connection)?;
|
.load::<PerformanceRow>(&mut *self.connection.lock().unwrap())?;
|
||||||
|
|
||||||
for row in performance_rows {
|
for row in performance_rows {
|
||||||
performance_descriptions.push(Performance {
|
performance_descriptions.push(Performance {
|
||||||
|
|
@ -291,7 +294,7 @@ impl Database {
|
||||||
.inner_join(persons::table.on(persons::id.nullable().eq(performances::person)))
|
.inner_join(persons::table.on(persons::id.nullable().eq(performances::person)))
|
||||||
.filter(persons::id.eq(person_id))
|
.filter(persons::id.eq(person_id))
|
||||||
.select(recordings::table::all_columns())
|
.select(recordings::table::all_columns())
|
||||||
.load::<RecordingRow>(&self.connection)?;
|
.load::<RecordingRow>(&mut *self.connection.lock().unwrap())?;
|
||||||
|
|
||||||
for row in rows {
|
for row in rows {
|
||||||
recordings.push(self.get_recording_data(row)?);
|
recordings.push(self.get_recording_data(row)?);
|
||||||
|
|
@ -309,7 +312,7 @@ impl Database {
|
||||||
.inner_join(ensembles::table.on(ensembles::id.nullable().eq(performances::ensemble)))
|
.inner_join(ensembles::table.on(ensembles::id.nullable().eq(performances::ensemble)))
|
||||||
.filter(ensembles::id.eq(ensemble_id))
|
.filter(ensembles::id.eq(ensemble_id))
|
||||||
.select(recordings::table::all_columns())
|
.select(recordings::table::all_columns())
|
||||||
.load::<RecordingRow>(&self.connection)?;
|
.load::<RecordingRow>(&mut *self.connection.lock().unwrap())?;
|
||||||
|
|
||||||
for row in rows {
|
for row in rows {
|
||||||
recordings.push(self.get_recording_data(row)?);
|
recordings.push(self.get_recording_data(row)?);
|
||||||
|
|
@ -324,7 +327,7 @@ impl Database {
|
||||||
|
|
||||||
let rows = recordings::table
|
let rows = recordings::table
|
||||||
.filter(recordings::work.eq(work_id))
|
.filter(recordings::work.eq(work_id))
|
||||||
.load::<RecordingRow>(&self.connection)?;
|
.load::<RecordingRow>(&mut *self.connection.lock().unwrap())?;
|
||||||
|
|
||||||
for row in rows {
|
for row in rows {
|
||||||
recordings.push(self.get_recording_data(row)?);
|
recordings.push(self.get_recording_data(row)?);
|
||||||
|
|
@ -338,7 +341,7 @@ impl Database {
|
||||||
pub fn delete_recording(&self, id: &str) -> Result<()> {
|
pub fn delete_recording(&self, id: &str) -> Result<()> {
|
||||||
info!("Deleting recording {}", id);
|
info!("Deleting recording {}", id);
|
||||||
diesel::delete(recordings::table.filter(recordings::id.eq(id)))
|
diesel::delete(recordings::table.filter(recordings::id.eq(id)))
|
||||||
.execute(&self.connection)?;
|
.execute(&mut *self.connection.lock().unwrap())?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,6 @@
|
||||||
table! {
|
// @generated automatically by Diesel CLI.
|
||||||
|
|
||||||
|
diesel::table! {
|
||||||
ensembles (id) {
|
ensembles (id) {
|
||||||
id -> Text,
|
id -> Text,
|
||||||
name -> Text,
|
name -> Text,
|
||||||
|
|
@ -7,7 +9,7 @@ table! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
table! {
|
diesel::table! {
|
||||||
instrumentations (id) {
|
instrumentations (id) {
|
||||||
id -> BigInt,
|
id -> BigInt,
|
||||||
work -> Text,
|
work -> Text,
|
||||||
|
|
@ -15,7 +17,7 @@ table! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
table! {
|
diesel::table! {
|
||||||
instruments (id) {
|
instruments (id) {
|
||||||
id -> Text,
|
id -> Text,
|
||||||
name -> Text,
|
name -> Text,
|
||||||
|
|
@ -24,7 +26,7 @@ table! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
table! {
|
diesel::table! {
|
||||||
mediums (id) {
|
mediums (id) {
|
||||||
id -> Text,
|
id -> Text,
|
||||||
name -> Text,
|
name -> Text,
|
||||||
|
|
@ -34,7 +36,7 @@ table! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
table! {
|
diesel::table! {
|
||||||
performances (id) {
|
performances (id) {
|
||||||
id -> BigInt,
|
id -> BigInt,
|
||||||
recording -> Text,
|
recording -> Text,
|
||||||
|
|
@ -44,7 +46,7 @@ table! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
table! {
|
diesel::table! {
|
||||||
persons (id) {
|
persons (id) {
|
||||||
id -> Text,
|
id -> Text,
|
||||||
first_name -> Text,
|
first_name -> Text,
|
||||||
|
|
@ -54,7 +56,7 @@ table! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
table! {
|
diesel::table! {
|
||||||
recordings (id) {
|
recordings (id) {
|
||||||
id -> Text,
|
id -> Text,
|
||||||
work -> Text,
|
work -> Text,
|
||||||
|
|
@ -64,7 +66,7 @@ table! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
table! {
|
diesel::table! {
|
||||||
tracks (id) {
|
tracks (id) {
|
||||||
id -> Text,
|
id -> Text,
|
||||||
medium -> Text,
|
medium -> Text,
|
||||||
|
|
@ -78,7 +80,7 @@ table! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
table! {
|
diesel::table! {
|
||||||
work_parts (id) {
|
work_parts (id) {
|
||||||
id -> BigInt,
|
id -> BigInt,
|
||||||
work -> Text,
|
work -> Text,
|
||||||
|
|
@ -87,7 +89,7 @@ table! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
table! {
|
diesel::table! {
|
||||||
works (id) {
|
works (id) {
|
||||||
id -> Text,
|
id -> Text,
|
||||||
composer -> Text,
|
composer -> Text,
|
||||||
|
|
@ -97,19 +99,19 @@ table! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
joinable!(instrumentations -> instruments (instrument));
|
diesel::joinable!(instrumentations -> instruments (instrument));
|
||||||
joinable!(instrumentations -> works (work));
|
diesel::joinable!(instrumentations -> works (work));
|
||||||
joinable!(performances -> ensembles (ensemble));
|
diesel::joinable!(performances -> ensembles (ensemble));
|
||||||
joinable!(performances -> instruments (role));
|
diesel::joinable!(performances -> instruments (role));
|
||||||
joinable!(performances -> persons (person));
|
diesel::joinable!(performances -> persons (person));
|
||||||
joinable!(performances -> recordings (recording));
|
diesel::joinable!(performances -> recordings (recording));
|
||||||
joinable!(recordings -> works (work));
|
diesel::joinable!(recordings -> works (work));
|
||||||
joinable!(tracks -> mediums (medium));
|
diesel::joinable!(tracks -> mediums (medium));
|
||||||
joinable!(tracks -> recordings (recording));
|
diesel::joinable!(tracks -> recordings (recording));
|
||||||
joinable!(work_parts -> works (work));
|
diesel::joinable!(work_parts -> works (work));
|
||||||
joinable!(works -> persons (composer));
|
diesel::joinable!(works -> persons (composer));
|
||||||
|
|
||||||
allow_tables_to_appear_in_same_query!(
|
diesel::allow_tables_to_appear_in_same_query!(
|
||||||
ensembles,
|
ensembles,
|
||||||
instrumentations,
|
instrumentations,
|
||||||
instruments,
|
instruments,
|
||||||
|
|
|
||||||
|
|
@ -112,60 +112,63 @@ impl Database {
|
||||||
info!("Updating work {:?}", work);
|
info!("Updating work {:?}", work);
|
||||||
self.defer_foreign_keys()?;
|
self.defer_foreign_keys()?;
|
||||||
|
|
||||||
self.connection.transaction::<(), Error, _>(|| {
|
self.connection
|
||||||
let work_id = &work.id;
|
.lock()
|
||||||
self.delete_work(work_id)?;
|
.unwrap()
|
||||||
|
.transaction::<(), Error, _>(|connection| {
|
||||||
|
let work_id = &work.id;
|
||||||
|
self.delete_work(work_id)?;
|
||||||
|
|
||||||
// Add associated items from the server, if they don't already exist.
|
// Add associated items from the server, if they don't already exist.
|
||||||
|
|
||||||
if self.get_person(&work.composer.id)?.is_none() {
|
if self.get_person(&work.composer.id)?.is_none() {
|
||||||
self.update_person(work.composer.clone())?;
|
self.update_person(work.composer.clone())?;
|
||||||
}
|
|
||||||
|
|
||||||
for instrument in &work.instruments {
|
|
||||||
if self.get_instrument(&instrument.id)?.is_none() {
|
|
||||||
self.update_instrument(instrument.clone())?;
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Add the actual work.
|
for instrument in &work.instruments {
|
||||||
|
if self.get_instrument(&instrument.id)?.is_none() {
|
||||||
|
self.update_instrument(instrument.clone())?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let row: WorkRow = work.clone().into();
|
// Add the actual work.
|
||||||
diesel::insert_into(works::table)
|
|
||||||
.values(row)
|
|
||||||
.execute(&self.connection)?;
|
|
||||||
|
|
||||||
let Work {
|
let row: WorkRow = work.clone().into();
|
||||||
instruments, parts, ..
|
diesel::insert_into(works::table)
|
||||||
} = work;
|
|
||||||
|
|
||||||
for instrument in instruments {
|
|
||||||
let row = InstrumentationRow {
|
|
||||||
id: rand::random(),
|
|
||||||
work: work_id.to_string(),
|
|
||||||
instrument: instrument.id,
|
|
||||||
};
|
|
||||||
|
|
||||||
diesel::insert_into(instrumentations::table)
|
|
||||||
.values(row)
|
.values(row)
|
||||||
.execute(&self.connection)?;
|
.execute(&mut *self.connection.lock().unwrap())?;
|
||||||
}
|
|
||||||
|
|
||||||
for (index, part) in parts.into_iter().enumerate() {
|
let Work {
|
||||||
let row = WorkPartRow {
|
instruments, parts, ..
|
||||||
id: rand::random(),
|
} = work;
|
||||||
work: work_id.to_string(),
|
|
||||||
part_index: index as i64,
|
|
||||||
title: part.title,
|
|
||||||
};
|
|
||||||
|
|
||||||
diesel::insert_into(work_parts::table)
|
for instrument in instruments {
|
||||||
.values(row)
|
let row = InstrumentationRow {
|
||||||
.execute(&self.connection)?;
|
id: rand::random(),
|
||||||
}
|
work: work_id.to_string(),
|
||||||
|
instrument: instrument.id,
|
||||||
|
};
|
||||||
|
|
||||||
Ok(())
|
diesel::insert_into(instrumentations::table)
|
||||||
})?;
|
.values(row)
|
||||||
|
.execute(connection)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (index, part) in parts.into_iter().enumerate() {
|
||||||
|
let row = WorkPartRow {
|
||||||
|
id: rand::random(),
|
||||||
|
work: work_id.to_string(),
|
||||||
|
part_index: index as i64,
|
||||||
|
title: part.title,
|
||||||
|
};
|
||||||
|
|
||||||
|
diesel::insert_into(work_parts::table)
|
||||||
|
.values(row)
|
||||||
|
.execute(connection)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
})?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
@ -174,7 +177,7 @@ impl Database {
|
||||||
pub fn get_work(&self, id: &str) -> Result<Option<Work>> {
|
pub fn get_work(&self, id: &str) -> Result<Option<Work>> {
|
||||||
let row = works::table
|
let row = works::table
|
||||||
.filter(works::id.eq(id))
|
.filter(works::id.eq(id))
|
||||||
.load::<WorkRow>(&self.connection)?
|
.load::<WorkRow>(&mut *self.connection.lock().unwrap())?
|
||||||
.first()
|
.first()
|
||||||
.cloned();
|
.cloned();
|
||||||
|
|
||||||
|
|
@ -192,7 +195,7 @@ impl Database {
|
||||||
|
|
||||||
let instrumentations = instrumentations::table
|
let instrumentations = instrumentations::table
|
||||||
.filter(instrumentations::work.eq(&row.id))
|
.filter(instrumentations::work.eq(&row.id))
|
||||||
.load::<InstrumentationRow>(&self.connection)?;
|
.load::<InstrumentationRow>(&mut *self.connection.lock().unwrap())?;
|
||||||
|
|
||||||
for instrumentation in instrumentations {
|
for instrumentation in instrumentations {
|
||||||
let id = instrumentation.instrument;
|
let id = instrumentation.instrument;
|
||||||
|
|
@ -206,7 +209,7 @@ impl Database {
|
||||||
|
|
||||||
let part_rows = work_parts::table
|
let part_rows = work_parts::table
|
||||||
.filter(work_parts::work.eq(&row.id))
|
.filter(work_parts::work.eq(&row.id))
|
||||||
.load::<WorkPartRow>(&self.connection)?;
|
.load::<WorkPartRow>(&mut *self.connection.lock().unwrap())?;
|
||||||
|
|
||||||
for part_row in part_rows {
|
for part_row in part_rows {
|
||||||
parts.push(WorkPart {
|
parts.push(WorkPart {
|
||||||
|
|
@ -234,7 +237,8 @@ impl Database {
|
||||||
/// this work except for the things that are part of the information on the work it
|
/// this work except for the things that are part of the information on the work it
|
||||||
pub fn delete_work(&self, id: &str) -> Result<()> {
|
pub fn delete_work(&self, id: &str) -> Result<()> {
|
||||||
info!("Deleting work {}", id);
|
info!("Deleting work {}", id);
|
||||||
diesel::delete(works::table.filter(works::id.eq(id))).execute(&self.connection)?;
|
diesel::delete(works::table.filter(works::id.eq(id)))
|
||||||
|
.execute(&mut *self.connection.lock().unwrap())?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -244,7 +248,7 @@ impl Database {
|
||||||
|
|
||||||
let rows = works::table
|
let rows = works::table
|
||||||
.filter(works::composer.eq(composer_id))
|
.filter(works::composer.eq(composer_id))
|
||||||
.load::<WorkRow>(&self.connection)?;
|
.load::<WorkRow>(&mut *self.connection.lock().unwrap())?;
|
||||||
|
|
||||||
for row in rows {
|
for row in rows {
|
||||||
works.push(self.get_work_data(row)?);
|
works.push(self.get_work_data(row)?);
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue