mirror of
https://github.com/johrpan/musicus.git
synced 2025-10-26 19:57:25 +01:00
Update diesel to version 2
This commit is contained in:
parent
d6b79fae59
commit
8b45ec4940
11 changed files with 259 additions and 236 deletions
|
|
@ -4,8 +4,8 @@ version = "0.1.0"
|
|||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
diesel = { version = "1", features = ["sqlite"] }
|
||||
diesel_migrations = "1"
|
||||
diesel = { version = "2", features = ["sqlite"] }
|
||||
diesel_migrations = "2"
|
||||
chrono = "0.4"
|
||||
log = "0.4"
|
||||
rand = "0.8"
|
||||
|
|
|
|||
|
|
@ -32,10 +32,10 @@ impl Database {
|
|||
|
||||
ensemble.last_used = Some(Utc::now().timestamp());
|
||||
|
||||
self.connection.transaction(|| {
|
||||
self.connection.lock().unwrap().transaction(|connection| {
|
||||
diesel::replace_into(ensembles::table)
|
||||
.values(ensemble)
|
||||
.execute(&self.connection)
|
||||
.execute(connection)
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
|
|
@ -45,7 +45,7 @@ impl Database {
|
|||
pub fn get_ensemble(&self, id: &str) -> Result<Option<Ensemble>> {
|
||||
let ensemble = ensembles::table
|
||||
.filter(ensembles::id.eq(id))
|
||||
.load::<Ensemble>(&self.connection)?
|
||||
.load::<Ensemble>(&mut *self.connection.lock().unwrap())?
|
||||
.into_iter()
|
||||
.next();
|
||||
|
||||
|
|
@ -55,13 +55,14 @@ impl Database {
|
|||
/// Delete an existing ensemble.
|
||||
pub fn delete_ensemble(&self, id: &str) -> Result<()> {
|
||||
info!("Deleting ensemble {}", id);
|
||||
diesel::delete(ensembles::table.filter(ensembles::id.eq(id))).execute(&self.connection)?;
|
||||
diesel::delete(ensembles::table.filter(ensembles::id.eq(id)))
|
||||
.execute(&mut *self.connection.lock().unwrap())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get all existing ensembles.
|
||||
pub fn get_ensembles(&self) -> Result<Vec<Ensemble>> {
|
||||
let ensembles = ensembles::table.load::<Ensemble>(&self.connection)?;
|
||||
let ensembles = ensembles::table.load::<Ensemble>(&mut *self.connection.lock().unwrap())?;
|
||||
Ok(ensembles)
|
||||
}
|
||||
|
||||
|
|
@ -69,7 +70,7 @@ impl Database {
|
|||
pub fn get_recent_ensembles(&self) -> Result<Vec<Ensemble>> {
|
||||
let ensembles = ensembles::table
|
||||
.order(ensembles::last_used.desc())
|
||||
.load::<Ensemble>(&self.connection)?;
|
||||
.load::<Ensemble>(&mut *self.connection.lock().unwrap())?;
|
||||
|
||||
Ok(ensembles)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ pub enum Error {
|
|||
ConnectionError(#[from] diesel::result::ConnectionError),
|
||||
|
||||
#[error(transparent)]
|
||||
MigrationsError(#[from] diesel_migrations::RunMigrationsError),
|
||||
Migrations(#[from] Box<dyn std::error::Error + Send + Sync>),
|
||||
|
||||
#[error(transparent)]
|
||||
QueryError(#[from] diesel::result::Error),
|
||||
|
|
|
|||
|
|
@ -32,10 +32,10 @@ impl Database {
|
|||
|
||||
instrument.last_used = Some(Utc::now().timestamp());
|
||||
|
||||
self.connection.transaction(|| {
|
||||
self.connection.lock().unwrap().transaction(|connection| {
|
||||
diesel::replace_into(instruments::table)
|
||||
.values(instrument)
|
||||
.execute(&self.connection)
|
||||
.execute(connection)
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
|
|
@ -45,7 +45,7 @@ impl Database {
|
|||
pub fn get_instrument(&self, id: &str) -> Result<Option<Instrument>> {
|
||||
let instrument = instruments::table
|
||||
.filter(instruments::id.eq(id))
|
||||
.load::<Instrument>(&self.connection)?
|
||||
.load::<Instrument>(&mut *self.connection.lock().unwrap())?
|
||||
.into_iter()
|
||||
.next();
|
||||
|
||||
|
|
@ -56,14 +56,15 @@ impl Database {
|
|||
pub fn delete_instrument(&self, id: &str) -> Result<()> {
|
||||
info!("Deleting instrument {}", id);
|
||||
diesel::delete(instruments::table.filter(instruments::id.eq(id)))
|
||||
.execute(&self.connection)?;
|
||||
.execute(&mut *self.connection.lock().unwrap())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get all existing instruments.
|
||||
pub fn get_instruments(&self) -> Result<Vec<Instrument>> {
|
||||
let instruments = instruments::table.load::<Instrument>(&self.connection)?;
|
||||
let instruments =
|
||||
instruments::table.load::<Instrument>(&mut *self.connection.lock().unwrap())?;
|
||||
|
||||
Ok(instruments)
|
||||
}
|
||||
|
|
@ -72,7 +73,7 @@ impl Database {
|
|||
pub fn get_recent_instruments(&self) -> Result<Vec<Instrument>> {
|
||||
let instruments = instruments::table
|
||||
.order(instruments::last_used.desc())
|
||||
.load::<Instrument>(&self.connection)?;
|
||||
.load::<Instrument>(&mut *self.connection.lock().unwrap())?;
|
||||
|
||||
Ok(instruments)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,10 +1,5 @@
|
|||
// Required for schema.rs
|
||||
#[macro_use]
|
||||
extern crate diesel;
|
||||
|
||||
// Required for embed_migrations macro in database.rs
|
||||
#[macro_use]
|
||||
extern crate diesel_migrations;
|
||||
use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness};
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use diesel::prelude::*;
|
||||
use log::info;
|
||||
|
|
@ -33,7 +28,7 @@ pub use works::*;
|
|||
mod schema;
|
||||
|
||||
// This makes the SQL migration scripts accessible from the code.
|
||||
embed_migrations!();
|
||||
const MIGRATIONS: EmbeddedMigrations = embed_migrations!();
|
||||
|
||||
/// Generate a random string suitable as an item ID.
|
||||
pub fn generate_id() -> String {
|
||||
|
|
@ -42,25 +37,28 @@ pub fn generate_id() -> String {
|
|||
|
||||
/// Interface to a Musicus database.
|
||||
pub struct Database {
|
||||
connection: SqliteConnection,
|
||||
connection: Arc<Mutex<SqliteConnection>>,
|
||||
}
|
||||
|
||||
impl Database {
|
||||
/// Create a new database interface and run migrations if necessary.
|
||||
pub fn new(file_name: &str) -> Result<Database> {
|
||||
info!("Opening database file '{}'", file_name);
|
||||
let connection = SqliteConnection::establish(file_name)?;
|
||||
diesel::sql_query("PRAGMA foreign_keys = ON").execute(&connection)?;
|
||||
let mut connection = SqliteConnection::establish(file_name)?;
|
||||
diesel::sql_query("PRAGMA foreign_keys = ON").execute(&mut connection)?;
|
||||
|
||||
info!("Running migrations if necessary");
|
||||
embedded_migrations::run(&connection)?;
|
||||
connection.run_pending_migrations(MIGRATIONS)?;
|
||||
|
||||
Ok(Database { connection })
|
||||
Ok(Database {
|
||||
connection: Arc::new(Mutex::new(connection)),
|
||||
})
|
||||
}
|
||||
|
||||
/// Defer all foreign keys for the next transaction.
|
||||
fn defer_foreign_keys(&self) -> Result<()> {
|
||||
diesel::sql_query("PRAGMA defer_foreign_keys = ON").execute(&self.connection)?;
|
||||
diesel::sql_query("PRAGMA defer_foreign_keys = ON")
|
||||
.execute(&mut *self.connection.lock().unwrap())?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -109,61 +109,64 @@ impl Database {
|
|||
info!("Updating medium {:?}", medium);
|
||||
self.defer_foreign_keys()?;
|
||||
|
||||
self.connection.transaction::<(), Error, _>(|| {
|
||||
let medium_id = &medium.id;
|
||||
self.connection
|
||||
.lock()
|
||||
.unwrap()
|
||||
.transaction::<(), Error, _>(|connection| {
|
||||
let medium_id = &medium.id;
|
||||
|
||||
// This will also delete the tracks.
|
||||
self.delete_medium(medium_id)?;
|
||||
// This will also delete the tracks.
|
||||
self.delete_medium(medium_id)?;
|
||||
|
||||
// Add the new medium.
|
||||
// Add the new medium.
|
||||
|
||||
let medium_row = MediumRow {
|
||||
id: medium_id.to_owned(),
|
||||
name: medium.name.clone(),
|
||||
discid: medium.discid.clone(),
|
||||
last_used: Some(Utc::now().timestamp()),
|
||||
last_played: medium.last_played.map(|t| t.timestamp()),
|
||||
};
|
||||
|
||||
diesel::insert_into(mediums::table)
|
||||
.values(medium_row)
|
||||
.execute(&self.connection)?;
|
||||
|
||||
for (index, track) in medium.tracks.iter().enumerate() {
|
||||
// Add associated items from the server, if they don't already exist.
|
||||
|
||||
if self.get_recording(&track.recording.id)?.is_none() {
|
||||
self.update_recording(track.recording.clone())?;
|
||||
}
|
||||
|
||||
// Add the actual track data.
|
||||
|
||||
let work_parts = track
|
||||
.work_parts
|
||||
.iter()
|
||||
.map(|part_index| part_index.to_string())
|
||||
.collect::<Vec<String>>()
|
||||
.join(",");
|
||||
|
||||
let track_row = TrackRow {
|
||||
id: generate_id(),
|
||||
medium: medium_id.to_owned(),
|
||||
index: index as i32,
|
||||
recording: track.recording.id.clone(),
|
||||
work_parts,
|
||||
source_index: track.source_index as i32,
|
||||
path: track.path.clone(),
|
||||
let medium_row = MediumRow {
|
||||
id: medium_id.to_owned(),
|
||||
name: medium.name.clone(),
|
||||
discid: medium.discid.clone(),
|
||||
last_used: Some(Utc::now().timestamp()),
|
||||
last_played: track.last_played.map(|t| t.timestamp()),
|
||||
last_played: medium.last_played.map(|t| t.timestamp()),
|
||||
};
|
||||
|
||||
diesel::insert_into(tracks::table)
|
||||
.values(track_row)
|
||||
.execute(&self.connection)?;
|
||||
}
|
||||
diesel::insert_into(mediums::table)
|
||||
.values(medium_row)
|
||||
.execute(connection)?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
for (index, track) in medium.tracks.iter().enumerate() {
|
||||
// Add associated items from the server, if they don't already exist.
|
||||
|
||||
if self.get_recording(&track.recording.id)?.is_none() {
|
||||
self.update_recording(track.recording.clone())?;
|
||||
}
|
||||
|
||||
// Add the actual track data.
|
||||
|
||||
let work_parts = track
|
||||
.work_parts
|
||||
.iter()
|
||||
.map(|part_index| part_index.to_string())
|
||||
.collect::<Vec<String>>()
|
||||
.join(",");
|
||||
|
||||
let track_row = TrackRow {
|
||||
id: generate_id(),
|
||||
medium: medium_id.to_owned(),
|
||||
index: index as i32,
|
||||
recording: track.recording.id.clone(),
|
||||
work_parts,
|
||||
source_index: track.source_index as i32,
|
||||
path: track.path.clone(),
|
||||
last_used: Some(Utc::now().timestamp()),
|
||||
last_played: track.last_played.map(|t| t.timestamp()),
|
||||
};
|
||||
|
||||
diesel::insert_into(tracks::table)
|
||||
.values(track_row)
|
||||
.execute(connection)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -172,7 +175,7 @@ impl Database {
|
|||
pub fn get_medium(&self, id: &str) -> Result<Option<Medium>> {
|
||||
let row = mediums::table
|
||||
.filter(mediums::id.eq(id))
|
||||
.load::<MediumRow>(&self.connection)?
|
||||
.load::<MediumRow>(&mut *self.connection.lock().unwrap())?
|
||||
.into_iter()
|
||||
.next();
|
||||
|
||||
|
|
@ -190,7 +193,7 @@ impl Database {
|
|||
|
||||
let rows = mediums::table
|
||||
.filter(mediums::discid.nullable().eq(source_id))
|
||||
.load::<MediumRow>(&self.connection)?;
|
||||
.load::<MediumRow>(&mut *self.connection.lock().unwrap())?;
|
||||
|
||||
for row in rows {
|
||||
let medium = self.get_medium_data(row)?;
|
||||
|
|
@ -212,7 +215,7 @@ impl Database {
|
|||
.filter(persons::id.eq(person_id))
|
||||
.select(mediums::table::all_columns())
|
||||
.distinct()
|
||||
.load::<MediumRow>(&self.connection)?;
|
||||
.load::<MediumRow>(&mut *self.connection.lock().unwrap())?;
|
||||
|
||||
for row in rows {
|
||||
let medium = self.get_medium_data(row)?;
|
||||
|
|
@ -234,7 +237,7 @@ impl Database {
|
|||
.filter(ensembles::id.eq(ensemble_id))
|
||||
.select(mediums::table::all_columns())
|
||||
.distinct()
|
||||
.load::<MediumRow>(&self.connection)?;
|
||||
.load::<MediumRow>(&mut *self.connection.lock().unwrap())?;
|
||||
|
||||
for row in rows {
|
||||
let medium = self.get_medium_data(row)?;
|
||||
|
|
@ -248,7 +251,8 @@ impl Database {
|
|||
/// library contains audio files referencing any of those tracks.
|
||||
pub fn delete_medium(&self, id: &str) -> Result<()> {
|
||||
info!("Deleting medium {}", id);
|
||||
diesel::delete(mediums::table.filter(mediums::id.eq(id))).execute(&self.connection)?;
|
||||
diesel::delete(mediums::table.filter(mediums::id.eq(id)))
|
||||
.execute(&mut *self.connection.lock().unwrap())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
@ -260,7 +264,7 @@ impl Database {
|
|||
.inner_join(recordings::table.on(recordings::id.eq(tracks::recording)))
|
||||
.filter(recordings::id.eq(recording_id))
|
||||
.select(tracks::table::all_columns())
|
||||
.load::<TrackRow>(&self.connection)?;
|
||||
.load::<TrackRow>(&mut *self.connection.lock().unwrap())?;
|
||||
|
||||
for row in rows {
|
||||
let track = self.get_track_from_row(row)?;
|
||||
|
|
@ -273,7 +277,7 @@ impl Database {
|
|||
/// Get a random track from the database.
|
||||
pub fn random_track(&self) -> Result<Track> {
|
||||
let row = diesel::sql_query("SELECT * FROM tracks ORDER BY RANDOM() LIMIT 1")
|
||||
.load::<TrackRow>(&self.connection)?
|
||||
.load::<TrackRow>(&mut *self.connection.lock().unwrap())?
|
||||
.into_iter()
|
||||
.next()
|
||||
.ok_or(Error::Other("Failed to generate random track"))?;
|
||||
|
|
@ -286,7 +290,7 @@ impl Database {
|
|||
let track_rows = tracks::table
|
||||
.filter(tracks::medium.eq(&row.id))
|
||||
.order_by(tracks::index)
|
||||
.load::<TrackRow>(&self.connection)?;
|
||||
.load::<TrackRow>(&mut *self.connection.lock().unwrap())?;
|
||||
|
||||
let mut tracks = Vec::new();
|
||||
|
||||
|
|
|
|||
|
|
@ -44,10 +44,10 @@ impl Database {
|
|||
|
||||
person.last_used = Some(Utc::now().timestamp());
|
||||
|
||||
self.connection.transaction(|| {
|
||||
self.connection.lock().unwrap().transaction(|connection| {
|
||||
diesel::replace_into(persons::table)
|
||||
.values(person)
|
||||
.execute(&self.connection)
|
||||
.execute(connection)
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
|
|
@ -57,7 +57,7 @@ impl Database {
|
|||
pub fn get_person(&self, id: &str) -> Result<Option<Person>> {
|
||||
let person = persons::table
|
||||
.filter(persons::id.eq(id))
|
||||
.load::<Person>(&self.connection)?
|
||||
.load::<Person>(&mut *self.connection.lock().unwrap())?
|
||||
.into_iter()
|
||||
.next();
|
||||
|
||||
|
|
@ -67,13 +67,14 @@ impl Database {
|
|||
/// Delete an existing person.
|
||||
pub fn delete_person(&self, id: &str) -> Result<()> {
|
||||
info!("Deleting person {}", id);
|
||||
diesel::delete(persons::table.filter(persons::id.eq(id))).execute(&self.connection)?;
|
||||
diesel::delete(persons::table.filter(persons::id.eq(id)))
|
||||
.execute(&mut *self.connection.lock().unwrap())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get all existing persons.
|
||||
pub fn get_persons(&self) -> Result<Vec<Person>> {
|
||||
let persons = persons::table.load::<Person>(&self.connection)?;
|
||||
let persons = persons::table.load::<Person>(&mut *self.connection.lock().unwrap())?;
|
||||
|
||||
Ok(persons)
|
||||
}
|
||||
|
|
@ -82,7 +83,7 @@ impl Database {
|
|||
pub fn get_recent_persons(&self) -> Result<Vec<Person>> {
|
||||
let persons = persons::table
|
||||
.order(persons::last_used.desc())
|
||||
.load::<Person>(&self.connection)?;
|
||||
.load::<Person>(&mut *self.connection.lock().unwrap())?;
|
||||
|
||||
Ok(persons)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -131,65 +131,68 @@ impl Database {
|
|||
pub fn update_recording(&self, recording: Recording) -> Result<()> {
|
||||
info!("Updating recording {:?}", recording);
|
||||
self.defer_foreign_keys()?;
|
||||
self.connection.transaction::<(), Error, _>(|| {
|
||||
let recording_id = &recording.id;
|
||||
self.delete_recording(recording_id)?;
|
||||
self.connection
|
||||
.lock()
|
||||
.unwrap()
|
||||
.transaction::<(), Error, _>(|connection| {
|
||||
let recording_id = &recording.id;
|
||||
self.delete_recording(recording_id)?;
|
||||
|
||||
// Add associated items from the server, if they don't already exist.
|
||||
// Add associated items from the server, if they don't already exist.
|
||||
|
||||
if self.get_work(&recording.work.id)?.is_none() {
|
||||
self.update_work(recording.work.clone())?;
|
||||
}
|
||||
if self.get_work(&recording.work.id)?.is_none() {
|
||||
self.update_work(recording.work.clone())?;
|
||||
}
|
||||
|
||||
for performance in &recording.performances {
|
||||
match &performance.performer {
|
||||
PersonOrEnsemble::Person(person) => {
|
||||
if self.get_person(&person.id)?.is_none() {
|
||||
self.update_person(person.clone())?;
|
||||
for performance in &recording.performances {
|
||||
match &performance.performer {
|
||||
PersonOrEnsemble::Person(person) => {
|
||||
if self.get_person(&person.id)?.is_none() {
|
||||
self.update_person(person.clone())?;
|
||||
}
|
||||
}
|
||||
PersonOrEnsemble::Ensemble(ensemble) => {
|
||||
if self.get_ensemble(&ensemble.id)?.is_none() {
|
||||
self.update_ensemble(ensemble.clone())?;
|
||||
}
|
||||
}
|
||||
}
|
||||
PersonOrEnsemble::Ensemble(ensemble) => {
|
||||
if self.get_ensemble(&ensemble.id)?.is_none() {
|
||||
self.update_ensemble(ensemble.clone())?;
|
||||
|
||||
if let Some(role) = &performance.role {
|
||||
if self.get_instrument(&role.id)?.is_none() {
|
||||
self.update_instrument(role.clone())?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(role) = &performance.role {
|
||||
if self.get_instrument(&role.id)?.is_none() {
|
||||
self.update_instrument(role.clone())?;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Add the actual recording.
|
||||
|
||||
// Add the actual recording.
|
||||
|
||||
let row: RecordingRow = recording.clone().into();
|
||||
diesel::insert_into(recordings::table)
|
||||
.values(row)
|
||||
.execute(&self.connection)?;
|
||||
|
||||
for performance in recording.performances {
|
||||
let (person, ensemble) = match performance.performer {
|
||||
PersonOrEnsemble::Person(person) => (Some(person.id), None),
|
||||
PersonOrEnsemble::Ensemble(ensemble) => (None, Some(ensemble.id)),
|
||||
};
|
||||
|
||||
let row = PerformanceRow {
|
||||
id: rand::random(),
|
||||
recording: recording_id.to_string(),
|
||||
person,
|
||||
ensemble,
|
||||
role: performance.role.map(|role| role.id),
|
||||
};
|
||||
|
||||
diesel::insert_into(performances::table)
|
||||
let row: RecordingRow = recording.clone().into();
|
||||
diesel::insert_into(recordings::table)
|
||||
.values(row)
|
||||
.execute(&self.connection)?;
|
||||
}
|
||||
.execute(connection)?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
for performance in recording.performances {
|
||||
let (person, ensemble) = match performance.performer {
|
||||
PersonOrEnsemble::Person(person) => (Some(person.id), None),
|
||||
PersonOrEnsemble::Ensemble(ensemble) => (None, Some(ensemble.id)),
|
||||
};
|
||||
|
||||
let row = PerformanceRow {
|
||||
id: rand::random(),
|
||||
recording: recording_id.to_string(),
|
||||
person,
|
||||
ensemble,
|
||||
role: performance.role.map(|role| role.id),
|
||||
};
|
||||
|
||||
diesel::insert_into(performances::table)
|
||||
.values(row)
|
||||
.execute(connection)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -198,7 +201,7 @@ impl Database {
|
|||
pub fn recording_exists(&self, id: &str) -> Result<bool> {
|
||||
let exists = recordings::table
|
||||
.filter(recordings::id.eq(id))
|
||||
.load::<RecordingRow>(&self.connection)?
|
||||
.load::<RecordingRow>(&mut *self.connection.lock().unwrap())?
|
||||
.first()
|
||||
.is_some();
|
||||
|
||||
|
|
@ -209,7 +212,7 @@ impl Database {
|
|||
pub fn get_recording(&self, id: &str) -> Result<Option<Recording>> {
|
||||
let row = recordings::table
|
||||
.filter(recordings::id.eq(id))
|
||||
.load::<RecordingRow>(&self.connection)?
|
||||
.load::<RecordingRow>(&mut *self.connection.lock().unwrap())?
|
||||
.into_iter()
|
||||
.next();
|
||||
|
||||
|
|
@ -224,7 +227,7 @@ impl Database {
|
|||
/// Get a random recording from the database.
|
||||
pub fn random_recording(&self) -> Result<Recording> {
|
||||
let row = diesel::sql_query("SELECT * FROM recordings ORDER BY RANDOM() LIMIT 1")
|
||||
.load::<RecordingRow>(&self.connection)?
|
||||
.load::<RecordingRow>(&mut *self.connection.lock().unwrap())?
|
||||
.into_iter()
|
||||
.next()
|
||||
.ok_or(Error::Other("Failed to find random recording."))?;
|
||||
|
|
@ -238,7 +241,7 @@ impl Database {
|
|||
|
||||
let performance_rows = performances::table
|
||||
.filter(performances::recording.eq(&row.id))
|
||||
.load::<PerformanceRow>(&self.connection)?;
|
||||
.load::<PerformanceRow>(&mut *self.connection.lock().unwrap())?;
|
||||
|
||||
for row in performance_rows {
|
||||
performance_descriptions.push(Performance {
|
||||
|
|
@ -291,7 +294,7 @@ impl Database {
|
|||
.inner_join(persons::table.on(persons::id.nullable().eq(performances::person)))
|
||||
.filter(persons::id.eq(person_id))
|
||||
.select(recordings::table::all_columns())
|
||||
.load::<RecordingRow>(&self.connection)?;
|
||||
.load::<RecordingRow>(&mut *self.connection.lock().unwrap())?;
|
||||
|
||||
for row in rows {
|
||||
recordings.push(self.get_recording_data(row)?);
|
||||
|
|
@ -309,7 +312,7 @@ impl Database {
|
|||
.inner_join(ensembles::table.on(ensembles::id.nullable().eq(performances::ensemble)))
|
||||
.filter(ensembles::id.eq(ensemble_id))
|
||||
.select(recordings::table::all_columns())
|
||||
.load::<RecordingRow>(&self.connection)?;
|
||||
.load::<RecordingRow>(&mut *self.connection.lock().unwrap())?;
|
||||
|
||||
for row in rows {
|
||||
recordings.push(self.get_recording_data(row)?);
|
||||
|
|
@ -324,7 +327,7 @@ impl Database {
|
|||
|
||||
let rows = recordings::table
|
||||
.filter(recordings::work.eq(work_id))
|
||||
.load::<RecordingRow>(&self.connection)?;
|
||||
.load::<RecordingRow>(&mut *self.connection.lock().unwrap())?;
|
||||
|
||||
for row in rows {
|
||||
recordings.push(self.get_recording_data(row)?);
|
||||
|
|
@ -338,7 +341,7 @@ impl Database {
|
|||
pub fn delete_recording(&self, id: &str) -> Result<()> {
|
||||
info!("Deleting recording {}", id);
|
||||
diesel::delete(recordings::table.filter(recordings::id.eq(id)))
|
||||
.execute(&self.connection)?;
|
||||
.execute(&mut *self.connection.lock().unwrap())?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
table! {
|
||||
// @generated automatically by Diesel CLI.
|
||||
|
||||
diesel::table! {
|
||||
ensembles (id) {
|
||||
id -> Text,
|
||||
name -> Text,
|
||||
|
|
@ -7,7 +9,7 @@ table! {
|
|||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
diesel::table! {
|
||||
instrumentations (id) {
|
||||
id -> BigInt,
|
||||
work -> Text,
|
||||
|
|
@ -15,7 +17,7 @@ table! {
|
|||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
diesel::table! {
|
||||
instruments (id) {
|
||||
id -> Text,
|
||||
name -> Text,
|
||||
|
|
@ -24,7 +26,7 @@ table! {
|
|||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
diesel::table! {
|
||||
mediums (id) {
|
||||
id -> Text,
|
||||
name -> Text,
|
||||
|
|
@ -34,7 +36,7 @@ table! {
|
|||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
diesel::table! {
|
||||
performances (id) {
|
||||
id -> BigInt,
|
||||
recording -> Text,
|
||||
|
|
@ -44,7 +46,7 @@ table! {
|
|||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
diesel::table! {
|
||||
persons (id) {
|
||||
id -> Text,
|
||||
first_name -> Text,
|
||||
|
|
@ -54,7 +56,7 @@ table! {
|
|||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
diesel::table! {
|
||||
recordings (id) {
|
||||
id -> Text,
|
||||
work -> Text,
|
||||
|
|
@ -64,7 +66,7 @@ table! {
|
|||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
diesel::table! {
|
||||
tracks (id) {
|
||||
id -> Text,
|
||||
medium -> Text,
|
||||
|
|
@ -78,7 +80,7 @@ table! {
|
|||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
diesel::table! {
|
||||
work_parts (id) {
|
||||
id -> BigInt,
|
||||
work -> Text,
|
||||
|
|
@ -87,7 +89,7 @@ table! {
|
|||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
diesel::table! {
|
||||
works (id) {
|
||||
id -> Text,
|
||||
composer -> Text,
|
||||
|
|
@ -97,19 +99,19 @@ table! {
|
|||
}
|
||||
}
|
||||
|
||||
joinable!(instrumentations -> instruments (instrument));
|
||||
joinable!(instrumentations -> works (work));
|
||||
joinable!(performances -> ensembles (ensemble));
|
||||
joinable!(performances -> instruments (role));
|
||||
joinable!(performances -> persons (person));
|
||||
joinable!(performances -> recordings (recording));
|
||||
joinable!(recordings -> works (work));
|
||||
joinable!(tracks -> mediums (medium));
|
||||
joinable!(tracks -> recordings (recording));
|
||||
joinable!(work_parts -> works (work));
|
||||
joinable!(works -> persons (composer));
|
||||
diesel::joinable!(instrumentations -> instruments (instrument));
|
||||
diesel::joinable!(instrumentations -> works (work));
|
||||
diesel::joinable!(performances -> ensembles (ensemble));
|
||||
diesel::joinable!(performances -> instruments (role));
|
||||
diesel::joinable!(performances -> persons (person));
|
||||
diesel::joinable!(performances -> recordings (recording));
|
||||
diesel::joinable!(recordings -> works (work));
|
||||
diesel::joinable!(tracks -> mediums (medium));
|
||||
diesel::joinable!(tracks -> recordings (recording));
|
||||
diesel::joinable!(work_parts -> works (work));
|
||||
diesel::joinable!(works -> persons (composer));
|
||||
|
||||
allow_tables_to_appear_in_same_query!(
|
||||
diesel::allow_tables_to_appear_in_same_query!(
|
||||
ensembles,
|
||||
instrumentations,
|
||||
instruments,
|
||||
|
|
|
|||
|
|
@ -112,60 +112,63 @@ impl Database {
|
|||
info!("Updating work {:?}", work);
|
||||
self.defer_foreign_keys()?;
|
||||
|
||||
self.connection.transaction::<(), Error, _>(|| {
|
||||
let work_id = &work.id;
|
||||
self.delete_work(work_id)?;
|
||||
self.connection
|
||||
.lock()
|
||||
.unwrap()
|
||||
.transaction::<(), Error, _>(|connection| {
|
||||
let work_id = &work.id;
|
||||
self.delete_work(work_id)?;
|
||||
|
||||
// Add associated items from the server, if they don't already exist.
|
||||
// Add associated items from the server, if they don't already exist.
|
||||
|
||||
if self.get_person(&work.composer.id)?.is_none() {
|
||||
self.update_person(work.composer.clone())?;
|
||||
}
|
||||
|
||||
for instrument in &work.instruments {
|
||||
if self.get_instrument(&instrument.id)?.is_none() {
|
||||
self.update_instrument(instrument.clone())?;
|
||||
if self.get_person(&work.composer.id)?.is_none() {
|
||||
self.update_person(work.composer.clone())?;
|
||||
}
|
||||
}
|
||||
|
||||
// Add the actual work.
|
||||
for instrument in &work.instruments {
|
||||
if self.get_instrument(&instrument.id)?.is_none() {
|
||||
self.update_instrument(instrument.clone())?;
|
||||
}
|
||||
}
|
||||
|
||||
let row: WorkRow = work.clone().into();
|
||||
diesel::insert_into(works::table)
|
||||
.values(row)
|
||||
.execute(&self.connection)?;
|
||||
// Add the actual work.
|
||||
|
||||
let Work {
|
||||
instruments, parts, ..
|
||||
} = work;
|
||||
|
||||
for instrument in instruments {
|
||||
let row = InstrumentationRow {
|
||||
id: rand::random(),
|
||||
work: work_id.to_string(),
|
||||
instrument: instrument.id,
|
||||
};
|
||||
|
||||
diesel::insert_into(instrumentations::table)
|
||||
let row: WorkRow = work.clone().into();
|
||||
diesel::insert_into(works::table)
|
||||
.values(row)
|
||||
.execute(&self.connection)?;
|
||||
}
|
||||
.execute(&mut *self.connection.lock().unwrap())?;
|
||||
|
||||
for (index, part) in parts.into_iter().enumerate() {
|
||||
let row = WorkPartRow {
|
||||
id: rand::random(),
|
||||
work: work_id.to_string(),
|
||||
part_index: index as i64,
|
||||
title: part.title,
|
||||
};
|
||||
let Work {
|
||||
instruments, parts, ..
|
||||
} = work;
|
||||
|
||||
diesel::insert_into(work_parts::table)
|
||||
.values(row)
|
||||
.execute(&self.connection)?;
|
||||
}
|
||||
for instrument in instruments {
|
||||
let row = InstrumentationRow {
|
||||
id: rand::random(),
|
||||
work: work_id.to_string(),
|
||||
instrument: instrument.id,
|
||||
};
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
diesel::insert_into(instrumentations::table)
|
||||
.values(row)
|
||||
.execute(connection)?;
|
||||
}
|
||||
|
||||
for (index, part) in parts.into_iter().enumerate() {
|
||||
let row = WorkPartRow {
|
||||
id: rand::random(),
|
||||
work: work_id.to_string(),
|
||||
part_index: index as i64,
|
||||
title: part.title,
|
||||
};
|
||||
|
||||
diesel::insert_into(work_parts::table)
|
||||
.values(row)
|
||||
.execute(connection)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -174,7 +177,7 @@ impl Database {
|
|||
pub fn get_work(&self, id: &str) -> Result<Option<Work>> {
|
||||
let row = works::table
|
||||
.filter(works::id.eq(id))
|
||||
.load::<WorkRow>(&self.connection)?
|
||||
.load::<WorkRow>(&mut *self.connection.lock().unwrap())?
|
||||
.first()
|
||||
.cloned();
|
||||
|
||||
|
|
@ -192,7 +195,7 @@ impl Database {
|
|||
|
||||
let instrumentations = instrumentations::table
|
||||
.filter(instrumentations::work.eq(&row.id))
|
||||
.load::<InstrumentationRow>(&self.connection)?;
|
||||
.load::<InstrumentationRow>(&mut *self.connection.lock().unwrap())?;
|
||||
|
||||
for instrumentation in instrumentations {
|
||||
let id = instrumentation.instrument;
|
||||
|
|
@ -206,7 +209,7 @@ impl Database {
|
|||
|
||||
let part_rows = work_parts::table
|
||||
.filter(work_parts::work.eq(&row.id))
|
||||
.load::<WorkPartRow>(&self.connection)?;
|
||||
.load::<WorkPartRow>(&mut *self.connection.lock().unwrap())?;
|
||||
|
||||
for part_row in part_rows {
|
||||
parts.push(WorkPart {
|
||||
|
|
@ -234,7 +237,8 @@ impl Database {
|
|||
/// this work except for the things that are part of the information on the work it
|
||||
pub fn delete_work(&self, id: &str) -> Result<()> {
|
||||
info!("Deleting work {}", id);
|
||||
diesel::delete(works::table.filter(works::id.eq(id))).execute(&self.connection)?;
|
||||
diesel::delete(works::table.filter(works::id.eq(id)))
|
||||
.execute(&mut *self.connection.lock().unwrap())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
@ -244,7 +248,7 @@ impl Database {
|
|||
|
||||
let rows = works::table
|
||||
.filter(works::composer.eq(composer_id))
|
||||
.load::<WorkRow>(&self.connection)?;
|
||||
.load::<WorkRow>(&mut *self.connection.lock().unwrap())?;
|
||||
|
||||
for row in rows {
|
||||
works.push(self.get_work_data(row)?);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue