musicus/src/backend/database/medium.rs

264 lines
8.1 KiB
Rust
Raw Normal View History

2020-12-20 11:47:27 +01:00
use super::generate_id;
2021-01-15 22:27:43 +01:00
use super::schema::{mediums, recordings, track_sets, tracks};
use super::{Database, DatabaseError, Recording, DatabaseResult};
2020-12-20 11:47:27 +01:00
use diesel::prelude::*;
use serde::{Deserialize, Serialize};
/// Representation of someting like a physical audio disc or a folder with
/// audio files (i.e. a collection of tracks for one or more recordings).
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Medium {
2021-01-13 16:10:48 +01:00
/// An unique ID for the medium.
2020-12-20 11:47:27 +01:00
pub id: String,
2021-01-13 16:10:48 +01:00
/// The human identifier for the medium.
2020-12-20 11:47:27 +01:00
pub name: String,
2021-01-13 16:10:48 +01:00
/// If applicable, the MusicBrainz DiscID.
2020-12-20 11:47:27 +01:00
pub discid: Option<String>,
2021-01-13 16:10:48 +01:00
/// The tracks of the medium, grouped by recording.
2020-12-20 11:47:27 +01:00
pub tracks: Vec<TrackSet>,
}
/// A set of tracks of one recording within a medium.
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct TrackSet {
2021-01-13 16:10:48 +01:00
/// The recording to which the tracks belong.
2020-12-20 11:47:27 +01:00
pub recording: Recording,
2021-01-13 16:10:48 +01:00
/// The actual tracks.
2020-12-20 11:47:27 +01:00
pub tracks: Vec<Track>,
}
/// A track within a recording on a medium.
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Track {
2021-01-13 16:10:48 +01:00
/// The work parts that are played on this track. They are indices to the
/// work parts of the work that is associated with the recording.
pub work_parts: Vec<usize>,
2021-01-15 22:27:43 +01:00
/// The path to the audio file containing this track. This will not be
/// included when communicating with the server.
#[serde(skip)]
pub path: String,
2020-12-20 11:47:27 +01:00
}
/// Table data for a [`Medium`].
#[derive(Insertable, Queryable, Debug, Clone)]
#[table_name = "mediums"]
struct MediumRow {
pub id: String,
pub name: String,
pub discid: Option<String>,
}
/// Table data for a [`TrackSet`].
#[derive(Insertable, Queryable, Debug, Clone)]
#[table_name = "track_sets"]
struct TrackSetRow {
pub id: String,
pub medium: String,
pub index: i32,
pub recording: String,
}
/// Table data for a [`Track`].
#[derive(Insertable, Queryable, Debug, Clone)]
#[table_name = "tracks"]
struct TrackRow {
pub id: String,
pub track_set: String,
pub index: i32,
pub work_parts: String,
2021-01-15 22:27:43 +01:00
pub path: String,
2020-12-20 11:47:27 +01:00
}
impl Database {
/// Update an existing medium or insert a new one.
pub fn update_medium(&self, medium: Medium) -> DatabaseResult<()> {
2020-12-20 11:47:27 +01:00
self.defer_foreign_keys()?;
self.connection.transaction::<(), DatabaseError, _>(|| {
2020-12-20 11:47:27 +01:00
let medium_id = &medium.id;
// This will also delete the track sets and tracks.
self.delete_medium(medium_id)?;
2021-01-15 22:27:43 +01:00
// Add the new medium.
let medium_row = MediumRow {
id: medium_id.to_owned(),
name: medium.name.clone(),
discid: medium.discid.clone(),
};
diesel::insert_into(mediums::table)
.values(medium_row)
.execute(&self.connection)?;
2020-12-20 11:47:27 +01:00
for (index, track_set) in medium.tracks.iter().enumerate() {
2021-01-15 22:27:43 +01:00
// Add associated items from the server, if they don't already
// exist.
if self.get_recording(&track_set.recording.id)?.is_none() {
self.update_recording(track_set.recording.clone())?;
}
// Add the actual track set data.
2020-12-20 11:47:27 +01:00
let track_set_id = generate_id();
let track_set_row = TrackSetRow {
id: track_set_id.clone(),
medium: medium_id.to_owned(),
index: index as i32,
recording: track_set.recording.id.clone(),
};
diesel::insert_into(track_sets::table)
.values(track_set_row)
.execute(&self.connection)?;
for (index, track) in track_set.tracks.iter().enumerate() {
let work_parts = track
.work_parts
.iter()
.map(|part_index| part_index.to_string())
.collect::<Vec<String>>()
.join(",");
let track_row = TrackRow {
id: generate_id(),
track_set: track_set_id.clone(),
index: index as i32,
work_parts,
2021-01-15 22:27:43 +01:00
path: track.path.clone(),
2020-12-20 11:47:27 +01:00
};
diesel::insert_into(tracks::table)
.values(track_row)
.execute(&self.connection)?;
}
}
Ok(())
})?;
Ok(())
}
/// Get an existing medium.
pub fn get_medium(&self, id: &str) -> DatabaseResult<Option<Medium>> {
2020-12-20 11:47:27 +01:00
let row = mediums::table
.filter(mediums::id.eq(id))
.load::<MediumRow>(&self.connection)?
.into_iter()
.next();
let medium = match row {
Some(row) => Some(self.get_medium_data(row)?),
None => None,
};
Ok(medium)
}
/// Delete a medium and all of its tracks. This will fail, if the music
/// library contains audio files referencing any of those tracks.
pub fn delete_medium(&self, id: &str) -> DatabaseResult<()> {
2020-12-20 11:47:27 +01:00
diesel::delete(mediums::table.filter(mediums::id.eq(id))).execute(&self.connection)?;
Ok(())
}
2021-01-16 15:08:12 +01:00
/// Get all available track sets for a recording.
pub fn get_track_sets(&self, recording_id: &str) -> DatabaseResult<Vec<TrackSet>> {
2021-01-16 15:08:12 +01:00
let mut track_sets: Vec<TrackSet> = Vec::new();
2021-01-15 22:27:43 +01:00
2021-01-16 15:08:12 +01:00
let rows = track_sets::table
2021-01-15 22:27:43 +01:00
.inner_join(recordings::table.on(recordings::id.eq(track_sets::recording)))
.filter(recordings::id.eq(recording_id))
2021-01-16 15:08:12 +01:00
.select(track_sets::table::all_columns())
.load::<TrackSetRow>(&self.connection)?;
2021-01-15 22:27:43 +01:00
for row in rows {
2021-01-16 15:08:12 +01:00
let track_set = self.get_track_set_from_row(row)?;
track_sets.push(track_set);
2021-01-15 22:27:43 +01:00
}
2021-01-16 15:08:12 +01:00
Ok(track_sets)
2021-01-15 22:27:43 +01:00
}
2020-12-20 11:47:27 +01:00
/// Retrieve all available information on a medium from related tables.
fn get_medium_data(&self, row: MediumRow) -> DatabaseResult<Medium> {
2020-12-20 11:47:27 +01:00
let track_set_rows = track_sets::table
.filter(track_sets::medium.eq(&row.id))
.order_by(track_sets::index)
.load::<TrackSetRow>(&self.connection)?;
let mut track_sets = Vec::new();
for track_set_row in track_set_rows {
2021-01-16 15:08:12 +01:00
let track_set = self.get_track_set_from_row(track_set_row)?;
2020-12-20 11:47:27 +01:00
track_sets.push(track_set);
}
let medium = Medium {
id: row.id,
name: row.name,
discid: row.discid,
tracks: track_sets,
};
Ok(medium)
}
2021-01-16 15:08:12 +01:00
/// Convert a track set row from the database to an actual track set.
fn get_track_set_from_row(&self, row: TrackSetRow) -> DatabaseResult<TrackSet> {
2021-01-16 15:08:12 +01:00
let recording_id = row.recording;
let recording = self
.get_recording(&recording_id)?
.ok_or(DatabaseError::Other(format!(
"Failed to get recording ({}) for track set ({}).",
recording_id,
row.id,
)))?;
2021-01-16 15:08:12 +01:00
let track_rows = tracks::table
.filter(tracks::track_set.eq(row.id))
.order_by(tracks::index)
.load::<TrackRow>(&self.connection)?;
let mut tracks = Vec::new();
for track_row in track_rows {
let work_parts = track_row
.work_parts
.split(',')
.map(|part_index| {
str::parse(part_index)
.or(Err(DatabaseError::Other(format!(
"Failed to parse part index from '{}'.",
track_row.work_parts,
)))?)
})
.collect::<DatabaseResult<Vec<usize>>>()?;
2021-01-16 15:08:12 +01:00
let track = Track {
work_parts,
path: track_row.path,
};
tracks.push(track);
}
let track_set = TrackSet { recording, tracks };
Ok(track_set)
}
2020-12-20 11:47:27 +01:00
}