Refactor db schema and use Diesel

This commit is contained in:
Elias Projahn 2024-03-23 18:06:46 +01:00
parent 2f6676ba3b
commit 220821a0e0
14 changed files with 1310 additions and 599 deletions

244
Cargo.lock generated
View file

@ -2,18 +2,6 @@
# It is not intended for manual editing. # It is not intended for manual editing.
version = 3 version = 3
[[package]]
name = "ahash"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a"
dependencies = [
"cfg-if",
"once_cell",
"version_check",
"zerocopy",
]
[[package]] [[package]]
name = "aho-corasick" name = "aho-corasick"
version = "1.1.2" version = "1.1.2"
@ -23,12 +11,6 @@ dependencies = [
"memchr", "memchr",
] ]
[[package]]
name = "allocator-api2"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5"
[[package]] [[package]]
name = "android-tzdata" name = "android-tzdata"
version = "0.1.1" version = "0.1.1"
@ -166,6 +148,59 @@ dependencies = [
"libdbus-sys", "libdbus-sys",
] ]
[[package]]
name = "deranged"
version = "0.3.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4"
dependencies = [
"powerfmt",
]
[[package]]
name = "diesel"
version = "2.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03fc05c17098f21b89bc7d98fe1dd3cce2c11c2ad8e145f2a44fe08ed28eb559"
dependencies = [
"chrono",
"diesel_derives",
"libsqlite3-sys",
"time",
]
[[package]]
name = "diesel_derives"
version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d02eecb814ae714ffe61ddc2db2dd03e6c49a42e269b5001355500d431cce0c"
dependencies = [
"diesel_table_macro_syntax",
"proc-macro2",
"quote",
"syn 2.0.39",
]
[[package]]
name = "diesel_migrations"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6036b3f0120c5961381b570ee20a02432d7e2d27ea60de9578799cf9156914ac"
dependencies = [
"diesel",
"migrations_internals",
"migrations_macros",
]
[[package]]
name = "diesel_table_macro_syntax"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc5557efc453706fed5e4fa85006fe9817c224c3f480a34c7e5959fd700921c5"
dependencies = [
"syn 2.0.39",
]
[[package]] [[package]]
name = "either" name = "either"
version = "1.9.0" version = "1.9.0"
@ -178,18 +213,6 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
[[package]]
name = "fallible-iterator"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
[[package]]
name = "fallible-streaming-iterator"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a"
[[package]] [[package]]
name = "field-offset" name = "field-offset"
version = "0.3.6" version = "0.3.6"
@ -808,19 +831,6 @@ name = "hashbrown"
version = "0.14.2" version = "0.14.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156" checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156"
dependencies = [
"ahash",
"allocator-api2",
]
[[package]]
name = "hashlink"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7"
dependencies = [
"hashbrown",
]
[[package]] [[package]]
name = "heck" name = "heck"
@ -870,6 +880,12 @@ dependencies = [
"either", "either",
] ]
[[package]]
name = "itoa"
version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c"
[[package]] [[package]]
name = "js-sys" name = "js-sys"
version = "0.3.65" version = "0.3.65"
@ -934,11 +950,10 @@ dependencies = [
[[package]] [[package]]
name = "libsqlite3-sys" name = "libsqlite3-sys"
version = "0.26.0" version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "afc22eff61b133b115c6e8c74e818c628d6d5e7a502afea6f64dee076dd94326" checksum = "0c10584274047cb335c23d3e61bcef8e323adae7c5c8c760540f73610177fc3f"
dependencies = [ dependencies = [
"cc",
"pkg-config", "pkg-config",
"vcpkg", "vcpkg",
] ]
@ -986,6 +1001,27 @@ dependencies = [
"autocfg", "autocfg",
] ]
[[package]]
name = "migrations_internals"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f23f71580015254b020e856feac3df5878c2c7a8812297edd6c0a485ac9dada"
dependencies = [
"serde",
"toml 0.7.8",
]
[[package]]
name = "migrations_macros"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cce3325ac70e67bbab5bd837a31cae01f1a6db64e0e744a33cb03a543469ef08"
dependencies = [
"migrations_internals",
"proc-macro2",
"quote",
]
[[package]] [[package]]
name = "mpris-player" name = "mpris-player"
version = "0.6.3" version = "0.6.3"
@ -1006,7 +1042,10 @@ checksum = "956787520e75e9bd233246045d19f42fb73242759cc57fba9611d940ae96d4b0"
name = "musicus" name = "musicus"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow",
"chrono", "chrono",
"diesel",
"diesel_migrations",
"fragile", "fragile",
"gettext-rs", "gettext-rs",
"gstreamer-play", "gstreamer-play",
@ -1016,7 +1055,8 @@ dependencies = [
"mpris-player", "mpris-player",
"once_cell", "once_cell",
"rand", "rand",
"rusqlite", "serde",
"serde_json",
"thiserror", "thiserror",
"tracing-subscriber", "tracing-subscriber",
"uuid", "uuid",
@ -1032,6 +1072,12 @@ dependencies = [
"winapi", "winapi",
] ]
[[package]]
name = "num-conv"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
[[package]] [[package]]
name = "num-integer" name = "num-integer"
version = "0.1.45" version = "0.1.45"
@ -1161,6 +1207,12 @@ version = "0.3.27"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964"
[[package]]
name = "powerfmt"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
[[package]] [[package]]
name = "ppv-lite86" name = "ppv-lite86"
version = "0.2.17" version = "0.2.17"
@ -1296,20 +1348,6 @@ version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
[[package]]
name = "rusqlite"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "549b9d036d571d42e6e85d1c1425e2ac83491075078ca9a15be021c56b1641f2"
dependencies = [
"bitflags 2.4.1",
"fallible-iterator",
"fallible-streaming-iterator",
"hashlink",
"libsqlite3-sys",
"smallvec",
]
[[package]] [[package]]
name = "rustc_version" name = "rustc_version"
version = "0.4.0" version = "0.4.0"
@ -1319,6 +1357,12 @@ dependencies = [
"semver", "semver",
] ]
[[package]]
name = "ryu"
version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
[[package]] [[package]]
name = "semver" name = "semver"
version = "1.0.20" version = "1.0.20"
@ -1345,6 +1389,17 @@ dependencies = [
"syn 2.0.39", "syn 2.0.39",
] ]
[[package]]
name = "serde_json"
version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb0652c533506ad7a2e353cce269330d6afd8bdfb6d75e0ace5b35aacbd7b9e9"
dependencies = [
"itoa",
"ryu",
"serde",
]
[[package]] [[package]]
name = "serde_spanned" name = "serde_spanned"
version = "0.6.4" version = "0.6.4"
@ -1409,7 +1464,7 @@ dependencies = [
"cfg-expr", "cfg-expr",
"heck", "heck",
"pkg-config", "pkg-config",
"toml", "toml 0.8.8",
"version-compare", "version-compare",
] ]
@ -1455,6 +1510,49 @@ dependencies = [
"once_cell", "once_cell",
] ]
[[package]]
name = "time"
version = "0.3.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8248b6521bb14bc45b4067159b9b6ad792e2d6d754d6c41fb50e29fefe38749"
dependencies = [
"deranged",
"itoa",
"num-conv",
"powerfmt",
"serde",
"time-core",
"time-macros",
]
[[package]]
name = "time-core"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3"
[[package]]
name = "time-macros"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ba3a3ef41e6672a2f0f001392bb5dcd3ff0a9992d618ca761a11c3121547774"
dependencies = [
"num-conv",
"time-core",
]
[[package]]
name = "toml"
version = "0.7.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257"
dependencies = [
"serde",
"serde_spanned",
"toml_datetime",
"toml_edit 0.19.15",
]
[[package]] [[package]]
name = "toml" name = "toml"
version = "0.8.8" version = "0.8.8"
@ -1483,6 +1581,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421"
dependencies = [ dependencies = [
"indexmap", "indexmap",
"serde",
"serde_spanned",
"toml_datetime", "toml_datetime",
"winnow", "winnow",
] ]
@ -1807,23 +1907,3 @@ checksum = "829846f3e3db426d4cee4510841b71a8e58aa2a76b1132579487ae430ccd9c7b"
dependencies = [ dependencies = [
"memchr", "memchr",
] ]
[[package]]
name = "zerocopy"
version = "0.7.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e97e415490559a91254a2979b4829267a57d2fcd741a98eee8b722fb57289aa0"
dependencies = [
"zerocopy-derive",
]
[[package]]
name = "zerocopy-derive"
version = "0.7.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd7e48ccf166952882ca8bd778a43502c64f33bf94c12ebe2a7f08e5a0f6689f"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.39",
]

View file

@ -5,7 +5,10 @@ edition = "2021"
[dependencies] [dependencies]
adw = { package = "libadwaita", version = "0.5", features = ["v1_4"] } adw = { package = "libadwaita", version = "0.5", features = ["v1_4"] }
anyhow = "1"
chrono = "0.4" chrono = "0.4"
diesel = { version = "2", features = ["chrono", "sqlite"] }
diesel_migrations = "2"
fragile = "2" fragile = "2"
gettext-rs = { version = "0.7", features = ["gettext-system"] } gettext-rs = { version = "0.7", features = ["gettext-system"] }
gstreamer-play = "0.22" gstreamer-play = "0.22"
@ -14,7 +17,8 @@ log = "0.4"
mpris-player = "0.6" mpris-player = "0.6"
once_cell = "1" once_cell = "1"
rand = "0.8" rand = "0.8"
rusqlite = { version = "0.29", features = ["bundled"] } serde = { version = "1", features = ["derive"] }
serde_json = "1"
thiserror = "1" thiserror = "1"
tracing-subscriber = "0.3" tracing-subscriber = "0.3"
uuid = { version = "1", features = ["v4"] } uuid = { version = "1", features = ["v4"] }

View file

@ -0,0 +1 @@
-- This migration is intended to become the initial schema.

View file

@ -0,0 +1,194 @@
CREATE TABLE persons_new (
person_id TEXT NOT NULL PRIMARY KEY,
name TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
edited_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_used_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_played_at TIMESTAMP
);
CREATE TABLE roles (
role_id TEXT NOT NULL PRIMARY KEY,
name TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
edited_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_used_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE instruments_new (
instrument_id TEXT NOT NULL PRIMARY KEY,
name TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
edited_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_used_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_played_at TIMESTAMP
);
CREATE TABLE works_new (
work_id TEXT NOT NULL PRIMARY KEY,
parent_work_id TEXT REFERENCES works(work_id),
sequence_number INTEGER,
name TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
edited_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_used_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_played_at TIMESTAMP
);
CREATE TABLE work_persons (
work_id TEXT NOT NULL REFERENCES works(work_id) ON DELETE CASCADE,
person_id TEXT NOT NULL REFERENCES persons(person_id),
role_id TEXT NOT NULL REFERENCES roles(role_id),
sequence_number INTEGER NOT NULL,
PRIMARY KEY (work_id, person_id, role_id)
);
CREATE TABLE work_instruments (
work_id TEXT NOT NULL REFERENCES works(work_id) ON DELETE CASCADE,
instrument_id TEXT NOT NULL REFERENCES instruments(instrument_id),
sequence_number INTEGER NOT NULL,
PRIMARY KEY (work_id, instrument_id)
);
CREATE TABLE ensembles_new (
ensemble_id TEXT NOT NULL PRIMARY KEY,
name TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
edited_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_used_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_played_at TIMESTAMP
);
CREATE TABLE ensemble_persons (
ensemble_id TEXT NOT NULL REFERENCES ensembles(ensemble_id) ON DELETE CASCADE,
person_id TEXT NOT NULL REFERENCES persons(person_id),
instrument_id TEXT NOT NULL REFERENCES instruments(instrument_id),
sequence_number INTEGER NOT NULL,
PRIMARY KEY (ensemble_id, person_id, instrument_id)
);
CREATE TABLE recordings_new (
recording_id TEXT NOT NULL PRIMARY KEY,
work_id TEXT NOT NULL REFERENCES works(work_id),
year INTEGER,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
edited_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_used_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_played_at TIMESTAMP
);
CREATE TABLE recording_persons (
recording_id TEXT NOT NULL REFERENCES recordings(recording_id) ON DELETE CASCADE,
person_id TEXT NOT NULL REFERENCES persons(person_id),
role_id TEXT NOT NULL REFERENCES roles(role_id),
instrument_id TEXT REFERENCES instruments(instrument_id),
sequence_number INTEGER NOT NULL,
PRIMARY KEY (recording_id, person_id, role_id, instrument_id)
);
CREATE TABLE recording_ensembles (
recording_id TEXT NOT NULL REFERENCES recordings(recording_id) ON DELETE CASCADE,
ensemble_id TEXT NOT NULL REFERENCES ensembles(ensemble_id),
role_id TEXT NOT NULL REFERENCES roles(role_id),
sequence_number INTEGER NOT NULL,
PRIMARY KEY (recording_id, ensemble_id, role_id)
);
CREATE TABLE tracks_new (
track_id TEXT NOT NULL PRIMARY KEY,
recording_id TEXT NOT NULL REFERENCES recordings(recording_id),
sequence_number INTEGER NOT NULL,
path TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
edited_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_used_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
last_played_at TIMESTAMP
);
CREATE TABLE track_works (
track_id TEXT NOT NULL REFERENCES tracks(track_id) ON DELETE CASCADE,
work_id TEXT NOT NULL REFERENCES works(work_id),
sequence_number INTEGER NOT NULL,
PRIMARY KEY (track_id, work_id)
);
INSERT INTO persons_new (person_id, name)
SELECT id, json_set('{}', '$.generic', first_name || ' ' || last_name)
FROM persons;
INSERT INTO roles (role_id, name)
VALUES ('380d7e09eb2f49c1a90db2ba4acb6ffd', json_set('{}', '$.generic', 'Composer'));
INSERT INTO roles (role_id, name)
VALUES ('28ff0aeb11c041a6916d93e9b4884eef', json_set('{}', '$.generic', 'Performer'));
INSERT INTO instruments_new (instrument_id, name)
SELECT id, json_set('{}', '$.generic', name)
FROM instruments;
INSERT INTO works_new (work_id, name)
SELECT id, json_set('{}', '$.generic', title)
FROM works;
INSERT INTO works_new (work_id, parent_work_id, sequence_number, name)
SELECT id, work, part_index, json_set('{}', '$.generic', title)
FROM work_parts;
INSERT INTO work_persons (work_id, person_id, role_id, sequence_number)
SELECT id, composer, '380d7e09eb2f49c1a90db2ba4acb6ffd', 0
FROM works;
INSERT INTO work_instruments (work_id, instrument_id, sequence_number)
SELECT work, instrument, 0
FROM instrumentations;
INSERT INTO ensembles_new (ensemble_id, name)
SELECT id, json_set('{}', '$.generic', name)
FROM ensembles;
INSERT INTO recordings_new (recording_id, work_id, year)
SELECT id, work, CAST(comment as INTEGER)
FROM recordings;
UPDATE recordings_new
SET year = NULL
WHERE year <= 0;
INSERT INTO recording_persons (recording_id, person_id, role_id, instrument_id, sequence_number)
SELECT recording, person, '28ff0aeb11c041a6916d93e9b4884eef', role, 0
FROM performances
WHERE person IS NOT NULL;
INSERT INTO recording_ensembles (recording_id, ensemble_id, role_id, sequence_number)
SELECT recording, ensemble, '28ff0aeb11c041a6916d93e9b4884eef', 0
FROM performances
WHERE ensemble IS NOT NULL;
INSERT INTO tracks_new (track_id, recording_id, sequence_number, path)
SELECT id, recording, "index", path
FROM tracks;
INSERT INTO track_works (track_id, work_id, sequence_number)
SELECT tracks.id, work_parts.id, 0
FROM tracks
JOIN recordings ON tracks.recording = recordings.id
JOIN work_parts ON recordings.work = work_parts.work
AND tracks.work_parts = work_parts.part_index;
DROP TABLE persons;
DROP TABLE instruments;
DROP TABLE works;
DROP TABLE instrumentations;
DROP TABLE work_parts;
DROP TABLE ensembles;
DROP TABLE recordings;
DROP TABLE performances;
DROP TABLE mediums;
DROP TABLE tracks;
ALTER TABLE persons_new RENAME TO persons;
ALTER TABLE instruments_new RENAME TO instruments;
ALTER TABLE works_new RENAME TO works;
ALTER TABLE recordings_new RENAME TO recordings;
ALTER TABLE tracks_new RENAME TO tracks;
ALTER TABLE ensembles_new RENAME TO ensembles;

87
src/db/mod.rs Normal file
View file

@ -0,0 +1,87 @@
pub mod models;
pub mod schema;
pub mod tables;
use std::collections::HashMap;
use anyhow::{anyhow, Result};
use diesel::{
backend::Backend,
deserialize::{self, FromSql, FromSqlRow},
expression::AsExpression,
prelude::*,
serialize::{self, IsNull, Output, ToSql},
sql_types::Text,
sqlite::Sqlite,
};
use diesel_migrations::{EmbeddedMigrations, MigrationHarness};
use serde::{Deserialize, Serialize};
// This makes the SQL migration scripts accessible from the code.
const MIGRATIONS: EmbeddedMigrations = diesel_migrations::embed_migrations!();
/// Connect to a Musicus database and apply any pending migrations.
pub fn connect(file_name: &str) -> Result<SqliteConnection> {
log::info!("Opening database file '{}'", file_name);
let mut connection = SqliteConnection::establish(file_name)?;
log::info!("Running migrations if necessary");
connection
.run_pending_migrations(MIGRATIONS)
.map_err(|e| anyhow!(e))?;
// Enable after running migrations to simplify changes in schema.
diesel::sql_query("PRAGMA foreign_keys = ON").execute(&mut connection)?;
Ok(connection)
}
/// A single translated string value.
#[derive(Serialize, Deserialize, AsExpression, FromSqlRow, Clone, Debug)]
#[diesel(sql_type = Text)]
pub struct TranslatedString(HashMap<String, String>);
impl TranslatedString {
/// Get the best translation for the user's current locale.
///
/// This will fall back to the generic variant if no translation exists. If no
/// generic translation exists (which is a bug in the data), an empty string is
/// returned and a warning is logged.
pub fn get(&self) -> &str {
// TODO: Get language from locale.
let lang = "generic";
match self.0.get(lang) {
Some(s) => s,
None => match self.0.get("generic") {
Some(s) => s,
None => {
log::warn!("No generic variant for TranslatedString: {:?}", self);
""
}
},
}
}
}
impl<DB: Backend> FromSql<Text, DB> for TranslatedString
where
String: FromSql<Text, DB>,
{
fn from_sql(bytes: DB::RawValue<'_>) -> deserialize::Result<Self> {
let text = String::from_sql(bytes)?;
let translated_string = serde_json::from_str(&text)?;
Ok(translated_string)
}
}
impl ToSql<Text, Sqlite> for TranslatedString
where
String: ToSql<Text, Sqlite>,
{
fn to_sql(&self, out: &mut Output<Sqlite>) -> serialize::Result {
let text = serde_json::to_string(self)?;
out.set_value(text);
Ok(IsNull::No)
}
}

297
src/db/models.rs Normal file
View file

@ -0,0 +1,297 @@
//! This module contains higher-level models combining information from
//! multiple database tables.
use std::{fmt::Display, path::Path};
use anyhow::Result;
use diesel::prelude::*;
use super::{schema::*, tables, TranslatedString};
// Re-exports for tables that don't need additional information.
pub use tables::{Instrument, Person, Role};
#[derive(Clone, Debug)]
pub struct Work {
pub work_id: String,
pub name: TranslatedString,
pub parts: Vec<WorkPart>,
pub persons: Vec<Person>,
pub instruments: Vec<Instrument>,
}
#[derive(Clone, Debug)]
pub struct WorkPart {
pub work_id: String,
pub level: u8,
pub name: TranslatedString,
}
#[derive(Clone, Debug)]
pub struct Ensemble {
pub ensemble_id: String,
pub name: TranslatedString,
pub persons: Vec<(Person, Instrument)>,
}
#[derive(Clone, Debug)]
pub struct Recording {
pub recording_id: String,
pub work: Work,
pub year: Option<i32>,
pub persons: Vec<Performer>,
pub ensembles: Vec<Ensemble>,
pub tracks: Vec<Track>,
}
#[derive(Clone, Debug)]
pub struct Performer {
pub person: Person,
pub role: Role,
pub instrument: Option<Instrument>,
}
#[derive(Clone, Debug)]
pub struct Track {
pub track_id: String,
pub path: String,
pub works: Vec<Work>,
}
impl Eq for Person {}
impl PartialEq for Person {
fn eq(&self, other: &Self) -> bool {
self.person_id == other.person_id
}
}
impl Work {
pub fn from_table(data: tables::Work, connection: &mut SqliteConnection) -> Result<Self> {
fn visit_children(
work_id: &str,
level: u8,
connection: &mut SqliteConnection,
) -> Result<Vec<WorkPart>> {
let mut parts = Vec::new();
let children: Vec<tables::Work> = works::table
.filter(works::parent_work_id.eq(work_id))
.load(connection)?;
for child in children {
let mut grand_children = visit_children(&child.work_id, level + 1, connection)?;
parts.push(WorkPart {
work_id: child.work_id,
level,
name: child.name,
});
parts.append(&mut grand_children);
}
Ok(parts)
}
let parts = visit_children(&data.work_id, 0, connection)?;
let persons: Vec<Person> = persons::table
.inner_join(work_persons::table)
.order(work_persons::sequence_number)
.filter(work_persons::work_id.eq(&data.work_id))
.select(tables::Person::as_select())
.load(connection)?;
let instruments: Vec<Instrument> = instruments::table
.inner_join(work_instruments::table)
.order(work_instruments::sequence_number)
.filter(work_instruments::work_id.eq(&data.work_id))
.select(tables::Instrument::as_select())
.load(connection)?;
Ok(Self {
work_id: data.work_id,
name: data.name,
parts,
persons,
instruments,
})
}
pub fn composers_string(&self) -> String {
self.persons
.iter()
.map(|p| p.name.get().to_string())
.collect::<Vec<String>>()
.join(", ")
}
}
impl Eq for Work {}
impl PartialEq for Work {
fn eq(&self, other: &Self) -> bool {
self.work_id == other.work_id
}
}
impl Ensemble {
pub fn from_table(data: tables::Ensemble, connection: &mut SqliteConnection) -> Result<Self> {
let persons: Vec<(Person, Instrument)> = persons::table
.inner_join(ensemble_persons::table.inner_join(instruments::table))
.order(ensemble_persons::sequence_number)
.filter(ensemble_persons::ensemble_id.eq(&data.ensemble_id))
.select((tables::Person::as_select(), tables::Instrument::as_select()))
.load(connection)?;
Ok(Self {
ensemble_id: data.ensemble_id,
name: data.name,
persons,
})
}
}
impl Eq for Ensemble {}
impl PartialEq for Ensemble {
fn eq(&self, other: &Self) -> bool {
self.ensemble_id == other.ensemble_id
}
}
impl Recording {
pub fn from_table(
data: tables::Recording,
library_path: &str,
connection: &mut SqliteConnection,
) -> Result<Self> {
let work = Work::from_table(
works::table
.filter(works::work_id.eq(&data.work_id))
.first::<tables::Work>(connection)?,
connection,
)?;
let persons = recording_persons::table
.order(recording_persons::sequence_number)
.filter(recording_persons::recording_id.eq(&data.recording_id))
.load::<tables::RecordingPerson>(connection)?
.into_iter()
.map(|r| Performer::from_table(r, connection))
.collect::<Result<Vec<Performer>>>()?;
let ensembles: Vec<Ensemble> = ensembles::table
.inner_join(recording_ensembles::table)
.order(recording_ensembles::sequence_number)
.filter(recording_ensembles::recording_id.eq(&data.recording_id))
.select(tables::Ensemble::as_select())
.load::<tables::Ensemble>(connection)?
.into_iter()
.map(|e| Ensemble::from_table(e, connection))
.collect::<Result<Vec<Ensemble>>>()?;
let tracks: Vec<Track> = tracks::table
.order(tracks::sequence_number)
.filter(tracks::recording_id.eq(&data.recording_id))
.select(tables::Track::as_select())
.load::<tables::Track>(connection)?
.into_iter()
.map(|t| Track::from_table(t, library_path, connection))
.collect::<Result<Vec<Track>>>()?;
Ok(Self {
recording_id: data.recording_id,
work,
year: data.year,
persons,
ensembles,
tracks,
})
}
pub fn performers_string(&self) -> String {
let mut performers = self
.persons
.iter()
.map(ToString::to_string)
.collect::<Vec<String>>();
performers.append(
&mut self
.ensembles
.iter()
.map(|e| e.name.get().to_string())
.collect::<Vec<String>>(),
);
performers.join(", ")
}
}
impl Performer {
pub fn from_table(
data: tables::RecordingPerson,
connection: &mut SqliteConnection,
) -> Result<Self> {
let person: Person = persons::table
.filter(persons::person_id.eq(&data.person_id))
.first(connection)?;
let role: Role = roles::table
.filter(roles::role_id.eq(&data.role_id))
.first(connection)?;
let instrument = match &data.instrument_id {
Some(instrument_id) => Some(
instruments::table
.filter(instruments::instrument_id.eq(instrument_id))
.first::<Instrument>(connection)?,
),
None => None,
};
Ok(Self {
person,
role,
instrument,
})
}
}
impl Display for Performer {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.instrument {
Some(instrument) => {
format!("{} ({})", self.person.name.get(), instrument.name.get()).fmt(f)
}
None => self.person.name.get().fmt(f),
}
}
}
impl Track {
pub fn from_table(
data: tables::Track,
library_path: &str,
connection: &mut SqliteConnection,
) -> Result<Self> {
let works: Vec<Work> = works::table
.inner_join(track_works::table)
.order(track_works::sequence_number)
.filter(track_works::track_id.eq(&data.track_id))
.select(tables::Work::as_select())
.load::<tables::Work>(connection)?
.into_iter()
.map(|w| Work::from_table(w, connection))
.collect::<Result<Vec<Work>>>()?;
Ok(Self {
track_id: data.track_id,
path: Path::new(library_path)
.join(&data.path)
.to_str()
.unwrap()
.to_string(),
works,
})
}
}

181
src/db/schema.rs Normal file
View file

@ -0,0 +1,181 @@
// @generated automatically by Diesel CLI.
diesel::table! {
ensemble_persons (ensemble_id, person_id, instrument_id) {
ensemble_id -> Text,
person_id -> Text,
instrument_id -> Text,
sequence_number -> Integer,
}
}
diesel::table! {
ensembles (ensemble_id) {
ensemble_id -> Text,
name -> Text,
created_at -> Timestamp,
edited_at -> Timestamp,
last_used_at -> Timestamp,
last_played_at -> Nullable<Timestamp>,
}
}
diesel::table! {
instruments (instrument_id) {
instrument_id -> Text,
name -> Text,
created_at -> Timestamp,
edited_at -> Timestamp,
last_used_at -> Timestamp,
last_played_at -> Nullable<Timestamp>,
}
}
diesel::table! {
persons (person_id) {
person_id -> Text,
name -> Text,
created_at -> Timestamp,
edited_at -> Timestamp,
last_used_at -> Timestamp,
last_played_at -> Nullable<Timestamp>,
}
}
diesel::table! {
recording_ensembles (recording_id, ensemble_id, role_id) {
recording_id -> Text,
ensemble_id -> Text,
role_id -> Text,
sequence_number -> Integer,
}
}
diesel::table! {
recording_persons (recording_id, person_id, role_id, instrument_id) {
recording_id -> Text,
person_id -> Text,
role_id -> Text,
instrument_id -> Nullable<Text>,
sequence_number -> Integer,
}
}
diesel::table! {
recordings (recording_id) {
recording_id -> Text,
work_id -> Text,
year -> Nullable<Integer>,
created_at -> Timestamp,
edited_at -> Timestamp,
last_used_at -> Timestamp,
last_played_at -> Nullable<Timestamp>,
}
}
diesel::table! {
roles (role_id) {
role_id -> Text,
name -> Text,
created_at -> Timestamp,
edited_at -> Timestamp,
last_used_at -> Timestamp,
}
}
diesel::table! {
track_works (track_id, work_id) {
track_id -> Text,
work_id -> Text,
sequence_number -> Integer,
}
}
diesel::table! {
tracks (track_id) {
track_id -> Text,
recording_id -> Text,
sequence_number -> Integer,
path -> Text,
created_at -> Timestamp,
edited_at -> Timestamp,
last_used_at -> Timestamp,
last_played_at -> Nullable<Timestamp>,
}
}
diesel::table! {
work_instruments (work_id, instrument_id) {
work_id -> Text,
instrument_id -> Text,
sequence_number -> Integer,
}
}
diesel::table! {
work_persons (work_id, person_id, role_id) {
work_id -> Text,
person_id -> Text,
role_id -> Text,
sequence_number -> Integer,
}
}
diesel::table! {
work_sections (id) {
id -> BigInt,
work -> Text,
title -> Text,
before_index -> BigInt,
}
}
diesel::table! {
works (work_id) {
work_id -> Text,
parent_work_id -> Nullable<Text>,
sequence_number -> Nullable<Integer>,
name -> Text,
created_at -> Timestamp,
edited_at -> Timestamp,
last_used_at -> Timestamp,
last_played_at -> Nullable<Timestamp>,
}
}
diesel::joinable!(ensemble_persons -> ensembles (ensemble_id));
diesel::joinable!(ensemble_persons -> instruments (instrument_id));
diesel::joinable!(ensemble_persons -> persons (person_id));
diesel::joinable!(recording_ensembles -> ensembles (ensemble_id));
diesel::joinable!(recording_ensembles -> recordings (recording_id));
diesel::joinable!(recording_ensembles -> roles (role_id));
diesel::joinable!(recording_persons -> instruments (instrument_id));
diesel::joinable!(recording_persons -> persons (person_id));
diesel::joinable!(recording_persons -> recordings (recording_id));
diesel::joinable!(recording_persons -> roles (role_id));
diesel::joinable!(recordings -> works (work_id));
diesel::joinable!(track_works -> tracks (track_id));
diesel::joinable!(track_works -> works (work_id));
diesel::joinable!(tracks -> recordings (recording_id));
diesel::joinable!(work_instruments -> instruments (instrument_id));
diesel::joinable!(work_instruments -> works (work_id));
diesel::joinable!(work_persons -> persons (person_id));
diesel::joinable!(work_persons -> roles (role_id));
diesel::joinable!(work_persons -> works (work_id));
diesel::allow_tables_to_appear_in_same_query!(
ensemble_persons,
ensembles,
instruments,
persons,
recording_ensembles,
recording_persons,
recordings,
roles,
track_works,
tracks,
work_instruments,
work_persons,
work_sections,
works,
);

142
src/db/tables.rs Normal file
View file

@ -0,0 +1,142 @@
//! This module contains structs that are one-to-one representations of the
//! tables in the database schema.
use chrono::NaiveDateTime;
use diesel::prelude::*;
use diesel::sqlite::Sqlite;
use super::{schema::*, TranslatedString};
#[derive(Insertable, Queryable, Selectable, Clone, Debug)]
#[diesel(check_for_backend(Sqlite))]
pub struct Person {
pub person_id: String,
pub name: TranslatedString,
pub created_at: NaiveDateTime,
pub edited_at: NaiveDateTime,
pub last_used_at: NaiveDateTime,
pub last_played_at: Option<NaiveDateTime>,
}
#[derive(Insertable, Queryable, Selectable, Clone, Debug)]
#[diesel(check_for_backend(Sqlite))]
pub struct Role {
pub role_id: String,
pub name: TranslatedString,
pub created_at: NaiveDateTime,
pub edited_at: NaiveDateTime,
pub last_used_at: NaiveDateTime,
}
#[derive(Insertable, Queryable, Selectable, Clone, Debug)]
#[diesel(check_for_backend(Sqlite))]
pub struct Instrument {
pub instrument_id: String,
pub name: TranslatedString,
pub created_at: NaiveDateTime,
pub edited_at: NaiveDateTime,
pub last_used_at: NaiveDateTime,
pub last_played_at: Option<NaiveDateTime>,
}
#[derive(Insertable, Queryable, Selectable, Clone, Debug)]
#[diesel(check_for_backend(Sqlite))]
pub struct Work {
pub work_id: String,
pub parent_work_id: Option<String>,
pub sequence_number: Option<i32>,
pub name: TranslatedString,
pub created_at: NaiveDateTime,
pub edited_at: NaiveDateTime,
pub last_used_at: NaiveDateTime,
pub last_played_at: Option<NaiveDateTime>,
}
#[derive(Insertable, Queryable, Selectable, Clone, Debug)]
#[diesel(check_for_backend(Sqlite))]
pub struct WorkPerson {
pub work_id: String,
pub person_id: String,
pub role_id: String,
pub sequence_number: i32,
}
#[derive(Insertable, Queryable, Selectable, Clone, Debug)]
#[diesel(check_for_backend(Sqlite))]
pub struct WorkInstrument {
pub work_id: String,
pub instrument_id: String,
pub sequence_number: i32,
}
#[derive(Insertable, Queryable, Selectable, Clone, Debug)]
#[diesel(check_for_backend(Sqlite))]
pub struct Ensemble {
pub ensemble_id: String,
pub name: TranslatedString,
pub created_at: NaiveDateTime,
pub edited_at: NaiveDateTime,
pub last_used_at: NaiveDateTime,
pub last_played_at: Option<NaiveDateTime>,
}
#[derive(Insertable, Queryable, Selectable, Clone, Debug)]
#[diesel(check_for_backend(Sqlite))]
pub struct EnsemblePerson {
pub ensemble_id: String,
pub person_id: String,
pub instrument_id: String,
pub sequence_number: i32,
}
#[derive(Insertable, Queryable, Selectable, Clone, Debug)]
#[diesel(check_for_backend(Sqlite))]
pub struct Recording {
pub recording_id: String,
pub work_id: String,
pub year: Option<i32>,
pub created_at: NaiveDateTime,
pub edited_at: NaiveDateTime,
pub last_used_at: NaiveDateTime,
pub last_played_at: Option<NaiveDateTime>,
}
#[derive(Insertable, Queryable, Selectable, Clone, Debug)]
#[diesel(check_for_backend(Sqlite))]
pub struct RecordingPerson {
pub recording_id: String,
pub person_id: String,
pub role_id: String,
pub instrument_id: Option<String>,
pub sequence_number: i32,
}
#[derive(Insertable, Queryable, Selectable, Clone, Debug)]
#[diesel(check_for_backend(Sqlite))]
pub struct RecordingEnsemble {
pub recording_id: String,
pub ensemble_id: String,
pub role_id: String,
pub sequence_number: i32,
}
#[derive(Insertable, Queryable, Selectable, Clone, Debug)]
#[diesel(check_for_backend(Sqlite))]
pub struct Track {
pub track_id: String,
pub recording_id: String,
pub sequence_number: i32,
pub path: String,
pub created_at: NaiveDateTime,
pub edited_at: NaiveDateTime,
pub last_used_at: NaiveDateTime,
pub last_played_at: Option<NaiveDateTime>,
}
#[derive(Insertable, Queryable, Selectable, Clone, Debug)]
#[diesel(check_for_backend(Sqlite))]
pub struct TrackWork {
pub track_id: String,
pub work_id: String,
pub sequence_number: i32,
}

View file

@ -1,5 +1,6 @@
use crate::{ use crate::{
library::{Ensemble, LibraryQuery, MusicusLibrary, Person, Recording, Track, Work}, db::models::*,
library::{LibraryQuery, MusicusLibrary},
player::MusicusPlayer, player::MusicusPlayer,
playlist_item::PlaylistItem, playlist_item::PlaylistItem,
recording_tile::MusicusRecordingTile, recording_tile::MusicusRecordingTile,
@ -159,7 +160,7 @@ impl MusicusHomePage {
} }
fn play_recording(&self, recording: &Recording) { fn play_recording(&self, recording: &Recording) {
let tracks = self.library().tracks(recording); let tracks = &recording.tracks;
if tracks.is_empty() { if tracks.is_empty() {
log::warn!("Ignoring recording without tracks being added to the playlist."); log::warn!("Ignoring recording without tracks being added to the playlist.");
@ -168,16 +169,11 @@ impl MusicusHomePage {
let title = format!( let title = format!(
"{}: {}", "{}: {}",
recording.work.composer.name_fl(), recording.work.composers_string(),
recording.work.title recording.work.name.get(),
); );
let performances = self.library().performances(recording); let performances = recording.performers_string();
let performances = if performances.is_empty() {
None
} else {
Some(performances.join(", "))
};
let mut items = Vec::new(); let mut items = Vec::new();
@ -185,20 +181,19 @@ impl MusicusHomePage {
items.push(PlaylistItem::new( items.push(PlaylistItem::new(
true, true,
&title, &title,
performances.as_deref(), Some(&performances),
None, None,
&tracks[0].path, &tracks[0].path,
)); ));
} else { } else {
let work_parts = self.library().work_parts(&recording.work);
let mut tracks = tracks.into_iter(); let mut tracks = tracks.into_iter();
let first_track = tracks.next().unwrap(); let first_track = tracks.next().unwrap();
let track_title = |track: &Track, number: usize| -> String { let track_title = |track: &Track, number: usize| -> String {
let title = track let title = track
.work_parts .works
.iter() .iter()
.map(|w| work_parts[*w].clone()) .map(|w| w.name.get().to_string())
.collect::<Vec<String>>() .collect::<Vec<String>>()
.join(", "); .join(", ");
@ -212,7 +207,7 @@ impl MusicusHomePage {
items.push(PlaylistItem::new( items.push(PlaylistItem::new(
true, true,
&title, &title,
performances.as_deref(), Some(&performances),
Some(&track_title(&first_track, 1)), Some(&track_title(&first_track, 1)),
&first_track.path, &first_track.path,
)); ));
@ -221,7 +216,7 @@ impl MusicusHomePage {
items.push(PlaylistItem::new( items.push(PlaylistItem::new(
false, false,
&title, &title,
performances.as_deref(), Some(&performances),
// track number = track index + 1 (first track) + 1 (zero based) // track number = track index + 1 (first track) + 1 (zero based)
Some(&track_title(&track, index + 2)), Some(&track_title(&track, index + 2)),
&track.path, &track.path,
@ -234,7 +229,7 @@ impl MusicusHomePage {
fn query(&self, query: &LibraryQuery) { fn query(&self, query: &LibraryQuery) {
let imp = self.imp(); let imp = self.imp();
let results = self.library().query(query); let results = self.library().query(query).unwrap();
for flowbox in [ for flowbox in [
&imp.composers_flow_box, &imp.composers_flow_box,
@ -284,9 +279,8 @@ impl MusicusHomePage {
} }
for recording in &results.recordings { for recording in &results.recordings {
let performances = self.library().performances(recording);
imp.recordings_flow_box imp.recordings_flow_box
.append(&MusicusRecordingTile::new(recording, performances)); .append(&MusicusRecordingTile::new(recording));
} }
imp.composers.replace(results.composers); imp.composers.replace(results.composers);

View file

@ -1,11 +1,19 @@
use gtk::{glib, glib::Properties, prelude::*, subclass::prelude::*};
use rusqlite::{Connection, Row};
use std::{ use std::{
cell::OnceCell, cell::{OnceCell, RefCell},
num::ParseIntError,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use anyhow::Result;
use diesel::{dsl::exists, prelude::*, QueryDsl, SqliteConnection};
use gtk::{glib, glib::Properties, prelude::*, subclass::prelude::*};
use crate::db::{self, models::*, schema::*, tables};
diesel::sql_function! {
/// Represents the SQL RANDOM() function.
fn random() -> Integer
}
mod imp { mod imp {
use super::*; use super::*;
@ -14,7 +22,7 @@ mod imp {
pub struct MusicusLibrary { pub struct MusicusLibrary {
#[property(get, construct_only)] #[property(get, construct_only)]
pub folder: OnceCell<String>, pub folder: OnceCell<String>,
pub connection: OnceCell<Connection>, pub connection: RefCell<Option<SqliteConnection>>,
} }
#[glib::object_subclass] #[glib::object_subclass]
@ -27,10 +35,10 @@ mod imp {
impl ObjectImpl for MusicusLibrary { impl ObjectImpl for MusicusLibrary {
fn constructed(&self) { fn constructed(&self) {
self.parent_constructed(); self.parent_constructed();
let db_path = PathBuf::from(self.folder.get().unwrap()).join("musicus.db");
self.connection let db_path = PathBuf::from(&self.folder.get().unwrap()).join("musicus.db");
.set(Connection::open(db_path).unwrap()) let connection = db::connect(db_path.to_str().unwrap()).unwrap();
.unwrap(); self.connection.set(Some(connection));
} }
} }
} }
@ -46,10 +54,12 @@ impl MusicusLibrary {
.build() .build()
} }
pub fn query(&self, query: &LibraryQuery) -> LibraryResults { pub fn query(&self, query: &LibraryQuery) -> Result<LibraryResults> {
let search = format!("%{}%", query.search); let search = format!("%{}%", query.search);
let mut binding = self.imp().connection.borrow_mut();
let connection = &mut *binding.as_mut().unwrap();
match query { Ok(match query {
LibraryQuery { LibraryQuery {
composer: None, composer: None,
performer: None, performer: None,
@ -57,59 +67,47 @@ impl MusicusLibrary {
work: None, work: None,
.. ..
} => { } => {
let composers = self let composers: Vec<Person> = persons::table
.con() .filter(
.prepare( exists(
"SELECT DISTINCT persons.id, persons.first_name, persons.last_name \ work_persons::table
FROM persons \ .filter(work_persons::person_id.eq(persons::person_id)),
JOIN works ON works.composer = persons.id \ )
WHERE persons.first_name LIKE ?1 OR persons.last_name LIKE ?1 \ .and(persons::name.like(&search)),
LIMIT 9",
) )
.unwrap() .limit(9)
.query_map([&search], Person::from_row) .load(connection)?;
.unwrap()
.collect::<rusqlite::Result<Vec<Person>>>()
.unwrap();
let performers = self let performers: Vec<Person> = persons::table
.con() .filter(
.prepare( exists(
"SELECT DISTINCT persons.id, persons.first_name, persons.last_name \ recording_persons::table
FROM persons \ .filter(recording_persons::person_id.eq(persons::person_id)),
JOIN performances ON performances.person = persons.id \ )
WHERE persons.first_name LIKE ?1 OR persons.last_name LIKE ?1 \ .and(persons::name.like(&search)),
LIMIT 9",
) )
.unwrap() .limit(9)
.query_map([&search], Person::from_row) .load(connection)?;
.unwrap()
.collect::<rusqlite::Result<Vec<Person>>>()
.unwrap();
let ensembles = self // TODO: Search ensemble persons as well.
.con() let ensembles: Vec<Ensemble> = ensembles::table
.prepare("SELECT id, name FROM ensembles WHERE name LIKE ?1 LIMIT 9") .filter(ensembles::name.like(&search))
.unwrap() .limit(9)
.query_map([&search], Ensemble::from_row) .load::<tables::Ensemble>(connection)?
.unwrap() .into_iter()
.collect::<rusqlite::Result<Vec<Ensemble>>>() .map(|e| Ensemble::from_table(e, connection))
.unwrap(); .collect::<Result<Vec<Ensemble>>>()?;
let works = self let works: Vec<Work> = works::table
.con() .inner_join(work_persons::table.inner_join(persons::table))
.prepare( .filter(works::name.like(&search).or(persons::name.like(&search)))
"SELECT works.id, works.title, persons.id, persons.first_name, persons.last_name \ .limit(9)
FROM works \ .select(works::all_columns)
JOIN persons ON works.composer = persons.id \ .distinct()
WHERE title LIKE ?1 \ .load::<tables::Work>(connection)?
LIMIT 9" .into_iter()
) .map(|w| Work::from_table(w, connection))
.unwrap() .collect::<Result<Vec<Work>>>()?;
.query_map([&search], Work::from_row)
.unwrap()
.collect::<rusqlite::Result<Vec<Work>>>()
.unwrap();
LibraryResults { LibraryResults {
composers, composers,
@ -126,54 +124,51 @@ impl MusicusLibrary {
work: None, work: None,
.. ..
} => { } => {
let performers = self let performers: Vec<Person> = persons::table
.con() .inner_join(recording_persons::table.inner_join(
.prepare( recordings::table.inner_join(works::table.inner_join(work_persons::table)),
"SELECT DISTINCT persons.id, persons.first_name, persons.last_name \ ))
FROM persons \ .filter(
JOIN performances ON performances.person = persons.id \ work_persons::person_id
JOIN recordings ON recordings.id = performances.recording \ .eq(&composer.person_id)
JOIN works ON works.id = recordings.work \ .and(persons::name.like(&search)),
WHERE works.composer IS ?1 \
AND (persons.first_name LIKE ?2 OR persons.last_name LIKE ?2) \
LIMIT 9",
) )
.unwrap() .limit(9)
.query_map([&composer.id, &search], Person::from_row) .select(persons::all_columns)
.unwrap() .distinct()
.collect::<rusqlite::Result<Vec<Person>>>() .load(connection)?;
.unwrap();
let ensembles = self let ensembles: Vec<Ensemble> = ensembles::table
.con() .inner_join(recording_ensembles::table.inner_join(
.prepare( recordings::table.inner_join(works::table.inner_join(work_persons::table)),
"SELECT DISTINCT ensembles.id, ensembles.name \ ))
FROM ensembles \ .filter(
JOIN performances ON performances.ensemble = ensembles.id \ work_persons::person_id
JOIN recordings ON recordings.id = performances.recording \ .eq(&composer.person_id)
JOIN works ON works.id = recordings.work \ .and(ensembles::name.like(&search)),
WHERE works.composer IS ?1 AND ensembles.name LIKE ?2 \
LIMIT 9",
) )
.unwrap() .limit(9)
.query_map([&composer.id, &search], Ensemble::from_row) .select(ensembles::all_columns)
.unwrap() .distinct()
.collect::<rusqlite::Result<Vec<Ensemble>>>() .load::<tables::Ensemble>(connection)?
.unwrap(); .into_iter()
.map(|e| Ensemble::from_table(e, connection))
.collect::<Result<Vec<Ensemble>>>()?;
let works = self let works: Vec<Work> = works::table
.con() .inner_join(work_persons::table)
.prepare( .filter(
"SELECT DISTINCT works.id, works.title, persons.id, persons.first_name, persons.last_name \ work_persons::person_id
FROM works \ .eq(&composer.person_id)
JOIN persons ON works.composer = persons.id \ .and(works::name.like(&search)),
WHERE works.composer = ?1 AND title LIKE ?2 \ )
LIMIT 9") .limit(9)
.unwrap() .select(works::all_columns)
.query_map([&composer.id, &search], Work::from_row) .distinct()
.unwrap() .load::<tables::Work>(connection)?
.collect::<rusqlite::Result<Vec<Work>>>() .into_iter()
.unwrap(); .map(|w| Work::from_table(w, connection))
.collect::<Result<Vec<Work>>>()?;
LibraryResults { LibraryResults {
performers, performers,
@ -189,40 +184,40 @@ impl MusicusLibrary {
work: None, work: None,
.. ..
} => { } => {
let composers = self let composers: Vec<Person> =
.con() persons::table
.prepare( .inner_join(work_persons::table.inner_join(
"SELECT DISTINCT persons.id, persons.first_name, persons.last_name \ works::table.inner_join(
FROM persons \ recordings::table.inner_join(recording_ensembles::table),
JOIN works ON works.composer = persons.id \ ),
JOIN recordings ON recordings.work = works.id \ ))
JOIN performances ON performances.recording = recordings.id \ .filter(
WHERE performances.ensemble IS ?1 \ recording_ensembles::ensemble_id
AND (persons.first_name LIKE ?2 OR persons.last_name LIKE ?2) \ .eq(&ensemble.ensemble_id)
LIMIT 9", .and(persons::name.like(&search)),
) )
.unwrap() .limit(9)
.query_map([&ensemble.id, &search], Person::from_row) .select(persons::all_columns)
.unwrap() .distinct()
.collect::<rusqlite::Result<Vec<Person>>>() .load(connection)?;
.unwrap();
let recordings = self let recordings = recordings::table
.con() .inner_join(
.prepare( works::table.inner_join(work_persons::table.inner_join(persons::table)),
"SELECT DISTINCT recordings.id, works.id, works.title, persons.id, persons.first_name, persons.last_name \ )
FROM recordings \ // .inner_join(recording_persons::table.inner_join(persons::table))
JOIN works ON recordings.work = works.id \ .inner_join(recording_ensembles::table)
JOIN persons ON works.composer = persons.id \ .filter(
JOIN performances ON recordings.id = performances.recording \ recording_ensembles::ensemble_id
WHERE performances.ensemble IS ?1 \ .eq(&ensemble.ensemble_id)
AND (works.title LIKE ?2 OR persons.first_name LIKE ?2 OR persons.last_name LIKE ?2) \ .and(works::name.like(&search).or(persons::name.like(&search))),
LIMIT 9") )
.unwrap() .select(recordings::all_columns)
.query_map([&ensemble.id, &search], Recording::from_row) .distinct()
.unwrap() .load::<tables::Recording>(connection)?
.collect::<rusqlite::Result<Vec<Recording>>>() .into_iter()
.unwrap(); .map(|r| Recording::from_table(r, &&self.folder(), connection))
.collect::<Result<Vec<Recording>>>()?;
LibraryResults { LibraryResults {
composers, composers,
@ -236,40 +231,39 @@ impl MusicusLibrary {
work: None, work: None,
.. ..
} => { } => {
let composers = self let composers: Vec<Person> = persons::table
.con() .inner_join(
.prepare( work_persons::table
"SELECT DISTINCT persons.id, persons.first_name, persons.last_name \ .inner_join(works::table.inner_join(
FROM persons \ recordings::table.inner_join(recording_persons::table),
JOIN works ON works.composer = persons.id \ )),
JOIN recordings ON recordings.work = works.id \
JOIN performances ON performances.recording = recordings.id \
WHERE performances.person IS ?1 \
AND (persons.first_name LIKE ?2 OR persons.last_name LIKE ?2) \
LIMIT 9",
) )
.unwrap() .filter(
.query_map([&performer.id, &search], Person::from_row) recording_persons::person_id
.unwrap() .eq(&performer.person_id)
.collect::<rusqlite::Result<Vec<Person>>>() .and(persons::name.like(&search)),
.unwrap(); )
.limit(9)
.select(persons::all_columns)
.distinct()
.load(connection)?;
let recordings = self let recordings = recordings::table
.con() .inner_join(
.prepare( works::table.inner_join(work_persons::table.inner_join(persons::table)),
"SELECT DISTINCT recordings.id, works.id, works.title, persons.id, persons.first_name, persons.last_name \ )
FROM recordings \ .inner_join(recording_persons::table)
JOIN works ON recordings.work = works.id \ .filter(
JOIN persons ON works.composer = persons.id \ recording_persons::person_id
JOIN performances ON recordings.id = performances.recording \ .eq(&performer.person_id)
WHERE performances.person IS ?1 \ .and(works::name.like(&search).or(persons::name.like(&search))),
AND (works.title LIKE ?2 OR persons.first_name LIKE ?2 OR persons.last_name LIKE ?2) \ )
LIMIT 9") .select(recordings::all_columns)
.unwrap() .distinct()
.query_map([&performer.id, &search], Recording::from_row) .load::<tables::Recording>(connection)?
.unwrap() .into_iter()
.collect::<rusqlite::Result<Vec<Recording>>>() .map(|r| Recording::from_table(r, &self.folder(), connection))
.unwrap(); .collect::<Result<Vec<Recording>>>()?;
LibraryResults { LibraryResults {
composers, composers,
@ -283,23 +277,21 @@ impl MusicusLibrary {
work: None, work: None,
.. ..
} => { } => {
let recordings = self let recordings = recordings::table
.con() .inner_join(works::table.inner_join(work_persons::table))
.prepare( .inner_join(recording_ensembles::table)
"SELECT DISTINCT recordings.id, works.id, works.title, persons.id, persons.first_name, persons.last_name \ .filter(
FROM recordings \ work_persons::person_id
JOIN works ON recordings.work = works.id \ .eq(&composer.person_id)
JOIN persons ON works.composer = persons.id \ .and(recording_ensembles::ensemble_id.eq(&ensemble.ensemble_id))
JOIN performances ON recordings.id = performances.recording \ .and(works::name.like(search)),
WHERE works.composer IS ?1 \ )
AND performances.ensemble IS ?2 \ .select(recordings::all_columns)
AND works.title LIKE ?3 \ .distinct()
LIMIT 9") .load::<tables::Recording>(connection)?
.unwrap() .into_iter()
.query_map([&composer.id, &ensemble.id, &search], Recording::from_row) .map(|r| Recording::from_table(r, &self.folder(), connection))
.unwrap() .collect::<Result<Vec<Recording>>>()?;
.collect::<rusqlite::Result<Vec<Recording>>>()
.unwrap();
LibraryResults { LibraryResults {
recordings, recordings,
@ -312,23 +304,21 @@ impl MusicusLibrary {
work: None, work: None,
.. ..
} => { } => {
let recordings = self let recordings = recordings::table
.con() .inner_join(works::table.inner_join(work_persons::table))
.prepare( .inner_join(recording_persons::table)
"SELECT DISTINCT recordings.id, works.id, works.title, persons.id, persons.first_name, persons.last_name \ .filter(
FROM recordings \ work_persons::person_id
JOIN works ON recordings.work = works.id \ .eq(&composer.person_id)
JOIN persons ON works.composer = persons.id \ .and(recording_persons::person_id.eq(&performer.person_id))
JOIN performances ON recordings.id = performances.recording \ .and(works::name.like(search)),
WHERE works.composer IS ?1 \ )
AND performances.person IS ?2 \ .select(recordings::all_columns)
AND works.title LIKE ?3 \ .distinct()
LIMIT 9") .load::<tables::Recording>(connection)?
.unwrap() .into_iter()
.query_map([&composer.id, &performer.id, &search], Recording::from_row) .map(|r| Recording::from_table(r, &self.folder(), connection))
.unwrap() .collect::<Result<Vec<Recording>>>()?;
.collect::<rusqlite::Result<Vec<Recording>>>()
.unwrap();
LibraryResults { LibraryResults {
recordings, recordings,
@ -338,130 +328,35 @@ impl MusicusLibrary {
LibraryQuery { LibraryQuery {
work: Some(work), .. work: Some(work), ..
} => { } => {
let recordings = self let recordings = recordings::table
.con() .filter(recordings::work_id.eq(&work.work_id))
.prepare( .load::<tables::Recording>(connection)?
"SELECT DISTINCT recordings.id, works.id, works.title, persons.id, persons.first_name, persons.last_name \ .into_iter()
FROM recordings \ .map(|r| Recording::from_table(r, &self.folder(), connection))
JOIN works ON recordings.work = works.id \ .collect::<Result<Vec<Recording>>>()?;
JOIN persons ON works.composer IS persons.id \
WHERE works.id IS ?1")
.unwrap()
.query_map([&work.id], Recording::from_row)
.unwrap()
.collect::<rusqlite::Result<Vec<Recording>>>()
.unwrap();
LibraryResults { LibraryResults {
recordings, recordings,
..Default::default() ..Default::default()
} }
} }
} })
} }
pub fn work_parts(&self, work: &Work) -> Vec<String> { pub fn random_recording(&self, query: &LibraryQuery) -> Result<Recording> {
self.con() let mut binding = self.imp().connection.borrow_mut();
.prepare("SELECT * FROM work_parts WHERE work IS ?1 ORDER BY part_index") let connection = &mut *binding.as_mut().unwrap();
.unwrap()
.query_map([&work.id], |row| row.get::<_, String>(3))
.unwrap()
.collect::<rusqlite::Result<Vec<String>>>()
.unwrap()
}
pub fn tracks(&self, recording: &Recording) -> Vec<Track> {
self.con()
.prepare("SELECT * FROM tracks WHERE recording IS ?1 ORDER BY \"index\"")
.unwrap()
.query_map([&recording.id], |row| {
Ok(Track {
work_parts: row
.get::<_, String>(4)?
.split(',')
.filter(|s| !s.is_empty())
.map(str::parse::<usize>)
.collect::<Result<Vec<usize>, ParseIntError>>()
.expect("work part IDs should be valid integers"),
path: PathBuf::from(self.folder()).join(row.get::<_, String>(6)?),
})
})
.unwrap()
.collect::<rusqlite::Result<Vec<Track>>>()
.unwrap()
}
pub fn random_recording(&self, query: &LibraryQuery) -> Option<Recording> {
match query { match query {
LibraryQuery { .. } => self LibraryQuery { .. } => Recording::from_table(
.con() recordings::table
.prepare("SELECT * FROM recordings ORDER BY RANDOM() LIMIT 1") .order(random())
.unwrap() .first::<tables::Recording>(connection)?,
.query_map([], Recording::from_row) &self.folder(),
.unwrap() connection,
.next() ),
.map(|r| r.unwrap()),
} }
} }
pub fn performances(&self, recording: &Recording) -> Vec<String> {
let mut performances = self
.con()
.prepare(
"SELECT persons.id, persons.first_name, persons.last_name, instruments.id, instruments.name \
FROM performances \
INNER JOIN persons ON persons.id = performances.person \
LEFT JOIN instruments ON instruments.id = performances.role \
INNER JOIN recordings ON performances.recording = recordings.id \
WHERE recordings.id IS ?1")
.unwrap()
.query_map([&recording.id], Performance::from_person_row)
.unwrap()
.collect::<rusqlite::Result<Vec<Performance>>>()
.unwrap();
performances.append(
&mut self
.con()
.prepare(
"SELECT ensembles.id, ensembles.name, instruments.id, instruments.name \
FROM performances \
INNER JOIN ensembles ON ensembles.id = performances.ensemble \
LEFT JOIN instruments ON instruments.id = performances.role \
INNER JOIN recordings ON performances.recording = recordings.id \
WHERE recordings.id IS ?1",
)
.unwrap()
.query_map([&recording.id], Performance::from_ensemble_row)
.unwrap()
.collect::<rusqlite::Result<Vec<Performance>>>()
.unwrap(),
);
performances
.into_iter()
.map(|performance| match performance {
Performance::Person(person, role) => {
let mut result = person.name_fl();
if let Some(role) = role {
result.push_str(&format!(" ({})", role.name));
}
result
}
Performance::Ensemble(ensemble, role) => {
let mut result = ensemble.name;
if let Some(role) = role {
result.push_str(&format!(" ({})", role.name));
}
result
}
})
.collect::<Vec<String>>()
}
fn con(&self) -> &Connection {
self.imp().connection.get().unwrap()
}
} }
#[derive(Default, Debug)] #[derive(Default, Debug)]
@ -491,170 +386,3 @@ impl LibraryResults {
&& self.recordings.is_empty() && self.recordings.is_empty()
} }
} }
#[derive(Debug, Clone, Eq)]
pub struct Person {
pub id: String,
pub first_name: String,
pub last_name: String,
}
impl PartialEq for Person {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Person {
pub fn from_row(row: &Row) -> rusqlite::Result<Self> {
Ok(Self {
id: row.get(0)?,
first_name: row.get(1)?,
last_name: row.get(2)?,
})
}
pub fn name_fl(&self) -> String {
format!("{} {}", self.first_name, self.last_name)
}
}
#[derive(Debug, Clone, Eq)]
pub struct Ensemble {
pub id: String,
pub name: String,
}
impl PartialEq for Ensemble {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Ensemble {
pub fn from_row(row: &Row) -> rusqlite::Result<Self> {
Ok(Self {
id: row.get(0)?,
name: row.get(1)?,
})
}
}
#[derive(Debug, Clone, Eq)]
pub struct Work {
pub id: String,
pub title: String,
pub composer: Person,
}
impl PartialEq for Work {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Work {
pub fn from_row(row: &Row) -> rusqlite::Result<Self> {
Ok(Self {
id: row.get(0)?,
title: row.get(1)?,
composer: Person {
id: row.get(2)?,
first_name: row.get(3)?,
last_name: row.get(4)?,
},
})
}
}
#[derive(Debug, Clone, Eq)]
pub struct Recording {
pub id: String,
pub work: Work,
}
impl PartialEq for Recording {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Recording {
pub fn from_row(row: &Row) -> rusqlite::Result<Self> {
Ok(Self {
id: row.get(0)?,
work: Work {
id: row.get(1)?,
title: row.get(2)?,
composer: Person {
id: row.get(3)?,
first_name: row.get(4)?,
last_name: row.get(5)?,
},
},
})
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Performance {
Person(Person, Option<Role>),
Ensemble(Ensemble, Option<Role>),
}
impl Performance {
pub fn from_person_row(row: &Row) -> rusqlite::Result<Self> {
let person = Person {
id: row.get(0)?,
first_name: row.get(1)?,
last_name: row.get(2)?,
};
Ok(match row.get::<_, Option<String>>(3)? {
None => Self::Person(person, None),
Some(role_id) => Self::Person(
person,
Some(Role {
id: role_id,
name: row.get(4)?,
}),
),
})
}
pub fn from_ensemble_row(row: &Row) -> rusqlite::Result<Self> {
let ensemble = Ensemble {
id: row.get(0)?,
name: row.get(1)?,
};
Ok(match row.get::<_, Option<String>>(2)? {
None => Self::Ensemble(ensemble, None),
Some(role_id) => Self::Ensemble(
ensemble,
Some(Role {
id: role_id,
name: row.get(3)?,
}),
),
})
}
}
#[derive(Debug, Clone, Eq)]
pub struct Role {
pub id: String,
pub name: String,
}
impl PartialEq for Role {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
#[derive(Debug, Clone)]
pub struct Track {
pub work_parts: Vec<usize>,
pub path: PathBuf,
}

View file

@ -1,5 +1,6 @@
mod application; mod application;
mod config; mod config;
mod db;
mod home_page; mod home_page;
mod library_manager; mod library_manager;
mod library; mod library;

View file

@ -1,7 +1,8 @@
use crate::library::Recording;
use gtk::{glib, subclass::prelude::*}; use gtk::{glib, subclass::prelude::*};
use std::cell::OnceCell; use std::cell::OnceCell;
use crate::db::models::Recording;
mod imp { mod imp {
use super::*; use super::*;
@ -44,14 +45,14 @@ glib::wrapper! {
} }
impl MusicusRecordingTile { impl MusicusRecordingTile {
pub fn new(recording: &Recording, performances: Vec<String>) -> Self { pub fn new(recording: &Recording) -> Self {
let obj: Self = glib::Object::new(); let obj: Self = glib::Object::new();
let imp = obj.imp(); let imp = obj.imp();
imp.work_label.set_label(&recording.work.title); imp.work_label.set_label(&recording.work.name.get());
imp.composer_label imp.composer_label.set_label(&recording.work.composers_string());
.set_label(&recording.work.composer.name_fl()); imp.performances_label.set_label(&recording.performers_string());
imp.performances_label.set_label(&performances.join(", "));
imp.recording.set(recording.clone()).unwrap(); imp.recording.set(recording.clone()).unwrap();
obj obj

View file

@ -1,8 +1,9 @@
use crate::library::{Ensemble, Person, Work};
use adw::{glib, glib::subclass::Signal, prelude::*, subclass::prelude::*}; use adw::{glib, glib::subclass::Signal, prelude::*, subclass::prelude::*};
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use std::cell::OnceCell; use std::cell::OnceCell;
use crate::db::models::{Ensemble, Person, Work};
mod imp { mod imp {
use super::*; use super::*;
@ -53,11 +54,11 @@ impl MusicusSearchTag {
pub fn new(tag: Tag) -> Self { pub fn new(tag: Tag) -> Self {
let obj: MusicusSearchTag = glib::Object::new(); let obj: MusicusSearchTag = glib::Object::new();
obj.imp().label.set_label(&match &tag { obj.imp().label.set_label(match &tag {
Tag::Composer(person) => person.name_fl(), Tag::Composer(person) => person.name.get(),
Tag::Performer(person) => person.name_fl(), Tag::Performer(person) => person.name.get(),
Tag::Ensemble(ensemble) => ensemble.name.clone(), Tag::Ensemble(ensemble) => ensemble.name.get(),
Tag::Work(work) => work.title.clone(), Tag::Work(work) => work.name.get(),
}); });
obj.imp().tag.set(tag).unwrap(); obj.imp().tag.set(tag).unwrap();

View file

@ -48,14 +48,14 @@ impl MusicusTagTile {
match &tag { match &tag {
Tag::Composer(person) | Tag::Performer(person) => { Tag::Composer(person) | Tag::Performer(person) => {
imp.title_label.set_label(&person.name_fl()); imp.title_label.set_label(person.name.get());
} }
Tag::Ensemble(ensemble) => { Tag::Ensemble(ensemble) => {
imp.title_label.set_label(&ensemble.name); imp.title_label.set_label(ensemble.name.get());
} }
Tag::Work(work) => { Tag::Work(work) => {
imp.title_label.set_label(&work.title); imp.title_label.set_label(work.name.get());
imp.subtitle_label.set_label(&work.composer.name_fl()); imp.subtitle_label.set_label(&work.composers_string());
imp.subtitle_label.set_visible(true); imp.subtitle_label.set_visible(true);
} }
} }