Add initial database including simple abstractions

This commit is contained in:
Elias Projahn 2020-09-28 14:15:59 +02:00
parent 868e1168e5
commit c67cefb38a
12 changed files with 772 additions and 0 deletions

1
.gitignore vendored
View file

@ -1,3 +1,4 @@
Cargo.lock Cargo.lock
test.sqlite
/res/resources.gresource /res/resources.gresource
/target /target

View file

@ -4,7 +4,10 @@ version = "0.1.0"
edition = "2018" edition = "2018"
[dependencies] [dependencies]
diesel = { version = "1.4.5", features = ["sqlite"] }
diesel_migrations = "1.4.0"
gio = "0.9.1" gio = "0.9.1"
glib = "0.10.2" glib = "0.10.2"
gtk = { version = "0.9.2", features = ["v3_24"] } gtk = { version = "0.9.2", features = ["v3_24"] }
gtk-macros = "0.2.0" gtk-macros = "0.2.0"
rand = "0.7.3"

View file

@ -19,6 +19,23 @@ Afterwards you can compile and run the program using:
$ cargo run $ cargo run
``` ```
This program uses [Diesel](https://diesel.rs) as its ORM. After installing
the Diesel command line utility, you will be able to create a new schema
migration using the following command:
```
$ diesel migration generate [change_description]
```
To update the `src/database/schema.rs` file, you should use the following
command:
```
$ diesel migration run --database-url test.sqlite
```
This file should never be edited manually.
## License ## License
Musicus Editor is free and open source software: you can redistribute it and/or Musicus Editor is free and open source software: you can redistribute it and/or

2
diesel.toml Normal file
View file

@ -0,0 +1,2 @@
[print_schema]
file = "src/database/schema.rs"

View file

@ -0,0 +1,19 @@
DROP TABLE persons;
DROP TABLE instruments;
DROP TABLE works;
DROP TABLE instrumentations;
DROP TABLE work_parts;
DROP TABLE part_instrumentations;
DROP TABLE work_sections;
DROP TABLE ensembles;
DROP TABLE recordings;
DROP TABLE performances;

View file

@ -0,0 +1,62 @@
CREATE TABLE persons (
id BIGINT NOT NULL PRIMARY KEY,
first_name TEXT NOT NULL,
last_name TEXT NOT NULL
);
CREATE TABLE instruments (
id BIGINT NOT NULL PRIMARY KEY,
name TEXT NOT NULL
);
CREATE TABLE works (
id BIGINT NOT NULL PRIMARY KEY,
composer BIGINT NOT NULL REFERENCES persons(id) ON DELETE CASCADE,
title TEXT NOT NULL
);
CREATE TABLE instrumentations (
id BIGINT NOT NULL PRIMARY KEY,
work BIGINT NOT NULL REFERENCES works(id) ON DELETE CASCADE,
instrument BIGINT NOT NULL REFERENCES instruments(id)
);
CREATE TABLE work_parts (
id BIGINT NOT NULL PRIMARY KEY,
work BIGINT NOT NULL REFERENCES works(id) ON DELETE CASCADE,
part_index BIGINT NOT NULL,
composer BIGINT REFERENCES persons(id),
title TEXT NOT NULL
);
CREATE TABLE part_instrumentations (
id BIGINT NOT NULL PRIMARY KEY,
work_part BIGINT NOT NULL REFERENCES works(id) ON DELETE CASCADE,
instrument BIGINT NOT NULL REFERENCES instruments(id)
);
CREATE TABLE work_sections (
id BIGINT NOT NULL PRIMARY KEY,
work BIGINT NOT NULL REFERENCES works(id) ON DELETE CASCADE,
title TEXT NOT NULL,
before_index BIGINT NOT NULL
);
CREATE TABLE ensembles (
id BIGINT NOT NULL PRIMARY KEY,
name TEXT NOT NULL
);
CREATE TABLE recordings (
id BIGINT NOT NULL PRIMARY KEY,
work BIGINT NOT NULL REFERENCES works(id) ON DELETE CASCADE,
comment TEXT NOT NULL
);
CREATE TABLE performances (
id BIGINT NOT NULL PRIMARY KEY,
recording BIGINT NOT NULL REFERENCES recordings(id) ON DELETE CASCADE,
person BIGINT REFERENCES persons(id) ON DELETE CASCADE,
ensemble BIGINT REFERENCES ensembles(id) ON DELETE CASCADE,
role BIGINT REFERENCES instruments(id)
);

324
src/database/database.rs Normal file
View file

@ -0,0 +1,324 @@
use super::models::*;
use super::schema::*;
use super::tables::*;
use diesel::prelude::*;
embed_migrations!();
pub struct Database {
c: SqliteConnection,
}
impl Database {
pub fn new(path: &str) -> Database {
let c = SqliteConnection::establish(path)
.expect(&format!("Failed to connect to database at \"{}\"!", path));
diesel::sql_query("PRAGMA foreign_keys = ON;")
.execute(&c)
.expect("Failed to activate foreign key support!");
embedded_migrations::run(&c).expect("Failed to run migrations!");
Database { c: c }
}
pub fn update_person(&self, person: Person) {
diesel::replace_into(persons::table)
.values(person)
.execute(&self.c)
.expect("Failed to insert person!");
}
pub fn get_person(&self, id: i64) -> Option<Person> {
persons::table
.filter(persons::id.eq(id))
.load::<Person>(&self.c)
.expect("Error loading person!")
.first()
.cloned()
}
pub fn delete_person(&self, id: i64) {
diesel::delete(persons::table.filter(persons::id.eq(id)))
.execute(&self.c)
.expect("Failed to delete person!");
}
pub fn get_persons(&self) -> Vec<Person> {
persons::table
.load::<Person>(&self.c)
.expect("Error loading persons!")
}
pub fn update_instrument(&self, instrument: Instrument) {
diesel::replace_into(instruments::table)
.values(instrument)
.execute(&self.c)
.expect("Failed to insert instrument!");
}
pub fn get_instrument(&self, id: i64) -> Option<Instrument> {
instruments::table
.filter(instruments::id.eq(id))
.load::<Instrument>(&self.c)
.expect("Error loading instrument!")
.first()
.cloned()
}
pub fn delete_instrument(&self, id: i64) {
diesel::delete(instruments::table.filter(instruments::id.eq(id)))
.execute(&self.c)
.expect("Failed to delete instrument!");
}
pub fn get_instruments(&self) -> Vec<Instrument> {
instruments::table
.load::<Instrument>(&self.c)
.expect("Error loading instruments!")
}
pub fn update_work(&self, work_insertion: WorkInsertion) {
let id = work_insertion.work.id;
self.delete_work(id);
diesel::insert_into(works::table)
.values(work_insertion.work)
.execute(&self.c)
.expect("Failed to insert work!");
for instrument_id in work_insertion.instrument_ids {
diesel::insert_into(instrumentations::table)
.values(Instrumentation {
id: rand::random(),
work: id,
instrument: instrument_id,
})
.execute(&self.c)
.expect("Failed to insert instrumentation!");
}
for part_insertion in work_insertion.parts {
let part_id = part_insertion.part.id;
diesel::insert_into(work_parts::table)
.values(part_insertion.part)
.execute(&self.c)
.expect("Failed to insert work part!");
for instrument_id in part_insertion.instrument_ids {
diesel::insert_into(part_instrumentations::table)
.values(PartInstrumentation {
id: rand::random(),
work_part: part_id,
instrument: instrument_id,
})
.execute(&self.c)
.expect("Failed to insert part instrumentation!");
}
}
for section in work_insertion.sections {
diesel::insert_into(work_sections::table)
.values(section)
.execute(&self.c)
.expect("Failed to insert work section!");
}
}
pub fn get_work(&self, id: i64) -> Option<Work> {
works::table
.filter(works::id.eq(id))
.load::<Work>(&self.c)
.expect("Error loading work!")
.first()
.cloned()
}
pub fn get_work_description_for_work(&self, work: Work) -> WorkDescription {
WorkDescription {
id: work.id,
composer: self
.get_person(work.composer)
.expect("Could not find composer for work!"),
title: work.title,
instruments: instrumentations::table
.filter(instrumentations::work.eq(work.id))
.load::<Instrumentation>(&self.c)
.expect("Failed to load instrumentations!")
.iter()
.map(|instrumentation| {
self.get_instrument(instrumentation.id)
.expect("Could not find instrument for instrumentation!")
})
.collect(),
parts: work_parts::table
.filter(work_parts::work.eq(work.id))
.load::<WorkPart>(&self.c)
.expect("Failed to load work parts!")
.iter()
.map(|work_part| WorkPartDescription {
composer: match work_part.composer {
Some(composer) => Some(
self.get_person(composer)
.expect("Could not find composer for work part!"),
),
None => None,
},
title: work_part.title.clone(),
instruments: part_instrumentations::table
.filter(part_instrumentations::work_part.eq(work_part.id))
.load::<PartInstrumentation>(&self.c)
.expect("Failed to load part instrumentations!")
.iter()
.map(|part_instrumentation| {
self.get_instrument(part_instrumentation.id)
.expect("Could not find instrument for part instrumentation!")
})
.collect(),
})
.collect(),
sections: work_sections::table
.filter(work_sections::work.eq(work.id))
.load::<WorkSection>(&self.c)
.expect("Failed to load work sections!")
.iter()
.map(|section| WorkSectionDescription {
title: section.title.clone(),
before_index: section.before_index,
})
.collect(),
}
}
pub fn get_work_description(&self, id: i64) -> Option<WorkDescription> {
match self.get_work(id) {
Some(work) => Some(self.get_work_description_for_work(work)),
None => None,
}
}
pub fn delete_work(&self, id: i64) {
diesel::delete(works::table.filter(works::id.eq(id)))
.execute(&self.c)
.expect("Failed to delete work!");
}
pub fn get_works(&self, composer_id: i64) -> Vec<Work> {
works::table
.filter(works::composer.eq(composer_id))
.load::<Work>(&self.c)
.expect("Error loading works!")
}
pub fn update_ensemble(&self, ensemble: Ensemble) {
diesel::replace_into(ensembles::table)
.values(ensemble)
.execute(&self.c)
.expect("Failed to insert ensemble!");
}
pub fn get_ensemble(&self, id: i64) -> Option<Ensemble> {
ensembles::table
.filter(ensembles::id.eq(id))
.load::<Ensemble>(&self.c)
.expect("Error loading ensemble!")
.first()
.cloned()
}
pub fn delete_ensemble(&self, id: i64) {
diesel::delete(ensembles::table.filter(ensembles::id.eq(id)))
.execute(&self.c)
.expect("Failed to delete ensemble!");
}
pub fn get_ensembles(&self) -> Vec<Ensemble> {
ensembles::table
.load::<Ensemble>(&self.c)
.expect("Error loading ensembles!")
}
pub fn update_recording(&self, recording_insertion: RecordingInsertion) {
let id = recording_insertion.recording.id;
self.delete_recording(id);
diesel::insert_into(recordings::table)
.values(recording_insertion.recording)
.execute(&self.c)
.expect("Failed to insert recording!");
for performance in recording_insertion.performances {
diesel::insert_into(performances::table)
.values(performance)
.execute(&self.c)
.expect("Failed to insert performance!");
}
}
pub fn get_recording(&self, id: i64) -> Option<Recording> {
recordings::table
.filter(recordings::id.eq(id))
.load::<Recording>(&self.c)
.expect("Error loading recording!")
.first()
.cloned()
}
pub fn get_recording_description_for_recording(
&self,
recording: Recording,
) -> RecordingDescription {
RecordingDescription {
id: recording.id,
work: self
.get_work_description(recording.work)
.expect("Could not find work for recording!"),
comment: recording.comment,
performances: performances::table
.filter(performances::recording.eq(recording.id))
.load::<Performance>(&self.c)
.expect("Failed to load performances!")
.iter()
.map(|performance| PerformanceDescription {
performance: performance.clone(),
person: performance.person.map(|id| {
self.get_person(id)
.expect("Could not find person for performance!")
}),
ensemble: performance.ensemble.map(|id| {
self.get_ensemble(id)
.expect("Could not find ensemble for performance!")
}),
role: performance.role.map(|id| {
self.get_instrument(id)
.expect("Could not find role for performance!")
}),
})
.collect(),
}
}
pub fn get_recording_description(&self, id: i64) -> Option<RecordingDescription> {
match self.get_recording(id) {
Some(recording) => Some(self.get_recording_description_for_recording(recording)),
None => None,
}
}
pub fn delete_recording(&self, id: i64) {
diesel::delete(recordings::table.filter(recordings::id.eq(id)))
.execute(&self.c)
.expect("Failed to delete recording!");
}
pub fn get_recordings(&self, work_id: i64) -> Vec<Recording> {
recordings::table
.filter(recordings::work.eq(work_id))
.load::<Recording>(&self.c)
.expect("Error loading recordings!")
}
}

10
src/database/mod.rs Normal file
View file

@ -0,0 +1,10 @@
pub mod database;
pub use database::*;
pub mod models;
pub use models::*;
pub mod schema;
pub mod tables;
pub use tables::*;

140
src/database/models.rs Normal file
View file

@ -0,0 +1,140 @@
use super::tables::*;
use std::convert::TryInto;
#[derive(Debug, Clone)]
pub struct WorkPartDescription {
pub title: String,
pub composer: Option<Person>,
pub instruments: Vec<Instrument>,
}
#[derive(Debug, Clone)]
pub struct WorkSectionDescription {
pub title: String,
pub before_index: i64,
}
#[derive(Debug, Clone)]
pub struct WorkDescription {
pub id: i64,
pub title: String,
pub composer: Person,
pub instruments: Vec<Instrument>,
pub parts: Vec<WorkPartDescription>,
pub sections: Vec<WorkSectionDescription>,
}
#[derive(Debug, Clone)]
pub struct WorkPartInsertion {
pub part: WorkPart,
pub instrument_ids: Vec<i64>,
}
#[derive(Debug, Clone)]
pub struct WorkInsertion {
pub work: Work,
pub instrument_ids: Vec<i64>,
pub parts: Vec<WorkPartInsertion>,
pub sections: Vec<WorkSection>,
}
impl From<WorkDescription> for WorkInsertion {
fn from(description: WorkDescription) -> Self {
WorkInsertion {
work: Work {
id: description.id,
composer: description.composer.id,
title: description.title.clone(),
},
instrument_ids: description
.instruments
.iter()
.map(|instrument| instrument.id)
.collect(),
parts: description
.parts
.iter()
.enumerate()
.map(|(index, part)| WorkPartInsertion {
part: WorkPart {
id: rand::random(),
work: description.id,
part_index: index.try_into().expect("Part index didn't fit into u32!"),
composer: part.composer.as_ref().map(|person| person.id),
title: part.title.clone(),
},
instrument_ids: part
.instruments
.iter()
.map(|instrument| instrument.id)
.collect(),
})
.collect(),
sections: description
.sections
.iter()
.map(|section| WorkSection {
id: rand::random(),
work: description.id,
title: section.title.clone(),
before_index: section.before_index,
})
.collect(),
}
}
}
#[derive(Debug, Clone)]
pub struct PerformanceDescription {
pub performance: Performance,
pub person: Option<Person>,
pub ensemble: Option<Ensemble>,
pub role: Option<Instrument>,
}
impl PerformanceDescription {
pub fn is_person(&self) -> bool {
self.person.is_some()
}
pub fn has_role(&self) -> bool {
self.role.is_some()
}
}
#[derive(Debug, Clone)]
pub struct RecordingDescription {
pub id: i64,
pub work: WorkDescription,
pub comment: String,
pub performances: Vec<PerformanceDescription>,
}
#[derive(Debug, Clone)]
pub struct RecordingInsertion {
pub recording: Recording,
pub performances: Vec<Performance>,
}
impl From<RecordingDescription> for RecordingInsertion {
fn from(description: RecordingDescription) -> Self {
RecordingInsertion {
recording: Recording {
id: description.id,
work: description.work.id,
comment: description.comment.clone(),
},
performances: description
.performances
.iter()
.map(|performance| Performance {
id: rand::random(),
recording: description.id,
person: performance.person.as_ref().map(|person| person.id),
ensemble: performance.ensemble.as_ref().map(|ensemble| ensemble.id),
role: performance.role.as_ref().map(|role| role.id),
})
.collect(),
}
}
}

109
src/database/schema.rs Normal file
View file

@ -0,0 +1,109 @@
table! {
ensembles (id) {
id -> BigInt,
name -> Text,
}
}
table! {
instrumentations (id) {
id -> BigInt,
work -> BigInt,
instrument -> BigInt,
}
}
table! {
instruments (id) {
id -> BigInt,
name -> Text,
}
}
table! {
part_instrumentations (id) {
id -> BigInt,
work_part -> BigInt,
instrument -> BigInt,
}
}
table! {
performances (id) {
id -> BigInt,
recording -> BigInt,
person -> Nullable<BigInt>,
ensemble -> Nullable<BigInt>,
role -> Nullable<BigInt>,
}
}
table! {
persons (id) {
id -> BigInt,
first_name -> Text,
last_name -> Text,
}
}
table! {
recordings (id) {
id -> BigInt,
work -> BigInt,
comment -> Text,
}
}
table! {
work_parts (id) {
id -> BigInt,
work -> BigInt,
part_index -> BigInt,
composer -> Nullable<BigInt>,
title -> Text,
}
}
table! {
work_sections (id) {
id -> BigInt,
work -> BigInt,
title -> Text,
before_index -> BigInt,
}
}
table! {
works (id) {
id -> BigInt,
composer -> BigInt,
title -> Text,
}
}
joinable!(instrumentations -> instruments (instrument));
joinable!(instrumentations -> works (work));
joinable!(part_instrumentations -> instruments (instrument));
joinable!(part_instrumentations -> works (work_part));
joinable!(performances -> ensembles (ensemble));
joinable!(performances -> instruments (role));
joinable!(performances -> persons (person));
joinable!(performances -> recordings (recording));
joinable!(recordings -> works (work));
joinable!(work_parts -> persons (composer));
joinable!(work_parts -> works (work));
joinable!(work_sections -> works (work));
joinable!(works -> persons (composer));
allow_tables_to_appear_in_same_query!(
ensembles,
instrumentations,
instruments,
part_instrumentations,
performances,
persons,
recordings,
work_parts,
work_sections,
works,
);

75
src/database/tables.rs Normal file
View file

@ -0,0 +1,75 @@
use super::schema::*;
use diesel::Queryable;
#[derive(Insertable, Queryable, Debug, Clone)]
pub struct Person {
pub id: i64,
pub first_name: String,
pub last_name: String,
}
#[derive(Insertable, Queryable, Debug, Clone)]
pub struct Instrument {
pub id: i64,
pub name: String,
}
#[derive(Insertable, Queryable, Debug, Clone)]
pub struct Work {
pub id: i64,
pub composer: i64,
pub title: String,
}
#[derive(Insertable, Queryable, Debug, Clone)]
pub struct Instrumentation {
pub id: i64,
pub work: i64,
pub instrument: i64,
}
#[derive(Insertable, Queryable, Debug, Clone)]
pub struct WorkPart {
pub id: i64,
pub work: i64,
pub part_index: i64,
pub composer: Option<i64>,
pub title: String,
}
#[derive(Insertable, Queryable, Debug, Clone)]
pub struct PartInstrumentation {
pub id: i64,
pub work_part: i64,
pub instrument: i64,
}
#[derive(Insertable, Queryable, Debug, Clone)]
pub struct WorkSection {
pub id: i64,
pub work: i64,
pub title: String,
pub before_index: i64,
}
#[derive(Insertable, Queryable, Debug, Clone)]
pub struct Ensemble {
pub id: i64,
pub name: String,
}
#[derive(Insertable, Queryable, Debug, Clone)]
pub struct Recording {
pub id: i64,
pub work: i64,
pub comment: String,
}
#[derive(Insertable, Queryable, Debug, Clone)]
pub struct Performance {
pub id: i64,
pub recording: i64,
pub person: Option<i64>,
pub ensemble: Option<i64>,
pub role: Option<i64>,
}

View file

@ -1,7 +1,17 @@
// Required for database/schema.rs
#[macro_use]
extern crate diesel;
// Required for embed_migrations macro in database/database.rs
#[macro_use]
extern crate diesel_migrations;
use gio::prelude::*; use gio::prelude::*;
use glib::clone; use glib::clone;
use std::cell::RefCell; use std::cell::RefCell;
mod database;
mod window; mod window;
use window::Window; use window::Window;