db: Add source to items

This also adds the missing update fields for mediums and albums.
This commit is contained in:
Elias Projahn 2025-10-26 10:16:43 +01:00
parent 8d9690dad6
commit 47a2e06a17
12 changed files with 655 additions and 40 deletions

View file

@ -65,9 +65,15 @@ template $MusicusAlbumEditor: Adw.NavigationPage {
margin-top: 24;
styles [
"boxed-list",
"boxed-list-separate",
]
Adw.SwitchRow enable_updates_row {
title: _("Enable updates");
subtitle: _("Keep this item up to date with the online metadata library");
active: true;
}
Adw.ButtonRow save_row {
title: _("_Create album");
use-underline: true;

View file

@ -0,0 +1,248 @@
CREATE TABLE persons_old (
person_id TEXT NOT NULL PRIMARY KEY,
name TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
edited_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_used_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_played_at TIMESTAMP,
enable_updates BOOLEAN NOT NULL DEFAULT TRUE
);
CREATE TABLE roles_old (
role_id TEXT NOT NULL PRIMARY KEY,
name TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
edited_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_used_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
enable_updates BOOLEAN NOT NULL DEFAULT TRUE
);
CREATE TABLE instruments_old (
instrument_id TEXT NOT NULL PRIMARY KEY,
name TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
edited_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_used_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_played_at TIMESTAMP,
enable_updates BOOLEAN NOT NULL DEFAULT TRUE
);
CREATE TABLE works_old (
work_id TEXT NOT NULL PRIMARY KEY,
parent_work_id TEXT REFERENCES works(work_id),
sequence_number INTEGER,
name TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
edited_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_used_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_played_at TIMESTAMP,
enable_updates BOOLEAN NOT NULL DEFAULT TRUE
);
CREATE TABLE ensembles_old (
ensemble_id TEXT NOT NULL PRIMARY KEY,
name TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
edited_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_used_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_played_at TIMESTAMP,
enable_updates BOOLEAN NOT NULL DEFAULT TRUE
);
CREATE TABLE recordings_old (
recording_id TEXT NOT NULL PRIMARY KEY,
work_id TEXT NOT NULL REFERENCES works(work_id),
year INTEGER,
created_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
edited_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_used_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_played_at TIMESTAMP,
enable_updates BOOLEAN NOT NULL DEFAULT TRUE
);
CREATE TABLE mediums_old (
medium_id TEXT NOT NULL PRIMARY KEY REFERENCES item_state(id),
discid TEXT NOT NULL,
enable_updates BOOLEAN NOT NULL DEFAULT TRUE,
created_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
edited_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_used_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_played_at TIMESTAMP
);
CREATE TABLE albums_old (
album_id TEXT NOT NULL PRIMARY KEY REFERENCES item_state(id),
name TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
edited_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_used_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_played_at TIMESTAMP
);
INSERT INTO persons_old (
person_id,
name,
created_at,
edited_at,
last_used_at,
last_played_at,
enable_updates
)
SELECT person_id,
name,
created_at,
edited_at,
last_used_at,
last_played_at,
enable_updates
FROM persons;
DROP TABLE persons;
ALTER TABLE persons_old
RENAME TO persons;
INSERT INTO roles_old (
role_id,
name,
created_at,
edited_at,
last_used_at,
enable_updates
)
SELECT role_id,
name,
created_at,
edited_at,
last_used_at,
enable_updates
FROM roles;
DROP TABLE roles;
ALTER TABLE roles_old
RENAME TO roles;
INSERT INTO instruments_old (
instrument_id,
name,
created_at,
edited_at,
last_used_at,
last_played_at,
enable_updates
)
SELECT instrument_id,
name,
created_at,
edited_at,
last_used_at,
last_played_at,
enable_updates
FROM instruments;
DROP TABLE instruments;
ALTER TABLE instruments_old
RENAME TO instruments;
INSERT INTO works_old (
work_id,
parent_work_id,
sequence_number,
name,
created_at,
edited_at,
last_used_at,
last_played_at,
enable_updates
)
SELECT work_id,
parent_work_id,
sequence_number,
name,
created_at,
edited_at,
last_used_at,
last_played_at,
enable_updates
FROM works;
DROP TABLE works;
ALTER TABLE works_old
RENAME TO works;
INSERT INTO ensembles_old (
ensemble_id,
name,
created_at,
edited_at,
last_used_at,
last_played_at,
enable_updates
)
SELECT ensemble_id,
name,
created_at,
edited_at,
last_used_at,
last_played_at,
enable_updates
FROM ensembles;
DROP TABLE ensembles;
ALTER TABLE ensembles_old
RENAME TO ensembles;
INSERT INTO recordings_old (
recording_id,
work_id,
year,
created_at,
edited_at,
last_used_at,
last_played_at,
enable_updates
)
SELECT recording_id,
work_id,
year,
created_at,
edited_at,
last_used_at,
last_played_at,
enable_updates
FROM recordings;
DROP TABLE recordings;
ALTER TABLE recordings_old
RENAME TO recordings;
INSERT INTO mediums_old (
medium_id,
discid,
created_at,
edited_at,
last_used_at,
last_played_at
)
SELECT medium_id,
discid,
created_at,
edited_at,
last_used_at,
last_played_at
FROM mediums;
DROP TABLE mediums;
ALTER TABLE mediums_old
RENAME TO mediums;
INSERT INTO albums_old (
album_id,
name,
created_at,
edited_at,
last_used_at,
last_played_at
)
SELECT album_id,
name,
created_at,
edited_at,
last_used_at,
last_played_at
FROM albums;
DROP TABLE albums;
ALTER TABLE albums_old
RENAME TO albums;

View file

@ -0,0 +1,245 @@
CREATE TABLE persons_new (
person_id TEXT NOT NULL PRIMARY KEY REFERENCES item_state(id),
name TEXT NOT NULL,
source TEXT NOT NULL DEFAULT 'user',
enable_updates BOOLEAN NOT NULL DEFAULT TRUE,
created_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
edited_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_used_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_played_at TIMESTAMP
);
CREATE TABLE roles_new (
role_id TEXT NOT NULL PRIMARY KEY REFERENCES item_state(id),
name TEXT NOT NULL,
source TEXT NOT NULL DEFAULT 'user',
enable_updates BOOLEAN NOT NULL DEFAULT TRUE,
created_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
edited_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_used_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime'))
);
CREATE TABLE instruments_new (
instrument_id TEXT NOT NULL PRIMARY KEY REFERENCES item_state(id),
name TEXT NOT NULL,
source TEXT NOT NULL DEFAULT 'user',
enable_updates BOOLEAN NOT NULL DEFAULT TRUE,
created_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
edited_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_used_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_played_at TIMESTAMP
);
CREATE TABLE works_new (
work_id TEXT NOT NULL PRIMARY KEY REFERENCES item_state(id),
parent_work_id TEXT REFERENCES works(work_id),
sequence_number INTEGER,
name TEXT NOT NULL,
source TEXT NOT NULL DEFAULT 'user',
enable_updates BOOLEAN NOT NULL DEFAULT TRUE,
created_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
edited_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_used_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_played_at TIMESTAMP
);
CREATE TABLE ensembles_new (
ensemble_id TEXT NOT NULL PRIMARY KEY REFERENCES item_state(id),
name TEXT NOT NULL,
source TEXT NOT NULL DEFAULT 'user',
enable_updates BOOLEAN NOT NULL DEFAULT TRUE,
created_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
edited_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_used_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_played_at TIMESTAMP
);
CREATE TABLE recordings_new (
recording_id TEXT NOT NULL PRIMARY KEY REFERENCES item_state(id),
work_id TEXT NOT NULL REFERENCES works(work_id),
year INTEGER,
source TEXT NOT NULL DEFAULT 'user',
enable_updates BOOLEAN NOT NULL DEFAULT TRUE,
created_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
edited_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_used_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_played_at TIMESTAMP
);
CREATE TABLE mediums_new (
medium_id TEXT NOT NULL PRIMARY KEY REFERENCES item_state(id),
discid TEXT NOT NULL,
source TEXT NOT NULL DEFAULT 'user',
enable_updates BOOLEAN NOT NULL DEFAULT TRUE,
created_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
edited_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_used_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_played_at TIMESTAMP
);
CREATE TABLE albums_new (
album_id TEXT NOT NULL PRIMARY KEY REFERENCES item_state(id),
name TEXT NOT NULL,
source TEXT NOT NULL DEFAULT 'user',
enable_updates BOOLEAN NOT NULL DEFAULT TRUE,
created_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
edited_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_used_at TIMESTAMP NOT NULL DEFAULT (DATETIME('now', 'localtime')),
last_played_at TIMESTAMP
);
INSERT INTO persons_new (
person_id,
name,
created_at,
edited_at,
last_used_at,
last_played_at
)
SELECT person_id,
name,
created_at,
edited_at,
last_used_at,
last_played_at
FROM persons;
DROP TABLE persons;
ALTER TABLE persons_new
RENAME TO persons;
INSERT INTO roles_new (
role_id,
name,
created_at,
edited_at,
last_used_at
)
SELECT role_id,
name,
created_at,
edited_at,
last_used_at
FROM roles;
DROP TABLE roles;
ALTER TABLE roles_new
RENAME TO roles;
INSERT INTO instruments_new (
instrument_id,
name,
created_at,
edited_at,
last_used_at,
last_played_at
)
SELECT instrument_id,
name,
created_at,
edited_at,
last_used_at,
last_played_at
FROM instruments;
DROP TABLE instruments;
ALTER TABLE instruments_new
RENAME TO instruments;
INSERT INTO works_new (
work_id,
parent_work_id,
sequence_number,
name,
created_at,
edited_at,
last_used_at,
last_played_at
)
SELECT work_id,
parent_work_id,
sequence_number,
name,
created_at,
edited_at,
last_used_at,
last_played_at
FROM works;
DROP TABLE works;
ALTER TABLE works_new
RENAME TO works;
INSERT INTO ensembles_new (
ensemble_id,
name,
created_at,
edited_at,
last_used_at,
last_played_at
)
SELECT ensemble_id,
name,
created_at,
edited_at,
last_used_at,
last_played_at
FROM ensembles;
DROP TABLE ensembles;
ALTER TABLE ensembles_new
RENAME TO ensembles;
INSERT INTO recordings_new (
recording_id,
work_id,
year,
created_at,
edited_at,
last_used_at,
last_played_at
)
SELECT recording_id,
work_id,
year,
created_at,
edited_at,
last_used_at,
last_played_at
FROM recordings;
DROP TABLE recordings;
ALTER TABLE recordings_new
RENAME TO recordings;
INSERT INTO mediums_new (
medium_id,
discid,
created_at,
edited_at,
last_used_at,
last_played_at
)
SELECT medium_id,
discid,
created_at,
edited_at,
last_used_at,
last_played_at
FROM mediums;
DROP TABLE mediums;
ALTER TABLE mediums_new
RENAME TO mediums;
INSERT INTO albums_new (
album_id,
name,
created_at,
edited_at,
last_used_at,
last_played_at
)
SELECT album_id,
name,
created_at,
edited_at,
last_used_at,
last_played_at
FROM albums;
DROP TABLE albums;
ALTER TABLE albums_new
RENAME TO albums;

View file

@ -74,6 +74,7 @@ pub struct Album {
pub album_id: String,
pub name: TranslatedString,
pub recordings: Vec<Recording>,
pub enable_updates: bool,
}
impl Eq for Person {}
@ -433,6 +434,7 @@ impl Album {
album_id: data.album_id,
name: data.name,
recordings,
enable_updates: data.enable_updates,
})
}

View file

@ -20,6 +20,8 @@ diesel::table! {
albums (album_id) {
album_id -> Text,
name -> Text,
source -> Text,
enable_updates -> Bool,
created_at -> Timestamp,
edited_at -> Timestamp,
last_used_at -> Timestamp,
@ -40,11 +42,12 @@ diesel::table! {
ensembles (ensemble_id) {
ensemble_id -> Text,
name -> Text,
source -> Text,
enable_updates -> Bool,
created_at -> Timestamp,
edited_at -> Timestamp,
last_used_at -> Timestamp,
last_played_at -> Nullable<Timestamp>,
enable_updates -> Bool,
}
}
@ -52,11 +55,12 @@ diesel::table! {
instruments (instrument_id) {
instrument_id -> Text,
name -> Text,
source -> Text,
enable_updates -> Bool,
created_at -> Timestamp,
edited_at -> Timestamp,
last_used_at -> Timestamp,
last_played_at -> Nullable<Timestamp>,
enable_updates -> Bool,
}
}
@ -64,6 +68,8 @@ diesel::table! {
mediums (medium_id) {
medium_id -> Text,
discid -> Text,
source -> Text,
enable_updates -> Bool,
created_at -> Timestamp,
edited_at -> Timestamp,
last_used_at -> Timestamp,
@ -75,16 +81,17 @@ diesel::table! {
persons (person_id) {
person_id -> Text,
name -> Text,
source -> Text,
enable_updates -> Bool,
created_at -> Timestamp,
edited_at -> Timestamp,
last_used_at -> Timestamp,
last_played_at -> Nullable<Timestamp>,
enable_updates -> Bool,
}
}
diesel::table! {
recording_ensembles (recording_id, ensemble_id) {
recording_ensembles (recording_id, ensemble_id, sequence_number) {
recording_id -> Text,
ensemble_id -> Text,
role_id -> Nullable<Text>,
@ -93,7 +100,7 @@ diesel::table! {
}
diesel::table! {
recording_persons (recording_id, person_id) {
recording_persons (recording_id, person_id, sequence_number) {
recording_id -> Text,
person_id -> Text,
role_id -> Nullable<Text>,
@ -107,11 +114,12 @@ diesel::table! {
recording_id -> Text,
work_id -> Text,
year -> Nullable<Integer>,
source -> Text,
enable_updates -> Bool,
created_at -> Timestamp,
edited_at -> Timestamp,
last_used_at -> Timestamp,
last_played_at -> Nullable<Timestamp>,
enable_updates -> Bool,
}
}
@ -119,10 +127,11 @@ diesel::table! {
roles (role_id) {
role_id -> Text,
name -> Text,
source -> Text,
enable_updates -> Bool,
created_at -> Timestamp,
edited_at -> Timestamp,
last_used_at -> Timestamp,
enable_updates -> Bool,
}
}
@ -158,7 +167,7 @@ diesel::table! {
}
diesel::table! {
work_persons (work_id, person_id) {
work_persons (work_id, person_id, sequence_number) {
work_id -> Text,
person_id -> Text,
role_id -> Nullable<Text>,
@ -172,11 +181,12 @@ diesel::table! {
parent_work_id -> Nullable<Text>,
sequence_number -> Nullable<Integer>,
name -> Text,
source -> Text,
enable_updates -> Bool,
created_at -> Timestamp,
edited_at -> Timestamp,
last_used_at -> Timestamp,
last_played_at -> Nullable<Timestamp>,
enable_updates -> Bool,
}
}

View file

@ -24,11 +24,12 @@ use super::{schema::*, TranslatedString};
pub struct Person {
pub person_id: String,
pub name: TranslatedString,
pub source: Source,
pub enable_updates: bool,
pub created_at: NaiveDateTime,
pub edited_at: NaiveDateTime,
pub last_used_at: NaiveDateTime,
pub last_played_at: Option<NaiveDateTime>,
pub enable_updates: bool,
}
#[derive(Boxed, Insertable, Queryable, Selectable, Clone, Debug)]
@ -37,10 +38,11 @@ pub struct Person {
pub struct Role {
pub role_id: String,
pub name: TranslatedString,
pub source: Source,
pub enable_updates: bool,
pub created_at: NaiveDateTime,
pub edited_at: NaiveDateTime,
pub last_used_at: NaiveDateTime,
pub enable_updates: bool,
}
#[derive(Boxed, Insertable, Queryable, Selectable, Clone, Debug)]
@ -49,11 +51,12 @@ pub struct Role {
pub struct Instrument {
pub instrument_id: String,
pub name: TranslatedString,
pub source: Source,
pub enable_updates: bool,
pub created_at: NaiveDateTime,
pub edited_at: NaiveDateTime,
pub last_used_at: NaiveDateTime,
pub last_played_at: Option<NaiveDateTime>,
pub enable_updates: bool,
}
#[derive(Insertable, Queryable, Selectable, Clone, Debug)]
@ -63,11 +66,12 @@ pub struct Work {
pub parent_work_id: Option<String>,
pub sequence_number: Option<i32>,
pub name: TranslatedString,
pub source: Source,
pub enable_updates: bool,
pub created_at: NaiveDateTime,
pub edited_at: NaiveDateTime,
pub last_used_at: NaiveDateTime,
pub last_played_at: Option<NaiveDateTime>,
pub enable_updates: bool,
}
#[derive(Insertable, Queryable, Selectable, Clone, Debug)]
@ -92,11 +96,12 @@ pub struct WorkInstrument {
pub struct Ensemble {
pub ensemble_id: String,
pub name: TranslatedString,
pub source: Source,
pub enable_updates: bool,
pub created_at: NaiveDateTime,
pub edited_at: NaiveDateTime,
pub last_used_at: NaiveDateTime,
pub last_played_at: Option<NaiveDateTime>,
pub enable_updates: bool,
}
#[derive(Insertable, Queryable, Selectable, Clone, Debug)]
@ -114,11 +119,12 @@ pub struct Recording {
pub recording_id: String,
pub work_id: String,
pub year: Option<i32>,
pub source: Source,
pub enable_updates: bool,
pub created_at: NaiveDateTime,
pub edited_at: NaiveDateTime,
pub last_used_at: NaiveDateTime,
pub last_played_at: Option<NaiveDateTime>,
pub enable_updates: bool,
}
#[derive(Insertable, Queryable, Selectable, Clone, Debug)]
@ -168,6 +174,8 @@ pub struct TrackWork {
pub struct Medium {
pub medium_id: String,
pub discid: String,
pub source: Source,
pub enable_updates: bool,
pub created_at: NaiveDateTime,
pub edited_at: NaiveDateTime,
pub last_used_at: NaiveDateTime,
@ -179,6 +187,8 @@ pub struct Medium {
pub struct Album {
pub album_id: String,
pub name: TranslatedString,
pub source: Source,
pub enable_updates: bool,
pub created_at: NaiveDateTime,
pub edited_at: NaiveDateTime,
pub last_used_at: NaiveDateTime,
@ -256,3 +266,40 @@ impl AsRef<Path> for PathBufWrapper {
self.0.as_ref()
}
}
#[derive(AsExpression, FromSqlRow, Copy, Clone, Debug)]
#[diesel(sql_type = Text)]
pub enum Source {
Metadata,
User,
Import,
Unknown,
}
impl ToSql<Text, Sqlite> for Source {
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Sqlite>) -> diesel::serialize::Result {
out.set_value(match self {
Source::Metadata => "metadata",
Source::User => "user",
Source::Import => "import",
Source::Unknown => "unknown",
});
Ok(IsNull::No)
}
}
impl<DB> FromSql<Text, DB> for Source
where
DB: Backend,
String: FromSql<Text, DB>,
{
fn from_sql(bytes: DB::RawValue<'_>) -> diesel::deserialize::Result<Self> {
Ok(match String::from_sql(bytes)?.as_str() {
"metadata" => Source::Metadata,
"user" => Source::User,
"import" => Source::Import,
_ => Source::Unknown,
})
}
}

View file

@ -39,6 +39,8 @@ mod imp {
#[template_child]
pub recordings_list: TemplateChild<gtk::ListBox>,
#[template_child]
pub enable_updates_row: TemplateChild<adw::SwitchRow>,
#[template_child]
pub save_row: TemplateChild<adw::ButtonRow>,
}
@ -126,6 +128,10 @@ impl AlbumEditor {
for recording in &album.recordings {
obj.add_recording(recording.to_owned());
}
obj.imp()
.enable_updates_row
.set_active(album.enable_updates);
}
obj
@ -191,10 +197,16 @@ impl AlbumEditor {
.map(|r| r.recording())
.collect::<Vec<Recording>>();
let enable_updates = self.imp().enable_updates_row.is_active();
if let Some(album_id) = self.imp().album_id.get() {
library.update_album(album_id, name, recordings).unwrap();
library
.update_album(album_id, name, recordings, enable_updates)
.unwrap();
} else {
let album = library.create_album(name, recordings).unwrap();
let album = library
.create_album(name, recordings, enable_updates)
.unwrap();
self.emit_by_name::<()>("created", &[&album]);
}

View file

@ -10,8 +10,8 @@ use gtk::{gio, glib, glib::subclass::Signal};
use once_cell::sync::Lazy;
use crate::{
config, library::Library, process::Process, process_manager::ProcessManager,
process_row::ProcessRow,
config, db::tables::Source, library::Library, process::Process,
process_manager::ProcessManager, process_row::ProcessRow,
};
mod imp {
@ -119,7 +119,7 @@ impl EmptyPage {
.library
.get()
.unwrap()
.import_library_from_url(&url)
.import_library_from_url(&url, Source::Metadata)
{
Ok(receiver) => {
let process = Process::new(&gettext("Downloading music library"), receiver);

View file

@ -10,7 +10,13 @@ use chrono::prelude::*;
use diesel::{prelude::*, QueryDsl, SqliteConnection};
use super::Library;
use crate::db::{self, models::*, schema::*, tables, TranslatedString};
use crate::db::{
self,
models::*,
schema::*,
tables::{self, Source},
TranslatedString,
};
impl Library {
pub fn create_person(&self, name: TranslatedString, enable_updates: bool) -> Result<Person> {
@ -21,6 +27,7 @@ impl Library {
let person = Person {
person_id: db::generate_id(),
name,
source: Source::User,
created_at: now,
edited_at: now,
last_used_at: now,
@ -86,6 +93,7 @@ impl Library {
let instrument = Instrument {
instrument_id: db::generate_id(),
name,
source: Source::User,
created_at: now,
edited_at: now,
last_used_at: now,
@ -147,6 +155,7 @@ impl Library {
let role = Role {
role_id: db::generate_id(),
name,
source: Source::User,
created_at: now,
edited_at: now,
last_used_at: now,
@ -243,6 +252,7 @@ impl Library {
parent_work_id: parent_work_id.map(|w| w.to_string()),
sequence_number,
name,
source: Source::User,
created_at: now,
edited_at: now,
last_used_at: now,
@ -452,6 +462,7 @@ impl Library {
let ensemble_data = tables::Ensemble {
ensemble_id: db::generate_id(),
name,
source: Source::User,
created_at: now,
edited_at: now,
last_used_at: now,
@ -528,6 +539,7 @@ impl Library {
recording_id: recording_id.clone(),
work_id: work.work_id.clone(),
year,
source: Source::User,
created_at: now,
edited_at: now,
last_used_at: now,
@ -691,6 +703,7 @@ impl Library {
&self,
name: TranslatedString,
recordings: Vec<Recording>,
enable_updates: bool,
) -> Result<Album> {
let connection = &mut *self.imp().connection.get().unwrap().lock().unwrap();
@ -700,6 +713,8 @@ impl Library {
let album_data = tables::Album {
album_id: album_id.clone(),
name,
source: Source::User,
enable_updates,
created_at: now,
edited_at: now,
last_used_at: now,
@ -734,6 +749,7 @@ impl Library {
album_id: &str,
name: TranslatedString,
recordings: Vec<Recording>,
enable_updates: bool,
) -> Result<()> {
let connection = &mut *self.imp().connection.get().unwrap().lock().unwrap();
@ -743,6 +759,7 @@ impl Library {
.filter(albums::album_id.eq(album_id))
.set((
albums::name.eq(name),
albums::enable_updates.eq(enable_updates),
albums::edited_at.eq(now),
albums::last_used_at.eq(now),
))

View file

@ -19,7 +19,11 @@ use zip::{write::SimpleFileOptions, ZipWriter};
use super::Library;
use crate::{
db::{self, schema::*, tables},
db::{
self,
schema::*,
tables::{self, Source},
},
process::ProcessMsg,
};
@ -28,6 +32,7 @@ impl Library {
pub fn import_library_from_zip(
&self,
path: impl AsRef<Path>,
source: Source,
) -> Result<async_channel::Receiver<ProcessMsg>> {
log::info!(
"Importing library from ZIP at {}",
@ -39,9 +44,15 @@ impl Library {
let (sender, receiver) = async_channel::unbounded::<ProcessMsg>();
thread::spawn(move || {
if let Err(err) = sender.send_blocking(ProcessMsg::Result(
import_library_from_zip_priv(path, library_folder, this_connection, &sender),
)) {
if let Err(err) =
sender.send_blocking(ProcessMsg::Result(import_library_from_zip_priv(
path,
library_folder,
source,
this_connection,
&sender,
)))
{
log::error!("Failed to send library action result: {err:?}");
}
});
@ -84,6 +95,7 @@ impl Library {
pub fn import_library_from_url(
&self,
url: &str,
source: Source,
) -> Result<async_channel::Receiver<ProcessMsg>> {
log::info!("Importing library from URL {url}");
let url = url.to_owned();
@ -94,7 +106,7 @@ impl Library {
thread::spawn(move || {
if let Err(err) = sender.send_blocking(ProcessMsg::Result(
import_library_from_url_priv(url, library_folder, this_connection, &sender),
import_library_from_url_priv(url, library_folder, source, this_connection, &sender),
)) {
log::error!("Failed to send library action result: {err:?}");
}
@ -107,6 +119,7 @@ impl Library {
pub fn import_metadata_from_url(
&self,
url: &str,
source: Source,
) -> Result<async_channel::Receiver<ProcessMsg>> {
log::info!("Importing metadata from URL {url}");
@ -117,7 +130,7 @@ impl Library {
thread::spawn(move || {
if let Err(err) = sender.send_blocking(ProcessMsg::Result(
import_metadata_from_url_priv(url, this_connection, &sender),
import_metadata_from_url_priv(url, source, this_connection, &sender),
)) {
log::error!("Failed to send library action result: {err:?}");
}
@ -131,6 +144,7 @@ impl Library {
fn import_library_from_zip_priv(
zip_path: impl AsRef<Path>,
library_folder: impl AsRef<Path>,
source: Source,
this_connection: Arc<Mutex<SqliteConnection>>,
sender: &async_channel::Sender<ProcessMsg>,
) -> Result<()> {
@ -144,7 +158,7 @@ fn import_library_from_zip_priv(
)?;
// Import metadata.
let tracks = import_metadata_from_file(tmp_db_file.path(), this_connection, false)?;
let tracks = import_metadata_from_file(tmp_db_file.path(), source, this_connection, false)?;
// Import audio files.
let n_tracks = tracks.len();
@ -218,6 +232,7 @@ fn add_file_to_zip(
fn import_metadata_from_url_priv(
url: String,
source: Source,
this_connection: Arc<Mutex<SqliteConnection>>,
sender: &async_channel::Sender<ProcessMsg>,
) -> Result<()> {
@ -236,11 +251,13 @@ fn import_metadata_from_url_priv(
));
let _ = sender.send_blocking(ProcessMsg::Result(
import_metadata_from_file(db_file.path(), this_connection, true).map(|tracks| {
if !tracks.is_empty() {
log::warn!("The metadata file at {url} contains tracks.");
}
}),
import_metadata_from_file(db_file.path(), source, this_connection, true).map(
|tracks| {
if !tracks.is_empty() {
log::warn!("The metadata file at {url} contains tracks.");
}
},
),
));
}
Err(err) => {
@ -254,6 +271,7 @@ fn import_metadata_from_url_priv(
fn import_library_from_url_priv(
url: String,
library_folder: impl AsRef<Path>,
source: Source,
this_connection: Arc<Mutex<SqliteConnection>>,
sender: &async_channel::Sender<ProcessMsg>,
) -> Result<()> {
@ -276,6 +294,7 @@ fn import_library_from_url_priv(
let _ = sender.send_blocking(ProcessMsg::Result(import_library_from_zip_priv(
archive_file.path(),
library_folder,
source,
this_connection,
sender,
)));
@ -295,6 +314,7 @@ fn import_library_from_url_priv(
/// In any case, tracks are returned.
fn import_metadata_from_file(
path: impl AsRef<Path>,
source: Source,
this_connection: Arc<Mutex<SqliteConnection>>,
ignore_tracks: bool,
) -> Result<Vec<tables::Track>> {
@ -329,6 +349,7 @@ fn import_metadata_from_file(
// Import metadata that is not already present.
for mut person in persons {
person.source = source;
person.created_at = now;
person.edited_at = now;
person.last_used_at = now;
@ -341,6 +362,7 @@ fn import_metadata_from_file(
}
for mut role in roles {
role.source = source;
role.created_at = now;
role.edited_at = now;
role.last_used_at = now;
@ -352,6 +374,7 @@ fn import_metadata_from_file(
}
for mut instrument in instruments {
instrument.source = source;
instrument.created_at = now;
instrument.edited_at = now;
instrument.last_used_at = now;
@ -364,6 +387,7 @@ fn import_metadata_from_file(
}
for mut work in works {
work.source = source;
work.created_at = now;
work.edited_at = now;
work.last_used_at = now;
@ -390,6 +414,7 @@ fn import_metadata_from_file(
}
for mut ensemble in ensembles {
ensemble.source = source;
ensemble.created_at = now;
ensemble.edited_at = now;
ensemble.last_used_at = now;
@ -409,6 +434,7 @@ fn import_metadata_from_file(
}
for mut recording in recordings {
recording.source = source;
recording.created_at = now;
recording.edited_at = now;
recording.last_used_at = now;
@ -468,6 +494,7 @@ fn import_metadata_from_file(
}
for mut album in albums {
album.source = source;
album.created_at = now;
album.edited_at = now;
album.last_used_at = now;

View file

@ -9,8 +9,8 @@ use gtk::{
};
use crate::{
config, library::Library, process::Process, process_manager::ProcessManager,
process_row::ProcessRow, window::Window,
config, db::tables::Source, library::Library, process::Process,
process_manager::ProcessManager, process_row::ProcessRow, window::Window,
};
mod imp {
@ -133,7 +133,7 @@ impl LibraryManager {
.library
.get()
.unwrap()
.import_library_from_zip(&path)
.import_library_from_zip(&path, Source::Import)
{
Ok(receiver) => {
let process = Process::new(
@ -240,7 +240,7 @@ impl LibraryManager {
.library
.get()
.unwrap()
.import_metadata_from_url(&url)
.import_metadata_from_url(&url, Source::Metadata)
{
Ok(receiver) => {
let process = Process::new(&gettext("Updating metadata"), receiver);
@ -271,7 +271,7 @@ impl LibraryManager {
.library
.get()
.unwrap()
.import_library_from_url(&url)
.import_library_from_url(&url, Source::Metadata)
{
Ok(receiver) => {
let process = Process::new(&gettext("Updating music library"), receiver);

View file

@ -11,6 +11,7 @@ use gtk::{gio, glib, glib::clone};
use crate::{
album_page::AlbumPage,
config,
db::tables::Source,
editor::{album::AlbumEditor, tracks::TracksEditor},
empty_page::EmptyPage,
library::{Library, LibraryQuery},
@ -315,7 +316,7 @@ impl Window {
config::METADATA_URL.to_string()
};
match library.import_metadata_from_url(&url) {
match library.import_metadata_from_url(&url, Source::Metadata) {
Ok(receiver) => {
let process = Process::new(&gettext("Updating metadata"), receiver);
self.imp().process_manager.add_process(&process);