Artist merge for non-null properties always erases database properties #72
40
src/lib.rs
40
src/lib.rs
@ -48,7 +48,7 @@ impl fmt::Display for InvalidUrlError {
|
||||
}
|
||||
|
||||
/// MusicBrainz reference.
|
||||
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct MusicBrainz(Url);
|
||||
|
||||
impl MusicBrainz {
|
||||
@ -93,7 +93,7 @@ impl IMbid for MusicBrainz {
|
||||
}
|
||||
|
||||
/// MusicButler reference.
|
||||
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct MusicButler(Url);
|
||||
|
||||
impl MusicButler {
|
||||
@ -126,7 +126,7 @@ impl IUrl for MusicButler {
|
||||
}
|
||||
|
||||
/// Bandcamp reference.
|
||||
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct Bandcamp(Url);
|
||||
|
||||
impl Bandcamp {
|
||||
@ -159,7 +159,7 @@ impl IUrl for Bandcamp {
|
||||
}
|
||||
|
||||
/// Qobuz reference.
|
||||
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct Qobuz(Url);
|
||||
|
||||
impl Qobuz {
|
||||
@ -192,7 +192,7 @@ impl IUrl for Qobuz {
|
||||
}
|
||||
|
||||
/// The track file format.
|
||||
#[derive(Clone, Copy, Debug, Deserialize, Serialize, PartialEq, Eq)]
|
||||
#[derive(Clone, Copy, Debug, Deserialize, Serialize, PartialEq, Eq, Hash)]
|
||||
pub enum Format {
|
||||
Flac,
|
||||
Mp3,
|
||||
@ -288,6 +288,17 @@ pub struct ArtistProperties {
|
||||
pub qobuz: Option<Qobuz>,
|
||||
}
|
||||
|
||||
impl Merge for ArtistProperties {
|
||||
fn merge(mut self, other: Self) -> Self {
|
||||
self.musicbrainz = Self::merge_opts(self.musicbrainz, other.musicbrainz);
|
||||
self.musicbutler = Self::merge_vecs(self.musicbutler, other.musicbutler);
|
||||
self.bandcamp = Self::merge_vecs(self.bandcamp, other.bandcamp);
|
||||
self.qobuz = Self::merge_opts(self.qobuz, other.qobuz);
|
||||
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// An artist.
|
||||
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)]
|
||||
pub struct Artist {
|
||||
@ -311,6 +322,7 @@ impl Ord for Artist {
|
||||
impl Merge for Artist {
|
||||
fn merge(mut self, other: Self) -> Self {
|
||||
assert_eq!(self.id, other.id);
|
||||
self.properties = self.properties.merge(other.properties);
|
||||
self.albums = MergeSorted::new(self.albums.into_iter(), other.albums.into_iter()).collect();
|
||||
self
|
||||
}
|
||||
@ -321,6 +333,22 @@ pub type Collection = Vec<Artist>;
|
||||
|
||||
trait Merge {
|
||||
fn merge(self, other: Self) -> Self;
|
||||
|
||||
fn merge_opts<T>(this: Option<T>, other: Option<T>) -> Option<T> {
|
||||
match (this, other) {
|
||||
(Some(t), Some(_)) => Some(t),
|
||||
(Some(t), None) => Some(t),
|
||||
(None, Some(o)) => Some(o),
|
||||
(None, None) => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn merge_vecs<T: Ord + Eq>(mut this: Vec<T>, mut other: Vec<T>) -> Vec<T> {
|
||||
this.append(&mut other);
|
||||
this.sort_unstable();
|
||||
this.dedup();
|
||||
this
|
||||
}
|
||||
}
|
||||
|
||||
struct MergeSorted<L, R>
|
||||
@ -740,6 +768,7 @@ mod tests {
|
||||
right.id = left.id.clone();
|
||||
|
||||
let mut expected = left.clone();
|
||||
expected.properties = expected.properties.merge(right.clone().properties);
|
||||
expected.albums.append(&mut right.albums.clone());
|
||||
expected.albums.sort_unstable();
|
||||
|
||||
@ -756,6 +785,7 @@ mod tests {
|
||||
left.albums.sort_unstable();
|
||||
|
||||
let mut expected = left.clone();
|
||||
expected.properties = expected.properties.merge(right.clone().properties);
|
||||
expected.albums.append(&mut right.albums.clone());
|
||||
expected.albums.sort_unstable();
|
||||
expected.albums.dedup();
|
||||
|
@ -18,7 +18,7 @@ pub trait ILibrary {
|
||||
}
|
||||
|
||||
/// An item from the library. An item corresponds to an individual file (usually a single track).
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
pub struct Item {
|
||||
pub album_artist: String,
|
||||
pub album_year: u32,
|
||||
|
@ -10,9 +10,9 @@ use musichoard::{
|
||||
use once_cell::sync::Lazy;
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
use crate::COLLECTION;
|
||||
use crate::testlib::COLLECTION;
|
||||
|
||||
static DATABASE_TEST_FILE: Lazy<PathBuf> =
|
||||
pub static DATABASE_TEST_FILE: Lazy<PathBuf> =
|
||||
Lazy::new(|| fs::canonicalize("./tests/files/database/database.json").unwrap());
|
||||
|
||||
#[test]
|
||||
|
@ -1,2 +1,2 @@
|
||||
#[cfg(feature = "database-json")]
|
||||
mod json;
|
||||
pub mod json;
|
||||
|
File diff suppressed because one or more lines are too long
1085
tests/lib.rs
1085
tests/lib.rs
File diff suppressed because it is too large
Load Diff
@ -1,7 +1,7 @@
|
||||
use std::{
|
||||
fs,
|
||||
path::PathBuf,
|
||||
sync::{Arc, Mutex},
|
||||
sync::{Arc, Mutex}, collections::HashSet,
|
||||
};
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
@ -14,21 +14,22 @@ use musichoard::{
|
||||
Artist,
|
||||
};
|
||||
|
||||
use crate::COLLECTION;
|
||||
use crate::testlib::COLLECTION;
|
||||
|
||||
static BEETS_EMPTY_CONFIG: Lazy<Arc<Mutex<BeetsLibrary<BeetsLibraryProcessExecutor>>>> =
|
||||
pub static BEETS_TEST_CONFIG_PATH: Lazy<PathBuf> =
|
||||
Lazy::new(|| fs::canonicalize("./tests/files/library/config.yml").unwrap());
|
||||
|
||||
pub static BEETS_EMPTY_CONFIG: Lazy<Arc<Mutex<BeetsLibrary<BeetsLibraryProcessExecutor>>>> =
|
||||
Lazy::new(|| {
|
||||
Arc::new(Mutex::new(BeetsLibrary::new(
|
||||
BeetsLibraryProcessExecutor::default(),
|
||||
)))
|
||||
});
|
||||
|
||||
static BEETS_TEST_CONFIG: Lazy<Arc<Mutex<BeetsLibrary<BeetsLibraryProcessExecutor>>>> =
|
||||
pub static BEETS_TEST_CONFIG: Lazy<Arc<Mutex<BeetsLibrary<BeetsLibraryProcessExecutor>>>> =
|
||||
Lazy::new(|| {
|
||||
Arc::new(Mutex::new(BeetsLibrary::new(
|
||||
BeetsLibraryProcessExecutor::default().config(Some(
|
||||
&fs::canonicalize("./tests/files/library/config.yml").unwrap(),
|
||||
)),
|
||||
BeetsLibraryProcessExecutor::default().config(Some(&*BEETS_TEST_CONFIG_PATH)),
|
||||
)))
|
||||
});
|
||||
|
||||
@ -89,8 +90,10 @@ fn test_full_list() {
|
||||
let beets = &mut beets_arc.lock().unwrap();
|
||||
|
||||
let output = beets.list(&Query::new()).unwrap();
|
||||
|
||||
let expected: Vec<Item> = artists_to_items(&COLLECTION);
|
||||
|
||||
let output: HashSet<_> = output.iter().collect();
|
||||
let expected: HashSet<_> = expected.iter().collect();
|
||||
assert_eq!(output, expected);
|
||||
}
|
||||
|
||||
@ -103,7 +106,7 @@ fn test_album_artist_query() {
|
||||
.list(Query::new().include(Field::AlbumArtist(String::from("Аркона"))))
|
||||
.unwrap();
|
||||
|
||||
let expected: Vec<Item> = artists_to_items(&COLLECTION[0..1]);
|
||||
let expected: Vec<Item> = artists_to_items(&COLLECTION[4..5]);
|
||||
assert_eq!(output, expected);
|
||||
}
|
||||
|
||||
@ -116,7 +119,7 @@ fn test_album_title_query() {
|
||||
.list(&Query::new().include(Field::AlbumTitle(String::from("Slovo"))))
|
||||
.unwrap();
|
||||
|
||||
let expected: Vec<Item> = artists_to_items(&COLLECTION[0..1]);
|
||||
let expected: Vec<Item> = artists_to_items(&COLLECTION[4..5]);
|
||||
assert_eq!(output, expected);
|
||||
}
|
||||
|
||||
@ -128,7 +131,9 @@ fn test_exclude_query() {
|
||||
let output = beets
|
||||
.list(&Query::new().exclude(Field::AlbumArtist(String::from("Аркона"))))
|
||||
.unwrap();
|
||||
let expected: Vec<Item> = artists_to_items(&COLLECTION[..4]);
|
||||
|
||||
let expected: Vec<Item> = artists_to_items(&COLLECTION[1..]);
|
||||
let output: HashSet<_> = output.iter().collect();
|
||||
let expected: HashSet<_> = expected.iter().collect();
|
||||
assert_eq!(output, expected);
|
||||
}
|
||||
|
@ -1,2 +1,2 @@
|
||||
#[cfg(feature = "library-beets")]
|
||||
mod beets;
|
||||
pub mod beets;
|
||||
|
1026
tests/testlib.rs
Normal file
1026
tests/testlib.rs
Normal file
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user