2023-03-29 01:29:11 +02:00
|
|
|
//! MusicHoard - a music collection manager.
|
|
|
|
|
2023-03-29 08:37:01 +02:00
|
|
|
pub mod database;
|
2023-03-31 14:24:54 +02:00
|
|
|
pub mod library;
|
2023-03-29 01:29:11 +02:00
|
|
|
|
2023-05-20 00:02:39 +02:00
|
|
|
use std::{
|
|
|
|
cmp::Ordering,
|
|
|
|
collections::{HashMap, HashSet},
|
2024-01-10 22:33:57 +01:00
|
|
|
fmt::{self, Debug, Display},
|
2023-05-20 00:02:39 +02:00
|
|
|
iter::Peekable,
|
|
|
|
mem,
|
|
|
|
};
|
2023-05-10 22:52:03 +02:00
|
|
|
|
|
|
|
use database::IDatabase;
|
2023-05-20 00:02:39 +02:00
|
|
|
use library::{ILibrary, Item, Query};
|
2024-01-10 22:33:57 +01:00
|
|
|
use paste::paste;
|
2023-05-10 22:52:03 +02:00
|
|
|
use serde::{Deserialize, Serialize};
|
2023-05-21 17:24:00 +02:00
|
|
|
use url::Url;
|
2023-05-10 22:52:03 +02:00
|
|
|
use uuid::Uuid;
|
|
|
|
|
2023-05-21 17:24:00 +02:00
|
|
|
/// An object with the [`IUrl`] trait contains a valid URL.
|
|
|
|
pub trait IUrl {
|
|
|
|
fn url(&self) -> &str;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// An object with the [`IMbid`] trait contains a [MusicBrainz
|
|
|
|
/// Identifier](https://musicbrainz.org/doc/MusicBrainz_Identifier) (MBID).
|
|
|
|
pub trait IMbid {
|
|
|
|
fn mbid(&self) -> &str;
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug)]
|
|
|
|
enum UrlType {
|
|
|
|
MusicBrainz,
|
|
|
|
MusicButler,
|
|
|
|
Bandcamp,
|
|
|
|
Qobuz,
|
|
|
|
}
|
|
|
|
|
|
|
|
struct InvalidUrlError {
|
|
|
|
url_type: UrlType,
|
|
|
|
url: String,
|
|
|
|
}
|
|
|
|
|
2024-01-10 22:33:57 +01:00
|
|
|
impl Display for InvalidUrlError {
|
2023-05-21 17:24:00 +02:00
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
write!(f, "invalid url of type {:?}: {}", self.url_type, self.url)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// MusicBrainz reference.
|
2023-05-21 22:28:51 +02:00
|
|
|
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord)]
|
2023-05-21 17:24:00 +02:00
|
|
|
pub struct MusicBrainz(Url);
|
|
|
|
|
|
|
|
impl MusicBrainz {
|
|
|
|
pub fn new<S: AsRef<str>>(url: S) -> Result<Self, Error> {
|
|
|
|
let url = Url::parse(url.as_ref())?;
|
|
|
|
|
|
|
|
if !url
|
|
|
|
.domain()
|
|
|
|
.map(|u| u.ends_with("musicbrainz.org"))
|
|
|
|
.unwrap_or(false)
|
|
|
|
{
|
|
|
|
return Err(Self::invalid_url_error(url).into());
|
|
|
|
}
|
|
|
|
|
|
|
|
match url.path_segments().and_then(|mut ps| ps.nth(1)) {
|
|
|
|
Some(segment) => Uuid::try_parse(segment)?,
|
|
|
|
None => return Err(Self::invalid_url_error(url).into()),
|
|
|
|
};
|
|
|
|
|
|
|
|
Ok(MusicBrainz(url))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn invalid_url_error<S: Into<String>>(url: S) -> InvalidUrlError {
|
|
|
|
InvalidUrlError {
|
|
|
|
url_type: UrlType::MusicBrainz,
|
|
|
|
url: url.into(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-10 22:33:57 +01:00
|
|
|
impl TryFrom<&str> for MusicBrainz {
|
|
|
|
type Error = Error;
|
|
|
|
|
|
|
|
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
|
|
|
MusicBrainz::new(value)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Display for MusicBrainz {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
write!(f, "{}", self.0)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-21 17:24:00 +02:00
|
|
|
impl IUrl for MusicBrainz {
|
|
|
|
fn url(&self) -> &str {
|
|
|
|
self.0.as_str()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl IMbid for MusicBrainz {
|
|
|
|
fn mbid(&self) -> &str {
|
|
|
|
// The URL is assumed to have been validated.
|
|
|
|
self.0.path_segments().and_then(|mut ps| ps.nth(1)).unwrap()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// MusicButler reference.
|
2023-05-21 22:28:51 +02:00
|
|
|
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord)]
|
2023-05-21 17:24:00 +02:00
|
|
|
pub struct MusicButler(Url);
|
|
|
|
|
|
|
|
impl MusicButler {
|
|
|
|
pub fn new<S: AsRef<str>>(url: S) -> Result<Self, Error> {
|
|
|
|
let url = Url::parse(url.as_ref())?;
|
|
|
|
|
|
|
|
if !url
|
|
|
|
.domain()
|
|
|
|
.map(|u| u.ends_with("musicbutler.io"))
|
|
|
|
.unwrap_or(false)
|
|
|
|
{
|
|
|
|
return Err(Self::invalid_url_error(url).into());
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(MusicButler(url))
|
|
|
|
}
|
|
|
|
|
2024-01-10 22:33:57 +01:00
|
|
|
pub fn as_str(&self) -> &str {
|
|
|
|
self.0.as_str()
|
|
|
|
}
|
|
|
|
|
2023-05-21 17:24:00 +02:00
|
|
|
fn invalid_url_error<S: Into<String>>(url: S) -> InvalidUrlError {
|
|
|
|
InvalidUrlError {
|
|
|
|
url_type: UrlType::MusicButler,
|
|
|
|
url: url.into(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-10 22:33:57 +01:00
|
|
|
impl TryFrom<&str> for MusicButler {
|
|
|
|
type Error = Error;
|
|
|
|
|
|
|
|
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
|
|
|
MusicButler::new(value)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-21 17:24:00 +02:00
|
|
|
impl IUrl for MusicButler {
|
|
|
|
fn url(&self) -> &str {
|
|
|
|
self.0.as_str()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Bandcamp reference.
|
2023-05-21 22:28:51 +02:00
|
|
|
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord)]
|
2023-05-21 17:24:00 +02:00
|
|
|
pub struct Bandcamp(Url);
|
|
|
|
|
|
|
|
impl Bandcamp {
|
|
|
|
pub fn new<S: AsRef<str>>(url: S) -> Result<Self, Error> {
|
|
|
|
let url = Url::parse(url.as_ref())?;
|
|
|
|
|
|
|
|
if !url
|
|
|
|
.domain()
|
|
|
|
.map(|u| u.ends_with("bandcamp.com"))
|
|
|
|
.unwrap_or(false)
|
|
|
|
{
|
|
|
|
return Err(Self::invalid_url_error(url).into());
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(Bandcamp(url))
|
|
|
|
}
|
|
|
|
|
2024-01-10 22:33:57 +01:00
|
|
|
pub fn as_str(&self) -> &str {
|
|
|
|
self.0.as_str()
|
|
|
|
}
|
|
|
|
|
2023-05-21 17:24:00 +02:00
|
|
|
fn invalid_url_error<S: Into<String>>(url: S) -> InvalidUrlError {
|
|
|
|
InvalidUrlError {
|
|
|
|
url_type: UrlType::Bandcamp,
|
|
|
|
url: url.into(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-10 22:33:57 +01:00
|
|
|
impl TryFrom<&str> for Bandcamp {
|
|
|
|
type Error = Error;
|
|
|
|
|
|
|
|
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
|
|
|
Bandcamp::new(value)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-21 17:24:00 +02:00
|
|
|
impl IUrl for Bandcamp {
|
|
|
|
fn url(&self) -> &str {
|
|
|
|
self.0.as_str()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Qobuz reference.
|
2023-05-21 22:28:51 +02:00
|
|
|
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord)]
|
2023-05-21 17:24:00 +02:00
|
|
|
pub struct Qobuz(Url);
|
|
|
|
|
|
|
|
impl Qobuz {
|
|
|
|
pub fn new<S: AsRef<str>>(url: S) -> Result<Self, Error> {
|
|
|
|
let url = Url::parse(url.as_ref())?;
|
|
|
|
|
|
|
|
if !url
|
|
|
|
.domain()
|
|
|
|
.map(|u| u.ends_with("qobuz.com"))
|
|
|
|
.unwrap_or(false)
|
|
|
|
{
|
|
|
|
return Err(Self::invalid_url_error(url).into());
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(Qobuz(url))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn invalid_url_error<S: Into<String>>(url: S) -> InvalidUrlError {
|
|
|
|
InvalidUrlError {
|
|
|
|
url_type: UrlType::Qobuz,
|
|
|
|
url: url.into(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-10 22:33:57 +01:00
|
|
|
impl TryFrom<&str> for Qobuz {
|
|
|
|
type Error = Error;
|
|
|
|
|
|
|
|
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
|
|
|
Qobuz::new(value)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Display for Qobuz {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
write!(f, "{}", self.0)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-21 17:24:00 +02:00
|
|
|
impl IUrl for Qobuz {
|
|
|
|
fn url(&self) -> &str {
|
|
|
|
self.0.as_str()
|
|
|
|
}
|
|
|
|
}
|
2023-03-29 01:29:11 +02:00
|
|
|
|
2023-04-10 11:27:07 +02:00
|
|
|
/// The track file format.
|
2023-05-21 22:28:51 +02:00
|
|
|
#[derive(Clone, Copy, Debug, Deserialize, Serialize, PartialEq, Eq, Hash)]
|
2023-05-11 21:45:23 +02:00
|
|
|
pub enum Format {
|
2023-04-10 11:27:07 +02:00
|
|
|
Flac,
|
2023-05-11 21:45:23 +02:00
|
|
|
Mp3,
|
|
|
|
}
|
|
|
|
|
|
|
|
/// The track quality. Combines format and bitrate information.
|
|
|
|
#[derive(Clone, Copy, Debug, Deserialize, Serialize, PartialEq, Eq)]
|
|
|
|
pub struct Quality {
|
|
|
|
pub format: Format,
|
|
|
|
pub bitrate: u32,
|
2023-04-10 11:27:07 +02:00
|
|
|
}
|
|
|
|
|
2023-05-20 00:02:39 +02:00
|
|
|
/// The track identifier.
|
|
|
|
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
|
|
|
pub struct TrackId {
|
|
|
|
pub number: u32,
|
|
|
|
pub title: String,
|
|
|
|
}
|
|
|
|
|
2023-03-31 14:24:54 +02:00
|
|
|
/// A single track on an album.
|
2023-04-10 00:13:18 +02:00
|
|
|
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)]
|
2023-03-29 01:29:11 +02:00
|
|
|
pub struct Track {
|
2023-05-20 00:02:39 +02:00
|
|
|
pub id: TrackId,
|
2023-03-31 14:24:54 +02:00
|
|
|
pub artist: Vec<String>,
|
2023-05-10 23:44:02 +02:00
|
|
|
pub quality: Quality,
|
2023-03-28 22:49:59 +02:00
|
|
|
}
|
|
|
|
|
2023-05-20 00:02:39 +02:00
|
|
|
impl PartialOrd for Track {
|
|
|
|
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
2024-01-05 21:25:55 +01:00
|
|
|
Some(self.cmp(other))
|
2023-05-20 00:02:39 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Ord for Track {
|
|
|
|
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
|
|
|
self.id.cmp(&other.id)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Merge for Track {
|
|
|
|
fn merge(self, other: Self) -> Self {
|
|
|
|
assert_eq!(self.id, other.id);
|
|
|
|
self
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-03-31 14:24:54 +02:00
|
|
|
/// The album identifier.
|
2023-05-20 00:02:39 +02:00
|
|
|
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, PartialOrd, Ord, Eq, Hash)]
|
2023-03-31 14:24:54 +02:00
|
|
|
pub struct AlbumId {
|
|
|
|
pub year: u32,
|
|
|
|
pub title: String,
|
2023-03-29 01:29:11 +02:00
|
|
|
}
|
2023-03-28 22:49:59 +02:00
|
|
|
|
2023-03-31 14:24:54 +02:00
|
|
|
/// An album is a collection of tracks that were released together.
|
2023-04-10 00:13:18 +02:00
|
|
|
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)]
|
2023-03-31 14:24:54 +02:00
|
|
|
pub struct Album {
|
|
|
|
pub id: AlbumId,
|
|
|
|
pub tracks: Vec<Track>,
|
2023-03-28 22:49:59 +02:00
|
|
|
}
|
2023-04-01 01:59:59 +02:00
|
|
|
|
2023-05-20 00:02:39 +02:00
|
|
|
impl PartialOrd for Album {
|
|
|
|
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
2024-01-05 21:25:55 +01:00
|
|
|
Some(self.cmp(other))
|
2023-05-20 00:02:39 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Ord for Album {
|
|
|
|
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
|
|
|
self.id.cmp(&other.id)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Merge for Album {
|
|
|
|
fn merge(mut self, other: Self) -> Self {
|
|
|
|
assert_eq!(self.id, other.id);
|
|
|
|
self.tracks = MergeSorted::new(self.tracks.into_iter(), other.tracks.into_iter()).collect();
|
|
|
|
self
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-01 01:59:59 +02:00
|
|
|
/// The artist identifier.
|
2023-05-20 00:02:39 +02:00
|
|
|
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
2023-04-01 01:59:59 +02:00
|
|
|
pub struct ArtistId {
|
|
|
|
pub name: String,
|
|
|
|
}
|
|
|
|
|
2024-01-10 22:33:57 +01:00
|
|
|
impl AsRef<ArtistId> for ArtistId {
|
|
|
|
fn as_ref(&self) -> &ArtistId {
|
|
|
|
self
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ArtistId {
|
|
|
|
pub fn new<S: Into<String>>(name: S) -> ArtistId {
|
|
|
|
ArtistId { name: name.into() }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Display for ArtistId {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
write!(f, "{}", self.name)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-21 17:24:00 +02:00
|
|
|
/// The artist properties.
|
|
|
|
#[derive(Clone, Debug, Default, Deserialize, Serialize, PartialEq, Eq)]
|
|
|
|
pub struct ArtistProperties {
|
|
|
|
pub musicbrainz: Option<MusicBrainz>,
|
|
|
|
pub musicbutler: Vec<MusicButler>,
|
|
|
|
pub bandcamp: Vec<Bandcamp>,
|
|
|
|
pub qobuz: Option<Qobuz>,
|
|
|
|
}
|
|
|
|
|
2023-05-21 22:28:51 +02:00
|
|
|
impl Merge for ArtistProperties {
|
|
|
|
fn merge(mut self, other: Self) -> Self {
|
|
|
|
self.musicbrainz = Self::merge_opts(self.musicbrainz, other.musicbrainz);
|
|
|
|
self.musicbutler = Self::merge_vecs(self.musicbutler, other.musicbutler);
|
|
|
|
self.bandcamp = Self::merge_vecs(self.bandcamp, other.bandcamp);
|
|
|
|
self.qobuz = Self::merge_opts(self.qobuz, other.qobuz);
|
|
|
|
|
|
|
|
self
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-01 01:59:59 +02:00
|
|
|
/// An artist.
|
2023-04-10 00:13:18 +02:00
|
|
|
#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)]
|
2023-04-01 01:59:59 +02:00
|
|
|
pub struct Artist {
|
|
|
|
pub id: ArtistId,
|
2023-05-21 17:24:00 +02:00
|
|
|
pub properties: ArtistProperties,
|
2023-04-01 01:59:59 +02:00
|
|
|
pub albums: Vec<Album>,
|
|
|
|
}
|
|
|
|
|
2024-01-10 22:33:57 +01:00
|
|
|
macro_rules! artist_unique_url_dispatch {
|
|
|
|
($field:ident) => {
|
|
|
|
paste! {
|
|
|
|
fn [<add_ $field _url>]<S: AsRef<str>>(&mut self, url: S) -> Result<(), Error> {
|
|
|
|
Self::add_unique_url(&mut self.properties.$field, url)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn [<remove_ $field _url>]<S: AsRef<str>>(&mut self, url: S) -> Result<(), Error> {
|
|
|
|
Self::remove_unique_url(&mut self.properties.$field, url)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn [<set_ $field _url>]<S: AsRef<str>>(&mut self, url: S) -> Result<(), Error> {
|
|
|
|
Self::set_unique_url(&mut self.properties.$field, url)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn [<clear_ $field _url>](&mut self) {
|
|
|
|
Self::clear_unique_url(&mut self.properties.$field);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
macro_rules! artist_multi_url_dispatch {
|
|
|
|
($field:ident) => {
|
|
|
|
paste! {
|
|
|
|
fn [<add_ $field _urls>]<S: AsRef<str>>(&mut self, urls: Vec<S>) -> Result<(), Error> {
|
|
|
|
Self::add_multi_urls(&mut self.properties.$field, urls)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn [<remove_ $field _urls>]<S: AsRef<str>>(&mut self, urls: Vec<S>) -> Result<(), Error> {
|
|
|
|
Self::remove_multi_urls(&mut self.properties.$field, urls)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn [<set_ $field _urls>]<S: AsRef<str>>(&mut self, urls: Vec<S>) -> Result<(), Error> {
|
|
|
|
Self::set_multi_urls(&mut self.properties.$field, urls)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn [<clear_ $field _urls>](&mut self) {
|
|
|
|
Self::clear_multi_urls(&mut self.properties.$field);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Artist {
|
|
|
|
pub fn new<ID: Into<ArtistId>>(id: ID) -> Self {
|
|
|
|
Artist {
|
|
|
|
id: id.into(),
|
|
|
|
properties: ArtistProperties::default(),
|
|
|
|
albums: vec![],
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn add_unique_url<S: AsRef<str>, T: for<'a> TryFrom<&'a str, Error = Error> + Eq + Display>(
|
|
|
|
container: &mut Option<T>,
|
|
|
|
url: S,
|
|
|
|
) -> Result<(), Error> {
|
|
|
|
let url: T = url.as_ref().try_into()?;
|
|
|
|
|
|
|
|
match container {
|
|
|
|
Some(current) => {
|
|
|
|
if current != &url {
|
|
|
|
return Err(Error::CollectionError(format!(
|
|
|
|
"artist already has a different URL: {}",
|
|
|
|
current
|
|
|
|
)));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None => {
|
|
|
|
_ = container.insert(url);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn remove_unique_url<S: AsRef<str>, T: for<'a> TryFrom<&'a str, Error = Error> + Eq>(
|
|
|
|
container: &mut Option<T>,
|
|
|
|
url: S,
|
|
|
|
) -> Result<(), Error> {
|
|
|
|
let url: T = url.as_ref().try_into()?;
|
|
|
|
|
|
|
|
if container == &Some(url) {
|
|
|
|
_ = container.take();
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn set_unique_url<S: AsRef<str>, T: for<'a> TryFrom<&'a str, Error = Error>>(
|
|
|
|
container: &mut Option<T>,
|
|
|
|
url: S,
|
|
|
|
) -> Result<(), Error> {
|
|
|
|
_ = container.insert(url.as_ref().try_into()?);
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn clear_unique_url<T>(container: &mut Option<T>) {
|
|
|
|
_ = container.take();
|
|
|
|
}
|
|
|
|
|
|
|
|
fn add_multi_urls<S: AsRef<str>, T: for<'a> TryFrom<&'a str, Error = Error> + Eq>(
|
|
|
|
container: &mut Vec<T>,
|
|
|
|
urls: Vec<S>,
|
|
|
|
) -> Result<(), Error> {
|
|
|
|
let mut new_urls = urls
|
|
|
|
.iter()
|
|
|
|
.map(|url| url.as_ref().try_into())
|
|
|
|
.filter(|res| {
|
|
|
|
res.as_ref()
|
|
|
|
.map(|url| !container.contains(url))
|
|
|
|
.unwrap_or(true) // Propagate errors.
|
|
|
|
})
|
|
|
|
.collect::<Result<Vec<T>, Error>>()?;
|
|
|
|
|
|
|
|
container.append(&mut new_urls);
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn remove_multi_urls<S: AsRef<str>, T: for<'a> TryFrom<&'a str, Error = Error> + Eq>(
|
|
|
|
container: &mut Vec<T>,
|
|
|
|
urls: Vec<S>,
|
|
|
|
) -> Result<(), Error> {
|
|
|
|
let urls = urls
|
|
|
|
.iter()
|
|
|
|
.map(|url| url.as_ref().try_into())
|
|
|
|
.collect::<Result<Vec<T>, Error>>()?;
|
|
|
|
|
|
|
|
container.retain(|url| !urls.contains(url));
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn set_multi_urls<S: AsRef<str>, T: for<'a> TryFrom<&'a str, Error = Error>>(
|
|
|
|
container: &mut Vec<T>,
|
|
|
|
urls: Vec<S>,
|
|
|
|
) -> Result<(), Error> {
|
|
|
|
let mut urls = urls
|
|
|
|
.iter()
|
|
|
|
.map(|url| url.as_ref().try_into())
|
|
|
|
.collect::<Result<Vec<T>, Error>>()?;
|
|
|
|
|
|
|
|
container.clear();
|
|
|
|
container.append(&mut urls);
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn clear_multi_urls<T>(container: &mut Vec<T>) {
|
|
|
|
container.clear();
|
|
|
|
}
|
|
|
|
|
|
|
|
artist_unique_url_dispatch!(musicbrainz);
|
|
|
|
|
|
|
|
artist_multi_url_dispatch!(musicbutler);
|
|
|
|
|
|
|
|
artist_multi_url_dispatch!(bandcamp);
|
|
|
|
|
|
|
|
artist_unique_url_dispatch!(qobuz);
|
|
|
|
}
|
|
|
|
|
2023-05-20 00:02:39 +02:00
|
|
|
impl PartialOrd for Artist {
|
|
|
|
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
2024-01-05 21:25:55 +01:00
|
|
|
Some(self.cmp(other))
|
2023-05-20 00:02:39 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Ord for Artist {
|
|
|
|
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
|
|
|
self.id.cmp(&other.id)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Merge for Artist {
|
|
|
|
fn merge(mut self, other: Self) -> Self {
|
|
|
|
assert_eq!(self.id, other.id);
|
2023-05-21 22:28:51 +02:00
|
|
|
self.properties = self.properties.merge(other.properties);
|
2023-05-20 00:02:39 +02:00
|
|
|
self.albums = MergeSorted::new(self.albums.into_iter(), other.albums.into_iter()).collect();
|
|
|
|
self
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-10 22:52:03 +02:00
|
|
|
/// The collection type. Currently, a collection is a list of artists.
|
|
|
|
pub type Collection = Vec<Artist>;
|
|
|
|
|
2023-05-20 00:02:39 +02:00
|
|
|
trait Merge {
|
|
|
|
fn merge(self, other: Self) -> Self;
|
2023-05-21 22:28:51 +02:00
|
|
|
|
|
|
|
fn merge_opts<T>(this: Option<T>, other: Option<T>) -> Option<T> {
|
|
|
|
match &this {
|
|
|
|
Some(_) => this,
|
|
|
|
None => other,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn merge_vecs<T: Ord + Eq>(mut this: Vec<T>, mut other: Vec<T>) -> Vec<T> {
|
|
|
|
this.append(&mut other);
|
|
|
|
this.sort_unstable();
|
|
|
|
this.dedup();
|
|
|
|
this
|
|
|
|
}
|
2023-05-20 00:02:39 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
struct MergeSorted<L, R>
|
|
|
|
where
|
|
|
|
L: Iterator<Item = R::Item>,
|
|
|
|
R: Iterator,
|
|
|
|
{
|
|
|
|
left: Peekable<L>,
|
|
|
|
right: Peekable<R>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<L, R> MergeSorted<L, R>
|
|
|
|
where
|
|
|
|
L: Iterator<Item = R::Item>,
|
|
|
|
R: Iterator,
|
|
|
|
{
|
|
|
|
fn new(left: L, right: R) -> MergeSorted<L, R> {
|
|
|
|
MergeSorted {
|
|
|
|
left: left.peekable(),
|
|
|
|
right: right.peekable(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<L, R> Iterator for MergeSorted<L, R>
|
|
|
|
where
|
|
|
|
L: Iterator<Item = R::Item>,
|
|
|
|
R: Iterator,
|
|
|
|
L::Item: Ord + Merge,
|
|
|
|
{
|
|
|
|
type Item = L::Item;
|
|
|
|
|
|
|
|
fn next(&mut self) -> Option<L::Item> {
|
|
|
|
let which = match (self.left.peek(), self.right.peek()) {
|
|
|
|
(Some(l), Some(r)) => l.cmp(r),
|
|
|
|
(Some(_), None) => Ordering::Less,
|
|
|
|
(None, Some(_)) => Ordering::Greater,
|
|
|
|
(None, None) => return None,
|
|
|
|
};
|
|
|
|
|
|
|
|
match which {
|
|
|
|
Ordering::Less => self.left.next(),
|
|
|
|
Ordering::Equal => Some(self.left.next().unwrap().merge(self.right.next().unwrap())),
|
|
|
|
Ordering::Greater => self.right.next(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-10 22:52:03 +02:00
|
|
|
/// Error type for `musichoard`.
|
|
|
|
#[derive(Debug, PartialEq, Eq)]
|
|
|
|
pub enum Error {
|
2024-01-10 22:33:57 +01:00
|
|
|
/// The [`MusicHoard`] is not able to read/write its in-memory collection.
|
|
|
|
CollectionError(String),
|
2023-05-10 22:52:03 +02:00
|
|
|
/// The [`MusicHoard`] failed to read/write from/to the library.
|
|
|
|
LibraryError(String),
|
|
|
|
/// The [`MusicHoard`] failed to read/write from/to the database.
|
|
|
|
DatabaseError(String),
|
2023-05-21 17:24:00 +02:00
|
|
|
/// The [`MusicHoard`] failed to parse a user-provided URL.
|
|
|
|
UrlParseError(String),
|
|
|
|
/// The user-provided URL is not valid.
|
|
|
|
InvalidUrlError(String),
|
2023-05-10 22:52:03 +02:00
|
|
|
}
|
|
|
|
|
2024-01-10 22:33:57 +01:00
|
|
|
impl Display for Error {
|
2023-05-10 22:52:03 +02:00
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
match *self {
|
2024-01-10 22:33:57 +01:00
|
|
|
Self::CollectionError(ref s) => write!(f, "failed to read/write the collection: {s}"),
|
2023-05-10 22:52:03 +02:00
|
|
|
Self::LibraryError(ref s) => write!(f, "failed to read/write from/to the library: {s}"),
|
|
|
|
Self::DatabaseError(ref s) => {
|
|
|
|
write!(f, "failed to read/write from/to the database: {s}")
|
|
|
|
}
|
2023-05-21 17:24:00 +02:00
|
|
|
Self::UrlParseError(ref s) => write!(f, "failed to parse a user-provided URL: {s}"),
|
|
|
|
Self::InvalidUrlError(ref s) => write!(f, "user-provided URL is invalid: {s}"),
|
2023-05-10 22:52:03 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<library::Error> for Error {
|
|
|
|
fn from(err: library::Error) -> Error {
|
|
|
|
Error::LibraryError(err.to_string())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-20 00:02:39 +02:00
|
|
|
impl From<database::LoadError> for Error {
|
|
|
|
fn from(err: database::LoadError) -> Error {
|
|
|
|
Error::DatabaseError(err.to_string())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<database::SaveError> for Error {
|
|
|
|
fn from(err: database::SaveError) -> Error {
|
2023-05-10 22:52:03 +02:00
|
|
|
Error::DatabaseError(err.to_string())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-21 17:24:00 +02:00
|
|
|
impl From<url::ParseError> for Error {
|
|
|
|
fn from(err: url::ParseError) -> Error {
|
|
|
|
Error::UrlParseError(err.to_string())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<uuid::Error> for Error {
|
|
|
|
fn from(err: uuid::Error) -> Error {
|
|
|
|
Error::UrlParseError(err.to_string())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<InvalidUrlError> for Error {
|
|
|
|
fn from(err: InvalidUrlError) -> Error {
|
|
|
|
Error::InvalidUrlError(err.to_string())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-10 22:52:03 +02:00
|
|
|
/// The Music Hoard. It is responsible for pulling information from both the library and the
|
|
|
|
/// database, ensuring its consistent and writing back any changes.
|
|
|
|
pub struct MusicHoard<LIB, DB> {
|
|
|
|
collection: Collection,
|
2024-01-12 20:42:37 +01:00
|
|
|
library: LIB,
|
|
|
|
database: DB,
|
2023-05-10 22:52:03 +02:00
|
|
|
}
|
|
|
|
|
2024-01-12 20:42:37 +01:00
|
|
|
// Unit structs to ensure library/database are not compiled in.
|
|
|
|
pub struct NoLibrary;
|
|
|
|
pub struct NoDatabase;
|
|
|
|
|
2024-01-10 22:33:57 +01:00
|
|
|
macro_rules! music_hoard_unique_url_dispatch {
|
|
|
|
($field:ident) => {
|
|
|
|
paste! {
|
|
|
|
pub fn [<add_ $field _url>]<ID: AsRef<ArtistId>, S: AsRef<str>>(
|
|
|
|
&mut self,
|
|
|
|
artist_id: ID,
|
|
|
|
url: S,
|
|
|
|
) -> Result<(), Error> {
|
|
|
|
self.get_artist_or_err(artist_id.as_ref())?.[<add_ $field _url>](url)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn [<remove_ $field _url>]<ID: AsRef<ArtistId>, S: AsRef<str>>(
|
|
|
|
&mut self,
|
|
|
|
artist_id: ID,
|
|
|
|
url: S,
|
|
|
|
) -> Result<(), Error> {
|
|
|
|
self.get_artist_or_err(artist_id.as_ref())?.[<remove_ $field _url>](url)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn [<set_ $field _url>]<ID: AsRef<ArtistId>, S: AsRef<str>>(
|
|
|
|
&mut self,
|
|
|
|
artist_id: ID,
|
|
|
|
url: S,
|
|
|
|
) -> Result<(), Error> {
|
|
|
|
self.get_artist_or_err(artist_id.as_ref())?.[<set_ $field _url>](url)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn [<clear_ $field _url>]<ID: AsRef<ArtistId>>(
|
|
|
|
&mut self,
|
|
|
|
artist_id: ID,
|
|
|
|
) -> Result<(), Error> {
|
|
|
|
self.get_artist_or_err(artist_id.as_ref())?.[<clear_ $field _url>]();
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
macro_rules! music_hoard_multi_url_dispatch {
|
|
|
|
($field:ident) => {
|
|
|
|
paste! {
|
|
|
|
pub fn [<add_ $field _urls>]<ID: AsRef<ArtistId>, S: AsRef<str>>(
|
|
|
|
&mut self,
|
|
|
|
artist_id: ID,
|
|
|
|
urls: Vec<S>,
|
|
|
|
) -> Result<(), Error> {
|
|
|
|
self.get_artist_or_err(artist_id.as_ref())?.[<add_ $field _urls>](urls)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn [<remove_ $field _urls>]<ID: AsRef<ArtistId>, S: AsRef<str>>(
|
|
|
|
&mut self,
|
|
|
|
artist_id: ID,
|
|
|
|
urls: Vec<S>,
|
|
|
|
) -> Result<(), Error> {
|
|
|
|
self.get_artist_or_err(artist_id.as_ref())?.[<remove_ $field _urls>](urls)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn [<set_ $field _urls>]<ID: AsRef<ArtistId>, S: AsRef<str>>(
|
|
|
|
&mut self,
|
|
|
|
artist_id: ID,
|
|
|
|
urls: Vec<S>,
|
|
|
|
) -> Result<(), Error> {
|
|
|
|
self.get_artist_or_err(artist_id.as_ref())?.[<set_ $field _urls>](urls)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn [<clear_ $field _urls>]<ID: AsRef<ArtistId>>(
|
|
|
|
&mut self, artist_id: ID,
|
|
|
|
) -> Result<(), Error> {
|
|
|
|
self.get_artist_or_err(artist_id.as_ref())?.[<clear_ $field _urls>]();
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2024-01-12 20:42:37 +01:00
|
|
|
impl<LIB, DB> MusicHoard<LIB, DB> {
|
2023-05-10 22:52:03 +02:00
|
|
|
/// Create a new [`MusicHoard`] with the provided [`ILibrary`] and [`IDatabase`].
|
2024-01-12 20:42:37 +01:00
|
|
|
pub fn new(library: LIB, database: DB) -> Self {
|
2023-05-10 22:52:03 +02:00
|
|
|
MusicHoard {
|
2024-01-12 20:42:37 +01:00
|
|
|
collection: vec![],
|
2023-05-10 22:52:03 +02:00
|
|
|
library,
|
|
|
|
database,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-10 22:33:57 +01:00
|
|
|
pub fn get_collection(&self) -> &Collection {
|
|
|
|
&self.collection
|
|
|
|
}
|
|
|
|
|
2024-01-11 21:51:51 +01:00
|
|
|
pub fn add_artist<ID: Into<ArtistId>>(&mut self, artist_id: ID) {
|
2024-01-10 22:33:57 +01:00
|
|
|
let artist_id: ArtistId = artist_id.into();
|
|
|
|
|
2024-01-11 21:51:51 +01:00
|
|
|
if self.get_artist(&artist_id).is_none() {
|
|
|
|
let new_artist = vec![Artist::new(artist_id)];
|
2024-01-10 22:33:57 +01:00
|
|
|
|
2024-01-11 21:51:51 +01:00
|
|
|
let collection = mem::take(&mut self.collection);
|
|
|
|
self.collection = Self::merge(collection, new_artist);
|
|
|
|
}
|
2023-05-10 22:52:03 +02:00
|
|
|
}
|
2023-05-20 00:02:39 +02:00
|
|
|
|
2024-01-11 21:51:51 +01:00
|
|
|
pub fn remove_artist<ID: AsRef<ArtistId>>(&mut self, artist_id: ID) {
|
2024-01-10 22:33:57 +01:00
|
|
|
let index_opt = self
|
|
|
|
.collection
|
|
|
|
.iter()
|
|
|
|
.position(|a| &a.id == artist_id.as_ref());
|
|
|
|
|
2024-01-11 21:51:51 +01:00
|
|
|
if let Some(index) = index_opt {
|
|
|
|
self.collection.remove(index);
|
2024-01-10 22:33:57 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
music_hoard_unique_url_dispatch!(musicbrainz);
|
|
|
|
|
|
|
|
music_hoard_multi_url_dispatch!(musicbutler);
|
|
|
|
|
|
|
|
music_hoard_multi_url_dispatch!(bandcamp);
|
|
|
|
|
|
|
|
music_hoard_unique_url_dispatch!(qobuz);
|
|
|
|
|
2023-05-20 00:02:39 +02:00
|
|
|
fn sort(collection: &mut [Artist]) {
|
|
|
|
collection.sort_unstable();
|
|
|
|
for artist in collection.iter_mut() {
|
|
|
|
artist.albums.sort_unstable();
|
|
|
|
for album in artist.albums.iter_mut() {
|
|
|
|
album.tracks.sort_unstable();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn merge(primary: Vec<Artist>, secondary: Vec<Artist>) -> Vec<Artist> {
|
|
|
|
MergeSorted::new(primary.into_iter(), secondary.into_iter()).collect()
|
|
|
|
}
|
|
|
|
|
|
|
|
fn items_to_artists(items: Vec<Item>) -> Vec<Artist> {
|
|
|
|
let mut artists: Vec<Artist> = vec![];
|
|
|
|
let mut album_ids = HashMap::<ArtistId, HashSet<AlbumId>>::new();
|
|
|
|
|
|
|
|
for item in items.into_iter() {
|
|
|
|
let artist_id = ArtistId {
|
|
|
|
name: item.album_artist,
|
|
|
|
};
|
|
|
|
|
|
|
|
let album_id = AlbumId {
|
|
|
|
year: item.album_year,
|
|
|
|
title: item.album_title,
|
|
|
|
};
|
|
|
|
|
|
|
|
let track = Track {
|
|
|
|
id: TrackId {
|
|
|
|
number: item.track_number,
|
|
|
|
title: item.track_title,
|
|
|
|
},
|
|
|
|
artist: item.track_artist,
|
|
|
|
quality: Quality {
|
|
|
|
format: item.track_format,
|
|
|
|
bitrate: item.track_bitrate,
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
let artist = if album_ids.contains_key(&artist_id) {
|
|
|
|
// Assume results are in some order which means they will likely be grouped by
|
|
|
|
// artist. Therefore, we look from the back since the last inserted artist is most
|
|
|
|
// likely the one we are looking for.
|
|
|
|
artists
|
|
|
|
.iter_mut()
|
|
|
|
.rev()
|
|
|
|
.find(|a| a.id == artist_id)
|
|
|
|
.unwrap()
|
|
|
|
} else {
|
|
|
|
album_ids.insert(artist_id.clone(), HashSet::<AlbumId>::new());
|
2024-01-10 22:33:57 +01:00
|
|
|
artists.push(Artist::new(artist_id.clone()));
|
2023-05-20 00:02:39 +02:00
|
|
|
artists.last_mut().unwrap()
|
|
|
|
};
|
|
|
|
|
|
|
|
if album_ids[&artist_id].contains(&album_id) {
|
|
|
|
// Assume results are in some order which means they will likely be grouped by
|
|
|
|
// album. Therefore, we look from the back since the last inserted album is most
|
|
|
|
// likely the one we are looking for.
|
|
|
|
let album = artist
|
|
|
|
.albums
|
|
|
|
.iter_mut()
|
|
|
|
.rev()
|
|
|
|
.find(|a| a.id == album_id)
|
|
|
|
.unwrap();
|
|
|
|
album.tracks.push(track);
|
|
|
|
} else {
|
|
|
|
album_ids
|
|
|
|
.get_mut(&artist_id)
|
|
|
|
.unwrap()
|
|
|
|
.insert(album_id.clone());
|
|
|
|
artist.albums.push(Album {
|
|
|
|
id: album_id,
|
|
|
|
tracks: vec![track],
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
artists
|
|
|
|
}
|
2024-01-10 22:33:57 +01:00
|
|
|
|
2024-01-11 21:51:51 +01:00
|
|
|
fn get_artist(&mut self, artist_id: &ArtistId) -> Option<&mut Artist> {
|
|
|
|
self.collection.iter_mut().find(|a| &a.id == artist_id)
|
|
|
|
}
|
|
|
|
|
2024-01-10 22:33:57 +01:00
|
|
|
fn get_artist_or_err(&mut self, artist_id: &ArtistId) -> Result<&mut Artist, Error> {
|
2024-01-11 21:51:51 +01:00
|
|
|
self.get_artist(artist_id).ok_or_else(|| {
|
|
|
|
Error::CollectionError(format!("artist '{}' is not in the collection", artist_id))
|
|
|
|
})
|
2024-01-10 22:33:57 +01:00
|
|
|
}
|
2023-05-10 22:52:03 +02:00
|
|
|
}
|
|
|
|
|
2024-01-12 20:42:37 +01:00
|
|
|
impl<LIB: ILibrary, DB> MusicHoard<LIB, DB> {
|
|
|
|
pub fn rescan_library(&mut self) -> Result<(), Error> {
|
|
|
|
let items = self.library.list(&Query::new())?;
|
|
|
|
let mut library_collection = Self::items_to_artists(items);
|
|
|
|
Self::sort(&mut library_collection);
|
|
|
|
|
|
|
|
let collection = mem::take(&mut self.collection);
|
|
|
|
self.collection = Self::merge(library_collection, collection);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<LIB, DB: IDatabase> MusicHoard<LIB, DB> {
|
|
|
|
pub fn load_from_database(&mut self) -> Result<(), Error> {
|
|
|
|
let mut database_collection: Collection = vec![];
|
|
|
|
self.database.load(&mut database_collection)?;
|
|
|
|
Self::sort(&mut database_collection);
|
|
|
|
|
|
|
|
let collection = mem::take(&mut self.collection);
|
|
|
|
self.collection = Self::merge(collection, database_collection);
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn save_to_database(&mut self) -> Result<(), Error> {
|
|
|
|
self.database.save(&self.collection)?;
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Builder for [`MusicHoard`]. Its purpose is to make it easier to set various combinations of
|
|
|
|
/// library/database or their absence.
|
|
|
|
pub struct MusicHoardBuilder<LIB, DB> {
|
|
|
|
library: LIB,
|
|
|
|
database: DB,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Default for MusicHoardBuilder<NoLibrary, NoDatabase> {
|
|
|
|
/// Create a [`MusicHoardBuilder`].
|
|
|
|
fn default() -> Self {
|
|
|
|
Self::new()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl MusicHoardBuilder<NoLibrary, NoDatabase> {
|
|
|
|
/// Create a [`MusicHoardBuilder`].
|
|
|
|
pub fn new() -> Self {
|
|
|
|
MusicHoardBuilder {
|
|
|
|
library: NoLibrary,
|
|
|
|
database: NoDatabase,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<LIB, DB> MusicHoardBuilder<LIB, DB> {
|
|
|
|
/// Set a library for [`MusicHoard`].
|
|
|
|
pub fn set_library<NEWLIB: ILibrary>(self, library: NEWLIB) -> MusicHoardBuilder<NEWLIB, DB> {
|
|
|
|
MusicHoardBuilder {
|
|
|
|
library,
|
|
|
|
database: self.database,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Set a database for [`MusicHoard`].
|
|
|
|
pub fn set_database<NEWDB: IDatabase>(self, database: NEWDB) -> MusicHoardBuilder<LIB, NEWDB> {
|
|
|
|
MusicHoardBuilder {
|
|
|
|
library: self.library,
|
|
|
|
database,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Build [`MusicHoard`] with the currently set library and database.
|
|
|
|
pub fn build(self) -> MusicHoard<LIB, DB> {
|
|
|
|
MusicHoard::new(self.library, self.database)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-01 01:59:59 +02:00
|
|
|
#[cfg(test)]
|
2023-04-13 14:09:59 +02:00
|
|
|
#[macro_use]
|
|
|
|
mod testlib;
|
2023-04-01 01:59:59 +02:00
|
|
|
|
2023-04-13 14:09:59 +02:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
2023-05-10 22:52:03 +02:00
|
|
|
use mockall::predicate;
|
2023-04-10 00:13:18 +02:00
|
|
|
use once_cell::sync::Lazy;
|
|
|
|
|
2024-01-12 20:42:37 +01:00
|
|
|
use crate::{database::MockIDatabase, library::MockILibrary};
|
2023-05-10 22:52:03 +02:00
|
|
|
|
2023-04-13 14:09:59 +02:00
|
|
|
use super::*;
|
|
|
|
|
2024-01-10 22:33:57 +01:00
|
|
|
static MUSICBRAINZ: &str =
|
|
|
|
"https://musicbrainz.org/artist/d368baa8-21ca-4759-9731-0b2753071ad8";
|
|
|
|
static MUSICBRAINZ_2: &str =
|
|
|
|
"https://musicbrainz.org/artist/823869a5-5ded-4f6b-9fb7-2a9344d83c6b";
|
|
|
|
static MUSICBUTLER: &str = "https://www.musicbutler.io/artist-page/483340948";
|
|
|
|
static MUSICBUTLER_2: &str = "https://www.musicbutler.io/artist-page/658903042/";
|
|
|
|
static BANDCAMP: &str = "https://thelasthangmen.bandcamp.com/";
|
|
|
|
static BANDCAMP_2: &str = "https://viciouscrusade.bandcamp.com/";
|
|
|
|
static QOBUZ: &str = "https://www.qobuz.com/nl-nl/interpreter/the-last-hangmen/1244413";
|
|
|
|
static QOBUZ_2: &str = "https://www.qobuz.com/nl-nl/interpreter/vicious-crusade/7522386";
|
|
|
|
|
2023-04-13 14:09:59 +02:00
|
|
|
pub static COLLECTION: Lazy<Vec<Artist>> = Lazy::new(|| collection!());
|
2023-05-10 22:52:03 +02:00
|
|
|
|
2023-05-20 00:02:39 +02:00
|
|
|
pub fn artist_to_items(artist: &Artist) -> Vec<Item> {
|
|
|
|
let mut items = vec![];
|
|
|
|
|
|
|
|
for album in artist.albums.iter() {
|
|
|
|
for track in album.tracks.iter() {
|
|
|
|
items.push(Item {
|
|
|
|
album_artist: artist.id.name.clone(),
|
|
|
|
album_year: album.id.year,
|
|
|
|
album_title: album.id.title.clone(),
|
|
|
|
track_number: track.id.number,
|
|
|
|
track_title: track.id.title.clone(),
|
|
|
|
track_artist: track.artist.clone(),
|
|
|
|
track_format: track.quality.format,
|
|
|
|
track_bitrate: track.quality.bitrate,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
items
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn artists_to_items(artists: &[Artist]) -> Vec<Item> {
|
|
|
|
let mut items = vec![];
|
|
|
|
for artist in artists.iter() {
|
|
|
|
items.append(&mut artist_to_items(artist));
|
|
|
|
}
|
|
|
|
items
|
|
|
|
}
|
|
|
|
|
2023-05-21 17:24:00 +02:00
|
|
|
fn clean_collection(mut collection: Collection) -> Collection {
|
|
|
|
for artist in collection.iter_mut() {
|
|
|
|
artist.properties = ArtistProperties::default();
|
|
|
|
}
|
|
|
|
collection
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn musicbrainz() {
|
|
|
|
let uuid = "d368baa8-21ca-4759-9731-0b2753071ad8";
|
|
|
|
let url = format!("https://musicbrainz.org/artist/{uuid}");
|
|
|
|
let mb = MusicBrainz::new(&url).unwrap();
|
|
|
|
assert_eq!(url, mb.url());
|
|
|
|
assert_eq!(uuid, mb.mbid());
|
|
|
|
|
2024-01-06 11:14:30 +01:00
|
|
|
let url = "not a url at all".to_string();
|
2023-05-21 17:24:00 +02:00
|
|
|
let expected_error: Error = url::ParseError::RelativeUrlWithoutBase.into();
|
2024-01-06 11:14:30 +01:00
|
|
|
let actual_error = MusicBrainz::new(url).unwrap_err();
|
2023-05-21 17:24:00 +02:00
|
|
|
assert_eq!(actual_error, expected_error);
|
|
|
|
assert_eq!(actual_error.to_string(), expected_error.to_string());
|
|
|
|
|
2024-01-06 11:14:30 +01:00
|
|
|
let url = "https://musicbrainz.org/artist/i-am-not-a-uuid".to_string();
|
2023-05-21 17:24:00 +02:00
|
|
|
let expected_error: Error = Uuid::try_parse("i-am-not-a-uuid").unwrap_err().into();
|
2024-01-06 11:14:30 +01:00
|
|
|
let actual_error = MusicBrainz::new(url).unwrap_err();
|
2023-05-21 17:24:00 +02:00
|
|
|
assert_eq!(actual_error, expected_error);
|
|
|
|
assert_eq!(actual_error.to_string(), expected_error.to_string());
|
|
|
|
|
2024-01-06 11:14:30 +01:00
|
|
|
let url = "https://musicbrainz.org/artist".to_string();
|
2023-05-21 17:24:00 +02:00
|
|
|
let expected_error: Error = InvalidUrlError {
|
|
|
|
url_type: UrlType::MusicBrainz,
|
|
|
|
url: url.clone(),
|
|
|
|
}
|
|
|
|
.into();
|
|
|
|
let actual_error = MusicBrainz::new(&url).unwrap_err();
|
|
|
|
assert_eq!(actual_error, expected_error);
|
|
|
|
assert_eq!(actual_error.to_string(), expected_error.to_string());
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn urls() {
|
2024-01-10 22:33:57 +01:00
|
|
|
assert!(MusicBrainz::new(MUSICBRAINZ).is_ok());
|
|
|
|
assert!(MusicBrainz::new(MUSICBUTLER).is_err());
|
|
|
|
assert!(MusicBrainz::new(BANDCAMP).is_err());
|
|
|
|
assert!(MusicBrainz::new(QOBUZ).is_err());
|
|
|
|
|
|
|
|
assert!(MusicButler::new(MUSICBRAINZ).is_err());
|
|
|
|
assert!(MusicButler::new(MUSICBUTLER).is_ok());
|
|
|
|
assert!(MusicButler::new(BANDCAMP).is_err());
|
|
|
|
assert!(MusicButler::new(QOBUZ).is_err());
|
|
|
|
|
|
|
|
assert!(Bandcamp::new(MUSICBRAINZ).is_err());
|
|
|
|
assert!(Bandcamp::new(MUSICBUTLER).is_err());
|
|
|
|
assert!(Bandcamp::new(BANDCAMP).is_ok());
|
|
|
|
assert!(Bandcamp::new(QOBUZ).is_err());
|
|
|
|
|
|
|
|
assert!(Qobuz::new(MUSICBRAINZ).is_err());
|
|
|
|
assert!(Qobuz::new(MUSICBUTLER).is_err());
|
|
|
|
assert!(Qobuz::new(BANDCAMP).is_err());
|
|
|
|
assert!(Qobuz::new(QOBUZ).is_ok());
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn artist_new_delete() {
|
|
|
|
let artist_id = ArtistId::new("an artist");
|
|
|
|
let artist_id_2 = ArtistId::new("another artist");
|
2024-01-12 20:42:37 +01:00
|
|
|
let mut music_hoard = MusicHoardBuilder::default().build();
|
2024-01-10 22:33:57 +01:00
|
|
|
|
2024-01-11 21:51:51 +01:00
|
|
|
let mut expected: Vec<Artist> = vec![];
|
2024-01-10 22:33:57 +01:00
|
|
|
|
2024-01-11 21:51:51 +01:00
|
|
|
music_hoard.add_artist(artist_id.clone());
|
|
|
|
expected.push(Artist::new(artist_id.clone()));
|
|
|
|
assert_eq!(music_hoard.collection, expected);
|
|
|
|
|
|
|
|
music_hoard.add_artist(artist_id.clone());
|
|
|
|
assert_eq!(music_hoard.collection, expected);
|
|
|
|
|
|
|
|
music_hoard.remove_artist(&artist_id_2);
|
|
|
|
assert_eq!(music_hoard.collection, expected);
|
|
|
|
|
|
|
|
music_hoard.remove_artist(&artist_id);
|
|
|
|
_ = expected.pop();
|
|
|
|
assert_eq!(music_hoard.collection, expected);
|
|
|
|
}
|
2024-01-10 22:33:57 +01:00
|
|
|
|
2024-01-11 21:51:51 +01:00
|
|
|
#[test]
|
|
|
|
fn collection_error() {
|
|
|
|
let artist_id = ArtistId::new("an artist");
|
2024-01-12 20:42:37 +01:00
|
|
|
let mut music_hoard = MusicHoardBuilder::default().build();
|
2024-01-11 21:51:51 +01:00
|
|
|
|
|
|
|
let actual_err = music_hoard
|
|
|
|
.add_musicbrainz_url(&artist_id, QOBUZ)
|
|
|
|
.unwrap_err();
|
|
|
|
let expected_err =
|
|
|
|
Error::CollectionError(String::from("artist 'an artist' is not in the collection"));
|
2024-01-10 22:33:57 +01:00
|
|
|
assert_eq!(actual_err, expected_err);
|
|
|
|
assert_eq!(actual_err.to_string(), expected_err.to_string());
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn add_remove_musicbrainz_url() {
|
|
|
|
let artist_id = ArtistId::new("an artist");
|
|
|
|
let artist_id_2 = ArtistId::new("another artist");
|
2024-01-12 20:42:37 +01:00
|
|
|
let mut music_hoard = MusicHoardBuilder::default().build();
|
2024-01-10 22:33:57 +01:00
|
|
|
|
2024-01-11 21:51:51 +01:00
|
|
|
music_hoard.add_artist(artist_id.clone());
|
2024-01-10 22:33:57 +01:00
|
|
|
|
|
|
|
let mut expected: Option<MusicBrainz> = None;
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbrainz, expected);
|
|
|
|
|
|
|
|
// Adding incorect URL is an error.
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_musicbrainz_url(&artist_id, MUSICBUTLER)
|
|
|
|
.is_err());
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_musicbrainz_url(&artist_id, BANDCAMP)
|
|
|
|
.is_err());
|
|
|
|
assert!(music_hoard.add_musicbrainz_url(&artist_id, QOBUZ).is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbrainz, expected);
|
|
|
|
|
|
|
|
// Adding URL to an artist not in the collection is an error.
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_musicbrainz_url(&artist_id_2, MUSICBRAINZ)
|
|
|
|
.is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbrainz, expected);
|
|
|
|
|
|
|
|
// Adding URL to artist.
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_musicbrainz_url(&artist_id, MUSICBRAINZ)
|
|
|
|
.is_ok());
|
|
|
|
_ = expected.insert(MusicBrainz::new(MUSICBRAINZ).unwrap());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbrainz, expected);
|
|
|
|
|
|
|
|
// Adding the same URL again is ok, but does not do anything.
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_musicbrainz_url(&artist_id, MUSICBRAINZ)
|
|
|
|
.is_ok());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbrainz, expected);
|
|
|
|
|
|
|
|
// Adding further URLs is an error.
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_musicbrainz_url(&artist_id, MUSICBRAINZ_2)
|
|
|
|
.is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbrainz, expected);
|
|
|
|
|
|
|
|
// Removing a URL from an artist not in the collection is an error.
|
|
|
|
assert!(music_hoard
|
|
|
|
.remove_musicbrainz_url(&artist_id_2, MUSICBRAINZ)
|
|
|
|
.is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbrainz, expected);
|
|
|
|
|
|
|
|
// Removing a URL not in the collection is okay, but does not do anything.
|
|
|
|
assert!(music_hoard
|
|
|
|
.remove_musicbrainz_url(&artist_id, MUSICBRAINZ_2)
|
|
|
|
.is_ok());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbrainz, expected);
|
|
|
|
|
|
|
|
// Removing a URL in the collection removes it.
|
|
|
|
assert!(music_hoard
|
|
|
|
.remove_musicbrainz_url(&artist_id, MUSICBRAINZ)
|
|
|
|
.is_ok());
|
|
|
|
_ = expected.take();
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbrainz, expected);
|
|
|
|
|
|
|
|
assert!(music_hoard
|
|
|
|
.remove_musicbrainz_url(&artist_id, MUSICBRAINZ)
|
|
|
|
.is_ok());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbrainz, expected);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn set_clear_musicbrainz_url() {
|
|
|
|
let artist_id = ArtistId::new("an artist");
|
|
|
|
let artist_id_2 = ArtistId::new("another artist");
|
2024-01-12 20:42:37 +01:00
|
|
|
let mut music_hoard = MusicHoardBuilder::default().build();
|
2024-01-10 22:33:57 +01:00
|
|
|
|
2024-01-11 21:51:51 +01:00
|
|
|
music_hoard.add_artist(artist_id.clone());
|
2024-01-10 22:33:57 +01:00
|
|
|
|
|
|
|
let mut expected: Option<MusicBrainz> = None;
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbrainz, expected);
|
|
|
|
|
|
|
|
// Setting an incorrect URL is an error.
|
|
|
|
assert!(music_hoard
|
|
|
|
.set_musicbrainz_url(&artist_id, MUSICBUTLER)
|
|
|
|
.is_err());
|
|
|
|
assert!(music_hoard
|
|
|
|
.set_musicbrainz_url(&artist_id, BANDCAMP)
|
|
|
|
.is_err());
|
|
|
|
assert!(music_hoard.set_musicbrainz_url(&artist_id, QOBUZ).is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbrainz, expected);
|
|
|
|
|
|
|
|
// Setting a URL on an artist not in the collection is an error.
|
|
|
|
assert!(music_hoard
|
|
|
|
.set_musicbrainz_url(&artist_id_2, MUSICBRAINZ)
|
|
|
|
.is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbrainz, expected);
|
|
|
|
|
|
|
|
// Setting a URL on an artist.
|
|
|
|
assert!(music_hoard
|
|
|
|
.set_musicbrainz_url(&artist_id, MUSICBRAINZ)
|
|
|
|
.is_ok());
|
|
|
|
_ = expected.insert(MusicBrainz::new(MUSICBRAINZ).unwrap());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbrainz, expected);
|
|
|
|
|
|
|
|
assert!(music_hoard
|
|
|
|
.set_musicbrainz_url(&artist_id, MUSICBRAINZ)
|
|
|
|
.is_ok());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbrainz, expected);
|
|
|
|
|
|
|
|
assert!(music_hoard
|
|
|
|
.set_musicbrainz_url(&artist_id, MUSICBRAINZ_2)
|
|
|
|
.is_ok());
|
|
|
|
_ = expected.insert(MusicBrainz::new(MUSICBRAINZ_2).unwrap());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbrainz, expected);
|
|
|
|
|
|
|
|
// Clearing URLs on an artist that does not exist is an error.
|
|
|
|
assert!(music_hoard.clear_musicbrainz_url(&artist_id_2).is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbrainz, expected);
|
|
|
|
|
|
|
|
// Clearing URLs.
|
|
|
|
assert!(music_hoard.clear_musicbrainz_url(&artist_id).is_ok());
|
|
|
|
_ = expected.take();
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbrainz, expected);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn add_remove_musicbutler_urls() {
|
|
|
|
let artist_id = ArtistId::new("an artist");
|
|
|
|
let artist_id_2 = ArtistId::new("another artist");
|
2024-01-12 20:42:37 +01:00
|
|
|
let mut music_hoard = MusicHoardBuilder::default().build();
|
2024-01-10 22:33:57 +01:00
|
|
|
|
2024-01-11 21:51:51 +01:00
|
|
|
music_hoard.add_artist(artist_id.clone());
|
2024-01-10 22:33:57 +01:00
|
|
|
|
|
|
|
let mut expected: Vec<MusicButler> = vec![];
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
// If any URL is incorrect adding URLs is an error.
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_musicbutler_urls(&artist_id, vec![MUSICBRAINZ, MUSICBRAINZ_2])
|
|
|
|
.is_err());
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_musicbutler_urls(&artist_id, vec![BANDCAMP, BANDCAMP_2])
|
|
|
|
.is_err());
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_musicbutler_urls(&artist_id, vec![QOBUZ, QOBUZ_2])
|
|
|
|
.is_err());
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_musicbutler_urls(&artist_id, vec![MUSICBRAINZ, MUSICBUTLER, BANDCAMP, QOBUZ])
|
|
|
|
.is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
// Adding URLs to an artist not in the collection is an error.
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_musicbutler_urls(&artist_id_2, vec![MUSICBUTLER])
|
|
|
|
.is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
// Adding a single URL.
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_musicbutler_urls(&artist_id, vec![MUSICBUTLER])
|
|
|
|
.is_ok());
|
|
|
|
expected.push(MusicButler::new(MUSICBUTLER).unwrap());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
// Adding a URL that already exists is ok, but does not do anything.
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_musicbutler_urls(&artist_id, vec![MUSICBUTLER])
|
|
|
|
.is_ok());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
// Adding another single URL.
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_musicbutler_urls(&artist_id, vec![MUSICBUTLER_2])
|
|
|
|
.is_ok());
|
|
|
|
expected.push(MusicButler::new(MUSICBUTLER_2).unwrap());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_musicbutler_urls(&artist_id, vec![MUSICBUTLER_2])
|
|
|
|
.is_ok());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
// Removing URLs from an artist not in the collection is an error.
|
|
|
|
assert!(music_hoard
|
|
|
|
.remove_musicbutler_urls(&artist_id_2, vec![MUSICBUTLER])
|
|
|
|
.is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
// Removing a URL.
|
|
|
|
assert!(music_hoard
|
|
|
|
.remove_musicbutler_urls(&artist_id, vec![MUSICBUTLER])
|
|
|
|
.is_ok());
|
|
|
|
expected.retain(|url| url.as_str() != MUSICBUTLER);
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
// Removing URls that do not exist is okay, they will be ignored.
|
|
|
|
assert!(music_hoard
|
|
|
|
.remove_musicbutler_urls(&artist_id, vec![MUSICBUTLER])
|
|
|
|
.is_ok());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
// Removing a URL.
|
|
|
|
assert!(music_hoard
|
|
|
|
.remove_musicbutler_urls(&artist_id, vec![MUSICBUTLER_2])
|
|
|
|
.is_ok());
|
|
|
|
expected.retain(|url| url.as_str() != MUSICBUTLER_2);
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
assert!(music_hoard
|
|
|
|
.remove_musicbutler_urls(&artist_id, vec![MUSICBUTLER_2])
|
|
|
|
.is_ok());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
// Adding URLs if some exist is okay, they will be ignored.
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_musicbutler_urls(&artist_id, vec![MUSICBUTLER])
|
|
|
|
.is_ok());
|
|
|
|
expected.push(MusicButler::new(MUSICBUTLER).unwrap());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_musicbutler_urls(&artist_id, vec![MUSICBUTLER, MUSICBUTLER_2])
|
|
|
|
.is_ok());
|
|
|
|
expected.push(MusicButler::new(MUSICBUTLER_2).unwrap());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
// Removing URLs if some do not exist is okay, they will be ignored.
|
|
|
|
assert!(music_hoard
|
|
|
|
.remove_musicbutler_urls(&artist_id, vec![MUSICBUTLER])
|
|
|
|
.is_ok());
|
|
|
|
expected.retain(|url| url.as_str() != MUSICBUTLER);
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
assert!(music_hoard
|
|
|
|
.remove_musicbutler_urls(&artist_id, vec![MUSICBUTLER, MUSICBUTLER_2])
|
|
|
|
.is_ok());
|
|
|
|
expected.retain(|url| url.as_str() != MUSICBUTLER_2);
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
// Adding mutliple URLs without clashes.
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_musicbutler_urls(&artist_id, vec![MUSICBUTLER, MUSICBUTLER_2])
|
|
|
|
.is_ok());
|
|
|
|
expected.push(MusicButler::new(MUSICBUTLER).unwrap());
|
|
|
|
expected.push(MusicButler::new(MUSICBUTLER_2).unwrap());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
// Removing multiple URLs without clashes.
|
|
|
|
assert!(music_hoard
|
|
|
|
.remove_musicbutler_urls(&artist_id, vec![MUSICBUTLER, MUSICBUTLER_2])
|
|
|
|
.is_ok());
|
|
|
|
expected.clear();
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn set_clear_musicbutler_urls() {
|
|
|
|
let artist_id = ArtistId::new("an artist");
|
|
|
|
let artist_id_2 = ArtistId::new("another artist");
|
2024-01-12 20:42:37 +01:00
|
|
|
let mut music_hoard = MusicHoardBuilder::default().build();
|
2024-01-10 22:33:57 +01:00
|
|
|
|
2024-01-11 21:51:51 +01:00
|
|
|
music_hoard.add_artist(artist_id.clone());
|
2024-01-10 22:33:57 +01:00
|
|
|
|
|
|
|
let mut expected: Vec<MusicButler> = vec![];
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
// If any URL is incorrect setting URLs is an error.
|
|
|
|
assert!(music_hoard
|
|
|
|
.set_musicbutler_urls(&artist_id, vec![MUSICBRAINZ, MUSICBRAINZ_2])
|
|
|
|
.is_err());
|
|
|
|
assert!(music_hoard
|
|
|
|
.set_musicbutler_urls(&artist_id, vec![BANDCAMP, BANDCAMP_2])
|
|
|
|
.is_err());
|
|
|
|
assert!(music_hoard
|
|
|
|
.set_musicbutler_urls(&artist_id, vec![QOBUZ, QOBUZ_2])
|
|
|
|
.is_err());
|
|
|
|
assert!(music_hoard
|
|
|
|
.set_musicbutler_urls(&artist_id, vec![MUSICBRAINZ, MUSICBUTLER, BANDCAMP, QOBUZ])
|
|
|
|
.is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
// Seting URL on an artist not in the collection is an error.
|
|
|
|
assert!(music_hoard
|
|
|
|
.set_musicbutler_urls(&artist_id_2, vec![MUSICBUTLER])
|
|
|
|
.is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
// Set URLs.
|
|
|
|
assert!(music_hoard
|
|
|
|
.set_musicbutler_urls(&artist_id, vec![MUSICBUTLER])
|
|
|
|
.is_ok());
|
|
|
|
expected.push(MusicButler::new(MUSICBUTLER).unwrap());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
assert!(music_hoard
|
|
|
|
.set_musicbutler_urls(&artist_id, vec![MUSICBUTLER_2])
|
|
|
|
.is_ok());
|
|
|
|
expected.clear();
|
|
|
|
expected.push(MusicButler::new(MUSICBUTLER_2).unwrap());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
assert!(music_hoard
|
|
|
|
.set_musicbutler_urls(&artist_id, vec![MUSICBUTLER, MUSICBUTLER_2])
|
|
|
|
.is_ok());
|
|
|
|
expected.clear();
|
|
|
|
expected.push(MusicButler::new(MUSICBUTLER).unwrap());
|
|
|
|
expected.push(MusicButler::new(MUSICBUTLER_2).unwrap());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
|
|
|
|
// Clearing URLs on an artist that does not exist is an error.
|
|
|
|
assert!(music_hoard.clear_musicbutler_urls(&artist_id_2).is_err());
|
|
|
|
|
|
|
|
// Clear URLs.
|
|
|
|
assert!(music_hoard.clear_musicbutler_urls(&artist_id).is_ok());
|
|
|
|
expected.clear();
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.musicbutler, expected);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn add_remove_bandcamp_urls() {
|
|
|
|
let artist_id = ArtistId::new("an artist");
|
|
|
|
let artist_id_2 = ArtistId::new("another artist");
|
2024-01-12 20:42:37 +01:00
|
|
|
let mut music_hoard = MusicHoardBuilder::default().build();
|
2024-01-10 22:33:57 +01:00
|
|
|
|
2024-01-11 21:51:51 +01:00
|
|
|
music_hoard.add_artist(artist_id.clone());
|
2024-01-10 22:33:57 +01:00
|
|
|
|
|
|
|
let mut expected: Vec<Bandcamp> = vec![];
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
// If any URL is incorrect adding URLs is an error.
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_bandcamp_urls(&artist_id, vec![MUSICBRAINZ, MUSICBRAINZ_2])
|
|
|
|
.is_err());
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_bandcamp_urls(&artist_id, vec![MUSICBUTLER, MUSICBUTLER_2])
|
|
|
|
.is_err());
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_bandcamp_urls(&artist_id, vec![QOBUZ, QOBUZ_2])
|
|
|
|
.is_err());
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_bandcamp_urls(&artist_id, vec![MUSICBRAINZ, MUSICBUTLER, BANDCAMP, QOBUZ])
|
|
|
|
.is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
// Adding URLs to an artist not in the collection is an error.
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_bandcamp_urls(&artist_id_2, vec![BANDCAMP])
|
|
|
|
.is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
// Adding a single URL.
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_bandcamp_urls(&artist_id, vec![BANDCAMP])
|
|
|
|
.is_ok());
|
|
|
|
expected.push(Bandcamp::new(BANDCAMP).unwrap());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
// Adding a URL that already exists is ok, but does not do anything.
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_bandcamp_urls(&artist_id, vec![BANDCAMP])
|
|
|
|
.is_ok());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
// Adding another single URL.
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_bandcamp_urls(&artist_id, vec![BANDCAMP_2])
|
|
|
|
.is_ok());
|
|
|
|
expected.push(Bandcamp::new(BANDCAMP_2).unwrap());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_bandcamp_urls(&artist_id, vec![BANDCAMP_2])
|
|
|
|
.is_ok());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
// Removing URLs from an artist not in the collection is an error.
|
|
|
|
assert!(music_hoard
|
|
|
|
.remove_bandcamp_urls(&artist_id_2, vec![BANDCAMP])
|
|
|
|
.is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
// Removing a URL.
|
|
|
|
assert!(music_hoard
|
|
|
|
.remove_bandcamp_urls(&artist_id, vec![BANDCAMP])
|
|
|
|
.is_ok());
|
|
|
|
expected.retain(|url| url.as_str() != BANDCAMP);
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
// Removing URls that do not exist is okay, they will be ignored.
|
|
|
|
assert!(music_hoard
|
|
|
|
.remove_bandcamp_urls(&artist_id, vec![BANDCAMP])
|
|
|
|
.is_ok());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
// Removing a URL.
|
|
|
|
assert!(music_hoard
|
|
|
|
.remove_bandcamp_urls(&artist_id, vec![BANDCAMP_2])
|
|
|
|
.is_ok());
|
|
|
|
expected.retain(|url| url.as_str() != BANDCAMP_2);
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
assert!(music_hoard
|
|
|
|
.remove_bandcamp_urls(&artist_id, vec![BANDCAMP_2])
|
|
|
|
.is_ok());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
// Adding URLs if some exist is okay, they will be ignored.
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_bandcamp_urls(&artist_id, vec![BANDCAMP])
|
|
|
|
.is_ok());
|
|
|
|
expected.push(Bandcamp::new(BANDCAMP).unwrap());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_bandcamp_urls(&artist_id, vec![BANDCAMP, BANDCAMP_2])
|
|
|
|
.is_ok());
|
|
|
|
expected.push(Bandcamp::new(BANDCAMP_2).unwrap());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
// Removing URLs if some do not exist is okay, they will be ignored.
|
|
|
|
assert!(music_hoard
|
|
|
|
.remove_bandcamp_urls(&artist_id, vec![BANDCAMP])
|
|
|
|
.is_ok());
|
|
|
|
expected.retain(|url| url.as_str() != BANDCAMP);
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
assert!(music_hoard
|
|
|
|
.remove_bandcamp_urls(&artist_id, vec![BANDCAMP, BANDCAMP_2])
|
|
|
|
.is_ok());
|
|
|
|
expected.retain(|url| url.as_str() != BANDCAMP_2);
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
// Adding mutliple URLs without clashes.
|
|
|
|
assert!(music_hoard
|
|
|
|
.add_bandcamp_urls(&artist_id, vec![BANDCAMP, BANDCAMP_2])
|
|
|
|
.is_ok());
|
|
|
|
expected.push(Bandcamp::new(BANDCAMP).unwrap());
|
|
|
|
expected.push(Bandcamp::new(BANDCAMP_2).unwrap());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
// Removing multiple URLs without clashes.
|
|
|
|
assert!(music_hoard
|
|
|
|
.remove_bandcamp_urls(&artist_id, vec![BANDCAMP, BANDCAMP_2])
|
|
|
|
.is_ok());
|
|
|
|
expected.clear();
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn set_clear_bandcamp_urls() {
|
|
|
|
let artist_id = ArtistId::new("an artist");
|
|
|
|
let artist_id_2 = ArtistId::new("another artist");
|
2024-01-12 20:42:37 +01:00
|
|
|
let mut music_hoard = MusicHoardBuilder::default().build();
|
2024-01-10 22:33:57 +01:00
|
|
|
|
2024-01-11 21:51:51 +01:00
|
|
|
music_hoard.add_artist(artist_id.clone());
|
2024-01-10 22:33:57 +01:00
|
|
|
|
|
|
|
let mut expected: Vec<Bandcamp> = vec![];
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
// If any URL is incorrect setting URLs is an error.
|
|
|
|
assert!(music_hoard
|
|
|
|
.set_bandcamp_urls(&artist_id, vec![MUSICBRAINZ, MUSICBRAINZ_2])
|
|
|
|
.is_err());
|
|
|
|
assert!(music_hoard
|
|
|
|
.set_bandcamp_urls(&artist_id, vec![MUSICBUTLER, MUSICBUTLER_2])
|
|
|
|
.is_err());
|
|
|
|
assert!(music_hoard
|
|
|
|
.set_bandcamp_urls(&artist_id, vec![QOBUZ, QOBUZ_2])
|
|
|
|
.is_err());
|
|
|
|
assert!(music_hoard
|
|
|
|
.set_bandcamp_urls(&artist_id, vec![MUSICBRAINZ, MUSICBUTLER, BANDCAMP, QOBUZ])
|
|
|
|
.is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
// Seting URL on an artist not in the collection is an error.
|
|
|
|
assert!(music_hoard
|
|
|
|
.set_bandcamp_urls(&artist_id_2, vec![BANDCAMP])
|
|
|
|
.is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
// Set URLs.
|
|
|
|
assert!(music_hoard
|
|
|
|
.set_bandcamp_urls(&artist_id, vec![BANDCAMP])
|
|
|
|
.is_ok());
|
|
|
|
expected.push(Bandcamp::new(BANDCAMP).unwrap());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
assert!(music_hoard
|
|
|
|
.set_bandcamp_urls(&artist_id, vec![BANDCAMP_2])
|
|
|
|
.is_ok());
|
|
|
|
expected.clear();
|
|
|
|
expected.push(Bandcamp::new(BANDCAMP_2).unwrap());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
assert!(music_hoard
|
|
|
|
.set_bandcamp_urls(&artist_id, vec![BANDCAMP, BANDCAMP_2])
|
|
|
|
.is_ok());
|
|
|
|
expected.clear();
|
|
|
|
expected.push(Bandcamp::new(BANDCAMP).unwrap());
|
|
|
|
expected.push(Bandcamp::new(BANDCAMP_2).unwrap());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
|
|
|
|
// Clearing URLs on an artist that does not exist is an error.
|
|
|
|
assert!(music_hoard.clear_bandcamp_urls(&artist_id_2).is_err());
|
|
|
|
|
|
|
|
// Clear URLs.
|
|
|
|
assert!(music_hoard.clear_bandcamp_urls(&artist_id).is_ok());
|
|
|
|
expected.clear();
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.bandcamp, expected);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn add_remove_qobuz_url() {
|
|
|
|
let artist_id = ArtistId::new("an artist");
|
|
|
|
let artist_id_2 = ArtistId::new("another artist");
|
2024-01-12 20:42:37 +01:00
|
|
|
let mut music_hoard = MusicHoardBuilder::default().build();
|
2024-01-10 22:33:57 +01:00
|
|
|
|
2024-01-11 21:51:51 +01:00
|
|
|
music_hoard.add_artist(artist_id.clone());
|
2024-01-10 22:33:57 +01:00
|
|
|
|
|
|
|
let mut expected: Option<Qobuz> = None;
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.qobuz, expected);
|
|
|
|
|
|
|
|
// Adding incorect URL is an error.
|
|
|
|
assert!(music_hoard.add_qobuz_url(&artist_id, MUSICBRAINZ).is_err());
|
|
|
|
assert!(music_hoard.add_qobuz_url(&artist_id, MUSICBUTLER).is_err());
|
|
|
|
assert!(music_hoard.add_qobuz_url(&artist_id, BANDCAMP).is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.qobuz, expected);
|
|
|
|
|
|
|
|
// Adding URL to an artist not in the collection is an error.
|
|
|
|
assert!(music_hoard.add_qobuz_url(&artist_id_2, QOBUZ).is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.qobuz, expected);
|
|
|
|
|
|
|
|
// Adding URL to artist.
|
|
|
|
assert!(music_hoard.add_qobuz_url(&artist_id, QOBUZ).is_ok());
|
|
|
|
_ = expected.insert(Qobuz::new(QOBUZ).unwrap());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.qobuz, expected);
|
|
|
|
|
|
|
|
// Adding the same URL again is ok, but does not do anything.
|
|
|
|
assert!(music_hoard.add_qobuz_url(&artist_id, QOBUZ).is_ok());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.qobuz, expected);
|
|
|
|
|
|
|
|
// Adding further URLs is an error.
|
|
|
|
assert!(music_hoard.add_qobuz_url(&artist_id, QOBUZ_2).is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.qobuz, expected);
|
|
|
|
|
|
|
|
// Removing a URL from an artist not in the collection is an error.
|
|
|
|
assert!(music_hoard.remove_qobuz_url(&artist_id_2, QOBUZ).is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.qobuz, expected);
|
|
|
|
|
|
|
|
// Removing a URL not in the collection is okay, but does not do anything.
|
|
|
|
assert!(music_hoard.remove_qobuz_url(&artist_id, QOBUZ_2).is_ok());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.qobuz, expected);
|
|
|
|
|
|
|
|
// Removing a URL in the collection removes it.
|
|
|
|
assert!(music_hoard.remove_qobuz_url(&artist_id, QOBUZ).is_ok());
|
|
|
|
_ = expected.take();
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.qobuz, expected);
|
|
|
|
|
|
|
|
assert!(music_hoard.remove_qobuz_url(&artist_id, QOBUZ).is_ok());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.qobuz, expected);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn set_clear_qobuz_url() {
|
|
|
|
let artist_id = ArtistId::new("an artist");
|
|
|
|
let artist_id_2 = ArtistId::new("another artist");
|
2024-01-12 20:42:37 +01:00
|
|
|
let mut music_hoard = MusicHoardBuilder::default().build();
|
2024-01-10 22:33:57 +01:00
|
|
|
|
2024-01-11 21:51:51 +01:00
|
|
|
music_hoard.add_artist(artist_id.clone());
|
2024-01-10 22:33:57 +01:00
|
|
|
|
|
|
|
let mut expected: Option<Qobuz> = None;
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.qobuz, expected);
|
|
|
|
|
|
|
|
// Setting an incorrect URL is an error.
|
|
|
|
assert!(music_hoard.set_qobuz_url(&artist_id, MUSICBUTLER).is_err());
|
|
|
|
assert!(music_hoard.set_qobuz_url(&artist_id, BANDCAMP).is_err());
|
|
|
|
assert!(music_hoard.set_qobuz_url(&artist_id, MUSICBRAINZ).is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.qobuz, expected);
|
|
|
|
|
|
|
|
// Setting a URL on an artist not in the collection is an error.
|
|
|
|
assert!(music_hoard.set_qobuz_url(&artist_id_2, QOBUZ).is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.qobuz, expected);
|
|
|
|
|
|
|
|
// Setting a URL on an artist.
|
|
|
|
assert!(music_hoard.set_qobuz_url(&artist_id, QOBUZ).is_ok());
|
|
|
|
_ = expected.insert(Qobuz::new(QOBUZ).unwrap());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.qobuz, expected);
|
|
|
|
|
|
|
|
assert!(music_hoard.set_qobuz_url(&artist_id, QOBUZ).is_ok());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.qobuz, expected);
|
|
|
|
|
|
|
|
assert!(music_hoard.set_qobuz_url(&artist_id, QOBUZ_2).is_ok());
|
|
|
|
_ = expected.insert(Qobuz::new(QOBUZ_2).unwrap());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.qobuz, expected);
|
|
|
|
|
|
|
|
// Clearing URLs on an artist that does not exist is an error.
|
|
|
|
assert!(music_hoard.clear_qobuz_url(&artist_id_2).is_err());
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.qobuz, expected);
|
|
|
|
|
|
|
|
// Clearing URLs.
|
|
|
|
assert!(music_hoard.clear_qobuz_url(&artist_id).is_ok());
|
|
|
|
_ = expected.take();
|
|
|
|
assert_eq!(music_hoard.collection[0].properties.qobuz, expected);
|
2023-05-21 17:24:00 +02:00
|
|
|
}
|
|
|
|
|
2023-05-20 00:02:39 +02:00
|
|
|
#[test]
|
|
|
|
fn merge_track() {
|
|
|
|
let left = Track {
|
|
|
|
id: TrackId {
|
2024-01-06 11:14:30 +01:00
|
|
|
number: 4,
|
2023-05-20 00:02:39 +02:00
|
|
|
title: String::from("a title"),
|
|
|
|
},
|
|
|
|
artist: vec![String::from("left artist")],
|
|
|
|
quality: Quality {
|
|
|
|
format: Format::Flac,
|
|
|
|
bitrate: 1411,
|
|
|
|
},
|
|
|
|
};
|
|
|
|
let right = Track {
|
|
|
|
id: left.id.clone(),
|
|
|
|
artist: vec![String::from("right artist")],
|
|
|
|
quality: Quality {
|
|
|
|
format: Format::Mp3,
|
|
|
|
bitrate: 320,
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
let merged = left.clone().merge(right);
|
|
|
|
assert_eq!(left, merged);
|
|
|
|
}
|
|
|
|
|
2023-05-10 22:52:03 +02:00
|
|
|
#[test]
|
2023-05-20 00:02:39 +02:00
|
|
|
fn merge_album_no_overlap() {
|
|
|
|
let left = COLLECTION[0].albums[0].to_owned();
|
|
|
|
let mut right = COLLECTION[0].albums[1].to_owned();
|
|
|
|
right.id = left.id.clone();
|
|
|
|
|
|
|
|
let mut expected = left.clone();
|
|
|
|
expected.tracks.append(&mut right.tracks.clone());
|
|
|
|
expected.tracks.sort_unstable();
|
|
|
|
|
|
|
|
let merged = left.clone().merge(right);
|
|
|
|
assert_eq!(expected, merged);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn merge_album_overlap() {
|
|
|
|
let mut left = COLLECTION[0].albums[0].to_owned();
|
|
|
|
let mut right = COLLECTION[0].albums[1].to_owned();
|
|
|
|
right.id = left.id.clone();
|
|
|
|
left.tracks.push(right.tracks[0].clone());
|
|
|
|
left.tracks.sort_unstable();
|
|
|
|
|
|
|
|
let mut expected = left.clone();
|
|
|
|
expected.tracks.append(&mut right.tracks.clone());
|
|
|
|
expected.tracks.sort_unstable();
|
|
|
|
expected.tracks.dedup();
|
|
|
|
|
|
|
|
let merged = left.clone().merge(right);
|
|
|
|
assert_eq!(expected, merged);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn merge_artist_no_overlap() {
|
|
|
|
let left = COLLECTION[0].to_owned();
|
|
|
|
let mut right = COLLECTION[1].to_owned();
|
|
|
|
right.id = left.id.clone();
|
|
|
|
|
|
|
|
let mut expected = left.clone();
|
2023-05-21 22:28:51 +02:00
|
|
|
expected.properties = expected.properties.merge(right.clone().properties);
|
2023-05-20 00:02:39 +02:00
|
|
|
expected.albums.append(&mut right.albums.clone());
|
|
|
|
expected.albums.sort_unstable();
|
|
|
|
|
|
|
|
let merged = left.clone().merge(right);
|
|
|
|
assert_eq!(expected, merged);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn merge_artist_overlap() {
|
|
|
|
let mut left = COLLECTION[0].to_owned();
|
|
|
|
let mut right = COLLECTION[1].to_owned();
|
|
|
|
right.id = left.id.clone();
|
|
|
|
left.albums.push(right.albums[0].clone());
|
|
|
|
left.albums.sort_unstable();
|
|
|
|
|
|
|
|
let mut expected = left.clone();
|
2023-05-21 22:28:51 +02:00
|
|
|
expected.properties = expected.properties.merge(right.clone().properties);
|
2023-05-20 00:02:39 +02:00
|
|
|
expected.albums.append(&mut right.albums.clone());
|
|
|
|
expected.albums.sort_unstable();
|
|
|
|
expected.albums.dedup();
|
|
|
|
|
|
|
|
let merged = left.clone().merge(right);
|
|
|
|
assert_eq!(expected, merged);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn merge_collection_no_overlap() {
|
|
|
|
let half: usize = COLLECTION.len() / 2;
|
|
|
|
|
|
|
|
let left = COLLECTION[..half].to_owned();
|
|
|
|
let right = COLLECTION[half..].to_owned();
|
|
|
|
|
|
|
|
let mut expected = COLLECTION.to_owned();
|
|
|
|
expected.sort_unstable();
|
|
|
|
|
2024-01-12 20:42:37 +01:00
|
|
|
let merged = MusicHoard::<NoLibrary, NoDatabase>::merge(left.clone(), right);
|
2023-05-20 00:02:39 +02:00
|
|
|
assert_eq!(expected, merged);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn merge_collection_overlap() {
|
|
|
|
let half: usize = COLLECTION.len() / 2;
|
|
|
|
|
|
|
|
let left = COLLECTION[..(half + 1)].to_owned();
|
|
|
|
let right = COLLECTION[half..].to_owned();
|
|
|
|
|
|
|
|
let mut expected = COLLECTION.to_owned();
|
|
|
|
expected.sort_unstable();
|
|
|
|
|
2024-01-12 20:42:37 +01:00
|
|
|
let merged = MusicHoard::<NoLibrary, NoDatabase>::merge(left.clone(), right);
|
2023-05-20 00:02:39 +02:00
|
|
|
assert_eq!(expected, merged);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn rescan_library_ordered() {
|
|
|
|
let mut library = MockILibrary::new();
|
|
|
|
let database = MockIDatabase::new();
|
|
|
|
|
|
|
|
let library_input = Query::new();
|
|
|
|
let library_result = Ok(artists_to_items(&COLLECTION));
|
|
|
|
|
|
|
|
library
|
|
|
|
.expect_list()
|
|
|
|
.with(predicate::eq(library_input))
|
|
|
|
.times(1)
|
|
|
|
.return_once(|_| library_result);
|
|
|
|
|
2024-01-12 20:42:37 +01:00
|
|
|
let mut music_hoard = MusicHoardBuilder::default()
|
|
|
|
.set_library(library)
|
|
|
|
.set_database(database)
|
|
|
|
.build();
|
2023-05-20 00:02:39 +02:00
|
|
|
|
|
|
|
music_hoard.rescan_library().unwrap();
|
2023-05-21 17:24:00 +02:00
|
|
|
assert_eq!(
|
|
|
|
music_hoard.get_collection(),
|
|
|
|
&clean_collection(COLLECTION.to_owned())
|
|
|
|
);
|
2023-05-20 00:02:39 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn rescan_library_unordered() {
|
|
|
|
let mut library = MockILibrary::new();
|
|
|
|
let database = MockIDatabase::new();
|
|
|
|
|
|
|
|
let library_input = Query::new();
|
|
|
|
let mut library_result = Ok(artists_to_items(&COLLECTION));
|
|
|
|
|
|
|
|
// Swap the last item with the first.
|
|
|
|
let last = library_result.as_ref().unwrap().len() - 1;
|
|
|
|
library_result.as_mut().unwrap().swap(0, last);
|
|
|
|
|
|
|
|
library
|
|
|
|
.expect_list()
|
|
|
|
.with(predicate::eq(library_input))
|
|
|
|
.times(1)
|
|
|
|
.return_once(|_| library_result);
|
|
|
|
|
2024-01-12 20:42:37 +01:00
|
|
|
let mut music_hoard = MusicHoardBuilder::default()
|
|
|
|
.set_library(library)
|
|
|
|
.set_database(database)
|
|
|
|
.build();
|
2023-05-20 00:02:39 +02:00
|
|
|
|
|
|
|
music_hoard.rescan_library().unwrap();
|
2023-05-21 17:24:00 +02:00
|
|
|
assert_eq!(
|
|
|
|
music_hoard.get_collection(),
|
|
|
|
&clean_collection(COLLECTION.to_owned())
|
|
|
|
);
|
2023-05-20 00:02:39 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn rescan_library_album_title_year_clash() {
|
|
|
|
let mut library = MockILibrary::new();
|
|
|
|
let database = MockIDatabase::new();
|
|
|
|
|
2023-05-21 17:24:00 +02:00
|
|
|
let mut expected = clean_collection(COLLECTION.to_owned());
|
2023-05-20 00:02:39 +02:00
|
|
|
expected[0].albums[0].id.year = expected[1].albums[0].id.year;
|
|
|
|
expected[0].albums[0].id.title = expected[1].albums[0].id.title.clone();
|
|
|
|
|
|
|
|
let library_input = Query::new();
|
|
|
|
let library_result = Ok(artists_to_items(&expected));
|
|
|
|
|
|
|
|
library
|
|
|
|
.expect_list()
|
|
|
|
.with(predicate::eq(library_input))
|
|
|
|
.times(1)
|
|
|
|
.return_once(|_| library_result);
|
|
|
|
|
2024-01-12 20:42:37 +01:00
|
|
|
let mut music_hoard = MusicHoardBuilder::default()
|
|
|
|
.set_library(library)
|
|
|
|
.set_database(database)
|
|
|
|
.build();
|
2023-05-20 00:02:39 +02:00
|
|
|
|
|
|
|
music_hoard.rescan_library().unwrap();
|
|
|
|
assert_eq!(music_hoard.get_collection(), &expected);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn load_database() {
|
|
|
|
let library = MockILibrary::new();
|
|
|
|
let mut database = MockIDatabase::new();
|
|
|
|
|
|
|
|
database
|
|
|
|
.expect_load()
|
|
|
|
.times(1)
|
|
|
|
.return_once(|coll: &mut Collection| {
|
|
|
|
*coll = COLLECTION.to_owned();
|
|
|
|
Ok(())
|
|
|
|
});
|
|
|
|
|
2024-01-12 20:42:37 +01:00
|
|
|
let mut music_hoard = MusicHoardBuilder::default()
|
|
|
|
.set_library(library)
|
|
|
|
.set_database(database)
|
|
|
|
.build();
|
2023-05-20 00:02:39 +02:00
|
|
|
|
|
|
|
music_hoard.load_from_database().unwrap();
|
|
|
|
assert_eq!(music_hoard.get_collection(), &*COLLECTION);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn rescan_get_save() {
|
2023-05-10 22:52:03 +02:00
|
|
|
let mut library = MockILibrary::new();
|
|
|
|
let mut database = MockIDatabase::new();
|
|
|
|
|
|
|
|
let library_input = Query::new();
|
2023-05-20 00:02:39 +02:00
|
|
|
let library_result = Ok(artists_to_items(&COLLECTION));
|
2023-05-10 22:52:03 +02:00
|
|
|
|
2023-05-21 17:24:00 +02:00
|
|
|
let database_input = clean_collection(COLLECTION.to_owned());
|
2023-05-10 22:52:03 +02:00
|
|
|
let database_result = Ok(());
|
|
|
|
|
|
|
|
library
|
|
|
|
.expect_list()
|
|
|
|
.with(predicate::eq(library_input))
|
|
|
|
.times(1)
|
|
|
|
.return_once(|_| library_result);
|
|
|
|
|
|
|
|
database
|
2023-05-20 00:02:39 +02:00
|
|
|
.expect_save()
|
2023-05-10 22:52:03 +02:00
|
|
|
.with(predicate::eq(database_input))
|
|
|
|
.times(1)
|
|
|
|
.return_once(|_: &Collection| database_result);
|
|
|
|
|
2024-01-12 20:42:37 +01:00
|
|
|
let mut music_hoard = MusicHoardBuilder::default()
|
|
|
|
.set_library(library)
|
|
|
|
.set_database(database)
|
|
|
|
.build();
|
2023-05-10 22:52:03 +02:00
|
|
|
|
|
|
|
music_hoard.rescan_library().unwrap();
|
2023-05-21 17:24:00 +02:00
|
|
|
assert_eq!(
|
|
|
|
music_hoard.get_collection(),
|
|
|
|
&clean_collection(COLLECTION.to_owned())
|
|
|
|
);
|
2023-05-10 22:52:03 +02:00
|
|
|
music_hoard.save_to_database().unwrap();
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn library_error() {
|
|
|
|
let mut library = MockILibrary::new();
|
|
|
|
let database = MockIDatabase::new();
|
|
|
|
|
|
|
|
let library_result = Err(library::Error::Invalid(String::from("invalid data")));
|
|
|
|
|
|
|
|
library
|
|
|
|
.expect_list()
|
|
|
|
.times(1)
|
|
|
|
.return_once(|_| library_result);
|
|
|
|
|
2024-01-12 20:42:37 +01:00
|
|
|
let mut music_hoard = MusicHoardBuilder::default()
|
|
|
|
.set_library(library)
|
|
|
|
.set_database(database)
|
|
|
|
.build();
|
2023-05-10 22:52:03 +02:00
|
|
|
|
|
|
|
let actual_err = music_hoard.rescan_library().unwrap_err();
|
|
|
|
let expected_err =
|
|
|
|
Error::LibraryError(library::Error::Invalid(String::from("invalid data")).to_string());
|
|
|
|
|
|
|
|
assert_eq!(actual_err, expected_err);
|
|
|
|
assert_eq!(actual_err.to_string(), expected_err.to_string());
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
2023-05-20 00:02:39 +02:00
|
|
|
fn database_load_error() {
|
2023-05-10 22:52:03 +02:00
|
|
|
let library = MockILibrary::new();
|
|
|
|
let mut database = MockIDatabase::new();
|
|
|
|
|
2023-05-20 00:02:39 +02:00
|
|
|
let database_result = Err(database::LoadError::IoError(String::from("I/O error")));
|
2023-05-10 22:52:03 +02:00
|
|
|
|
|
|
|
database
|
2023-05-20 00:02:39 +02:00
|
|
|
.expect_load()
|
|
|
|
.times(1)
|
|
|
|
.return_once(|_: &mut Collection| database_result);
|
|
|
|
|
2024-01-12 20:42:37 +01:00
|
|
|
let mut music_hoard = MusicHoardBuilder::default()
|
|
|
|
.set_library(library)
|
|
|
|
.set_database(database)
|
|
|
|
.build();
|
2023-05-20 00:02:39 +02:00
|
|
|
|
|
|
|
let actual_err = music_hoard.load_from_database().unwrap_err();
|
|
|
|
let expected_err = Error::DatabaseError(
|
|
|
|
database::LoadError::IoError(String::from("I/O error")).to_string(),
|
|
|
|
);
|
|
|
|
|
|
|
|
assert_eq!(actual_err, expected_err);
|
|
|
|
assert_eq!(actual_err.to_string(), expected_err.to_string());
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn database_save_error() {
|
|
|
|
let library = MockILibrary::new();
|
|
|
|
let mut database = MockIDatabase::new();
|
|
|
|
|
|
|
|
let database_result = Err(database::SaveError::IoError(String::from("I/O error")));
|
|
|
|
|
|
|
|
database
|
|
|
|
.expect_save()
|
2023-05-10 22:52:03 +02:00
|
|
|
.times(1)
|
|
|
|
.return_once(|_: &Collection| database_result);
|
|
|
|
|
2024-01-12 20:42:37 +01:00
|
|
|
let mut music_hoard = MusicHoardBuilder::default()
|
|
|
|
.set_library(library)
|
|
|
|
.set_database(database)
|
|
|
|
.build();
|
2023-05-10 22:52:03 +02:00
|
|
|
|
|
|
|
let actual_err = music_hoard.save_to_database().unwrap_err();
|
2023-05-20 00:02:39 +02:00
|
|
|
let expected_err = Error::DatabaseError(
|
|
|
|
database::SaveError::IoError(String::from("I/O error")).to_string(),
|
|
|
|
);
|
2023-05-10 22:52:03 +02:00
|
|
|
|
|
|
|
assert_eq!(actual_err, expected_err);
|
|
|
|
assert_eq!(actual_err.to_string(), expected_err.to_string());
|
|
|
|
}
|
2023-04-01 01:59:59 +02:00
|
|
|
}
|