Streamline adding new URL types #122

Merged
wojtek merged 7 commits from 117---streamline-adding-new-url-types into main 2024-02-09 18:41:21 +01:00
3 changed files with 20 additions and 58 deletions
Showing only changes of commit f1b4f8747d - Show all commits

View File

@ -1,10 +1,10 @@
use std::{
collections::{BTreeMap, HashMap},
collections::HashMap,
fmt::{self, Debug, Display},
mem,
};
use serde::{Deserialize, Serialize, Serializer};
use serde::{Deserialize, Serialize};
use url::Url;
use uuid::Uuid;
@ -20,7 +20,6 @@ pub struct Artist {
pub id: ArtistId,
pub sort: Option<ArtistId>,
pub musicbrainz: Option<MusicBrainz>,
#[serde(serialize_with = "ordered_map")]
pub properties: HashMap<String, Vec<String>>,
wojtek marked this conversation as resolved
Review

This couples the on-disk database serialisation with the in-memory abstract representation. To be fair, it was always there under the subtle guise of Serialize/Deserialize. Think whether to create the split here, or in a separate issue following this one.

This couples the on-disk database serialisation with the in-memory abstract representation. To be fair, it was always there under the subtle guise of `Serialize`/`Deserialize`. Think whether to create the split here, or in a separate issue following this one.
pub albums: Vec<Album>,
}
@ -171,18 +170,6 @@ impl Merge for Artist {
}
}
// For use with serde's [serialize_with] attribute
fn ordered_map<S, K: Ord + Serialize, V: Serialize>(
value: &HashMap<K, V>,
serializer: S,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let ordered: BTreeMap<_, _> = value.iter().collect();
ordered.serialize(serializer)
}
impl AsRef<ArtistId> for ArtistId {
fn as_ref(&self) -> &ArtistId {
self

View File

@ -65,8 +65,6 @@ pub mod testmod;
mod tests {
use std::collections::HashMap;
use mockall::predicate;
use crate::core::{
collection::{
artist::{Artist, ArtistId},
@ -78,21 +76,6 @@ mod tests {
use super::*;
use testmod::DATABASE_JSON;
#[test]
fn save() {
let write_data = FULL_COLLECTION.to_owned();
let input = DATABASE_JSON.to_owned();
let mut backend = MockIJsonDatabaseBackend::new();
backend
.expect_write()
.with(predicate::eq(input))
.times(1)
.return_once(|_| Ok(()));
JsonDatabase::new(backend).save(&write_data).unwrap();
}
#[test]
fn load() {
let expected = FULL_COLLECTION.to_owned();
@ -108,17 +91,24 @@ mod tests {
#[test]
fn reverse() {
let input = DATABASE_JSON.to_owned();
let result = Ok(input.clone());
// Saving is non-deterministic due to HashMap, but regardless of how the data ends up being
// saved, loading it again should always yield the exact same data as was input.
struct MockIJsonDatabaseBackend {
data: Option<String>,
}
let mut backend = MockIJsonDatabaseBackend::new();
backend
.expect_write()
.with(predicate::eq(input))
.times(1)
.return_once(|_| Ok(()));
backend.expect_read().times(1).return_once(|| result);
impl IJsonDatabaseBackend for MockIJsonDatabaseBackend {
fn write(&mut self, json: &str) -> Result<(), std::io::Error> {
let _ = self.data.insert(json.to_owned());
Ok(())
}
fn read(&self) -> Result<String, std::io::Error> {
Ok(self.data.as_ref().unwrap().clone())
}
}
let backend = MockIJsonDatabaseBackend { data: None };
let mut database = JsonDatabase::new(backend);
let write_data = FULL_COLLECTION.to_owned();

View File

@ -16,22 +16,6 @@ use crate::testlib::COLLECTION;
pub static DATABASE_TEST_FILE: Lazy<PathBuf> =
Lazy::new(|| fs::canonicalize("./tests/files/database/database.json").unwrap());
#[test]
fn save() {
let file = NamedTempFile::new().unwrap();
let backend = JsonDatabaseFileBackend::new(file.path());
let mut database = JsonDatabase::new(backend);
let write_data = COLLECTION.to_owned();
database.save(&write_data).unwrap();
let expected = fs::read_to_string(&*DATABASE_TEST_FILE).unwrap();
let actual = fs::read_to_string(file.path()).unwrap();
assert_eq!(actual, expected);
}
#[test]
fn load() {
let backend = JsonDatabaseFileBackend::new(&*DATABASE_TEST_FILE);
@ -45,6 +29,8 @@ fn load() {
#[test]
fn reverse() {
// Saving is non-deterministic due to HashMap, but regardless of how the data ends up being
// saved, loading it again should always yield the exact same data as was input.
let file = NamedTempFile::new().unwrap();
let backend = JsonDatabaseFileBackend::new(file.path());
@ -52,7 +38,6 @@ fn reverse() {
let write_data = COLLECTION.to_owned();
database.save(&write_data).unwrap();
let read_data: Vec<Artist> = database.load().unwrap();
assert_eq!(write_data, read_data);