diff options
author | Vika <vika@fireburn.ru> | 2022-05-10 07:25:07 +0300 |
---|---|---|
committer | Vika <vika@fireburn.ru> | 2022-05-10 07:25:07 +0300 |
commit | 1363650ee69bbeb693b99204c22c8419a716f240 (patch) | |
tree | d0215b32c6a9d4bdb3767f4dc2babed53102ee88 /src | |
parent | 05349fbd7cbeb074ec88a727a52383da99640b37 (diff) | |
download | kittybox-1363650ee69bbeb693b99204c22c8419a716f240.tar.zst |
FileStorage: fixes and regression tests for read_feed_with_limit
Now I will know if something breaks horribly again.
Diffstat (limited to 'src')
-rw-r--r-- | src/database/file/mod.rs | 8 | ||||
-rw-r--r-- | src/database/mod.rs | 174 |
2 files changed, 141 insertions, 41 deletions
diff --git a/src/database/file/mod.rs b/src/database/file/mod.rs index 4a40f38..f9588f5 100644 --- a/src/database/file/mod.rs +++ b/src/database/file/mod.rs @@ -483,16 +483,20 @@ impl Storage for FileStorage { if let Some(feed) = self.get_post(url).await? { if let Some(mut feed) = filter_post(feed, user) { if feed["children"].is_array() { + // This code contains several clones. It looks + // like the borrow checker thinks it is preventing + // me from doing something incredibly stupid. The + // borrow checker may or may not be right. let children = feed["children"].as_array().unwrap().clone(); let mut posts_iter = children .into_iter() .map(|s: serde_json::Value| s.as_str().unwrap().to_string()); // Note: we can't actually use skip_while here because we end up emitting `after`. // This imperative snippet consumes after instead of emitting it, allowing the - // stream of posts to return only those items that truly come *after*. + // stream of posts to return only those items that truly come *after* if let Some(after) = after { for s in posts_iter.by_ref() { - if &s != after { + if &s == after { break } } diff --git a/src/database/mod.rs b/src/database/mod.rs index 5a1dd3f..b9a8652 100644 --- a/src/database/mod.rs +++ b/src/database/mod.rs @@ -257,10 +257,9 @@ pub trait Storage: std::fmt::Debug + Clone + Send + Sync { #[cfg(test)] mod tests { use super::{MicropubChannel, Storage}; - use paste::paste; use serde_json::json; - async fn test_backend_basic_operations<Backend: Storage>(backend: Backend) { + async fn test_basic_operations<Backend: Storage>(backend: Backend) { let post: serde_json::Value = json!({ "type": ["h-entry"], "properties": { @@ -324,7 +323,7 @@ mod tests { } /// Note: this is merely a smoke check and is in no way comprehensive. - async fn test_backend_update<Backend: Storage>(backend: Backend) { + async fn test_update<Backend: Storage>(backend: Backend) { let post: serde_json::Value = json!({ "type": ["h-entry"], "properties": { @@ -358,30 +357,33 @@ mod tests { .await .unwrap(); - if let Some(returned_post) = backend.get_post(&key).await.unwrap() { - assert!(returned_post.is_object()); - assert_eq!( - returned_post["type"].as_array().unwrap().len(), - post["type"].as_array().unwrap().len() - ); - assert_eq!( - returned_post["type"].as_array().unwrap(), - post["type"].as_array().unwrap() - ); - assert_eq!( - returned_post["properties"]["content"][0].as_str().unwrap(), - "Different test content" - ); - assert_eq!( - returned_post["properties"]["category"].as_array().unwrap(), - &vec![json!("testing")] - ); - } else { - panic!("For some reason the backend did not return the post.") + match backend.get_post(&key).await { + Ok(Some(returned_post)) => { + assert!(returned_post.is_object()); + assert_eq!( + returned_post["type"].as_array().unwrap().len(), + post["type"].as_array().unwrap().len() + ); + assert_eq!( + returned_post["type"].as_array().unwrap(), + post["type"].as_array().unwrap() + ); + assert_eq!( + returned_post["properties"]["content"][0].as_str().unwrap(), + "Different test content" + ); + assert_eq!( + returned_post["properties"]["category"].as_array().unwrap(), + &vec![json!("testing")] + ); + }, + something_else => { + something_else.expect("Shouldn't error").expect("Should have the post"); + } } } - async fn test_backend_get_channel_list<Backend: Storage>(backend: Backend) { + async fn test_get_channel_list<Backend: Storage>(backend: Backend) { let feed = json!({ "type": ["h-feed"], "properties": { @@ -406,7 +408,7 @@ mod tests { ); } - async fn test_backend_settings<Backend: Storage>(backend: Backend) { + async fn test_settings<Backend: Storage>(backend: Backend) { backend .set_setting(crate::database::Settings::SiteName, "https://fireburn.ru/", "Vika's Hideout") .await @@ -420,23 +422,117 @@ mod tests { ); } + fn gen_random_post(domain: &str) -> serde_json::Value { + use faker_rand::lorem::{Paragraphs, Word}; + + let uid = format!( + "https://{domain}/posts/{}-{}-{}", + rand::random::<Word>(), rand::random::<Word>(), rand::random::<Word>() + ); + + let post = json!({ + "type": ["h-entry"], + "properties": { + "content": [rand::random::<Paragraphs>().to_string()], + "uid": [&uid], + "url": [&uid] + } + }); + + post + } + + async fn test_feed_pagination<Backend: Storage>(backend: Backend) { + let posts = std::iter::from_fn(|| Some(gen_random_post("fireburn.ru"))) + .take(20) + .collect::<Vec<serde_json::Value>>(); + + let feed = json!({ + "type": ["h-feed"], + "properties": { + "name": ["Main Page"], + "author": ["https://fireburn.ru/"], + "uid": ["https://fireburn.ru/feeds/main"] + }, + "children": posts.iter() + .filter_map(|json| json["properties"]["uid"][0].as_str()) + .collect::<Vec<&str>>() + }); + let key = feed["properties"]["uid"][0].as_str().unwrap(); + + backend + .put_post(&feed, "https://fireburn.ru/") + .await + .unwrap(); + println!("---"); + for (i, post) in posts.iter().enumerate() { + backend.put_post(post, "https://fireburn.ru/").await.unwrap(); + println!("posts[{}] = {}", i, post["properties"]["uid"][0]); + } + println!("---"); + let limit: usize = 10; + let result = backend.read_feed_with_limit(key, &None, limit, &None) + .await + .unwrap() + .unwrap(); + for (i, post) in result["children"].as_array().unwrap().iter().enumerate() { + println!("feed[0][{}] = {}", i, post["properties"]["uid"][0]); + } + println!("---"); + assert_eq!(result["children"].as_array().unwrap()[0..10], posts[0..10]); + + let result2 = backend.read_feed_with_limit( + key, + &result["children"] + .as_array() + .unwrap() + .last() + .unwrap() + ["properties"]["uid"][0] + .as_str() + .map(|i| i.to_owned()), + limit, &None + ).await.unwrap().unwrap(); + for (i, post) in result2["children"].as_array().unwrap().iter().enumerate() { + println!("feed[1][{}] = {}", i, post["properties"]["uid"][0]); + } + println!("---"); + assert_eq!(result2["children"].as_array().unwrap()[0..10], posts[10..20]); + + // Regression test for #4 + let nonsense_after = Some("1010101010".to_owned()); + let result3 = tokio::time::timeout(tokio::time::Duration::from_secs(10), async move { + backend.read_feed_with_limit( + key, &nonsense_after, limit, &None + ).await.unwrap().unwrap() + }).await.expect("Operation should not hang: see https://gitlab.com/kittybox/kittybox/-/issues/4"); + assert!(result3["children"].as_array().unwrap().is_empty()); + } + + /// Automatically generates a test suite for + macro_rules! test_all { + ($func_name:ident, $mod_name:ident) => { + mod $mod_name { + $func_name!(test_basic_operations); + $func_name!(test_get_channel_list); + $func_name!(test_settings); + $func_name!(test_update); + $func_name!(test_feed_pagination); + } + } + } macro_rules! file_test { - ($func_name:expr) => { - paste! { - #[tokio::test] - async fn [<file_ $func_name>] () { - test_logger::ensure_env_logger_initialized(); - let tempdir = tempdir::TempDir::new("file").expect("Failed to create tempdir"); - let backend = super::FileStorage::new(tempdir.into_path()).await.unwrap(); - $func_name(backend).await - } + ($func_name:ident) => { + #[tokio::test] + async fn $func_name () { + test_logger::ensure_env_logger_initialized(); + let tempdir = tempdir::TempDir::new("file").expect("Failed to create tempdir"); + let backend = super::super::FileStorage::new(tempdir.into_path()).await.unwrap(); + super::$func_name(backend).await } }; } - - file_test!(test_backend_basic_operations); - file_test!(test_backend_get_channel_list); - file_test!(test_backend_settings); - file_test!(test_backend_update); + + test_all!(file_test, file); } |