use crate::database::Storage;
use kittybox_indieauth::TokenData;
use chrono::prelude::*;
use core::iter::Iterator;
use newbase60::num_to_sxg;
use serde_json::json;
use std::convert::TryInto;
pub(crate) const DEFAULT_CHANNEL_PATH: &str = "/feeds/main";
const DEFAULT_CHANNEL_NAME: &str = "Main feed";
pub(crate) const CONTACTS_CHANNEL_PATH: &str = "/feeds/vcards";
const CONTACTS_CHANNEL_NAME: &str = "My address book";
pub(crate) const FOOD_CHANNEL_PATH: &str = "/feeds/food";
const FOOD_CHANNEL_NAME: &str = "My recipe book";
fn get_folder_from_type(post_type: &str) -> String {
(match post_type {
"h-feed" => "feeds/",
"h-card" => "vcards/",
"h-event" => "events/",
"h-food" => "food/",
_ => "posts/",
})
.to_string()
}
/// Reset the datetime to a proper datetime.
/// Do not attempt to recover the information.
/// Do not pass GO. Do not collect $200.
fn reset_dt(post: &mut serde_json::Value) -> DateTime<FixedOffset> {
let curtime: DateTime<Local> = Local::now();
post["properties"]["published"] = json!([curtime.to_rfc3339()]);
chrono::DateTime::from(curtime)
}
pub fn normalize_mf2(mut body: serde_json::Value, user: &TokenData) -> (String, serde_json::Value) {
// Normalize the MF2 object here.
let me = &user.me;
let folder = get_folder_from_type(body["type"][0].as_str().unwrap());
let published: DateTime<FixedOffset> =
if let Some(dt) = body["properties"]["published"][0].as_str() {
// Check if the datetime is parsable.
match DateTime::parse_from_rfc3339(dt) {
Ok(dt) => dt,
Err(_) => reset_dt(&mut body),
}
} else {
// Set the datetime.
// Note: this code block duplicates functionality with the above failsafe.
// Consider refactoring it to a helper function?
reset_dt(&mut body)
};
match body["properties"]["uid"][0].as_str() {
None => {
let uid = serde_json::Value::String(
me.join(
&(folder.clone()
+ &num_to_sxg(published.timestamp_millis().try_into().unwrap())),
)
.unwrap()
.to_string(),
);
body["properties"]["uid"] = serde_json::Value::Array(vec![uid.clone()]);
match body["properties"]["url"].as_array_mut() {
Some(array) => array.push(uid),
None => body["properties"]["url"] = body["properties"]["uid"].clone(),
}
}
Some(uid_str) => {
let uid = uid_str.to_string();
match body["properties"]["url"].as_array_mut() {
Some(array) => {
if !array.iter().any(|i| i.as_str().unwrap_or("") == uid) {
array.push(serde_json::Value::String(uid))
}
}
None => body["properties"]["url"] = body["properties"]["uid"].clone(),
}
}
}
if let Some(slugs) = body["properties"]["mp-slug"].as_array() {
let new_urls = slugs
.iter()
.map(|i| i.as_str().unwrap_or(""))
.filter(|i| i != &"")
.map(|i| me.join(&((&folder).clone() + i)).unwrap().to_string())
.collect::<Vec<String>>();
let urls = body["properties"]["url"].as_array_mut().unwrap();
new_urls.iter().for_each(|i| urls.push(json!(i)));
}
let props = body["properties"].as_object_mut().unwrap();
props.remove("mp-slug");
if body["properties"]["content"][0].is_string() {
// Convert the content to HTML using the `markdown` crate
body["properties"]["content"] = json!([{
"html": markdown::to_html(body["properties"]["content"][0].as_str().unwrap()),
"value": body["properties"]["content"][0]
}])
}
// TODO: apply this normalization to editing too
if body["properties"]["mp-channel"].is_array() {
let mut additional_channels = body["properties"]["mp-channel"].as_array().unwrap().clone();
if let Some(array) = body["properties"]["channel"].as_array_mut() {
array.append(&mut additional_channels);
} else {
body["properties"]["channel"] = json!(additional_channels)
}
body["properties"]
.as_object_mut()
.unwrap()
.remove("mp-channel");
} else if body["properties"]["mp-channel"].is_string() {
let chan = body["properties"]["mp-channel"]
.as_str()
.unwrap()
.to_owned();
if let Some(array) = body["properties"]["channel"].as_array_mut() {
array.push(json!(chan))
} else {
body["properties"]["channel"] = json!([chan]);
}
body["properties"]
.as_object_mut()
.unwrap()
.remove("mp-channel");
}
if body["properties"]["channel"][0].as_str().is_none() {
match body["type"][0].as_str() {
Some("h-entry") => {
// Set the channel to the main channel...
// TODO find like posts and move them to separate private channel
let default_channel = me.join(DEFAULT_CHANNEL_PATH).unwrap().to_string();
body["properties"]["channel"] = json!([default_channel]);
}
Some("h-card") => {
let default_channel = me.join(CONTACTS_CHANNEL_PATH).unwrap().to_string();
body["properties"]["channel"] = json!([default_channel]);
}
Some("h-food") => {
let default_channel = me.join(FOOD_CHANNEL_PATH).unwrap().to_string();
body["properties"]["channel"] = json!([default_channel]);
}
// TODO h-event
/*"h-event" => {
let default_channel
},*/
_ => {
body["properties"]["channel"] = json!([]);
}
}
}
body["properties"]["posted-with"] = json!([user.client_id]);
if body["properties"]["author"][0].as_str().is_none() {
body["properties"]["author"] = json!([me.as_str()])
}
// TODO: maybe highlight #hashtags?
// Find other processing to do and insert it here
return (
body["properties"]["uid"][0].as_str().unwrap().to_string(),
body,
);
}
pub(crate) fn form_to_mf2_json(form: Vec<(String, String)>) -> serde_json::Value {
let mut mf2 = json!({"type": [], "properties": {}});
for (k, v) in form {
if k == "h" {
mf2["type"]
.as_array_mut()
.unwrap()
.push(json!("h-".to_string() + &v));
} else if k != "access_token" {
let key = k.strip_suffix("[]").unwrap_or(&k);
match mf2["properties"][key].as_array_mut() {
Some(prop) => prop.push(json!(v)),
None => mf2["properties"][key] = json!([v]),
}
}
}
if mf2["type"].as_array().unwrap().is_empty() {
mf2["type"].as_array_mut().unwrap().push(json!("h-entry"));
}
mf2
}
pub(crate) async fn create_feed(
storage: &impl Storage,
uid: &str,
channel: &str,
user: &TokenData,
) -> crate::database::Result<()> {
let path = url::Url::parse(channel).unwrap().path().to_string();
let name = match path.as_str() {
DEFAULT_CHANNEL_PATH => DEFAULT_CHANNEL_NAME,
CONTACTS_CHANNEL_PATH => CONTACTS_CHANNEL_NAME,
FOOD_CHANNEL_PATH => FOOD_CHANNEL_NAME,
_ => panic!("Tried to create an unknown default feed!"),
};
let (_, feed) = normalize_mf2(
json!({
"type": ["h-feed"],
"properties": {
"name": [name],
"uid": [channel]
},
}),
user,
);
storage.put_post(&feed, user.me.as_str()).await?;
storage.add_to_feed(channel, uid).await
}
#[cfg(test)]
mod tests {
use super::*;
use serde_json::json;
fn token_data() -> TokenData {
TokenData {
me: "https://fireburn.ru/".parse().unwrap(),
client_id: "https://quill.p3k.io/".parse().unwrap(),
scope: kittybox_indieauth::Scopes::new(vec![kittybox_indieauth::Scope::Create]),
exp: Some(u64::MAX),
iat: Some(0)
}
}
#[test]
fn test_form_to_mf2() {
assert_eq!(
super::form_to_mf2_json(
serde_urlencoded::from_str("h=entry&content=something%20interesting").unwrap()
),
json!({
"type": ["h-entry"],
"properties": {
"content": ["something interesting"]
}
})
)
}
#[test]
fn test_no_replace_uid() {
let mf2 = json!({
"type": ["h-card"],
"properties": {
"uid": ["https://fireburn.ru/"],
"name": ["Vika Nezrimaya"],
"note": ["A crazy programmer girl who wants some hugs"]
}
});
let (uid, normalized) = normalize_mf2(
mf2.clone(),
&token_data(),
);
assert_eq!(
normalized["properties"]["uid"][0], mf2["properties"]["uid"][0],
"UID was replaced"
);
assert_eq!(
normalized["properties"]["uid"][0], uid,
"Returned post location doesn't match UID"
);
}
#[test]
fn test_mp_channel() {
let mf2 = json!({
"type": ["h-entry"],
"properties": {
"uid": ["https://fireburn.ru/posts/test"],
"content": [{"html": "<p>Hello world!</p>"}],
"mp-channel": ["https://fireburn.ru/feeds/test"]
}
});
let (_, normalized) = normalize_mf2(
mf2.clone(),
&token_data(),
);
assert_eq!(
normalized["properties"]["channel"],
mf2["properties"]["mp-channel"]
);
}
#[test]
fn test_mp_channel_as_string() {
let mf2 = json!({
"type": ["h-entry"],
"properties": {
"uid": ["https://fireburn.ru/posts/test"],
"content": [{"html": "<p>Hello world!</p>"}],
"mp-channel": "https://fireburn.ru/feeds/test"
}
});
let (_, normalized) = normalize_mf2(
mf2.clone(),
&token_data(),
);
assert_eq!(
normalized["properties"]["channel"][0],
mf2["properties"]["mp-channel"]
);
}
#[test]
fn test_normalize_mf2() {
let mf2 = json!({
"type": ["h-entry"],
"properties": {
"content": ["This is content!"]
}
});
let (uid, post) = normalize_mf2(
mf2,
&token_data(),
);
assert_eq!(
post["properties"]["published"]
.as_array()
.expect("post['published'] is undefined")
.len(),
1,
"Post doesn't have a published time"
);
DateTime::parse_from_rfc3339(post["properties"]["published"][0].as_str().unwrap())
.expect("Couldn't parse date from rfc3339");
assert!(
!post["properties"]["url"]
.as_array()
.expect("post['url'] is undefined")
.is_empty(),
"Post doesn't have any URLs"
);
assert_eq!(
post["properties"]["uid"]
.as_array()
.expect("post['uid'] is undefined")
.len(),
1,
"Post doesn't have a single UID"
);
assert_eq!(
post["properties"]["uid"][0], uid,
"UID of a post and its supposed location don't match"
);
assert!(
uid.starts_with("https://fireburn.ru/posts/"),
"The post namespace is incorrect"
);
assert_eq!(
post["properties"]["content"][0]["html"]
.as_str()
.expect("Post doesn't have a rich content object")
.trim(),
"<p>This is content!</p>",
"Parsed Markdown content doesn't match expected HTML"
);
assert_eq!(
post["properties"]["channel"][0], "https://fireburn.ru/feeds/main",
"Post isn't posted to the main channel"
);
assert_eq!(
post["properties"]["author"][0], "https://fireburn.ru/",
"Post author is unknown"
);
}
#[test]
fn test_mp_slug() {
let mf2 = json!({
"type": ["h-entry"],
"properties": {
"content": ["This is content!"],
"mp-slug": ["hello-post"]
},
});
let (_, post) = normalize_mf2(
mf2,
&token_data(),
);
assert!(
post["properties"]["url"]
.as_array()
.unwrap()
.iter()
.map(|i| i.as_str().unwrap())
.any(|i| i == "https://fireburn.ru/posts/hello-post"),
"Didn't found an URL pointing to the location expected by the mp-slug semantics"
);
assert!(
post["properties"]["mp-slug"].as_array().is_none(),
"mp-slug wasn't deleted from the array!"
)
}
#[test]
fn test_normalize_feed() {
let mf2 = json!({
"type": ["h-feed"],
"properties": {
"name": "Main feed",
"mp-slug": ["main"]
}
});
let (uid, post) = normalize_mf2(
mf2,
&token_data(),
);
assert_eq!(
post["properties"]["uid"][0], uid,
"UID of a post and its supposed location don't match"
);
assert_eq!(post["properties"]["author"][0], "https://fireburn.ru/");
assert!(
post["properties"]["url"]
.as_array()
.unwrap()
.iter()
.map(|i| i.as_str().unwrap())
.any(|i| i == "https://fireburn.ru/feeds/main"),
"Didn't found an URL pointing to the location expected by the mp-slug semantics"
);
assert!(
post["properties"]["mp-slug"].as_array().is_none(),
"mp-slug wasn't deleted from the array!"
)
}
}