use core::iter::Iterator;
use std::str::FromStr;
use std::convert::TryInto;
use chrono::prelude::*;
use http_types::Mime;
use tide::prelude::json;
use tide::{Request, Response, Result};
use newbase60::num_to_sxg;
use crate::ApplicationState;
use crate::database::{Storage};
use crate::indieauth::User;

static DEFAULT_CHANNEL_PATH: &str = "/feeds/main";
static DEFAULT_CHANNEL_NAME: &str = "Main feed";

macro_rules! response {
    ($($code:expr, $json:tt)+) => {
        $(
            Ok(Response::builder($code).body(json!($json)).build())
        )+
    };
}

macro_rules! error_json {
    ($($code:expr, $error:expr, $error_desc:expr)+) => {
        $(
            response!($code, {
                "error": $error,
                "error_description": $error_desc
            })
        )+
    }
}

fn get_folder_from_type(post_type: &str) -> String {
    (match post_type {
        "h-feed" => "feeds/",
        "h-event" => "events/",
        _ => "posts/"
    }).to_string()
}

fn normalize_mf2(mut body: serde_json::Value, user: &User) -> (String, serde_json::Value) {
    // Normalize the MF2 object here.
    let me = &user.me;
    let published: DateTime<FixedOffset>;
    let folder = get_folder_from_type(body["type"][0].as_str().unwrap());
    if let Some(dt) = body["properties"]["published"][0].as_str() {
        // Check if the datetime is parsable.
        match DateTime::parse_from_rfc3339(dt) {
            Ok(dt) => {
                published = dt;
            }
            Err(_) => {
                // Reset the datetime to a proper datetime.
                // Do not attempt to recover the information.
                // Do not pass GO. Do not collect $200.
                let curtime: DateTime<Local> = Local::now();
                body["properties"]["published"] = serde_json::Value::Array(vec![
                    serde_json::Value::String(curtime.to_rfc3339())
                ]);
                published = chrono::DateTime::from(curtime);
            }
        }
    } else {
        // Set the datetime.
        let curtime: DateTime<Local> = Local::now();
        body["properties"]["published"] = serde_json::Value::Array(vec![
            serde_json::Value::String(curtime.to_rfc3339())
        ]);
        published = chrono::DateTime::from(curtime);
    }
    match body["properties"]["uid"][0].as_str() {
        None => {
            let uid = serde_json::Value::String(
                me.join(
                    &(folder.clone() + &num_to_sxg(published.timestamp_millis().try_into().unwrap()))
                ).unwrap().to_string());
            body["properties"]["uid"] = serde_json::Value::Array(vec![uid.clone()]);
            match body["properties"]["url"].as_array_mut() {
                Some(array) => {
                    array.push(uid)
                }
                None => {
                    body["properties"]["url"] = body["properties"]["uid"].clone()
                }
            }
        }
        Some(uid_str) => {
            let uid = uid_str.to_string();
            match body["properties"]["url"].as_array_mut() {
                Some(array) => {
                    if !array.iter().any(|i| i.as_str().unwrap_or("") == uid) {
                        array.push(serde_json::Value::String(uid))
                    }
                }
                None => {
                    body["properties"]["url"] = body["properties"]["uid"].clone()
                }
            }
        }
    }
    if let Some(slugs) = body["properties"]["mp-slug"].as_array() {
        let new_urls = slugs.iter()
            .map(|i| i.as_str().unwrap_or(""))
            .filter(|i| i != &"")
            .map(|i| me.join(&((&folder).clone() + i)).unwrap().to_string())
            .collect::<Vec<String>>();
        let urls = body["properties"]["url"].as_array_mut().unwrap();
        new_urls.iter().for_each(|i| urls.push(json!(i)));
    }
    let props = body["properties"].as_object_mut().unwrap();
    props.remove("mp-slug");

    if body["properties"]["content"][0].is_string() {
        // Convert the content to HTML using the `markdown` crate
        body["properties"]["content"] = json!([{
            "html": markdown::to_html(body["properties"]["content"][0].as_str().unwrap()),
            "value": body["properties"]["content"][0]
        }])
    }
    if body["properties"]["channel"][0].as_str().is_none() && body["type"][0] != "h-feed" {
        // Set the channel to the main channel...
        let default_channel = me.join("/feeds/main").unwrap().to_string();

        body["properties"]["channel"] = json!([default_channel]);
    }
    body["properties"]["posted-with"] = json!([user.client_id]);
    if body["properties"]["author"][0].as_str().is_none() {
        body["properties"]["author"] = json!([me.as_str()])
    }
    // TODO: maybe highlight #hashtags?
    // Find other processing to do and insert it here
    return (body["properties"]["uid"][0].as_str().unwrap().to_string(), body)
}

async fn new_post<S: Storage>(req: Request<ApplicationState<S>>, body: serde_json::Value) -> Result {
    // First, check for rights.
    let user = req.ext::<User>().unwrap();
    if !user.check_scope("create") {
        return error_json!(401, "invalid_scope", "Not enough privileges to post. Try a token with a \"create\" scope instead.")
    }
    let (uid, post) = normalize_mf2(body, user);

    // Security check!
    // This software might also be used in a multi-user setting
    // where several users or identities share one Micropub server
    // (maybe a family website or a shitpost sideblog?)
    if post["properties"]["url"].as_array().unwrap().iter().any(|url| !url.as_str().unwrap().starts_with(user.me.as_str()))
        || !post["properties"]["uid"][0].as_str().unwrap().starts_with(user.me.as_str())
        || post["properties"]["channel"].as_array().unwrap().iter().any(|url| !url.as_str().unwrap().starts_with(user.me.as_str()))
    {
        return error_json!(403, "forbidden", "You're trying to post to someone else's website...")
    }

    let storage = &req.state().storage;
    match storage.post_exists(&uid).await {
        Ok(exists) => if exists {
            return error_json!(409, "already_exists", format!("A post with the exact same UID already exists in the database: {}", uid))
        },
        Err(err) => return Ok(err.into())
    }
    // WARNING: WRITE BOUNDARY
    //let mut storage = RwLockUpgradableReadGuard::upgrade(storage).await;
    if let Err(err) = storage.put_post(&post).await {
        return error_json!(500, "database_error", format!("{}", err))
    }
    for channel in post["properties"]["channel"]
        .as_array().unwrap().iter()
        .map(|i| i.as_str().unwrap_or("").to_string())
        .filter(|i| !i.is_empty())
        .collect::<Vec<_>>()
    {
        let default_channel = user.me.join(DEFAULT_CHANNEL_PATH).unwrap().to_string();
        match storage.post_exists(&channel).await {
            Ok(exists) => if exists {
                if let Err(err) = storage.update_post(&channel, json!({
                    "add": {
                        "children": [uid]
                    }
                })).await {
                    return error_json!(500, "database_error", format!("Couldn't insert post into the channel due to a database error: {}", err))
                }
            } else if channel == default_channel {
                let (_, feed) = normalize_mf2(json!({
                    "type": ["h-feed"],
                    "properties": {
                        "name": [DEFAULT_CHANNEL_NAME],
                        "mp-slug": ["main"],
                    },
                    "children": [uid]
                }), &user);
                if let Err(err) = storage.put_post(&feed).await {
                    return error_json!(500, "database_error", format!("Couldn't save feed: {}", err))
                }
            },
            Err(err) => return error_json!(500, "database_error", err)
        }
    }
    // END WRITE BOUNDARY
    //drop(storage);
    // TODO: Post-processing the post (aka second write pass)
    // - [ ] Send webmentions
    // - [ ] Download rich reply contexts
    // - [ ] Send WebSub notifications to the hub (if we happen to have one)
    // - [ ] Syndicate the post if requested, add links to the syndicated copies

    return Ok(Response::builder(202)
        .header("Location", &uid)
        .body(json!({"status": "accepted", "location": &uid}))
        .build());
}

async fn process_json<S: Storage>(req: Request<ApplicationState<S>>, body: serde_json::Value) -> Result {
    let is_action = body["action"].is_string() && body["url"].is_string();
    if is_action {
        // This could be an update, a deletion or an undeletion request.
        // Process it separately.
        let action = body["action"].as_str().unwrap();
        let url = body["url"].as_str().unwrap();
        let user = req.ext::<User>().unwrap();
        match action {
            "delete" => {
                if !user.check_scope("delete") {
                    return error_json!(401, "insufficient_scope", "You need a `delete` scope to delete posts.")
                }
                if let Err(error) = req.state().storage.delete_post(&url).await {
                    return Ok(error.into())
                }
                return Ok(Response::builder(200).build());
            },
            "update" => {
                if !user.check_scope("update") {
                    return error_json!(401, "insufficient_scope", "You need an `update` scope to update posts.")
                }
                if let Err(error) = req.state().storage.update_post(&url, body.clone()).await {
                    return Ok(error.into())
                } else {
                    return Ok(Response::builder(204).build())
                }
            },
            _ => {
                return error_json!(400, "invalid_request", "This action is not supported.")
            }
        }
    } else if body["type"][0].is_string() {
        // This is definitely an h-entry or something similar. Check if it has properties?
        if body["properties"].is_object() {
            // Ok, this is definitely a new h-entry. Let's save it.
            return new_post(req, body).await
        } else {
            return error_json!(400, "invalid_request", "This MF2-JSON object has a type, but not properties. This makes no sense to post.")
        }
    } else {
        return error_json!(400, "invalid_request", "Try sending MF2-structured data or an object with an \"action\" and \"url\" keys.")
    }
}

fn convert_form_to_mf2_json(form: Vec<(String, String)>) -> serde_json::Value {
    let mut mf2 = json!({"type": [], "properties": {}});
    for (k, v) in form {
        if k == "h" {
            mf2["type"].as_array_mut().unwrap().push(json!("h-".to_string() + &v));
        } else if k != "access_token" {
            let key = k.strip_suffix("[]").unwrap_or(&k);
            match mf2["properties"][key].as_array_mut() {
                Some(prop) => prop.push(json!(v)),
                None => mf2["properties"][key] = json!([v])
            }
        }
    }
    if mf2["type"].as_array().unwrap().is_empty() {
        mf2["type"].as_array_mut().unwrap().push(json!("h-entry"));
    }
    mf2
}

async fn process_form<S: Storage>(req: Request<ApplicationState<S>>, form: Vec<(String, String)>) -> Result {
    if let Some((_, v)) = form.iter().find(|(k, _)| k == "action") {
        if v == "delete" {
            let user = req.ext::<User>().unwrap();
            if !user.check_scope("delete") {
                return error_json!(401, "insufficient_scope", "You cannot delete posts without a `delete` scope.")
            }
            match form.iter().find(|(k, _)| k == "url") {
                Some((_, url)) => {
                    if let Err(error) = req.state().storage.delete_post(&url).await {
                        return error_json!(500, "database_error", error)
                    }
                    return Ok(Response::builder(200).build())
                },
                None => return error_json!(400, "invalid_request", "Please provide an `url` to delete.")
            }
        } else {
            return error_json!(400, "invalid_request", "This action is not supported in form-encoded mode. (JSON requests support more actions, use them!)")
        }
    }
    
    let mf2 = convert_form_to_mf2_json(form);

    if mf2["properties"].as_object().unwrap().keys().len() > 0 {
        return new_post(req, mf2).await;
    }
    return error_json!(400, "invalid_request", "Try sending h=entry&content=something%20interesting");
}

pub async fn post_handler<S: Storage>(mut req: Request<ApplicationState<S>>) -> Result {
    match req.content_type() {
        Some(value) => {
            if value == Mime::from_str("application/json").unwrap() {
                match req.body_json::<serde_json::Value>().await {
                    Ok(parsed) => {
                        return process_json(req, parsed).await
                    },
                    Err(err) => return error_json!(
                        400, "invalid_request",
                        format!("Parsing JSON failed: {:?}", err)
                    )
                }
            } else if value == Mime::from_str("application/x-www-form-urlencoded").unwrap() {
                match req.body_form::<Vec<(String, String)>>().await {
                    Ok(parsed) => {
                        return process_form(req, parsed).await
                    },
                    Err(err) => return error_json!(
                        400, "invalid_request",
                        format!("Parsing form failed: {:?}", err)
                    )
                }
            } else {
                return error_json!(
                    415, "unsupported_media_type",
                    "What's this? Try sending JSON instead. (urlencoded form also works but is less cute)"
                )
            }
        }
        _ => {
            return error_json!(
                415, "unsupported_media_type",
                "You didn't send a Content-Type header, so we don't know how to parse your request."
            );
        }
    }
}

#[cfg(test)]
mod tests {
    use super::*;

    #[test]
    fn test_form_to_mf2() {
        use serde_urlencoded::from_str;

        assert_eq!(
            convert_form_to_mf2_json(from_str("h=entry&content=something%20interesting").unwrap()), 
            json!({
                "type": ["h-entry"],
                "properties": {
                    "content": ["something interesting"]
                }
            })
        )
    }

    #[test]
    fn test_normalize_mf2() {
        let mf2 = json!({
            "type": ["h-entry"],
            "properties": {
                "content": ["This is content!"]
            }
        });

        let (uid, post) = normalize_mf2(mf2, &User::new("https://fireburn.ru/", "https://quill.p3k.io/", "create update media"));
        assert!(post["properties"]["published"].as_array().unwrap().len() > 0);
        DateTime::parse_from_rfc3339(post["properties"]["published"][0].as_str().unwrap()).unwrap();
        assert!(post["properties"]["url"].as_array().unwrap().len() > 0);
        assert!(post["properties"]["uid"].as_array().unwrap().len() > 0);
        assert_eq!(post["properties"]["uid"][0].as_str().unwrap(), &uid);
        assert!(uid.starts_with("https://fireburn.ru/posts/"));
        assert_eq!(post["properties"]["content"][0]["html"].as_str().unwrap().trim(), "<p>This is content!</p>");
        assert_eq!(post["properties"]["channel"][0], "https://fireburn.ru/feeds/main");
        assert_eq!(post["properties"]["author"][0], "https://fireburn.ru/");
    }

    #[test]
    fn test_mp_slug() {
        let mf2 = json!({
            "type": ["h-entry"],
            "properties": {
                "content": ["This is content!"],
                "mp-slug": ["hello-post"]
            },
        });

        let (_, post) = normalize_mf2(mf2, &User::new("https://fireburn.ru/", "https://quill.p3k.io/", "create update media"));
        assert!(post["properties"]["url"]
            .as_array()
            .unwrap()
            .iter()
            .map(|i| i.as_str().unwrap())
            .any(|i| i == "https://fireburn.ru/posts/hello-post")
        );
        if let Some(_) = post["properties"]["mp-slug"].as_array() {
            panic!("mp-slug wasn't deleted from the array!")
        }
    }

    #[test]
    fn test_normalize_feed() {
        let mf2 = json!({
            "type": ["h-feed"],
            "properties": {
                "name": "Main feed",
                "mp-slug": ["main"]
            }
        });

        let (uid, post) = normalize_mf2(mf2, &User::new("https://fireburn.ru/", "https://quill.p3k.io/", "create update media"));
        assert_eq!(post["properties"]["uid"][0].as_str().unwrap(), &uid);
        assert_eq!(post["properties"]["author"][0], "https://fireburn.ru/");
        assert!(post["properties"]["url"]
            .as_array()
            .unwrap()
            .iter()
            .map(|i| i.as_str().unwrap())
            .any(|i| i == "https://fireburn.ru/feeds/main"));
        if let Some(_) = post["properties"]["mp-slug"].as_array() {
            panic!("mp-slug wasn't deleted from the array!")
        }
    }
}