about summary refs log tree commit diff
path: root/src/micropub
diff options
context:
space:
mode:
authorVika <vika@fireburn.ru>2021-07-29 18:14:30 +0300
committerVika <vika@fireburn.ru>2021-07-29 18:14:30 +0300
commit949a961c19ba994c3f0846e7b54d9a55a94d7b9a (patch)
treeb9d2031e1b08dcebf8f22c6051dea4da6e4f40ab /src/micropub
parentbd38e2f19b8c2c353ad759c1631b93b0f8eafbb0 (diff)
downloadkittybox-949a961c19ba994c3f0846e7b54d9a55a94d7b9a.tar.zst
Appease rustfmt, clippy and cargo check
Diffstat (limited to 'src/micropub')
-rw-r--r--src/micropub/mod.rs10
-rw-r--r--src/micropub/post.rs116
2 files changed, 72 insertions, 54 deletions
diff --git a/src/micropub/mod.rs b/src/micropub/mod.rs
index 84b9083..23f20c4 100644
--- a/src/micropub/mod.rs
+++ b/src/micropub/mod.rs
@@ -7,17 +7,21 @@ pub use post::post_handler;
 
 pub struct CORSMiddleware {}
 
-use async_trait::async_trait;
-use tide::{Next, Request, Result};
 use crate::database;
 use crate::ApplicationState;
+use async_trait::async_trait;
+use tide::{Next, Request, Result};
 
 #[async_trait]
 impl<B> tide::Middleware<ApplicationState<B>> for CORSMiddleware
 where
     B: database::Storage + Send + Sync + Clone,
 {
-    async fn handle(&self, req: Request<ApplicationState<B>>, next: Next<'_, ApplicationState<B>>) -> Result {
+    async fn handle(
+        &self,
+        req: Request<ApplicationState<B>>,
+        next: Next<'_, ApplicationState<B>>,
+    ) -> Result {
         let mut res = next.run(req).await;
 
         res.insert_header("Access-Control-Allow-Origin", "*");
diff --git a/src/micropub/post.rs b/src/micropub/post.rs
index 8667451..f317da5 100644
--- a/src/micropub/post.rs
+++ b/src/micropub/post.rs
@@ -6,7 +6,7 @@ use core::iter::Iterator;
 use futures::stream;
 use futures::StreamExt;
 use http_types::Mime;
-use log::{error, warn, info};
+use log::{error, info, warn};
 use newbase60::num_to_sxg;
 use std::convert::TryInto;
 use std::str::FromStr;
@@ -172,9 +172,9 @@ pub async fn new_post<S: Storage>(
     // where several users or identities share one Micropub server
     // (maybe a family website or a shitpost sideblog?)
     if !post["properties"]["uid"][0]
-            .as_str()
-            .unwrap()
-            .starts_with(user.me.as_str())
+        .as_str()
+        .unwrap()
+        .starts_with(user.me.as_str())
         || post["properties"]["channel"]
             .as_array()
             .unwrap()
@@ -430,52 +430,60 @@ async fn post_process_new_post<S: Storage>(
     //    We'll need the bodies here to get their endpoints
     let source = &uid;
     stream::iter(posts_with_bodies.into_iter())
-        .filter_map(|(url, response, body): (surf::Url, surf::Response, String)| async move {
-            // Check Link headers first
-            // the first webmention endpoint will be returned
-            if let Some(values) = response.header("Link") {
-                let mut iter = values.iter().flat_map(|i| i.as_str().split(','));
-
-                for link in iter {
-                    let mut split = link.split(";");
-
-                    match split.next() {
-                        Some(uri) => {
-                            if let Some(uri) = uri.strip_prefix('<') {
-                                if let Some(uri) = uri.strip_suffix('>') {
-                                    for prop in split {
-                                        let lowercased = prop.to_ascii_lowercase();
-                                        if &lowercased == "rel=\"webmention\"" || &lowercased == "rel=webmention" {
-                                            if let Ok(endpoint) = url.join(uri) {
-                                                return Some((url, endpoint));
+        .filter_map(
+            |(url, response, body): (surf::Url, surf::Response, String)| async move {
+                // Check Link headers first
+                // the first webmention endpoint will be returned
+                if let Some(values) = response.header("Link") {
+                    let iter = values.iter().flat_map(|i| i.as_str().split(','));
+
+                    for link in iter {
+                        let mut split = link.split(';');
+
+                        match split.next() {
+                            Some(uri) => {
+                                if let Some(uri) = uri.strip_prefix('<') {
+                                    if let Some(uri) = uri.strip_suffix('>') {
+                                        for prop in split {
+                                            let lowercased = prop.to_ascii_lowercase();
+                                            if &lowercased == "rel=\"webmention\""
+                                                || &lowercased == "rel=webmention"
+                                            {
+                                                if let Ok(endpoint) = url.join(uri) {
+                                                    return Some((url, endpoint));
+                                                }
                                             }
                                         }
                                     }
                                 }
                             }
-                        },
-                        None => continue
+                            None => continue,
+                        }
                     }
                 }
-            }
-            // TODO: Replace this function once the MF2 parser is ready
-            // A compliant parser's output format includes rels,
-            // we could just find a Webmention one in there
-            let pattern = easy_scraper::Pattern::new(r#"<link href="{url}" rel="webmention">"#)
-                .expect("Pattern for webmentions couldn't be parsed");
-            let matches = pattern.matches(&body);
-            if matches.is_empty() {
-                return None;
-            }
-            let endpoint = &matches[0]["url"];
-            if let Ok(endpoint) = url.join(endpoint) {
-                Some((url, endpoint))
-            } else {
-                None
-            }
-        })
+                // TODO: Replace this function once the MF2 parser is ready
+                // A compliant parser's output format includes rels,
+                // we could just find a Webmention one in there
+                let pattern = easy_scraper::Pattern::new(r#"<link href="{url}" rel="webmention">"#)
+                    .expect("Pattern for webmentions couldn't be parsed");
+                let matches = pattern.matches(&body);
+                if matches.is_empty() {
+                    return None;
+                }
+                let endpoint = &matches[0]["url"];
+                if let Ok(endpoint) = url.join(endpoint) {
+                    Some((url, endpoint))
+                } else {
+                    None
+                }
+            },
+        )
         .map(|(target, endpoint)| async move {
-            info!("Sending webmention to {} about {}", source, &target.to_string());
+            info!(
+                "Sending webmention to {} about {}",
+                source,
+                &target.to_string()
+            );
             let response = http
                 .post(&endpoint)
                 .content_type("application/x-www-form-urlencoded")
@@ -543,12 +551,14 @@ async fn process_json<S: Storage>(
                 // authorization endpoint is supposed to reject any auth request trying to get this
                 // scope. It is intended for TRUSTED external services that need to modify the
                 // database while ignoring any access controls
-                if (url::Url::parse(url)?.origin().ascii_serialization() + "/") != user.me.as_str() && !user.check_scope("kittybox_internal:do_what_thou_wilt") {
+                if (url::Url::parse(url)?.origin().ascii_serialization() + "/") != user.me.as_str()
+                    && !user.check_scope("kittybox_internal:do_what_thou_wilt")
+                {
                     return error_json!(
                         403,
                         "forbidden",
                         "You're not allowed to delete someone else's posts."
-                    )
+                    );
                 }
                 if let Err(error) = req.state().storage.delete_post(&url).await {
                     return Ok(error.into());
@@ -563,12 +573,14 @@ async fn process_json<S: Storage>(
                         "You need an `update` scope to update posts."
                     );
                 }
-                if (url::Url::parse(url)?.origin().ascii_serialization() + "/") != user.me.as_str() && !user.check_scope("kittybox_internal:do_what_thou_wilt") {
+                if (url::Url::parse(url)?.origin().ascii_serialization() + "/") != user.me.as_str()
+                    && !user.check_scope("kittybox_internal:do_what_thou_wilt")
+                {
                     return error_json!(
                         403,
                         "forbidden",
                         "You're not allowed to delete someone else's posts."
-                    )
+                    );
                 }
                 if let Err(error) = req.state().storage.update_post(&url, body.clone()).await {
                     Ok(error.into())
@@ -637,12 +649,15 @@ async fn process_form<S: Storage>(
             }
             match form.iter().find(|(k, _)| k == "url") {
                 Some((_, url)) => {
-                    if (url::Url::parse(url)?.origin().ascii_serialization() + "/") != user.me.as_str() && !user.check_scope("kittybox_internal:do_what_thou_wilt") {
+                    if (url::Url::parse(url)?.origin().ascii_serialization() + "/")
+                        != user.me.as_str()
+                        && !user.check_scope("kittybox_internal:do_what_thou_wilt")
+                    {
                         return error_json!(
                             403,
                             "forbidden",
                             "You're not allowed to delete someone else's posts."
-                        )
+                        );
                     }
                     if let Err(error) = req.state().storage.delete_post(&url).await {
                         return error_json!(500, "database_error", error);
@@ -791,11 +806,10 @@ mod tests {
         DateTime::parse_from_rfc3339(post["properties"]["published"][0].as_str().unwrap())
             .expect("Couldn't parse date from rfc3339");
         assert!(
-            post["properties"]["url"]
+            !post["properties"]["url"]
                 .as_array()
                 .expect("post['url'] is undefined")
-                .len()
-                > 0,
+                .is_empty(),
             "Post doesn't have any URLs"
         );
         assert_eq!(