about summary refs log tree commit diff
diff options
context:
space:
mode:
authorVika <vika@fireburn.ru>2021-07-27 15:29:26 +0300
committerVika <vika@fireburn.ru>2021-07-28 18:40:30 +0300
commitc5211ed74d290c6d8f5dd9ea92aceca8b2d20d71 (patch)
treeb061a4a3b03e48e3deb1d2a18059ceef3faaf4f8
parentfce70776e7cb53e25416e4c3b3e18c249611434c (diff)
downloadkittybox-c5211ed74d290c6d8f5dd9ea92aceca8b2d20d71.tar.zst
Respect Link: headers when sending webmentions
-rw-r--r--src/micropub/post.rs36
1 files changed, 32 insertions, 4 deletions
diff --git a/src/micropub/post.rs b/src/micropub/post.rs
index 95b4dd0..8667451 100644
--- a/src/micropub/post.rs
+++ b/src/micropub/post.rs
@@ -6,7 +6,7 @@ use core::iter::Iterator;
 use futures::stream;
 use futures::StreamExt;
 use http_types::Mime;
-use log::{error, warn};
+use log::{error, warn, info};
 use newbase60::num_to_sxg;
 use std::convert::TryInto;
 use std::str::FromStr;
@@ -336,7 +336,7 @@ async fn post_process_new_post<S: Storage>(
     contextually_significant_posts.dedup();
 
     // 1.3. Fetch the posts with their bodies and save them in a new Vec<(surf::Url, String)>
-    let posts_with_bodies: Vec<(surf::Url, String)> =
+    let posts_with_bodies: Vec<(surf::Url, surf::Response, String)> =
         stream::iter(contextually_significant_posts.into_iter())
             .filter_map(|v: surf::Url| async move {
                 if let Ok(res) = http.get(&v).send().await {
@@ -351,7 +351,7 @@ async fn post_process_new_post<S: Storage>(
             })
             .filter_map(|(v, mut res): (surf::Url, surf::Response)| async move {
                 if let Ok(body) = res.body_string().await {
-                    Some((v, body))
+                    Some((v, res, body))
                 } else {
                     None
                 }
@@ -430,7 +430,34 @@ async fn post_process_new_post<S: Storage>(
     //    We'll need the bodies here to get their endpoints
     let source = &uid;
     stream::iter(posts_with_bodies.into_iter())
-        .filter_map(|(url, body): (surf::Url, String)| async move {
+        .filter_map(|(url, response, body): (surf::Url, surf::Response, String)| async move {
+            // Check Link headers first
+            // the first webmention endpoint will be returned
+            if let Some(values) = response.header("Link") {
+                let mut iter = values.iter().flat_map(|i| i.as_str().split(','));
+
+                for link in iter {
+                    let mut split = link.split(";");
+
+                    match split.next() {
+                        Some(uri) => {
+                            if let Some(uri) = uri.strip_prefix('<') {
+                                if let Some(uri) = uri.strip_suffix('>') {
+                                    for prop in split {
+                                        let lowercased = prop.to_ascii_lowercase();
+                                        if &lowercased == "rel=\"webmention\"" || &lowercased == "rel=webmention" {
+                                            if let Ok(endpoint) = url.join(uri) {
+                                                return Some((url, endpoint));
+                                            }
+                                        }
+                                    }
+                                }
+                            }
+                        },
+                        None => continue
+                    }
+                }
+            }
             // TODO: Replace this function once the MF2 parser is ready
             // A compliant parser's output format includes rels,
             // we could just find a Webmention one in there
@@ -448,6 +475,7 @@ async fn post_process_new_post<S: Storage>(
             }
         })
         .map(|(target, endpoint)| async move {
+            info!("Sending webmention to {} about {}", source, &target.to_string());
             let response = http
                 .post(&endpoint)
                 .content_type("application/x-www-form-urlencoded")