about summary refs log tree commit diff
path: root/src/media/storage/file.rs
diff options
context:
space:
mode:
Diffstat (limited to 'src/media/storage/file.rs')
-rw-r--r--src/media/storage/file.rs322
1 files changed, 188 insertions, 134 deletions
diff --git a/src/media/storage/file.rs b/src/media/storage/file.rs
index 4cd0ece..5198a4c 100644
--- a/src/media/storage/file.rs
+++ b/src/media/storage/file.rs
@@ -1,12 +1,12 @@
-use super::{Metadata, ErrorKind, MediaStore, MediaStoreError, Result};
-use std::{path::PathBuf, fmt::Debug};
-use tokio::fs::OpenOptions;
-use tokio::io::{BufReader, BufWriter, AsyncWriteExt, AsyncSeekExt};
+use super::{ErrorKind, MediaStore, MediaStoreError, Metadata, Result};
+use futures::FutureExt;
 use futures::{StreamExt, TryStreamExt};
+use sha2::Digest;
 use std::ops::{Bound, Neg};
 use std::pin::Pin;
-use sha2::Digest;
-use futures::FutureExt;
+use std::{fmt::Debug, path::PathBuf};
+use tokio::fs::OpenOptions;
+use tokio::io::{AsyncSeekExt, AsyncWriteExt, BufReader, BufWriter};
 use tracing::{debug, error};
 
 const BUF_CAPACITY: usize = 16 * 1024;
@@ -22,7 +22,7 @@ impl From<tokio::io::Error> for MediaStoreError {
             msg: format!("file I/O error: {}", source),
             kind: match source.kind() {
                 std::io::ErrorKind::NotFound => ErrorKind::NotFound,
-                _ => ErrorKind::Backend
+                _ => ErrorKind::Backend,
             },
             source: Some(Box::new(source)),
         }
@@ -40,7 +40,9 @@ impl FileStore {
 
 impl MediaStore for FileStore {
     async fn new(url: &'_ url::Url) -> Result<Self> {
-        Ok(Self { base: url.path().into() })
+        Ok(Self {
+            base: url.path().into(),
+        })
     }
 
     #[tracing::instrument(skip(self, content))]
@@ -51,10 +53,17 @@ impl MediaStore for FileStore {
         mut content: T,
     ) -> Result<String>
     where
-        T: tokio_stream::Stream<Item = std::result::Result<bytes::Bytes, axum::extract::multipart::MultipartError>> + Unpin + Send + Debug
+        T: tokio_stream::Stream<
+                Item = std::result::Result<bytes::Bytes, axum::extract::multipart::MultipartError>,
+            > + Unpin
+            + Send
+            + Debug,
     {
         let (tempfilepath, mut tempfile) = self.mktemp().await?;
-        debug!("Temporary file opened for storing pending upload: {}", tempfilepath.display());
+        debug!(
+            "Temporary file opened for storing pending upload: {}",
+            tempfilepath.display()
+        );
         let mut hasher = sha2::Sha256::new();
         let mut length: usize = 0;
 
@@ -62,7 +71,7 @@ impl MediaStore for FileStore {
             let chunk = chunk.map_err(|err| MediaStoreError {
                 kind: ErrorKind::Backend,
                 source: Some(Box::new(err)),
-                msg: "Failed to read a data chunk".to_owned()
+                msg: "Failed to read a data chunk".to_owned(),
             })?;
             debug!("Read {} bytes from the stream", chunk.len());
             length += chunk.len();
@@ -70,9 +79,7 @@ impl MediaStore for FileStore {
                 {
                     let chunk = chunk.clone();
                     let tempfile = &mut tempfile;
-                    async move {
-                        tempfile.write_all(&chunk).await
-                    }
+                    async move { tempfile.write_all(&chunk).await }
                 },
                 {
                     let chunk = chunk.clone();
@@ -80,7 +87,8 @@ impl MediaStore for FileStore {
                         hasher.update(&*chunk);
 
                         hasher
-                    }).map(|r| r.unwrap())
+                    })
+                    .map(|r| r.unwrap())
                 }
             );
             if let Err(err) = write_result {
@@ -90,7 +98,9 @@ impl MediaStore for FileStore {
                 // though temporary files might take up space on the hard drive
                 // We'll clean them when maintenance time comes
                 #[allow(unused_must_use)]
-                { tokio::fs::remove_file(tempfilepath).await; }
+                {
+                    tokio::fs::remove_file(tempfilepath).await;
+                }
                 return Err(err.into());
             }
             hasher = _hasher;
@@ -113,10 +123,17 @@ impl MediaStore for FileStore {
         let filepath = self.base.join(domain_str.as_str()).join(&filename);
         let metafilename = filename.clone() + ".json";
         let metapath = self.base.join(domain_str.as_str()).join(&metafilename);
-        let metatemppath = self.base.join(domain_str.as_str()).join(metafilename + ".tmp");
+        let metatemppath = self
+            .base
+            .join(domain_str.as_str())
+            .join(metafilename + ".tmp");
         metadata.length = std::num::NonZeroUsize::new(length);
         metadata.etag = Some(hash);
-        debug!("File path: {}, metadata: {}", filepath.display(), metapath.display());
+        debug!(
+            "File path: {}, metadata: {}",
+            filepath.display(),
+            metapath.display()
+        );
         {
             let parent = filepath.parent().unwrap();
             tokio::fs::create_dir_all(parent).await?;
@@ -126,7 +143,8 @@ impl MediaStore for FileStore {
             .write(true)
             .open(&metatemppath)
             .await?;
-        meta.write_all(&serde_json::to_vec(&metadata).unwrap()).await?;
+        meta.write_all(&serde_json::to_vec(&metadata).unwrap())
+            .await?;
         tokio::fs::rename(tempfilepath, filepath).await?;
         tokio::fs::rename(metatemppath, metapath).await?;
         Ok(filename)
@@ -138,28 +156,31 @@ impl MediaStore for FileStore {
         &self,
         domain: &str,
         filename: &str,
-    ) -> Result<(Metadata, Pin<Box<dyn tokio_stream::Stream<Item = std::io::Result<bytes::Bytes>> + Send>>)> {
+    ) -> Result<(
+        Metadata,
+        Pin<Box<dyn tokio_stream::Stream<Item = std::io::Result<bytes::Bytes>> + Send>>,
+    )> {
         debug!("Domain: {}, filename: {}", domain, filename);
         let path = self.base.join(domain).join(filename);
         debug!("Path: {}", path.display());
 
-        let file = OpenOptions::new()
-            .read(true)
-            .open(path)
-            .await?;
+        let file = OpenOptions::new().read(true).open(path).await?;
         let meta = self.metadata(domain, filename).await?;
 
-        Ok((meta, Box::pin(
-            tokio_util::io::ReaderStream::new(
-                // TODO: determine if BufReader provides benefit here
-                // From the logs it looks like we're reading 4KiB at a time
-                // Buffering file contents seems to double download speed
-                // How to benchmark this?
-                BufReader::with_capacity(BUF_CAPACITY, file)
-            )
-            // Sprinkle some salt in form of protective log wrapping
-                .inspect_ok(|chunk| debug!("Read {} bytes from file", chunk.len()))
-        )))
+        Ok((
+            meta,
+            Box::pin(
+                tokio_util::io::ReaderStream::new(
+                    // TODO: determine if BufReader provides benefit here
+                    // From the logs it looks like we're reading 4KiB at a time
+                    // Buffering file contents seems to double download speed
+                    // How to benchmark this?
+                    BufReader::with_capacity(BUF_CAPACITY, file),
+                )
+                // Sprinkle some salt in form of protective log wrapping
+                .inspect_ok(|chunk| debug!("Read {} bytes from file", chunk.len())),
+            ),
+        ))
     }
 
     #[tracing::instrument(skip(self))]
@@ -167,12 +188,13 @@ impl MediaStore for FileStore {
         let metapath = self.base.join(domain).join(format!("{}.json", filename));
         debug!("Metadata path: {}", metapath.display());
 
-        let meta = serde_json::from_slice(&tokio::fs::read(metapath).await?)
-            .map_err(|err| MediaStoreError {
+        let meta = serde_json::from_slice(&tokio::fs::read(metapath).await?).map_err(|err| {
+            MediaStoreError {
                 kind: ErrorKind::Json,
                 msg: format!("{}", err),
-                source: Some(Box::new(err))
-            })?;
+                source: Some(Box::new(err)),
+            }
+        })?;
 
         Ok(meta)
     }
@@ -182,16 +204,14 @@ impl MediaStore for FileStore {
         &self,
         domain: &str,
         filename: &str,
-        range: (Bound<u64>, Bound<u64>)
-    ) -> Result<Pin<Box<dyn tokio_stream::Stream<Item = std::io::Result<bytes::Bytes>> + Send>>> {
+        range: (Bound<u64>, Bound<u64>),
+    ) -> Result<Pin<Box<dyn tokio_stream::Stream<Item = std::io::Result<bytes::Bytes>> + Send>>>
+    {
         let path = self.base.join(format!("{}/{}", domain, filename));
         let metapath = self.base.join(format!("{}/{}.json", domain, filename));
         debug!("Path: {}, metadata: {}", path.display(), metapath.display());
 
-        let mut file = OpenOptions::new()
-            .read(true)
-            .open(path)
-            .await?;
+        let mut file = OpenOptions::new().read(true).open(path).await?;
 
         let start = match range {
             (Bound::Included(bound), _) => {
@@ -202,45 +222,52 @@ impl MediaStore for FileStore {
             (Bound::Unbounded, Bound::Included(bound)) => {
                 // Seek to the end minus the bounded bytes
                 debug!("Seeking {} bytes back from the end...", bound);
-                file.seek(std::io::SeekFrom::End(i64::try_from(bound).unwrap().neg())).await?
-            },
+                file.seek(std::io::SeekFrom::End(i64::try_from(bound).unwrap().neg()))
+                    .await?
+            }
             (Bound::Unbounded, Bound::Unbounded) => 0,
-            (_, Bound::Excluded(_)) => unreachable!()
+            (_, Bound::Excluded(_)) => unreachable!(),
         };
 
-        let stream = Box::pin(tokio_util::io::ReaderStream::new(BufReader::with_capacity(BUF_CAPACITY, file)))
-            .map_ok({
-                let mut bytes_read = 0usize;
-                let len = match range {
-                    (_, Bound::Unbounded) => None,
-                    (Bound::Unbounded, Bound::Included(bound)) => Some(bound),
-                    (_, Bound::Included(bound)) => Some(bound + 1 - start),
-                    (_, Bound::Excluded(_)) => unreachable!()
-                };
-                move |chunk| {
-                    debug!("Read {} bytes from file, {} in this chunk", bytes_read, chunk.len());
-                    bytes_read += chunk.len();
-                    if let Some(len) = len.map(|len| len.try_into().unwrap()) {
-                        if bytes_read > len {
-                            if bytes_read - len > chunk.len() {
-                                return None
-                            }
-                            debug!("Truncating last {} bytes", bytes_read - len);
-                            return Some(chunk.slice(..chunk.len() - (bytes_read - len)))
+        let stream = Box::pin(tokio_util::io::ReaderStream::new(BufReader::with_capacity(
+            BUF_CAPACITY,
+            file,
+        )))
+        .map_ok({
+            let mut bytes_read = 0usize;
+            let len = match range {
+                (_, Bound::Unbounded) => None,
+                (Bound::Unbounded, Bound::Included(bound)) => Some(bound),
+                (_, Bound::Included(bound)) => Some(bound + 1 - start),
+                (_, Bound::Excluded(_)) => unreachable!(),
+            };
+            move |chunk| {
+                debug!(
+                    "Read {} bytes from file, {} in this chunk",
+                    bytes_read,
+                    chunk.len()
+                );
+                bytes_read += chunk.len();
+                if let Some(len) = len.map(|len| len.try_into().unwrap()) {
+                    if bytes_read > len {
+                        if bytes_read - len > chunk.len() {
+                            return None;
                         }
+                        debug!("Truncating last {} bytes", bytes_read - len);
+                        return Some(chunk.slice(..chunk.len() - (bytes_read - len)));
                     }
-
-                    Some(chunk)
                 }
-            })
-            .try_take_while(|x| std::future::ready(Ok(x.is_some())))
-            // Will never panic, because the moment the stream yields
-            // a None, it is considered exhausted.
-            .map_ok(|x| x.unwrap());
 
-        return Ok(Box::pin(stream))
-    }
+                Some(chunk)
+            }
+        })
+        .try_take_while(|x| std::future::ready(Ok(x.is_some())))
+        // Will never panic, because the moment the stream yields
+        // a None, it is considered exhausted.
+        .map_ok(|x| x.unwrap());
 
+        return Ok(Box::pin(stream));
+    }
 
     async fn delete(&self, domain: &str, filename: &str) -> Result<()> {
         let path = self.base.join(format!("{}/{}", domain, filename));
@@ -251,7 +278,7 @@ impl MediaStore for FileStore {
 
 #[cfg(test)]
 mod tests {
-    use super::{Metadata, FileStore, MediaStore};
+    use super::{FileStore, MediaStore, Metadata};
     use std::ops::Bound;
     use tokio::io::AsyncReadExt;
 
@@ -259,10 +286,15 @@ mod tests {
     #[tracing_test::traced_test]
     async fn test_ranges() {
         let tempdir = tempfile::tempdir().expect("Failed to create tempdir");
-        let store = FileStore { base: tempdir.path().to_path_buf() };
+        let store = FileStore {
+            base: tempdir.path().to_path_buf(),
+        };
 
         let file: &[u8] = include_bytes!("./file.rs");
-        let stream = tokio_stream::iter(file.chunks(100).map(|i| Ok(bytes::Bytes::copy_from_slice(i))));
+        let stream = tokio_stream::iter(
+            file.chunks(100)
+                .map(|i| Ok(bytes::Bytes::copy_from_slice(i))),
+        );
         let metadata = Metadata {
             filename: Some("file.rs".to_string()),
             content_type: Some("text/plain".to_string()),
@@ -271,28 +303,30 @@ mod tests {
         };
 
         // write through the interface
-        let filename = store.write_streaming(
-            "fireburn.ru",
-            metadata, stream
-        ).await.unwrap();
+        let filename = store
+            .write_streaming("fireburn.ru", metadata, stream)
+            .await
+            .unwrap();
 
         tracing::debug!("Writing complete.");
 
         // Ensure the file is there
-        let content = tokio::fs::read(
-            tempdir.path()
-                .join("fireburn.ru")
-                .join(&filename)
-        ).await.unwrap();
+        let content = tokio::fs::read(tempdir.path().join("fireburn.ru").join(&filename))
+            .await
+            .unwrap();
         assert_eq!(content, file);
 
         tracing::debug!("Reading range from the start...");
         // try to read range
         let range = {
-            let stream = store.stream_range(
-                "fireburn.ru", &filename,
-                (Bound::Included(0), Bound::Included(299))
-            ).await.unwrap();
+            let stream = store
+                .stream_range(
+                    "fireburn.ru",
+                    &filename,
+                    (Bound::Included(0), Bound::Included(299)),
+                )
+                .await
+                .unwrap();
 
             let mut reader = tokio_util::io::StreamReader::new(stream);
 
@@ -308,10 +342,14 @@ mod tests {
         tracing::debug!("Reading range from the middle...");
 
         let range = {
-            let stream = store.stream_range(
-                "fireburn.ru", &filename,
-                (Bound::Included(150), Bound::Included(449))
-            ).await.unwrap();
+            let stream = store
+                .stream_range(
+                    "fireburn.ru",
+                    &filename,
+                    (Bound::Included(150), Bound::Included(449)),
+                )
+                .await
+                .unwrap();
 
             let mut reader = tokio_util::io::StreamReader::new(stream);
 
@@ -326,13 +364,17 @@ mod tests {
 
         tracing::debug!("Reading range from the end...");
         let range = {
-            let stream = store.stream_range(
-                "fireburn.ru", &filename,
-                // Note: the `headers` crate parses bounds in a
-                // non-standard way, where unbounded start actually
-                // means getting things from the end...
-                (Bound::Unbounded, Bound::Included(300))
-            ).await.unwrap();
+            let stream = store
+                .stream_range(
+                    "fireburn.ru",
+                    &filename,
+                    // Note: the `headers` crate parses bounds in a
+                    // non-standard way, where unbounded start actually
+                    // means getting things from the end...
+                    (Bound::Unbounded, Bound::Included(300)),
+                )
+                .await
+                .unwrap();
 
             let mut reader = tokio_util::io::StreamReader::new(stream);
 
@@ -343,15 +385,19 @@ mod tests {
         };
 
         assert_eq!(range.len(), 300);
-        assert_eq!(range.as_slice(), &file[file.len()-300..file.len()]);
+        assert_eq!(range.as_slice(), &file[file.len() - 300..file.len()]);
 
         tracing::debug!("Reading the whole file...");
         // try to read range
         let range = {
-            let stream = store.stream_range(
-                "fireburn.ru", &("/".to_string() + &filename),
-                (Bound::Unbounded, Bound::Unbounded)
-            ).await.unwrap();
+            let stream = store
+                .stream_range(
+                    "fireburn.ru",
+                    &("/".to_string() + &filename),
+                    (Bound::Unbounded, Bound::Unbounded),
+                )
+                .await
+                .unwrap();
 
             let mut reader = tokio_util::io::StreamReader::new(stream);
 
@@ -365,15 +411,19 @@ mod tests {
         assert_eq!(range.as_slice(), file);
     }
 
-
     #[tokio::test]
     #[tracing_test::traced_test]
     async fn test_streaming_read_write() {
         let tempdir = tempfile::tempdir().expect("Failed to create tempdir");
-        let store = FileStore { base: tempdir.path().to_path_buf() };
+        let store = FileStore {
+            base: tempdir.path().to_path_buf(),
+        };
 
         let file: &[u8] = include_bytes!("./file.rs");
-        let stream = tokio_stream::iter(file.chunks(100).map(|i| Ok(bytes::Bytes::copy_from_slice(i))));
+        let stream = tokio_stream::iter(
+            file.chunks(100)
+                .map(|i| Ok(bytes::Bytes::copy_from_slice(i))),
+        );
         let metadata = Metadata {
             filename: Some("style.css".to_string()),
             content_type: Some("text/css".to_string()),
@@ -382,27 +432,32 @@ mod tests {
         };
 
         // write through the interface
-        let filename = store.write_streaming(
-            "fireburn.ru",
-            metadata, stream
-        ).await.unwrap();
-        println!("{}, {}", filename, tempdir.path()
-                 .join("fireburn.ru")
-                 .join(&filename)
-                 .display());
-        let content = tokio::fs::read(
-            tempdir.path()
-                .join("fireburn.ru")
-                .join(&filename)
-        ).await.unwrap();
+        let filename = store
+            .write_streaming("fireburn.ru", metadata, stream)
+            .await
+            .unwrap();
+        println!(
+            "{}, {}",
+            filename,
+            tempdir.path().join("fireburn.ru").join(&filename).display()
+        );
+        let content = tokio::fs::read(tempdir.path().join("fireburn.ru").join(&filename))
+            .await
+            .unwrap();
         assert_eq!(content, file);
 
         // check internal metadata format
-        let meta: Metadata = serde_json::from_slice(&tokio::fs::read(
-            tempdir.path()
-                .join("fireburn.ru")
-                .join(filename.clone() + ".json")
-        ).await.unwrap()).unwrap();
+        let meta: Metadata = serde_json::from_slice(
+            &tokio::fs::read(
+                tempdir
+                    .path()
+                    .join("fireburn.ru")
+                    .join(filename.clone() + ".json"),
+            )
+            .await
+            .unwrap(),
+        )
+        .unwrap();
         assert_eq!(meta.content_type.as_deref(), Some("text/css"));
         assert_eq!(meta.filename.as_deref(), Some("style.css"));
         assert_eq!(meta.length.map(|i| i.get()), Some(file.len()));
@@ -410,10 +465,10 @@ mod tests {
 
         // read back the data using the interface
         let (metadata, read_back) = {
-            let (metadata, stream) = store.read_streaming(
-                "fireburn.ru",
-                &filename
-            ).await.unwrap();
+            let (metadata, stream) = store
+                .read_streaming("fireburn.ru", &filename)
+                .await
+                .unwrap();
             let mut reader = tokio_util::io::StreamReader::new(stream);
 
             let mut buf = Vec::default();
@@ -427,6 +482,5 @@ mod tests {
         assert_eq!(meta.filename.as_deref(), Some("style.css"));
         assert_eq!(meta.length.map(|i| i.get()), Some(file.len()));
         assert!(meta.etag.is_some());
-
     }
 }