diff options
Diffstat (limited to 'src/database')
-rw-r--r-- | src/database/file/mod.rs | 14 | ||||
-rw-r--r-- | src/database/memory.rs | 21 | ||||
-rw-r--r-- | src/database/mod.rs | 10 | ||||
-rw-r--r-- | src/database/postgres/mod.rs | 30 |
4 files changed, 30 insertions, 45 deletions
diff --git a/src/database/file/mod.rs b/src/database/file/mod.rs index f6715e1..ba8201f 100644 --- a/src/database/file/mod.rs +++ b/src/database/file/mod.rs @@ -197,15 +197,7 @@ fn modify_post(post: &serde_json::Value, update: MicropubUpdate) -> Result<serde /// A backend using a folder with JSON files as a backing store. /// Uses symbolic links to represent a many-to-one mapping of URLs to a post. pub struct FileStorage { - root_dir: PathBuf, -} - -impl FileStorage { - /// Create a new storage wrapping a folder specified by root_dir. - pub async fn new(root_dir: PathBuf) -> Result<Self> { - // TODO check if the dir is writable - Ok(Self { root_dir }) - } + pub(super) root_dir: PathBuf, } async fn hydrate_author<S: Storage>( @@ -255,6 +247,10 @@ async fn hydrate_author<S: Storage>( #[async_trait] impl Storage for FileStorage { + async fn new(url: &'_ url::Url) -> Result<Self> { + // TODO: sanity check + Ok(Self { root_dir: PathBuf::from(url.path()) }) + } #[tracing::instrument(skip(self))] async fn categories(&self, url: &str) -> Result<Vec<String>> { // This requires an expensive scan through the entire diff --git a/src/database/memory.rs b/src/database/memory.rs index 56caeec..be37fed 100644 --- a/src/database/memory.rs +++ b/src/database/memory.rs @@ -8,7 +8,7 @@ use tokio::sync::RwLock; use crate::database::{ErrorKind, MicropubChannel, Result, settings, Storage, StorageError}; -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Default)] pub struct MemoryStorage { pub mapping: Arc<RwLock<HashMap<String, serde_json::Value>>>, pub channels: Arc<RwLock<HashMap<url::Url, Vec<String>>>>, @@ -16,6 +16,10 @@ pub struct MemoryStorage { #[async_trait] impl Storage for MemoryStorage { + async fn new(_url: &url::Url) -> Result<Self> { + Ok(Self::default()) + } + async fn categories(&self, _url: &str) -> Result<Vec<String>> { unimplemented!() } @@ -231,18 +235,3 @@ impl Storage for MemoryStorage { } } - -impl Default for MemoryStorage { - fn default() -> Self { - Self::new() - } -} - -impl MemoryStorage { - pub fn new() -> Self { - Self { - mapping: Arc::new(RwLock::new(HashMap::new())), - channels: Arc::new(RwLock::new(HashMap::new())), - } - } -} diff --git a/src/database/mod.rs b/src/database/mod.rs index f48b4a9..c256867 100644 --- a/src/database/mod.rs +++ b/src/database/mod.rs @@ -215,6 +215,8 @@ pub type Result<T> = std::result::Result<T, StorageError>; /// or lock the database so that write conflicts or reading half-written data should not occur. #[async_trait] pub trait Storage: std::fmt::Debug + Clone + Send + Sync { + /// Initialize Self from a URL, possibly performing initialization. + async fn new(url: &'_ url::Url) -> Result<Self>; /// Return the list of categories used in blog posts of a specified blog. async fn categories(&self, url: &str) -> Result<Vec<String>>; @@ -759,11 +761,9 @@ mod tests { #[tracing_test::traced_test] async fn $func_name() { let tempdir = tempfile::tempdir().expect("Failed to create tempdir"); - let backend = super::super::FileStorage::new( - tempdir.path().to_path_buf() - ) - .await - .unwrap(); + let backend = super::super::FileStorage { + root_dir: tempdir.path().to_path_buf() + }; super::$func_name(backend).await } }; diff --git a/src/database/postgres/mod.rs b/src/database/postgres/mod.rs index 7813045..0ebaffb 100644 --- a/src/database/postgres/mod.rs +++ b/src/database/postgres/mod.rs @@ -2,7 +2,7 @@ use std::borrow::Cow; use std::str::FromStr; use kittybox_util::{MicropubChannel, MentionType}; -use sqlx::{PgPool, Executor}; +use sqlx::{ConnectOptions, Executor, PgPool}; use crate::micropub::{MicropubUpdate, MicropubPropertyDeletion}; use super::settings::Setting; @@ -36,6 +36,17 @@ pub struct PostgresStorage { } impl PostgresStorage { + /// Construct a [`PostgresStorage`] from a [`sqlx::PgPool`], + /// running appropriate migrations. + pub async fn from_pool(db: sqlx::PgPool) -> Result<Self> { + db.execute(sqlx::query("CREATE SCHEMA IF NOT EXISTS kittybox")).await?; + MIGRATOR.run(&db).await?; + Ok(Self { db }) + } +} + +#[async_trait::async_trait] +impl Storage for PostgresStorage { /// Construct a new [`PostgresStorage`] from an URI string and run /// migrations on the database. /// @@ -43,9 +54,9 @@ impl PostgresStorage { /// password from the file at the specified path. If, instead, /// the `PGPASS` environment variable is present, read the /// password from it. - pub async fn new(uri: &str) -> Result<Self> { - tracing::debug!("Postgres URL: {uri}"); - let mut options = sqlx::postgres::PgConnectOptions::from_str(uri)? + async fn new(url: &'_ url::Url) -> Result<Self> { + tracing::debug!("Postgres URL: {url}"); + let mut options = sqlx::postgres::PgConnectOptions::from_url(url)? .options([("search_path", "kittybox")]); if let Ok(password_file) = std::env::var("PGPASS_FILE") { let password = tokio::fs::read_to_string(password_file).await.unwrap(); @@ -62,17 +73,6 @@ impl PostgresStorage { } - /// Construct a [`PostgresStorage`] from a [`sqlx::PgPool`], - /// running appropriate migrations. - pub async fn from_pool(db: sqlx::PgPool) -> Result<Self> { - db.execute(sqlx::query("CREATE SCHEMA IF NOT EXISTS kittybox")).await?; - MIGRATOR.run(&db).await?; - Ok(Self { db }) - } -} - -#[async_trait::async_trait] -impl Storage for PostgresStorage { #[tracing::instrument(skip(self))] async fn categories(&self, url: &str) -> Result<Vec<String>> { sqlx::query_scalar::<_, String>(" |