summary refs log tree commit diff
path: root/src
diff options
context:
space:
mode:
authorVika <vika@fireburn.ru>2024-09-04 21:10:05 +0300
committerVika <vika@fireburn.ru>2024-09-04 21:10:05 +0300
commitf16cac2d35487b1772d1c2524ed223c779f45f23 (patch)
tree1925b1e2dfc766c702b96c6456950a0ad635d7e5 /src
parent9ca1f9c49e2ed15f226000e5cb46342dfc72f5dd (diff)
downloadbowl-f16cac2d35487b1772d1c2524ed223c779f45f23.tar.zst
Deal with Clippy warnings and simplify the internal Smart Summary API
Now the API itself is responsible for querying settings. This makes
sense, as it's an internal part of the component.
Diffstat (limited to 'src')
-rw-r--r--src/components/smart_summary.rs46
1 files changed, 20 insertions, 26 deletions
diff --git a/src/components/smart_summary.rs b/src/components/smart_summary.rs
index 050a52c..7b2df7d 100644
--- a/src/components/smart_summary.rs
+++ b/src/components/smart_summary.rs
@@ -8,20 +8,20 @@ use relm4::{gtk, prelude::{Component, ComponentParts}, ComponentSender};
 // All of this is incredibly minimalist.
 // This should be expanded later.
 #[derive(Debug, serde::Serialize)]
-struct OllamaRequest {
+pub(crate) struct OllamaRequest {
     model: String,
     prompt: String,
     system: String,
 }
 
 #[derive(Debug, serde::Deserialize)]
-struct OllamaChunk {
+pub(crate) struct OllamaChunk {
     response: String,
     done: bool,
 }
 
 #[derive(Debug, serde::Deserialize)]
-struct OllamaError {
+pub(crate) struct OllamaError {
     error: String
 }
 impl std::error::Error for OllamaError {}
@@ -33,7 +33,7 @@ impl std::fmt::Display for OllamaError {
 
 #[derive(serde::Deserialize)]
 #[serde(untagged)]
-enum OllamaResult {
+pub(crate) enum OllamaResult {
     Ok(OllamaChunk),
     Err(OllamaError),
 }
@@ -57,16 +57,22 @@ pub(crate) struct SmartSummaryButton {
 }
 
 impl SmartSummaryButton {
-    async fn prompt_llm(
+    async fn summarize(
         sender: relm4::Sender<Result<String, Error>>,
         http: soup::Session,
-        endpoint: glib::Uri,
-        model: String,
-        system_prompt: String,
-        prompt_prefix: String,
-        mut prompt_suffix: String,
         text: String,
     ) {
+        let settings = gio::Settings::new(crate::APPLICATION_ID);
+        // We shouldn't let the user record a bad setting anyway.
+        let endpoint = glib::Uri::parse(
+            &settings.string("llm-endpoint"),
+            glib::UriFlags::NONE,
+        ).unwrap();
+        let model = settings.get::<String>("smart-summary-model");
+        let system_prompt = settings.get::<String>("smart-summary-system-prompt");
+        let prompt_prefix = settings.get::<String>("smart-summary-prompt-prefix");
+        let mut prompt_suffix = settings.get::<String>("smart-summary-prompt-suffix");
+
         let endpoint = endpoint.parse_relative("./api/generate", glib::UriFlags::NONE).unwrap();
         log::debug!("endpoint: {}, model: {}", endpoint, model);
         log::debug!("system prompt: {}", system_prompt);
@@ -93,7 +99,7 @@ impl SmartSummaryButton {
             }
         };
         log::debug!("response: {:?} ({})", msg.status(), msg.reason_phrase().unwrap_or_default());
-        let mut buffer = Vec::new();
+        let mut buffer = Vec::with_capacity(2048);
         const DELIM: u8 = b'\n';
         loop {
             let len = match stream.read_until(DELIM, &mut buffer).await {
@@ -136,6 +142,7 @@ pub(crate) enum Error {
     #[error("json error: {0}")]
     Json(#[from] serde_json::Error),
     #[error("ollama error: {0}")]
+    #[allow(private_interfaces)]
     Ollama(#[from] OllamaError),
     #[error("i/o error: {0}")]
     Io(#[from] std::io::Error)
@@ -224,22 +231,9 @@ impl Component for SmartSummaryButton {
                 log::debug!("Would generate summary for the following text:\n{}", text);
 
                 log::debug!("XDG_DATA_DIRS={:?}", std::env::var("XDG_DATA_DIRS"));
-                let settings = gio::Settings::new(crate::APPLICATION_ID);
-                // We shouldn't let the user record a bad setting anyway.
-                let endpoint = glib::Uri::parse(
-                    &settings.get::<String>("llm-endpoint"),
-                    glib::UriFlags::NONE,
-                ).unwrap();
-                let model = settings.get::<String>("smart-summary-model");
-                let system_prompt = settings.get::<String>("smart-summary-system-prompt");
-                let prompt_prefix = settings.get::<String>("smart-summary-prompt-prefix");
-                let prompt_suffix = settings.get::<String>("smart-summary-prompt-suffix");
                 let sender = sender.command_sender().clone();
-                relm4::spawn_local(Self::prompt_llm(
-                    sender, self.http.clone(),
-                    endpoint, model, system_prompt,
-                    prompt_prefix, prompt_suffix,
-                    text
+                relm4::spawn_local(Self::summarize(
+                    sender, self.http.clone(), text
                 ));
             }
         }