summary refs log tree commit diff
path: root/po/bowl.pot
diff options
context:
space:
mode:
Diffstat (limited to 'po/bowl.pot')
-rw-r--r--po/bowl.pot51
1 files changed, 50 insertions, 1 deletions
diff --git a/po/bowl.pot b/po/bowl.pot
index 24e87b5..92da214 100644
--- a/po/bowl.pot
+++ b/po/bowl.pot
@@ -8,7 +8,7 @@ msgid ""
 msgstr ""
 "Project-Id-Version: bowl\n"
 "Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2024-09-01 18:02+0300\n"
+"POT-Creation-Date: 2024-09-04 15:59+0300\n"
 "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
 "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
 "Language-Team: LANGUAGE <LL@li.org>\n"
@@ -130,3 +130,52 @@ msgstr ""
 #: src/lib.rs:331
 msgid "Micropub access token for {}"
 msgstr ""
+
+#: data/xyz.vikanezrimaya.kittybox.Bowl.gschema.xml.in:6
+msgid "LLM API endpoint"
+msgstr ""
+
+#: data/xyz.vikanezrimaya.kittybox.Bowl.gschema.xml.in:7
+msgid "Ollama API endpoint used to query an LLM for Smart Summary."
+msgstr ""
+
+#: data/xyz.vikanezrimaya.kittybox.Bowl.gschema.xml.in:13
+msgid "Smart Summary LLM"
+msgstr ""
+
+#: data/xyz.vikanezrimaya.kittybox.Bowl.gschema.xml.in:25
+msgid "LLM system prompt"
+msgstr ""
+
+#: data/xyz.vikanezrimaya.kittybox.Bowl.gschema.xml.in:26
+msgid ""
+"The system prompt provided to the LLM. For best results, it should instruct "
+"the LLM to provide a one-sentence summary of the document it receives. The "
+"default system prompt is tested for Llama 3.1-8B and should work for posts "
+"written mainly in English. Performance with other languages is untested."
+msgstr ""
+
+#. TRANSLATORS: please keep the link intact
+#: data/xyz.vikanezrimaya.kittybox.Bowl.gschema.xml.in:15
+msgid ""
+"The model that Ollama will load to produce summaries. Available models can "
+"be seen at <a href=\"https://ollama.com/library\">Ollama library</a>."
+msgstr ""
+
+#: data/xyz.vikanezrimaya.kittybox.Bowl.gschema.xml.in:38
+msgid "Smart Summary prompt prefix"
+msgstr ""
+
+#: data/xyz.vikanezrimaya.kittybox.Bowl.gschema.xml.in:39
+msgid ""
+"What the text is prefixed with when pasted into the LLM prompt. Something "
+"like \"Summarize this text:\" works well."
+msgstr ""
+
+#: data/xyz.vikanezrimaya.kittybox.Bowl.gschema.xml.in:47
+msgid "Smart Summary prompt suffix"
+msgstr ""
+
+#: data/xyz.vikanezrimaya.kittybox.Bowl.gschema.xml.in:48
+msgid "Append this to the prompt after the article text."
+msgstr ""