diff options
Diffstat (limited to 'data/xyz.vikanezrimaya.kittybox.Bowl.gschema.xml.in')
-rw-r--r-- | data/xyz.vikanezrimaya.kittybox.Bowl.gschema.xml.in | 48 |
1 files changed, 48 insertions, 0 deletions
diff --git a/data/xyz.vikanezrimaya.kittybox.Bowl.gschema.xml.in b/data/xyz.vikanezrimaya.kittybox.Bowl.gschema.xml.in index 81e0b13..4cec9d1 100644 --- a/data/xyz.vikanezrimaya.kittybox.Bowl.gschema.xml.in +++ b/data/xyz.vikanezrimaya.kittybox.Bowl.gschema.xml.in @@ -1,5 +1,53 @@ <?xml version="1.0" encoding="utf-8"?> <schemalist> <schema path="/xyz/vikanezrimaya/kittybox/Bowl/" id="@app-id@" gettext-domain="@gettext-package@"> + <key name="llm-endpoint" type="s"> + <default>"http://localhost:11434/"</default> + <summary>LLM API endpoint</summary> + <description> + Ollama API endpoint used to query an LLM for Smart Summary. + </description> + </key> + <key name="smart-summary-model" type="s"> + <default>"llama3.1:8b-instruct-q8_0"</default> + <summary>Smart Summary LLM</summary> + <!-- TRANSLATORS: please keep the link intact --> + <description> + <![CDATA[ + The model that Ollama will load to produce + summaries. Available models can be seen at + <a href="https://ollama.com/library">Ollama library</a>. + ]]> + </description> + </key> + <key name="smart-summary-system-prompt" type="s"> + <default>"You are a helpful AI assistant embedded into a blog authoring tool. You will be provided with a text to summarize. Reply only, strictly with a one-sentence summary of the provided text, and don't write anything else."</default> + <summary>LLM system prompt</summary> + <description> + The system prompt provided to the LLM. For best results, it + should instruct the LLM to provide a one-sentence summary of + the document it receives. + + The default system prompt is tested for Llama 3.1-8B and + should work for posts written mainly in English. Performance + with other languages is untested. + </description> + </key> + <key name="smart-summary-prompt-prefix" type="s"> + <default>"Summarize the following text:"</default> + <summary>Smart Summary prompt prefix</summary> + <description> + What the text is prefixed with when pasted into the LLM prompt. + + Something like "Summarize this text:" works well. + </description> + </key> + <key name="smart-summary-prompt-suffix" type="s"> + <default>""</default> + <summary>Smart Summary prompt suffix</summary> + <description> + Append this to the prompt after the article text. + </description> + </key> </schema> </schemalist> |