blob: 1037063ed13ec15ecbf2175c40e3dcbc53cb7f11 (
plain) (
blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
|
<?xml version="1.0" encoding="utf-8"?>
<schemalist>
<schema path="/xyz/vikanezrimaya/kittybox/Bowl/" id="@app-id@" gettext-domain="@gettext-package@">
<key name="llm-endpoint" type="s">
<default>"http://localhost:11434/"</default>
<summary>LLM API endpoint</summary>
<description>
Ollama API endpoint used to query an LLM for Smart Summary.
</description>
</key>
<key name="smart-summary-model" type="s">
<default>"llama3.1:8b-instruct-q8_0"</default>
<summary>Smart Summary LLM</summary>
<!-- TRANSLATORS: please keep the link intact -->
<description>
<![CDATA[
The model that Ollama will load to produce
summaries. Available models can be seen at
<a href="https://ollama.com/library">Ollama library</a>.
]]>
</description>
</key>
<key name="smart-summary-show-warning" type="b">
<default>true</default>
<summary>Show warnings on LLM enhancement features</summary>
<description>
If enabled, will show warnings regarding LLM enhancement
features.
</description>
</key>
<key name="smart-summary-system-prompt" type="s">
<default>"You are a helpful AI assistant embedded into a blog authoring tool. You will be provided with a text to summarize. Reply only, strictly with a one-sentence summary of the provided text, and don't write anything else."</default>
<summary>LLM system prompt</summary>
<description>
The system prompt provided to the LLM. For best results, it
should instruct the LLM to provide a one-sentence summary of
the document it receives.
The default system prompt is tested for Llama 3.1-8B and
should work for posts written mainly in English. Performance
with other languages is untested.
</description>
</key>
<key name="smart-summary-prompt-prefix" type="s">
<default>"Summarize the following text:"</default>
<summary>Smart Summary prompt prefix</summary>
<description>
What the text is prefixed with when pasted into the LLM prompt.
Something like "Summarize this text:" works well.
</description>
</key>
<key name="smart-summary-prompt-suffix" type="s">
<default>""</default>
<summary>Smart Summary prompt suffix</summary>
<description>
Append this to the prompt after the article text.
</description>
</key>
</schema>
</schemalist>
|