OllamaInterface/i18n/ollama_empty.ts

changeset 19
1797a34618d8
child 20
8cb7bfe07e15
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/OllamaInterface/i18n/ollama_empty.ts	Thu Aug 29 13:59:50 2024 +0200
@@ -0,0 +1,590 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!DOCTYPE TS>
+<TS version="2.1">
+  <context>
+    <name>OllamaChatWidget</name>
+    <message>
+      <location filename="../OllamaChatWidget.py" line="41" />
+      <source>&lt;b&gt;{0} - {1}&lt;/b&gt;</source>
+      <comment>title, model name</comment>
+      <translation type="unfinished" />
+    </message>
+  </context>
+  <context>
+    <name>OllamaClient</name>
+    <message>
+      <location filename="../OllamaClient.py" line="301" />
+      <source>100% CPU</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaClient.py" line="303" />
+      <source>100% GPU</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaClient.py" line="305" />
+      <source>unknown</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaClient.py" line="309" />
+      <source>{0}% / {1}% CPU / GPU</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaClient.py" line="458" />
+      <source>&lt;p&gt;A network error occurred.&lt;/p&gt;&lt;p&gt;Error: {0}&lt;/p&gt;</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaClient.py" line="587" />
+      <source>&lt;p&gt;Error: The local server at &lt;b&gt;{0}&lt;/b&gt; is not responding.&lt;/p&gt;</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaClient.py" line="591" />
+      <source>&lt;p&gt;Error: The configured server at &lt;b&gt;{0}&lt;/b&gt; is not responding.&lt;/p&gt;</source>
+      <translation type="unfinished" />
+    </message>
+  </context>
+  <context>
+    <name>OllamaDetailedModelsDialog</name>
+    <message>
+      <location filename="../OllamaDetailedModelsDialog.ui" line="0" />
+      <source>Available Models</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaDetailedModelsDialog.ui" line="0" />
+      <source>Name</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaDetailedModelsDialog.ui" line="0" />
+      <source>ID</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaDetailedModelsDialog.ui" line="0" />
+      <source>Size</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaDetailedModelsDialog.ui" line="0" />
+      <source>Modified At</source>
+      <translation type="unfinished" />
+    </message>
+  </context>
+  <context>
+    <name>OllamaHistoryWidget</name>
+    <message>
+      <location filename="../OllamaHistoryWidget.ui" line="0" />
+      <source>Press to start a new chat based on the current history or switch to an already opened chat.</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaHistoryWidget.ui" line="0" />
+      <source>Press to edit the chat title.</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaHistoryWidget.ui" line="0" />
+      <source>Press to delete this chat history.</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaHistoryWidget.py" line="126" />
+      <source>Edit Chat Title</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaHistoryWidget.py" line="127" />
+      <source>Enter the new title:</source>
+      <translation type="unfinished" />
+    </message>
+  </context>
+  <context>
+    <name>OllamaPage</name>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>&lt;b&gt;Configure 'ollama' Interface&lt;/b&gt;</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>Server URL</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>Scheme:</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>Select the scheme of the 'ollama' server URL.</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>http</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>https</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>Host:</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>Enter the host name of the 'ollama' server.</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>Port:</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>Enter the port of the 'ollama' server URL.</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>Local Server</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>Enter the port of the local 'ollama' server.</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>Model Library</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>URL:</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>Enter the URL of the 'ollama' model library.</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>Heartbeat Timer:</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>Enter the heartbeat timeout value (0 = disable).</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>Disabled</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source> s</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>Use streaming chat response</source>
+      <translation type="unfinished" />
+    </message>
+  </context>
+  <context>
+    <name>OllamaPullProgressDialog</name>
+    <message>
+      <location filename="../OllamaPullProgressDialog.ui" line="0" />
+      <source>Install Model</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaPullProgressDialog.py" line="138" />
+      <source>&lt;p&gt;Installing model &lt;b&gt;{0}&lt;/b&gt;.&lt;/p&gt;</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaPullProgressDialog.py" line="182" />
+      <source>{0} / {1}</source>
+      <comment>completed / total</comment>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaPullProgressDialog.py" line="201" />
+      <source>Error: {0}</source>
+      <translation type="unfinished" />
+    </message>
+  </context>
+  <context>
+    <name>OllamaRunningModelsDialog</name>
+    <message>
+      <location filename="../OllamaRunningModelsDialog.ui" line="0" />
+      <source>Running Models</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaRunningModelsDialog.ui" line="0" />
+      <source>Name</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaRunningModelsDialog.ui" line="0" />
+      <source>ID</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaRunningModelsDialog.ui" line="0" />
+      <source>Size</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaRunningModelsDialog.ui" line="0" />
+      <source>Processor</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaRunningModelsDialog.ui" line="0" />
+      <source>Expires</source>
+      <translation type="unfinished" />
+    </message>
+  </context>
+  <context>
+    <name>OllamaWidget</name>
+    <message>
+      <location filename="../OllamaWidget.py" line="191" />
+      <source>&lt;b&gt;ollama Server Version {0}&lt;/b&gt;</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="339" />
+      <source>Save Chat History</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="340" />
+      <source>&lt;p&gt;The chat history could not be saved to &lt;b&gt;{0}&lt;/b&gt;.&lt;/p&gt;&lt;p&gt;Reason: {1}&lt;/p&gt;</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="396" />
+      <location filename="../OllamaWidget.py" line="375" />
+      <source>Load Chat History</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="376" />
+      <source>&lt;p&gt;The chat history could not be loaded from &lt;b&gt;{0}&lt;/b&gt;.&lt;/p&gt;&lt;p&gt;Reason: {1}&lt;/p&gt;</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="397" />
+      <source>&lt;p&gt;These chats were not loaded because they already existed.&lt;/p&gt;{0}</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="474" />
+      <location filename="../OllamaWidget.py" line="467" />
+      <source>New Chat</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="468" />
+      <source>A model has to be selected first. Aborting...</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="475" />
+      <source>Enter a title for the new chat:</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="632" />
+      <source>Chat History</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="633" />
+      <source>Load</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="636" />
+      <source>Clear All</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="639" />
+      <source>Import</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="640" />
+      <source>Export</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="646" />
+      <source>Model Management</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="915" />
+      <location filename="../OllamaWidget.py" line="647" />
+      <source>List Models</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="933" />
+      <location filename="../OllamaWidget.py" line="649" />
+      <source>List Running Models</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="653" />
+      <source>Show Model Library</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="959" />
+      <location filename="../OllamaWidget.py" line="657" />
+      <source>Install Model</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="1032" />
+      <location filename="../OllamaWidget.py" line="1023" />
+      <location filename="../OllamaWidget.py" line="1012" />
+      <location filename="../OllamaWidget.py" line="660" />
+      <source>Remove Model</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="667" />
+      <source>Local Server</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="669" />
+      <source>Start with Monitoring</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="673" />
+      <source>Start</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="676" />
+      <source>Stop</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="690" />
+      <source>Configure...</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="730" />
+      <source>Clear All Chat Histories</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="731" />
+      <source>&lt;p&gt;Do you really want to delete all chat histories? This is &lt;b&gt;irreversible&lt;/b&gt;.&lt;/p&gt;</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="746" />
+      <source>Import Chat History</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="748" />
+      <source>Chat History Files (*.json);;All Files (*)</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="781" />
+      <location filename="../OllamaWidget.py" line="749" />
+      <source>Chat History Files (*.json)</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="779" />
+      <location filename="../OllamaWidget.py" line="769" />
+      <source>Export Chat History</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="770" />
+      <source>Select the chats to be exported:</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="868" />
+      <source>Run Local 'ollama' Server</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="869" />
+      <source>The loacl 'ollama' server process could not be started.</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="916" />
+      <source>There are no models available.</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="934" />
+      <source>There are no models running.</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="960" />
+      <source>Enter the name of the model to be installed:</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="1013" />
+      <source>Select the model to be removed by the 'ollama' server:</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="1024" />
+      <source>&lt;p&gt;The model &lt;b&gt;{0}&lt;/b&gt; was deleted successfully.&lt;/p&gt;</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="1033" />
+      <source>&lt;p&gt;The model &lt;b&gt;{0}&lt;/b&gt; could not be removed from the 'ollama' server.&lt;/p&gt;</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="1049" />
+      <source>Network Error</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.ui" line="0" />
+      <source>Press to reload the models list and update the 'ollama' version information.</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.ui" line="0" />
+      <source>Select to reload the list of selectable models.</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.ui" line="0" />
+      <source>Select the model for the chat.</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.ui" line="0" />
+      <source>Press to start a new chat.</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.ui" line="0" />
+      <source>Enter the message to be sent to the 'ollama' server.</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.ui" line="0" />
+      <source>Enter Message</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.ui" line="0" />
+      <source>Press to send the message of the current chat to the 'ollama' server.</source>
+      <translation type="unfinished" />
+    </message>
+  </context>
+  <context>
+    <name>PluginOllamaInterface</name>
+    <message>
+      <location filename="../../PluginAiOllama.py" line="181" />
+      <location filename="../../PluginAiOllama.py" line="180" />
+      <location filename="../../PluginAiOllama.py" line="176" />
+      <location filename="../../PluginAiOllama.py" line="82" />
+      <source>ollama AI Interface</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../../PluginAiOllama.py" line="182" />
+      <source>Ctrl+Alt+Shift+O</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../../PluginAiOllama.py" line="188" />
+      <source>Switch the input focus to the ollama AI window.</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../../PluginAiOllama.py" line="191" />
+      <source>&lt;b&gt;Activate ollama AI Interface&lt;/b&gt;&lt;p&gt;This switches the input focus to the ollama AI window.&lt;/p&gt;</source>
+      <translation type="unfinished" />
+    </message>
+  </context>
+  <context>
+    <name>RunOllamaServerDialog</name>
+    <message>
+      <location filename="../RunOllamaServerDialog.ui" line="0" />
+      <source>ollama Server</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../RunOllamaServerDialog.ui" line="0" />
+      <source>Output</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../RunOllamaServerDialog.ui" line="0" />
+      <source>Press to restart the loacl ollama server.</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../RunOllamaServerDialog.ui" line="0" />
+      <source>Re-Start Server</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../RunOllamaServerDialog.ui" line="0" />
+      <source>Press to stop the running ollama server.</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../RunOllamaServerDialog.ui" line="0" />
+      <source>Stop Server</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../RunOllamaServerDialog.py" line="79" />
+      <source>Run Local 'ollama' Server</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../RunOllamaServerDialog.py" line="80" />
+      <source>The loacl 'ollama' server process could not be started.</source>
+      <translation type="unfinished" />
+    </message>
+  </context>
+</TS>

eric ide

mercurial