--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/OllamaInterface/i18n/ollama_empty.ts Thu Aug 29 13:59:50 2024 +0200 @@ -0,0 +1,590 @@ +<?xml version="1.0" encoding="utf-8"?> +<!DOCTYPE TS> +<TS version="2.1"> + <context> + <name>OllamaChatWidget</name> + <message> + <location filename="../OllamaChatWidget.py" line="41" /> + <source><b>{0} - {1}</b></source> + <comment>title, model name</comment> + <translation type="unfinished" /> + </message> + </context> + <context> + <name>OllamaClient</name> + <message> + <location filename="../OllamaClient.py" line="301" /> + <source>100% CPU</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaClient.py" line="303" /> + <source>100% GPU</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaClient.py" line="305" /> + <source>unknown</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaClient.py" line="309" /> + <source>{0}% / {1}% CPU / GPU</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaClient.py" line="458" /> + <source><p>A network error occurred.</p><p>Error: {0}</p></source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaClient.py" line="587" /> + <source><p>Error: The local server at <b>{0}</b> is not responding.</p></source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaClient.py" line="591" /> + <source><p>Error: The configured server at <b>{0}</b> is not responding.</p></source> + <translation type="unfinished" /> + </message> + </context> + <context> + <name>OllamaDetailedModelsDialog</name> + <message> + <location filename="../OllamaDetailedModelsDialog.ui" line="0" /> + <source>Available Models</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaDetailedModelsDialog.ui" line="0" /> + <source>Name</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaDetailedModelsDialog.ui" line="0" /> + <source>ID</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaDetailedModelsDialog.ui" line="0" /> + <source>Size</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaDetailedModelsDialog.ui" line="0" /> + <source>Modified At</source> + <translation type="unfinished" /> + </message> + </context> + <context> + <name>OllamaHistoryWidget</name> + <message> + <location filename="../OllamaHistoryWidget.ui" line="0" /> + <source>Press to start a new chat based on the current history or switch to an already opened chat.</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaHistoryWidget.ui" line="0" /> + <source>Press to edit the chat title.</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaHistoryWidget.ui" line="0" /> + <source>Press to delete this chat history.</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaHistoryWidget.py" line="126" /> + <source>Edit Chat Title</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaHistoryWidget.py" line="127" /> + <source>Enter the new title:</source> + <translation type="unfinished" /> + </message> + </context> + <context> + <name>OllamaPage</name> + <message> + <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> + <source><b>Configure 'ollama' Interface</b></source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> + <source>Server URL</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> + <source>Scheme:</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> + <source>Select the scheme of the 'ollama' server URL.</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> + <source>http</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> + <source>https</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> + <source>Host:</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> + <source>Enter the host name of the 'ollama' server.</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> + <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> + <source>Port:</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> + <source>Enter the port of the 'ollama' server URL.</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> + <source>Local Server</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> + <source>Enter the port of the local 'ollama' server.</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> + <source>Model Library</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> + <source>URL:</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> + <source>Enter the URL of the 'ollama' model library.</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> + <source>Heartbeat Timer:</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> + <source>Enter the heartbeat timeout value (0 = disable).</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> + <source>Disabled</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> + <source> s</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> + <source>Use streaming chat response</source> + <translation type="unfinished" /> + </message> + </context> + <context> + <name>OllamaPullProgressDialog</name> + <message> + <location filename="../OllamaPullProgressDialog.ui" line="0" /> + <source>Install Model</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaPullProgressDialog.py" line="138" /> + <source><p>Installing model <b>{0}</b>.</p></source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaPullProgressDialog.py" line="182" /> + <source>{0} / {1}</source> + <comment>completed / total</comment> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaPullProgressDialog.py" line="201" /> + <source>Error: {0}</source> + <translation type="unfinished" /> + </message> + </context> + <context> + <name>OllamaRunningModelsDialog</name> + <message> + <location filename="../OllamaRunningModelsDialog.ui" line="0" /> + <source>Running Models</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaRunningModelsDialog.ui" line="0" /> + <source>Name</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaRunningModelsDialog.ui" line="0" /> + <source>ID</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaRunningModelsDialog.ui" line="0" /> + <source>Size</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaRunningModelsDialog.ui" line="0" /> + <source>Processor</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaRunningModelsDialog.ui" line="0" /> + <source>Expires</source> + <translation type="unfinished" /> + </message> + </context> + <context> + <name>OllamaWidget</name> + <message> + <location filename="../OllamaWidget.py" line="191" /> + <source><b>ollama Server Version {0}</b></source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="339" /> + <source>Save Chat History</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="340" /> + <source><p>The chat history could not be saved to <b>{0}</b>.</p><p>Reason: {1}</p></source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="396" /> + <location filename="../OllamaWidget.py" line="375" /> + <source>Load Chat History</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="376" /> + <source><p>The chat history could not be loaded from <b>{0}</b>.</p><p>Reason: {1}</p></source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="397" /> + <source><p>These chats were not loaded because they already existed.</p>{0}</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="474" /> + <location filename="../OllamaWidget.py" line="467" /> + <source>New Chat</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="468" /> + <source>A model has to be selected first. Aborting...</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="475" /> + <source>Enter a title for the new chat:</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="632" /> + <source>Chat History</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="633" /> + <source>Load</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="636" /> + <source>Clear All</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="639" /> + <source>Import</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="640" /> + <source>Export</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="646" /> + <source>Model Management</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="915" /> + <location filename="../OllamaWidget.py" line="647" /> + <source>List Models</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="933" /> + <location filename="../OllamaWidget.py" line="649" /> + <source>List Running Models</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="653" /> + <source>Show Model Library</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="959" /> + <location filename="../OllamaWidget.py" line="657" /> + <source>Install Model</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="1032" /> + <location filename="../OllamaWidget.py" line="1023" /> + <location filename="../OllamaWidget.py" line="1012" /> + <location filename="../OllamaWidget.py" line="660" /> + <source>Remove Model</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="667" /> + <source>Local Server</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="669" /> + <source>Start with Monitoring</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="673" /> + <source>Start</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="676" /> + <source>Stop</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="690" /> + <source>Configure...</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="730" /> + <source>Clear All Chat Histories</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="731" /> + <source><p>Do you really want to delete all chat histories? This is <b>irreversible</b>.</p></source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="746" /> + <source>Import Chat History</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="748" /> + <source>Chat History Files (*.json);;All Files (*)</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="781" /> + <location filename="../OllamaWidget.py" line="749" /> + <source>Chat History Files (*.json)</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="779" /> + <location filename="../OllamaWidget.py" line="769" /> + <source>Export Chat History</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="770" /> + <source>Select the chats to be exported:</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="868" /> + <source>Run Local 'ollama' Server</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="869" /> + <source>The loacl 'ollama' server process could not be started.</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="916" /> + <source>There are no models available.</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="934" /> + <source>There are no models running.</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="960" /> + <source>Enter the name of the model to be installed:</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="1013" /> + <source>Select the model to be removed by the 'ollama' server:</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="1024" /> + <source><p>The model <b>{0}</b> was deleted successfully.</p></source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="1033" /> + <source><p>The model <b>{0}</b> could not be removed from the 'ollama' server.</p></source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.py" line="1049" /> + <source>Network Error</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.ui" line="0" /> + <source>Press to reload the models list and update the 'ollama' version information.</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.ui" line="0" /> + <source>Select to reload the list of selectable models.</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.ui" line="0" /> + <source>Select the model for the chat.</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.ui" line="0" /> + <source>Press to start a new chat.</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.ui" line="0" /> + <source>Enter the message to be sent to the 'ollama' server.</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.ui" line="0" /> + <source>Enter Message</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../OllamaWidget.ui" line="0" /> + <source>Press to send the message of the current chat to the 'ollama' server.</source> + <translation type="unfinished" /> + </message> + </context> + <context> + <name>PluginOllamaInterface</name> + <message> + <location filename="../../PluginAiOllama.py" line="181" /> + <location filename="../../PluginAiOllama.py" line="180" /> + <location filename="../../PluginAiOllama.py" line="176" /> + <location filename="../../PluginAiOllama.py" line="82" /> + <source>ollama AI Interface</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../../PluginAiOllama.py" line="182" /> + <source>Ctrl+Alt+Shift+O</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../../PluginAiOllama.py" line="188" /> + <source>Switch the input focus to the ollama AI window.</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../../PluginAiOllama.py" line="191" /> + <source><b>Activate ollama AI Interface</b><p>This switches the input focus to the ollama AI window.</p></source> + <translation type="unfinished" /> + </message> + </context> + <context> + <name>RunOllamaServerDialog</name> + <message> + <location filename="../RunOllamaServerDialog.ui" line="0" /> + <source>ollama Server</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../RunOllamaServerDialog.ui" line="0" /> + <source>Output</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../RunOllamaServerDialog.ui" line="0" /> + <source>Press to restart the loacl ollama server.</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../RunOllamaServerDialog.ui" line="0" /> + <source>Re-Start Server</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../RunOllamaServerDialog.ui" line="0" /> + <source>Press to stop the running ollama server.</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../RunOllamaServerDialog.ui" line="0" /> + <source>Stop Server</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../RunOllamaServerDialog.py" line="79" /> + <source>Run Local 'ollama' Server</source> + <translation type="unfinished" /> + </message> + <message> + <location filename="../RunOllamaServerDialog.py" line="80" /> + <source>The loacl 'ollama' server process could not be started.</source> + <translation type="unfinished" /> + </message> + </context> +</TS>