OllamaInterface/i18n/ollama_ru.ts

changeset 24
f4d5108e90fe
parent 22
eafd0dce0393
child 30
79928998b29e
--- a/OllamaInterface/i18n/ollama_ru.ts	Tue Sep 03 15:51:28 2024 +0200
+++ b/OllamaInterface/i18n/ollama_ru.ts	Tue Sep 03 17:06:05 2024 +0200
@@ -164,18 +164,38 @@
     </message>
     <message>
       <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
-      <source>Model Library</source>
-      <translation>Библиотека моделей</translation>
+      <source>URLs</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>Model Library:</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>Enter the URL of the 'ollama' model library. Leave empty to use the default URL.</source>
+      <translation type="unfinished" />
     </message>
     <message>
       <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
-      <source>URL:</source>
-      <translation>URL-адрес:</translation>
+      <source>Download:</source>
+      <translation type="unfinished" />
     </message>
     <message>
       <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
-      <source>Enter the URL of the 'ollama' model library.</source>
-      <translation>Введите URL-адрес библиотеки моделей 'ollama'.</translation>
+      <source>Enter the URL of the 'ollama' download page. Leave empty to use the default URL.</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>Blog:</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
+      <source>Enter the URL of the 'ollama' blog. Leave empty to use the default URL.</source>
+      <translation type="unfinished" />
     </message>
     <message>
       <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
@@ -202,6 +222,18 @@
       <source>Use streaming chat response</source>
       <translation>Использовать ответ потокового чата</translation>
     </message>
+    <message>
+      <source>Model Library</source>
+      <translation type="vanished">Библиотека моделей</translation>
+    </message>
+    <message>
+      <source>URL:</source>
+      <translation type="vanished">URL-адрес:</translation>
+    </message>
+    <message>
+      <source>Enter the URL of the 'ollama' model library.</source>
+      <translation type="vanished">Введите URL-адрес библиотеки моделей 'ollama'.</translation>
+    </message>
   </context>
   <context>
     <name>OllamaPullProgressDialog</name>
@@ -340,13 +372,13 @@
       <translation>Управление моделью</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="958" />
+      <location filename="../OllamaWidget.py" line="979" />
       <location filename="../OllamaWidget.py" line="690" />
       <source>List Models</source>
       <translation>Список моделей</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="976" />
+      <location filename="../OllamaWidget.py" line="997" />
       <location filename="../OllamaWidget.py" line="692" />
       <source>List Running Models</source>
       <translation>Список работающих моделей</translation>
@@ -357,123 +389,143 @@
       <translation>Показать библиотеку моделей</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="1002" />
-      <location filename="../OllamaWidget.py" line="700" />
+      <location filename="../OllamaWidget.py" line="1014" />
+      <location filename="../OllamaWidget.py" line="701" />
       <source>Install Model</source>
       <translation>Установить модель</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="1075" />
-      <location filename="../OllamaWidget.py" line="1066" />
-      <location filename="../OllamaWidget.py" line="1055" />
-      <location filename="../OllamaWidget.py" line="703" />
+      <location filename="../OllamaWidget.py" line="1087" />
+      <location filename="../OllamaWidget.py" line="1078" />
+      <location filename="../OllamaWidget.py" line="1067" />
+      <location filename="../OllamaWidget.py" line="704" />
       <source>Remove Model</source>
       <translation>Удалить модель</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="710" />
+      <location filename="../OllamaWidget.py" line="711" />
       <source>Local Server</source>
       <translation>Локальный сервер</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="712" />
+      <location filename="../OllamaWidget.py" line="713" />
       <source>Start with Monitoring</source>
       <translation>Старт с мониторингом</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="716" />
+      <location filename="../OllamaWidget.py" line="717" />
       <source>Start</source>
       <translation>Пуск</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="719" />
+      <location filename="../OllamaWidget.py" line="720" />
       <source>Stop</source>
       <translation>Стоп</translation>
     </message>
     <message>
+      <location filename="../OllamaWidget.py" line="727" />
+      <source>ollama URLs</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="729" />
+      <source>Model Library</source>
+      <translation type="unfinished">Библиотека моделей</translation>
+    </message>
+    <message>
       <location filename="../OllamaWidget.py" line="733" />
+      <source>Download</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="737" />
+      <source>Blog</source>
+      <translation type="unfinished" />
+    </message>
+    <message>
+      <location filename="../OllamaWidget.py" line="754" />
       <source>Configure...</source>
       <translation>Конфигурировать...</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="773" />
+      <location filename="../OllamaWidget.py" line="794" />
       <source>Clear All Chat Histories</source>
       <translation>Очистить все истории чатов</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="774" />
+      <location filename="../OllamaWidget.py" line="795" />
       <source>&lt;p&gt;Do you really want to delete all chat histories? This is &lt;b&gt;irreversible&lt;/b&gt;.&lt;/p&gt;</source>
       <translation>&lt;p&gt;Вы действительно хотите удалить все истории чата? Это действие &lt;b&gt;необратимо&lt;/b&gt;.&lt;/p&gt;</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="789" />
+      <location filename="../OllamaWidget.py" line="810" />
       <source>Import Chat History</source>
       <translation>Импорт истории чата</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="791" />
+      <location filename="../OllamaWidget.py" line="812" />
       <source>Chat History Files (*.json);;All Files (*)</source>
       <translation>Файлы истории чата (*.json);;Все файлы (*)</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="824" />
-      <location filename="../OllamaWidget.py" line="792" />
+      <location filename="../OllamaWidget.py" line="845" />
+      <location filename="../OllamaWidget.py" line="813" />
       <source>Chat History Files (*.json)</source>
       <translation>Файлы истории чата (*.json)</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="822" />
-      <location filename="../OllamaWidget.py" line="812" />
+      <location filename="../OllamaWidget.py" line="843" />
+      <location filename="../OllamaWidget.py" line="833" />
       <source>Export Chat History</source>
       <translation>Экспорт истории чата</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="813" />
+      <location filename="../OllamaWidget.py" line="834" />
       <source>Select the chats to be exported:</source>
       <translation>Выберите чаты для экспорта:</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="911" />
+      <location filename="../OllamaWidget.py" line="932" />
       <source>Run Local 'ollama' Server</source>
       <translation>Запустить локальный сервер 'ollama'</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="912" />
+      <location filename="../OllamaWidget.py" line="933" />
       <source>The loacl 'ollama' server process could not be started.</source>
       <translation>Не удалось запустить локальный процесс сервера 'ollama'.</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="959" />
+      <location filename="../OllamaWidget.py" line="980" />
       <source>There are no models available.</source>
       <translation>Нет доступных моделей.</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="977" />
+      <location filename="../OllamaWidget.py" line="998" />
       <source>There are no models running.</source>
       <translation>Нет запущенных моделей.</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="1003" />
+      <location filename="../OllamaWidget.py" line="1015" />
       <source>Enter the name of the model to be installed:</source>
       <translation>Введите имя модели, которую нужно установить:</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="1056" />
+      <location filename="../OllamaWidget.py" line="1068" />
       <source>Select the model to be removed by the 'ollama' server:</source>
       <translation>Выберите модель, которая будет удалена сервером 'ollama':</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="1067" />
+      <location filename="../OllamaWidget.py" line="1079" />
       <source>&lt;p&gt;The model &lt;b&gt;{0}&lt;/b&gt; was deleted successfully.&lt;/p&gt;</source>
       <translation>&lt;p&gt;Модель &lt;b&gt;{0}&lt;/b&gt; была успешно удалена.&lt;/p&gt;</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="1076" />
+      <location filename="../OllamaWidget.py" line="1088" />
       <source>&lt;p&gt;The model &lt;b&gt;{0}&lt;/b&gt; could not be removed from the 'ollama' server.&lt;/p&gt;</source>
       <translation>&lt;p&gt;Модель &lt;b&gt;{0}&lt;/b&gt; не удалось удалить с сервера 'ollama'.&lt;/p&gt;</translation>
     </message>
     <message>
-      <location filename="../OllamaWidget.py" line="1092" />
+      <location filename="../OllamaWidget.py" line="1115" />
       <source>Network Error</source>
       <translation>Ошибка сети</translation>
     </message>
@@ -511,25 +563,25 @@
   <context>
     <name>PluginOllamaInterface</name>
     <message>
-      <location filename="../../PluginAiOllama.py" line="181" />
-      <location filename="../../PluginAiOllama.py" line="180" />
-      <location filename="../../PluginAiOllama.py" line="176" />
-      <location filename="../../PluginAiOllama.py" line="82" />
+      <location filename="../../PluginAiOllama.py" line="184" />
+      <location filename="../../PluginAiOllama.py" line="183" />
+      <location filename="../../PluginAiOllama.py" line="179" />
+      <location filename="../../PluginAiOllama.py" line="83" />
       <source>ollama AI Interface</source>
       <translation>Интерфейс ollama AI</translation>
     </message>
     <message>
-      <location filename="../../PluginAiOllama.py" line="182" />
+      <location filename="../../PluginAiOllama.py" line="185" />
       <source>Ctrl+Alt+Shift+O</source>
       <translation>Ctrl+Alt+Shift+O</translation>
     </message>
     <message>
-      <location filename="../../PluginAiOllama.py" line="188" />
+      <location filename="../../PluginAiOllama.py" line="191" />
       <source>Switch the input focus to the ollama AI window.</source>
       <translation>Переключите фокус ввода на окно ollama AI.</translation>
     </message>
     <message>
-      <location filename="../../PluginAiOllama.py" line="191" />
+      <location filename="../../PluginAiOllama.py" line="194" />
       <source>&lt;b&gt;Activate ollama AI Interface&lt;/b&gt;&lt;p&gt;This switches the input focus to the ollama AI window.&lt;/p&gt;</source>
       <translation>&lt;b&gt;Активируйте интерфейс allama AI&lt;/b&gt;&lt;p&gt; Это переключит фокус ввода на окно ollama AI.&lt;/p&gt;</translation>
     </message>

eric ide

mercurial