diff -r 22245a10b118 -r eafd0dce0393 OllamaInterface/i18n/ollama_ru.ts --- a/OllamaInterface/i18n/ollama_ru.ts Fri Aug 30 15:22:47 2024 +0200 +++ b/OllamaInterface/i18n/ollama_ru.ts Fri Aug 30 19:47:23 2024 +0200 @@ -1,13 +1,13 @@ <?xml version="1.0" encoding="utf-8"?> <!DOCTYPE TS> -<TS version="2.1"> +<TS version="2.1" language="ru"> <context> <name>OllamaChatWidget</name> <message> <location filename="../OllamaChatWidget.py" line="41" /> <source><b>{0} - {1}</b></source> <comment>title, model name</comment> - <translation type="unfinished" /> + <translation><b>{0} - {1}</b></translation> </message> </context> <context> @@ -15,37 +15,37 @@ <message> <location filename="../OllamaClient.py" line="301" /> <source>100% CPU</source> - <translation type="unfinished" /> + <translation>100% CPU</translation> </message> <message> <location filename="../OllamaClient.py" line="303" /> <source>100% GPU</source> - <translation type="unfinished" /> + <translation>100% GPU</translation> </message> <message> <location filename="../OllamaClient.py" line="305" /> <source>unknown</source> - <translation type="unfinished" /> + <translation>неизвестно</translation> </message> <message> <location filename="../OllamaClient.py" line="309" /> <source>{0}% / {1}% CPU / GPU</source> - <translation type="unfinished" /> + <translation>{0}% / {1}% CPU / GPU</translation> </message> <message> <location filename="../OllamaClient.py" line="458" /> <source><p>A network error occurred.</p><p>Error: {0}</p></source> - <translation type="unfinished" /> + <translation><p>Произошла сетевая ошибка.</p><p>Ошибка: {0}</p></translation> </message> <message> <location filename="../OllamaClient.py" line="587" /> <source><p>Error: The local server at <b>{0}</b> is not responding.</p></source> - <translation type="unfinished" /> + <translation><p>Ошибка: Локальный сервер по адресу <b>{0}</b> не отвечает.</p></translation> </message> <message> - <location filename="../OllamaClient.py" line="591" /> + <location filename="../OllamaClient.py" line="589" /> <source><p>Error: The configured server at <b>{0}</b> is not responding.</p></source> - <translation type="unfinished" /> + <translation><p>Ошибка: Настроенный сервер по адресу <b>{0}</b> не отвечает.</p></translation> </message> </context> <context> @@ -53,27 +53,27 @@ <message> <location filename="../OllamaDetailedModelsDialog.ui" line="0" /> <source>Available Models</source> - <translation type="unfinished" /> + <translation>Доступные модели</translation> </message> <message> <location filename="../OllamaDetailedModelsDialog.ui" line="0" /> <source>Name</source> - <translation type="unfinished" /> + <translation>Имя</translation> </message> <message> <location filename="../OllamaDetailedModelsDialog.ui" line="0" /> <source>ID</source> - <translation type="unfinished" /> + <translation>ID</translation> </message> <message> <location filename="../OllamaDetailedModelsDialog.ui" line="0" /> <source>Size</source> - <translation type="unfinished" /> + <translation>Размер</translation> </message> <message> <location filename="../OllamaDetailedModelsDialog.ui" line="0" /> <source>Modified At</source> - <translation type="unfinished" /> + <translation>Изменена</translation> </message> </context> <context> @@ -81,32 +81,32 @@ <message> <location filename="../OllamaHistoryWidget.ui" line="0" /> <source>Press to start a new chat based on the current history or switch to an already opened chat.</source> - <translation type="unfinished" /> + <translation>Открыть новый чат на основе текущей истории или переключиться на уже открытый чат.</translation> </message> <message> <location filename="../OllamaHistoryWidget.ui" line="0" /> <source>Press to edit the chat title.</source> - <translation type="unfinished" /> + <translation>Редактировать заголовок чата.</translation> </message> <message> <location filename="../OllamaHistoryWidget.ui" line="0" /> <source>Press to view the current chat history in a separate window.</source> - <translation type="unfinished" /> + <translation>Просмотреть текущую историю чата в отдельном окне.</translation> </message> <message> <location filename="../OllamaHistoryWidget.ui" line="0" /> <source>Press to delete this chat history.</source> - <translation type="unfinished" /> + <translation>Удалить историю этого чата.</translation> </message> <message> <location filename="../OllamaHistoryWidget.py" line="138" /> <source>Edit Chat Title</source> - <translation type="unfinished" /> + <translation>Редактировать заголовок чата</translation> </message> <message> <location filename="../OllamaHistoryWidget.py" line="139" /> <source>Enter the new title:</source> - <translation type="unfinished" /> + <translation>Введите новый заголовок:</translation> </message> </context> <context> @@ -114,93 +114,93 @@ <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source><b>Configure 'ollama' Interface</b></source> - <translation type="unfinished" /> + <translation><b>Настройка интерфейса 'ollama'</b></translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Server URL</source> - <translation type="unfinished" /> + <translation>URL-сервер</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Scheme:</source> - <translation type="unfinished" /> + <translation>Схема:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Select the scheme of the 'ollama' server URL.</source> - <translation type="unfinished" /> + <translation>Выберите схему URL-сервера 'ollama'.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Host:</source> - <translation type="unfinished" /> + <translation>Хост:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Enter the host name of the 'ollama' server.</source> - <translation type="unfinished" /> + <translation>Введите имя хоста сервера 'ollama'.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Port:</source> - <translation type="unfinished" /> + <translation>Порт:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Enter the port of the 'ollama' server URL.</source> - <translation type="unfinished" /> + <translation>Введите порт URL-сервера 'ollama'.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Local Server</source> - <translation type="unfinished" /> + <translation>Локальный сервер</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Enter the port of the local 'ollama' server.</source> - <translation type="unfinished" /> + <translation>Введите порт локального сервера 'ollama'.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Model Library</source> - <translation type="unfinished" /> + <translation>Библиотека моделей</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>URL:</source> - <translation type="unfinished" /> + <translation>URL-адрес:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Enter the URL of the 'ollama' model library.</source> - <translation type="unfinished" /> + <translation>Введите URL-адрес библиотеки моделей 'ollama'.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Heartbeat Timer:</source> - <translation type="unfinished" /> + <translation>Таймер пульса:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Enter the heartbeat timeout value (0 = disable).</source> - <translation type="unfinished" /> + <translation>Введите значение таймаута пульса (0 = отключить).</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Disabled</source> - <translation type="unfinished" /> + <translation>отключен</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source> s</source> - <translation type="unfinished" /> + <translation> сек</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Use streaming chat response</source> - <translation type="unfinished" /> + <translation>Использовать ответ потокового чата</translation> </message> </context> <context> @@ -208,23 +208,23 @@ <message> <location filename="../OllamaPullProgressDialog.ui" line="0" /> <source>Install Model</source> - <translation type="unfinished" /> + <translation>Установить модель</translation> </message> <message> <location filename="../OllamaPullProgressDialog.py" line="138" /> <source><p>Installing model <b>{0}</b>.</p></source> - <translation type="unfinished" /> + <translation><p>Установлена модель <b>{0}</b>.</p></translation> </message> <message> <location filename="../OllamaPullProgressDialog.py" line="182" /> <source>{0} / {1}</source> <comment>completed / total</comment> - <translation type="unfinished" /> + <translation>{0} / {1}</translation> </message> <message> <location filename="../OllamaPullProgressDialog.py" line="201" /> <source>Error: {0}</source> - <translation type="unfinished" /> + <translation>Ошибка: {0}</translation> </message> </context> <context> @@ -232,32 +232,32 @@ <message> <location filename="../OllamaRunningModelsDialog.ui" line="0" /> <source>Running Models</source> - <translation type="unfinished" /> + <translation>Работающие модели</translation> </message> <message> <location filename="../OllamaRunningModelsDialog.ui" line="0" /> <source>Name</source> - <translation type="unfinished" /> + <translation>Имя</translation> </message> <message> <location filename="../OllamaRunningModelsDialog.ui" line="0" /> <source>ID</source> - <translation type="unfinished" /> + <translation>ID</translation> </message> <message> <location filename="../OllamaRunningModelsDialog.ui" line="0" /> <source>Size</source> - <translation type="unfinished" /> + <translation>Размер</translation> </message> <message> <location filename="../OllamaRunningModelsDialog.ui" line="0" /> <source>Processor</source> - <translation type="unfinished" /> + <translation>Процессор</translation> </message> <message> <location filename="../OllamaRunningModelsDialog.ui" line="0" /> <source>Expires</source> - <translation type="unfinished" /> + <translation>Завершается</translation> </message> </context> <context> @@ -265,102 +265,102 @@ <message> <location filename="../OllamaWidget.py" line="193" /> <source><b>ollama Server Version {0}</b></source> - <translation type="unfinished" /> + <translation><b>Версия сервера ollama {0}</b></translation> </message> <message> <location filename="../OllamaWidget.py" line="342" /> <source>Save Chat History</source> - <translation type="unfinished" /> + <translation>Сохранить историю чата</translation> </message> <message> <location filename="../OllamaWidget.py" line="343" /> <source><p>The chat history could not be saved to <b>{0}</b>.</p><p>Reason: {1}</p></source> - <translation type="unfinished" /> + <translation><p>Не удалось сохранить историю чата в <b>{0}</b>.</p><p>Причина: {1}</p></translation> </message> <message> <location filename="../OllamaWidget.py" line="399" /> <location filename="../OllamaWidget.py" line="378" /> <source>Load Chat History</source> - <translation type="unfinished" /> + <translation>Загрузить историю чата</translation> </message> <message> <location filename="../OllamaWidget.py" line="379" /> <source><p>The chat history could not be loaded from <b>{0}</b>.</p><p>Reason: {1}</p></source> - <translation type="unfinished" /> + <translation><p>Не удалось загрузить историю чата из <b>{0}</b>.</p><p>Причина: {1}</p></translation> </message> <message> <location filename="../OllamaWidget.py" line="400" /> <source><p>These chats were not loaded because they already existed.</p>{0}</source> - <translation type="unfinished" /> + <translation>Эти чаты не были загружены, потому что они уже существуют.</p>{0}</translation> </message> <message> <location filename="../OllamaWidget.py" line="477" /> <location filename="../OllamaWidget.py" line="470" /> <source>New Chat</source> - <translation type="unfinished" /> + <translation>Нрвый чат</translation> </message> <message> <location filename="../OllamaWidget.py" line="471" /> <source>A model has to be selected first. Aborting...</source> - <translation type="unfinished" /> + <translation>Сначала необходимо выбрать модель. Прерывание...</translation> </message> <message> <location filename="../OllamaWidget.py" line="478" /> <source>Enter a title for the new chat:</source> - <translation type="unfinished" /> + <translation>Введите заголовок нового чата:</translation> </message> <message> <location filename="../OllamaWidget.py" line="675" /> <source>Chat History</source> - <translation type="unfinished" /> + <translation>История чата</translation> </message> <message> <location filename="../OllamaWidget.py" line="676" /> <source>Load</source> - <translation type="unfinished" /> + <translation>Загрузить</translation> </message> <message> <location filename="../OllamaWidget.py" line="679" /> <source>Clear All</source> - <translation type="unfinished" /> + <translation>Очистить все</translation> </message> <message> <location filename="../OllamaWidget.py" line="682" /> <source>Import</source> - <translation type="unfinished" /> + <translation>Импорт</translation> </message> <message> <location filename="../OllamaWidget.py" line="683" /> <source>Export</source> - <translation type="unfinished" /> + <translation>Ехпорт</translation> </message> <message> <location filename="../OllamaWidget.py" line="689" /> <source>Model Management</source> - <translation type="unfinished" /> + <translation>Управление моделью</translation> </message> <message> <location filename="../OllamaWidget.py" line="958" /> <location filename="../OllamaWidget.py" line="690" /> <source>List Models</source> - <translation type="unfinished" /> + <translation>Список моделей</translation> </message> <message> <location filename="../OllamaWidget.py" line="976" /> <location filename="../OllamaWidget.py" line="692" /> <source>List Running Models</source> - <translation type="unfinished" /> + <translation>Список работающих моделей</translation> </message> <message> <location filename="../OllamaWidget.py" line="696" /> <source>Show Model Library</source> - <translation type="unfinished" /> + <translation>Показать библиотеку моделей</translation> </message> <message> <location filename="../OllamaWidget.py" line="1002" /> <location filename="../OllamaWidget.py" line="700" /> <source>Install Model</source> - <translation type="unfinished" /> + <translation>Установить модель</translation> </message> <message> <location filename="../OllamaWidget.py" line="1075" /> @@ -368,144 +368,144 @@ <location filename="../OllamaWidget.py" line="1055" /> <location filename="../OllamaWidget.py" line="703" /> <source>Remove Model</source> - <translation type="unfinished" /> + <translation>Удалить модель</translation> </message> <message> <location filename="../OllamaWidget.py" line="710" /> <source>Local Server</source> - <translation type="unfinished" /> + <translation>Локальный сервер</translation> </message> <message> <location filename="../OllamaWidget.py" line="712" /> <source>Start with Monitoring</source> - <translation type="unfinished" /> + <translation>Старт с мониторингом</translation> </message> <message> <location filename="../OllamaWidget.py" line="716" /> <source>Start</source> - <translation type="unfinished" /> + <translation>Пуск</translation> </message> <message> <location filename="../OllamaWidget.py" line="719" /> <source>Stop</source> - <translation type="unfinished" /> + <translation>Стоп</translation> </message> <message> <location filename="../OllamaWidget.py" line="733" /> <source>Configure...</source> - <translation type="unfinished" /> + <translation>Конфигурировать...</translation> </message> <message> <location filename="../OllamaWidget.py" line="773" /> <source>Clear All Chat Histories</source> - <translation type="unfinished" /> + <translation>Очистить все истории чатов</translation> </message> <message> <location filename="../OllamaWidget.py" line="774" /> <source><p>Do you really want to delete all chat histories? This is <b>irreversible</b>.</p></source> - <translation type="unfinished" /> + <translation><p>Вы действительно хотите удалить все истории чата? Это действие <b>необратимо</b>.</p></translation> </message> <message> <location filename="../OllamaWidget.py" line="789" /> <source>Import Chat History</source> - <translation type="unfinished" /> + <translation>Импорт истории чата</translation> </message> <message> <location filename="../OllamaWidget.py" line="791" /> <source>Chat History Files (*.json);;All Files (*)</source> - <translation type="unfinished" /> + <translation>Файлы истории чата (*.json);;Все файлы (*)</translation> </message> <message> <location filename="../OllamaWidget.py" line="824" /> <location filename="../OllamaWidget.py" line="792" /> <source>Chat History Files (*.json)</source> - <translation type="unfinished" /> + <translation>Файлы истории чата (*.json)</translation> </message> <message> <location filename="../OllamaWidget.py" line="822" /> <location filename="../OllamaWidget.py" line="812" /> <source>Export Chat History</source> - <translation type="unfinished" /> + <translation>Экспорт истории чата</translation> </message> <message> <location filename="../OllamaWidget.py" line="813" /> <source>Select the chats to be exported:</source> - <translation type="unfinished" /> + <translation>Выберите чаты для экспорта:</translation> </message> <message> <location filename="../OllamaWidget.py" line="911" /> <source>Run Local 'ollama' Server</source> - <translation type="unfinished" /> + <translation>Запустить локальный сервер 'ollama'</translation> </message> <message> <location filename="../OllamaWidget.py" line="912" /> <source>The loacl 'ollama' server process could not be started.</source> - <translation type="unfinished" /> + <translation>Не удалось запустить локальный процесс сервера 'ollama'.</translation> </message> <message> <location filename="../OllamaWidget.py" line="959" /> <source>There are no models available.</source> - <translation type="unfinished" /> + <translation>Нет доступных моделей.</translation> </message> <message> <location filename="../OllamaWidget.py" line="977" /> <source>There are no models running.</source> - <translation type="unfinished" /> + <translation>Нет запущенных моделей.</translation> </message> <message> <location filename="../OllamaWidget.py" line="1003" /> <source>Enter the name of the model to be installed:</source> - <translation type="unfinished" /> + <translation>Введите имя модели, которую нужно установить:</translation> </message> <message> <location filename="../OllamaWidget.py" line="1056" /> <source>Select the model to be removed by the 'ollama' server:</source> - <translation type="unfinished" /> + <translation>Выберите модель, которая будет удалена сервером 'ollama':</translation> </message> <message> <location filename="../OllamaWidget.py" line="1067" /> <source><p>The model <b>{0}</b> was deleted successfully.</p></source> - <translation type="unfinished" /> + <translation><p>Модель <b>{0}</b> была успешно удалена.</p></translation> </message> <message> <location filename="../OllamaWidget.py" line="1076" /> <source><p>The model <b>{0}</b> could not be removed from the 'ollama' server.</p></source> - <translation type="unfinished" /> + <translation><p>Модель <b>{0}</b> не удалось удалить с сервера 'ollama'.</p></translation> </message> <message> <location filename="../OllamaWidget.py" line="1092" /> <source>Network Error</source> - <translation type="unfinished" /> + <translation>Ошибка сети</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0" /> <source>Press to reload the models list and update the 'ollama' version information.</source> - <translation type="unfinished" /> + <translation>Перезагрузить список моделей и обновить информацию о версии 'ollama'.</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0" /> <source>Select the model for the chat.</source> - <translation type="unfinished" /> + <translation>Выберите модель для чата.</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0" /> <source>Press to start a new chat.</source> - <translation type="unfinished" /> + <translation>Начать новый чат.</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0" /> <source>Enter the message to be sent to the 'ollama' server.</source> - <translation type="unfinished" /> + <translation>Введите сообщение, которое будет отправлено на сервер 'ollama'.</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0" /> <source>Enter Message</source> - <translation type="unfinished" /> + <translation>Введите сообщение</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0" /> <source>Press to send the message of the current chat to the 'ollama' server.</source> - <translation type="unfinished" /> + <translation>Отправить сообщение текущего чата на сервер 'ollama'.</translation> </message> </context> <context> @@ -516,22 +516,22 @@ <location filename="../../PluginAiOllama.py" line="176" /> <location filename="../../PluginAiOllama.py" line="82" /> <source>ollama AI Interface</source> - <translation type="unfinished" /> + <translation>Интерфейс ollama AI</translation> </message> <message> <location filename="../../PluginAiOllama.py" line="182" /> <source>Ctrl+Alt+Shift+O</source> - <translation type="unfinished" /> + <translation>Ctrl+Alt+Shift+O</translation> </message> <message> <location filename="../../PluginAiOllama.py" line="188" /> <source>Switch the input focus to the ollama AI window.</source> - <translation type="unfinished" /> + <translation>Переключите фокус ввода на окно ollama AI.</translation> </message> <message> <location filename="../../PluginAiOllama.py" line="191" /> <source><b>Activate ollama AI Interface</b><p>This switches the input focus to the ollama AI window.</p></source> - <translation type="unfinished" /> + <translation><b>Активируйте интерфейс allama AI</b><p> Это переключит фокус ввода на окно ollama AI.</p></translation> </message> </context> <context> @@ -539,42 +539,42 @@ <message> <location filename="../RunOllamaServerDialog.ui" line="0" /> <source>ollama Server</source> - <translation type="unfinished" /> + <translation>Сервер ollama</translation> </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0" /> <source>Output</source> - <translation type="unfinished" /> + <translation>Вывод</translation> </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0" /> <source>Press to restart the local ollama server.</source> - <translation type="unfinished" /> + <translation>Перезапустить локальный сервер ollama.</translation> </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0" /> <source>Re-Start Server</source> - <translation type="unfinished" /> + <translation>Перезапустить сервер</translation> </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0" /> <source>Press to stop the running ollama server.</source> - <translation type="unfinished" /> + <translation>Остановить запущенный сервер ollama.</translation> </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0" /> <source>Stop Server</source> - <translation type="unfinished" /> + <translation>Остановить сервер</translation> </message> <message> <location filename="../RunOllamaServerDialog.py" line="79" /> <source>Run Local 'ollama' Server</source> - <translation type="unfinished" /> + <translation>Запустить локальный сервер 'ollama'</translation> </message> <message> <location filename="../RunOllamaServerDialog.py" line="80" /> <source>The local 'ollama' server process could not be started.</source> - <translation type="unfinished" /> + <translation>Не удалось запустить процесс на локальном сервере 'ollama'.</translation> </message> </context> </TS>