OllamaInterface/i18n/ollama_ru.ts

Mon, 07 Apr 2025 18:22:30 +0200

author
Detlev Offenbach <detlev@die-offenbachs.de>
date
Mon, 07 Apr 2025 18:22:30 +0200
changeset 69
eb9340034f26
parent 68
ca2e671f894e
permissions
-rw-r--r--

Created global tag <release-10.1.8>.

<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="ru">
  <context>
    <name>OllamaChatWidget</name>
    <message>
      <location filename="../OllamaChatWidget.py" line="60" />
      <source>&lt;b&gt;{0} - {1}&lt;/b&gt;</source>
      <comment>title, model name</comment>
      <translation>&lt;b&gt;{0} - {1}&lt;/b&gt;</translation>
    </message>
  </context>
  <context>
    <name>OllamaClient</name>
    <message>
      <location filename="../OllamaClient.py" line="301" />
      <source>100% CPU</source>
      <translation>100% CPU</translation>
    </message>
    <message>
      <location filename="../OllamaClient.py" line="303" />
      <source>100% GPU</source>
      <translation>100% GPU</translation>
    </message>
    <message>
      <location filename="../OllamaClient.py" line="305" />
      <source>unknown</source>
      <translation>неизвестно</translation>
    </message>
    <message>
      <location filename="../OllamaClient.py" line="309" />
      <source>{0}% / {1}% CPU / GPU</source>
      <translation>{0}% / {1}% CPU / GPU</translation>
    </message>
    <message>
      <location filename="../OllamaClient.py" line="465" />
      <source>&lt;p&gt;A network error occurred.&lt;/p&gt;&lt;p&gt;Error: {0}&lt;/p&gt;</source>
      <translation>&lt;p&gt;Произошла сетевая ошибка.&lt;/p&gt;&lt;p&gt;Ошибка: {0}&lt;/p&gt;</translation>
    </message>
    <message>
      <location filename="../OllamaClient.py" line="597" />
      <source>&lt;p&gt;Error: The local server at &lt;b&gt;{0}&lt;/b&gt; is not responding.&lt;/p&gt;</source>
      <translation>&lt;p&gt;Ошибка: Локальный сервер по адресу &lt;b&gt;{0}&lt;/b&gt; не отвечает.&lt;/p&gt;</translation>
    </message>
    <message>
      <location filename="../OllamaClient.py" line="599" />
      <source>&lt;p&gt;Error: The configured server at &lt;b&gt;{0}&lt;/b&gt; is not responding.&lt;/p&gt;</source>
      <translation>&lt;p&gt;Ошибка: Настроенный сервер по адресу &lt;b&gt;{0}&lt;/b&gt; не отвечает.&lt;/p&gt;</translation>
    </message>
  </context>
  <context>
    <name>OllamaDetailedModelsDialog</name>
    <message>
      <location filename="../OllamaDetailedModelsDialog.ui" line="0" />
      <source>Available Models</source>
      <translation>Доступные модели</translation>
    </message>
    <message>
      <location filename="../OllamaDetailedModelsDialog.ui" line="0" />
      <source>Name</source>
      <translation>Имя</translation>
    </message>
    <message>
      <location filename="../OllamaDetailedModelsDialog.ui" line="0" />
      <source>ID</source>
      <translation>ID</translation>
    </message>
    <message>
      <location filename="../OllamaDetailedModelsDialog.ui" line="0" />
      <source>Size</source>
      <translation>Размер</translation>
    </message>
    <message>
      <location filename="../OllamaDetailedModelsDialog.ui" line="0" />
      <source>Modified At</source>
      <translation>Изменена</translation>
    </message>
  </context>
  <context>
    <name>OllamaHistoryEditDialog</name>
    <message>
      <location filename="../OllamaHistoryEditDialog.ui" line="0" />
      <source>Edit Chat Parameters</source>
      <translation>Редактирование параметров чата</translation>
    </message>
    <message>
      <location filename="../OllamaHistoryEditDialog.ui" line="0" />
      <source>Chat Title:</source>
      <translation>Заголовок чата:</translation>
    </message>
    <message>
      <location filename="../OllamaHistoryEditDialog.ui" line="0" />
      <source>Enter the title of the chat.</source>
      <translation>Введите заголовок чата.</translation>
    </message>
    <message>
      <location filename="../OllamaHistoryEditDialog.ui" line="0" />
      <source>Model:</source>
      <translation>Модель:</translation>
    </message>
    <message>
      <location filename="../OllamaHistoryEditDialog.ui" line="0" />
      <source>Select the model to be used by the chat.</source>
      <translation>Выберите модель, которая будет использоваться в чате.</translation>
    </message>
  </context>
  <context>
    <name>OllamaHistoryWidget</name>
    <message>
      <location filename="../OllamaHistoryWidget.ui" line="0" />
      <source>Press to start a new chat based on the current history or switch to an already opened chat.</source>
      <translation>Открыть новый чат на основе текущей истории или переключиться на уже открытый чат.</translation>
    </message>
    <message>
      <location filename="../OllamaHistoryWidget.ui" line="0" />
      <source>Press to edit the chat title.</source>
      <translation>Редактировать заголовок чата.</translation>
    </message>
    <message>
      <location filename="../OllamaHistoryWidget.ui" line="0" />
      <source>Press to view the current chat history in a separate window.</source>
      <translation>Просмотреть текущую историю чата в отдельном окне.</translation>
    </message>
    <message>
      <location filename="../OllamaHistoryWidget.ui" line="0" />
      <source>Press to delete this chat history.</source>
      <translation>Удалить историю этого чата.</translation>
    </message>
  </context>
  <context>
    <name>OllamaPage</name>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>&lt;b&gt;Configure 'ollama' Interface&lt;/b&gt;</source>
      <translation>&lt;b&gt;Настройка интерфейса 'ollama'&lt;/b&gt;</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Server URL</source>
      <translation>URL-сервер</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Scheme:</source>
      <translation>Схема:</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Select the scheme of the 'ollama' server URL.</source>
      <translation>Выберите схему URL-сервера 'ollama'.</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Host:</source>
      <translation>Хост:</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Enter the host name of the 'ollama' server.</source>
      <translation>Введите имя хоста сервера 'ollama'.</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Port:</source>
      <translation>Порт:</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Enter the port of the 'ollama' server URL.</source>
      <translation>Введите порт URL-сервера 'ollama'.</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Local Server</source>
      <translation>Локальный сервер</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Enter the port of the local 'ollama' server.</source>
      <translation>Введите порт локального сервера 'ollama'.</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>URLs</source>
      <translation>URL-адреса</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Model Library:</source>
      <translation>Библиотека моделей:</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Enter the URL of the 'ollama' model library. Leave empty to use the default URL.</source>
      <translation>Введите URL-адрес библиотеки моделей 'ollama'. Оставьте поле пустым, чтобы использовать URL-адрес по умолчанию.</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Download:</source>
      <translation>Загрузка:</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Enter the URL of the 'ollama' download page. Leave empty to use the default URL.</source>
      <translation>Введите URL-адрес страницы загрузки 'ollama'. Оставьте поле пустым, чтобы использовать URL-адрес по умолчанию.</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Blog:</source>
      <translation>Блог:</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Enter the URL of the 'ollama' blog. Leave empty to use the default URL.</source>
      <translation>Введите URL-адрес блога 'ollama'. Оставьте поле пустым, чтобы использовать URL-адрес по умолчанию.</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Heartbeat Timer:</source>
      <translation>Частота кадров мониторинга сервера:</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Enter the heartbeat timeout value (0 = disable).</source>
      <translation>Задайте частоту кадров мониторинга (0 = отключить).</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Disabled</source>
      <translation>Отключена</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source> s</source>
      <translation> сек</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Use streaming chat response</source>
      <translation>Использовать ответ потокового чата</translation>
    </message>
  </context>
  <context>
    <name>OllamaPullProgressDialog</name>
    <message>
      <location filename="../OllamaPullProgressDialog.ui" line="0" />
      <source>Install Model</source>
      <translation>Установить модель</translation>
    </message>
    <message>
      <location filename="../OllamaPullProgressDialog.py" line="138" />
      <source>&lt;p&gt;Installing model &lt;b&gt;{0}&lt;/b&gt;.&lt;/p&gt;</source>
      <translation>&lt;p&gt;Установка модели &lt;b&gt;{0}&lt;/b&gt;.&lt;/p&gt;</translation>
    </message>
    <message>
      <location filename="../OllamaPullProgressDialog.py" line="182" />
      <source>{0} / {1}</source>
      <comment>completed / total</comment>
      <translation>{0} / {1}</translation>
    </message>
    <message>
      <location filename="../OllamaPullProgressDialog.py" line="201" />
      <source>Error: {0}</source>
      <translation>Ошибка: {0}</translation>
    </message>
  </context>
  <context>
    <name>OllamaRunningModelsDialog</name>
    <message>
      <location filename="../OllamaRunningModelsDialog.ui" line="0" />
      <source>Running Models</source>
      <translation>Работающие модели</translation>
    </message>
    <message>
      <location filename="../OllamaRunningModelsDialog.ui" line="0" />
      <source>Name</source>
      <translation>Имя</translation>
    </message>
    <message>
      <location filename="../OllamaRunningModelsDialog.ui" line="0" />
      <source>ID</source>
      <translation>ID</translation>
    </message>
    <message>
      <location filename="../OllamaRunningModelsDialog.ui" line="0" />
      <source>Size</source>
      <translation>Размер</translation>
    </message>
    <message>
      <location filename="../OllamaRunningModelsDialog.ui" line="0" />
      <source>Processor</source>
      <translation>Процессор</translation>
    </message>
    <message>
      <location filename="../OllamaRunningModelsDialog.ui" line="0" />
      <source>Expires</source>
      <translation>Завершается</translation>
    </message>
  </context>
  <context>
    <name>OllamaWidget</name>
    <message>
      <location filename="../OllamaWidget.py" line="213" />
      <source>&lt;b&gt;ollama Server Version {0}&lt;/b&gt;</source>
      <translation>&lt;b&gt;Версия сервера ollama {0}&lt;/b&gt;</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="377" />
      <source>Save Chat History</source>
      <translation>Сохранить историю чата</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="378" />
      <source>&lt;p&gt;The chat history could not be saved to &lt;b&gt;{0}&lt;/b&gt;.&lt;/p&gt;&lt;p&gt;Reason: {1}&lt;/p&gt;</source>
      <translation>&lt;p&gt;Не удалось сохранить историю чата в &lt;b&gt;{0}&lt;/b&gt;.&lt;/p&gt;&lt;p&gt;Причина: {1}&lt;/p&gt;</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="434" />
      <location filename="../OllamaWidget.py" line="413" />
      <source>Load Chat History</source>
      <translation>Загрузить историю чата</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="414" />
      <source>&lt;p&gt;The chat history could not be loaded from &lt;b&gt;{0}&lt;/b&gt;.&lt;/p&gt;&lt;p&gt;Reason: {1}&lt;/p&gt;</source>
      <translation>&lt;p&gt;Не удалось загрузить историю чата из &lt;b&gt;{0}&lt;/b&gt;.&lt;/p&gt;&lt;p&gt;Причина: {1}&lt;/p&gt;</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="435" />
      <source>&lt;p&gt;These chats were not loaded because they already existed.&lt;/p&gt;{0}</source>
      <translation>Эти чаты не были загружены, потому что они уже существуют.&lt;/p&gt;{0}</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="512" />
      <location filename="../OllamaWidget.py" line="505" />
      <source>New Chat</source>
      <translation>Нрвый чат</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="506" />
      <source>A model has to be selected first. Aborting...</source>
      <translation>Сначала необходимо выбрать модель. Прерывание...</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="513" />
      <source>Enter a title for the new chat:</source>
      <translation>Введите заголовок нового чата:</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="712" />
      <source>Chat History</source>
      <translation>История чата</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="713" />
      <source>Load</source>
      <translation>Загрузить</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="716" />
      <source>Clear All</source>
      <translation>Очистить все</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="719" />
      <source>Import</source>
      <translation>Импорт</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="720" />
      <source>Export</source>
      <translation>Экспорт</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="726" />
      <source>Model Management</source>
      <translation>Управление моделью</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="1018" />
      <location filename="../OllamaWidget.py" line="727" />
      <source>List Models</source>
      <translation>Список моделей</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="1036" />
      <location filename="../OllamaWidget.py" line="729" />
      <source>List Running Models</source>
      <translation>Список работающих моделей</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="733" />
      <source>Show Model Library</source>
      <translation>Показать библиотеку моделей</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="1053" />
      <location filename="../OllamaWidget.py" line="738" />
      <source>Install Model</source>
      <translation>Установить модель</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="1126" />
      <location filename="../OllamaWidget.py" line="1117" />
      <location filename="../OllamaWidget.py" line="1106" />
      <location filename="../OllamaWidget.py" line="741" />
      <source>Remove Model</source>
      <translation>Удалить модель</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="748" />
      <source>Local Server</source>
      <translation>Локальный сервер</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="750" />
      <source>Start with Monitoring</source>
      <translation>Старт с мониторингом</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="754" />
      <source>Start</source>
      <translation>Старт</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="757" />
      <source>Stop</source>
      <translation>Стоп</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="764" />
      <source>ollama URLs</source>
      <translation>URL'ы ollama</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="766" />
      <source>Model Library</source>
      <translation>Библиотека моделей</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="770" />
      <source>Download</source>
      <translation>Загрузка</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="774" />
      <source>Blog</source>
      <translation>Блог</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="791" />
      <source>Configure...</source>
      <translation>Настройка...</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="832" />
      <source>Clear All Chat Histories</source>
      <translation>Очистить все истории чата</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="833" />
      <source>&lt;p&gt;Do you really want to delete all chat histories? This is &lt;b&gt;irreversible&lt;/b&gt;.&lt;/p&gt;</source>
      <translation>&lt;p&gt;Вы действительно хотите удалить все истории чата? Это действие &lt;b&gt;необратимо&lt;/b&gt;.&lt;/p&gt;</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="848" />
      <source>Import Chat History</source>
      <translation>Импорт истории чата</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="850" />
      <source>Chat History Files (*.json);;All Files (*)</source>
      <translation>Файлы истории чата (*.json);;Все файлы (*)</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="884" />
      <location filename="../OllamaWidget.py" line="851" />
      <source>Chat History Files (*.json)</source>
      <translation>Файлы истории чата (*.json)</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="882" />
      <location filename="../OllamaWidget.py" line="871" />
      <source>Export Chat History</source>
      <translation>Экспорт истории чата</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="872" />
      <source>Select the chats to be exported:</source>
      <translation>Выберите чаты для экспорта:</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="971" />
      <source>Run Local 'ollama' Server</source>
      <translation>Запустить локальный сервер 'ollama'</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="972" />
      <source>The loacl 'ollama' server process could not be started.</source>
      <translation>Не удалось запустить локальный процесс сервера 'ollama'.</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="1019" />
      <source>There are no models available.</source>
      <translation>Нет доступных моделей.</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="1037" />
      <source>There are no models running.</source>
      <translation>Нет запущенных моделей.</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="1054" />
      <source>Enter the name of the model to be installed:</source>
      <translation>Введите имя модели, которую нужно установить:</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="1107" />
      <source>Select the model to be removed by the 'ollama' server:</source>
      <translation>Выберите модель, которая будет удалена сервером 'ollama':</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="1118" />
      <source>&lt;p&gt;The model &lt;b&gt;{0}&lt;/b&gt; was deleted successfully.&lt;/p&gt;</source>
      <translation>&lt;p&gt;Модель &lt;b&gt;{0}&lt;/b&gt; была успешно удалена.&lt;/p&gt;</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="1127" />
      <source>&lt;p&gt;The model &lt;b&gt;{0}&lt;/b&gt; could not be removed from the 'ollama' server.&lt;/p&gt;</source>
      <translation>&lt;p&gt;Модель &lt;b&gt;{0}&lt;/b&gt; не удалось удалить с сервера 'ollama'.&lt;/p&gt;</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="1154" />
      <source>Network Error</source>
      <translation>Ошибка сети</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.ui" line="0" />
      <source>Press to reload the models list and update the 'ollama' version information.</source>
      <translation>Перезагрузить список моделей и обновить информацию о версии 'ollama'.</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.ui" line="0" />
      <source>Select the model for the chat.</source>
      <translation>Выберите модель для чата.</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.ui" line="0" />
      <source>Press to start a new chat.</source>
      <translation>Начать новый чат.</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.ui" line="0" />
      <source>Enter the message to be sent to the 'ollama' server.</source>
      <translation>Введите сообщение, которое будет отправлено на сервер 'ollama'.</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.ui" line="0" />
      <source>Enter Message and press 'Ctrl-Return' to send it or use the send button.</source>
      <translation type="unfinished" />
    </message>
    <message>
      <location filename="../OllamaWidget.ui" line="0" />
      <source>Press to send the message of the current chat to the 'ollama' server.</source>
      <translation>Отправить сообщение текущего чата на сервер 'ollama'.</translation>
    </message>
    <message>
      <source>Enter Message</source>
      <translation type="vanished">Введите сообщение</translation>
    </message>
  </context>
  <context>
    <name>PluginOllamaInterface</name>
    <message>
      <location filename="../../PluginAiOllama.py" line="184" />
      <location filename="../../PluginAiOllama.py" line="183" />
      <location filename="../../PluginAiOllama.py" line="179" />
      <location filename="../../PluginAiOllama.py" line="83" />
      <source>ollama AI Interface</source>
      <translation>Интерфейс ollama AI</translation>
    </message>
    <message>
      <location filename="../../PluginAiOllama.py" line="185" />
      <source>Ctrl+Alt+Shift+O</source>
      <translation>Ctrl+Alt+Shift+O</translation>
    </message>
    <message>
      <location filename="../../PluginAiOllama.py" line="191" />
      <source>Switch the input focus to the ollama AI window.</source>
      <translation>Переключите фокус ввода на окно ollama AI.</translation>
    </message>
    <message>
      <location filename="../../PluginAiOllama.py" line="194" />
      <source>&lt;b&gt;Activate ollama AI Interface&lt;/b&gt;&lt;p&gt;This switches the input focus to the ollama AI window.&lt;/p&gt;</source>
      <translation>&lt;b&gt;Активируйте интерфейс allama AI&lt;/b&gt;&lt;p&gt; Это переключит фокус ввода на окно ollama AI.&lt;/p&gt;</translation>
    </message>
  </context>
  <context>
    <name>RunOllamaServerDialog</name>
    <message>
      <location filename="../RunOllamaServerDialog.ui" line="0" />
      <source>ollama Server</source>
      <translation>Сервер ollama</translation>
    </message>
    <message>
      <location filename="../RunOllamaServerDialog.ui" line="0" />
      <source>Output</source>
      <translation>Вывод</translation>
    </message>
    <message>
      <location filename="../RunOllamaServerDialog.ui" line="0" />
      <source>Press to restart the local ollama server.</source>
      <translation>Перезапустить локальный сервер ollama.</translation>
    </message>
    <message>
      <location filename="../RunOllamaServerDialog.ui" line="0" />
      <source>Re-Start Server</source>
      <translation>Перезапустить сервер</translation>
    </message>
    <message>
      <location filename="../RunOllamaServerDialog.ui" line="0" />
      <source>Press to stop the running ollama server.</source>
      <translation>Остановить запущенный сервер ollama.</translation>
    </message>
    <message>
      <location filename="../RunOllamaServerDialog.ui" line="0" />
      <source>Stop Server</source>
      <translation>Остановить сервер</translation>
    </message>
    <message>
      <location filename="../RunOllamaServerDialog.py" line="79" />
      <source>Run Local 'ollama' Server</source>
      <translation>Запустить локальный сервер 'ollama'</translation>
    </message>
    <message>
      <location filename="../RunOllamaServerDialog.py" line="80" />
      <source>The local 'ollama' server process could not be started.</source>
      <translation>Не удалось запустить процесс на локальном сервере 'ollama'.</translation>
    </message>
  </context>
</TS>

eric ide

mercurial