OllamaInterface/i18n/ollama_es.ts

Sat, 28 Sep 2024 16:39:30 +0200

author
Jaime Seuma <jaims.seuma@gmail.com>
date
Sat, 28 Sep 2024 16:39:30 +0200
changeset 53
07ec4ddecf1e
parent 44
ef9a85b8768a
child 54
05f9c60f7ab6
permissions
-rw-r--r--

Updated Spanish translations Ollama interface plugin, Default branch

<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="es">
<context>
    <name>OllamaChatWidget</name>
    <message>
        <location filename="../OllamaChatWidget.py" line="41"/>
        <source>&lt;b&gt;{0} - {1}&lt;/b&gt;</source>
        <comment>title, model name</comment>
        <translation>&lt;b&gt;{0} - {1}&lt;/b&gt;</translation>
    </message>
</context>
<context>
    <name>OllamaClient</name>
    <message>
        <location filename="../OllamaClient.py" line="301"/>
        <source>100% CPU</source>
        <translation>100% CPU</translation>
    </message>
    <message>
        <location filename="../OllamaClient.py" line="303"/>
        <source>100% GPU</source>
        <translation>100% GPU</translation>
    </message>
    <message>
        <location filename="../OllamaClient.py" line="305"/>
        <source>unknown</source>
        <translation>desconocido</translation>
    </message>
    <message>
        <location filename="../OllamaClient.py" line="309"/>
        <source>{0}% / {1}% CPU / GPU</source>
        <translation>{0}% / {1}% CPU / GPU</translation>
    </message>
    <message>
        <location filename="../OllamaClient.py" line="465"/>
        <source>&lt;p&gt;A network error occurred.&lt;/p&gt;&lt;p&gt;Error: {0}&lt;/p&gt;</source>
        <translation>&lt;p&gt;Ha ocurrido un error de red.&lt;/p&gt;&lt;p&gt;Error: {0}&lt;/p&gt;</translation>
    </message>
    <message>
        <location filename="../OllamaClient.py" line="594"/>
        <source>&lt;p&gt;Error: The local server at &lt;b&gt;{0}&lt;/b&gt; is not responding.&lt;/p&gt;</source>
        <translation>&lt;p&gt;Error: El servidor local en &lt;b&gt;{0}&lt;/b&gt; no responde.&lt;/p&gt;</translation>
    </message>
    <message>
        <location filename="../OllamaClient.py" line="596"/>
        <source>&lt;p&gt;Error: The configured server at &lt;b&gt;{0}&lt;/b&gt; is not responding.&lt;/p&gt;</source>
        <translation>&lt;p&gt;Error: El servidor configurado en &lt;b&gt;{0}&lt;/b&gt; no responde.&lt;/p&gt;</translation>
    </message>
</context>
<context>
    <name>OllamaDetailedModelsDialog</name>
    <message>
        <location filename="../OllamaDetailedModelsDialog.ui" line="0"/>
        <source>Available Models</source>
        <translation>Modelos Disponibles</translation>
    </message>
    <message>
        <location filename="../OllamaDetailedModelsDialog.ui" line="0"/>
        <source>Name</source>
        <translation>Nombre</translation>
    </message>
    <message>
        <location filename="../OllamaDetailedModelsDialog.ui" line="0"/>
        <source>ID</source>
        <translation>ID</translation>
    </message>
    <message>
        <location filename="../OllamaDetailedModelsDialog.ui" line="0"/>
        <source>Size</source>
        <translation>Tamaño</translation>
    </message>
    <message>
        <location filename="../OllamaDetailedModelsDialog.ui" line="0"/>
        <source>Modified At</source>
        <translation>Modificado En</translation>
    </message>
</context>
<context>
    <name>OllamaHistoryEditDialog</name>
    <message>
        <location filename="../OllamaHistoryEditDialog.ui" line="0"/>
        <source>Edit Chat Parameters</source>
        <translation>Editar Parámetros del Chat</translation>
    </message>
    <message>
        <location filename="../OllamaHistoryEditDialog.ui" line="0"/>
        <source>Chat Title:</source>
        <translation>Título del Chat:</translation>
    </message>
    <message>
        <location filename="../OllamaHistoryEditDialog.ui" line="0"/>
        <source>Enter the title of the chat.</source>
        <translation>Introducir el título del chat.</translation>
    </message>
    <message>
        <location filename="../OllamaHistoryEditDialog.ui" line="0"/>
        <source>Model:</source>
        <translation>Modelo:</translation>
    </message>
    <message>
        <location filename="../OllamaHistoryEditDialog.ui" line="0"/>
        <source>Select the model to be used by the chat.</source>
        <translation>Seleccionar el modelo para usar en el chat.</translation>
    </message>
</context>
<context>
    <name>OllamaHistoryWidget</name>
    <message>
        <location filename="../OllamaHistoryWidget.ui" line="0"/>
        <source>Press to start a new chat based on the current history or switch to an already opened chat.</source>
        <translation>Pulsar para iniciar un nuevo chat basado en la historia actual o cambiar a un chat ya abierto.</translation>
    </message>
    <message>
        <location filename="../OllamaHistoryWidget.ui" line="0"/>
        <source>Press to edit the chat title.</source>
        <translation>Pulsar para editar el título del chat.</translation>
    </message>
    <message>
        <location filename="../OllamaHistoryWidget.ui" line="0"/>
        <source>Press to view the current chat history in a separate window.</source>
        <translation>Pulsar para ver la historia del chat actual en una ventana distinta.</translation>
    </message>
    <message>
        <location filename="../OllamaHistoryWidget.ui" line="0"/>
        <source>Press to delete this chat history.</source>
        <translation>Pulsar para borrar la historia de este chat.</translation>
    </message>
    <message>
        <source>Edit Chat Title</source>
        <translation type="vanished">Editar Título del Chat</translation>
    </message>
    <message>
        <source>Enter the new title:</source>
        <translation type="vanished">Introducir el nuevo título:</translation>
    </message>
</context>
<context>
    <name>OllamaPage</name>
    <message>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <source>&lt;b&gt;Configure &apos;ollama&apos; Interface&lt;/b&gt;</source>
        <translation>&lt;b&gt;Configurar la Interfaz &apos;ollama&apos;&lt;/b&gt;</translation>
    </message>
    <message>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <source>Server URL</source>
        <translation>URL del Servidor</translation>
    </message>
    <message>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <source>Scheme:</source>
        <translation>Esquema:</translation>
    </message>
    <message>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <source>Select the scheme of the &apos;ollama&apos; server URL.</source>
        <translation>Seleccionar el esquema de URL del servidor &apos;ollama&apos;.</translation>
    </message>
    <message>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <source>Host:</source>
        <translation>Host:</translation>
    </message>
    <message>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <source>Enter the host name of the &apos;ollama&apos; server.</source>
        <translation>Introducir el nombre de host del servidor &apos;ollama&apos;.</translation>
    </message>
    <message>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <source>Port:</source>
        <translation>Puerto:</translation>
    </message>
    <message>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <source>Enter the port of the &apos;ollama&apos; server URL.</source>
        <translation>Introducir el puerto de la URL del servidor &apos;ollama&apos;.</translation>
    </message>
    <message>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <source>Local Server</source>
        <translation>Servidor Local</translation>
    </message>
    <message>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <source>Enter the port of the local &apos;ollama&apos; server.</source>
        <translation>Introducir el puerto del servidor local &apos;ollama&apos;.</translation>
    </message>
    <message>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <source>URLs</source>
        <translation>URLs</translation>
    </message>
    <message>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <source>Model Library:</source>
        <translation>Biblioteca de Modelos:</translation>
    </message>
    <message>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <source>Enter the URL of the &apos;ollama&apos; model library. Leave empty to use the default URL.</source>
        <translation>Introducir la URL de la biblioteca de modelos de &apos;ollama&apos;. Dejar en blanco para utilizar la URL por defecto.</translation>
    </message>
    <message>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <source>Download:</source>
        <translation>Descarga:</translation>
    </message>
    <message>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <source>Enter the URL of the &apos;ollama&apos; download page. Leave empty to use the default URL.</source>
        <translation>Introducir la URL de la página de descarga de &apos;ollama&apos;. Dejar en blanco para utilizar la URL por defecto.</translation>
    </message>
    <message>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <source>Blog:</source>
        <translation>Blog:</translation>
    </message>
    <message>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <source>Enter the URL of the &apos;ollama&apos; blog. Leave empty to use the default URL.</source>
        <translation>Introducir la URL del blog &apos;ollama&apos;. Dejar en blanco para utilizar la URL por defecto.</translation>
    </message>
    <message>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <source>Heartbeat Timer:</source>
        <translation>Temporizador del Pulso:</translation>
    </message>
    <message>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <source>Enter the heartbeat timeout value (0 = disable).</source>
        <translation>Introducir el valor para el temporizador del pulso (0 = deshabilitar).</translation>
    </message>
    <message>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <source>Disabled</source>
        <translation>Deshabilitado</translation>
    </message>
    <message>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <source> s</source>
        <translation> s</translation>
    </message>
    <message>
        <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/>
        <source>Use streaming chat response</source>
        <translation>Usar repuesta de chat por streaming</translation>
    </message>
</context>
<context>
    <name>OllamaPullProgressDialog</name>
    <message>
        <location filename="../OllamaPullProgressDialog.ui" line="0"/>
        <source>Install Model</source>
        <translation>Instalar Modelo</translation>
    </message>
    <message>
        <location filename="../OllamaPullProgressDialog.py" line="138"/>
        <source>&lt;p&gt;Installing model &lt;b&gt;{0}&lt;/b&gt;.&lt;/p&gt;</source>
        <translation>&lt;p&gt;Instalando modelo &lt;b&gt;{0}&lt;/b&gt;.&lt;/p&gt;</translation>
    </message>
    <message>
        <location filename="../OllamaPullProgressDialog.py" line="182"/>
        <source>{0} / {1}</source>
        <comment>completed / total</comment>
        <translation>{0} / {1}</translation>
    </message>
    <message>
        <location filename="../OllamaPullProgressDialog.py" line="201"/>
        <source>Error: {0}</source>
        <translation>Error: {0}</translation>
    </message>
</context>
<context>
    <name>OllamaRunningModelsDialog</name>
    <message>
        <location filename="../OllamaRunningModelsDialog.ui" line="0"/>
        <source>Running Models</source>
        <translation>Modelos en Ejecución</translation>
    </message>
    <message>
        <location filename="../OllamaRunningModelsDialog.ui" line="0"/>
        <source>Name</source>
        <translation>Nombre</translation>
    </message>
    <message>
        <location filename="../OllamaRunningModelsDialog.ui" line="0"/>
        <source>ID</source>
        <translation>ID</translation>
    </message>
    <message>
        <location filename="../OllamaRunningModelsDialog.ui" line="0"/>
        <source>Size</source>
        <translation>Tamaño</translation>
    </message>
    <message>
        <location filename="../OllamaRunningModelsDialog.ui" line="0"/>
        <source>Processor</source>
        <translation>Procesador</translation>
    </message>
    <message>
        <location filename="../OllamaRunningModelsDialog.ui" line="0"/>
        <source>Expires</source>
        <translation>Expira</translation>
    </message>
</context>
<context>
    <name>OllamaWidget</name>
    <message>
        <location filename="../OllamaWidget.py" line="199"/>
        <source>&lt;b&gt;ollama Server Version {0}&lt;/b&gt;</source>
        <translation>&lt;b&gt;Versión del servidor ollama {0}&lt;/b&gt;</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="363"/>
        <source>Save Chat History</source>
        <translation>Guardar Historia del Chat</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="364"/>
        <source>&lt;p&gt;The chat history could not be saved to &lt;b&gt;{0}&lt;/b&gt;.&lt;/p&gt;&lt;p&gt;Reason: {1}&lt;/p&gt;</source>
        <translation>&lt;p&gt;La historia del chat no se ha podido salvar en &lt;b&gt;{0}&lt;/b&gt;.&lt;/p&gt;&lt;p&gt;Razón: {1}&lt;/p&gt;</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="420"/>
        <location filename="../OllamaWidget.py" line="399"/>
        <source>Load Chat History</source>
        <translation>Cargar Historia de Chat</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="400"/>
        <source>&lt;p&gt;The chat history could not be loaded from &lt;b&gt;{0}&lt;/b&gt;.&lt;/p&gt;&lt;p&gt;Reason: {1}&lt;/p&gt;</source>
        <translation>&lt;p&gt;La historia de chat no se ha podido cargar desde &lt;b&gt;{0}&lt;/b&gt;.&lt;/p&gt;&lt;p&gt;Razón: {1}&lt;/p&gt;</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="421"/>
        <source>&lt;p&gt;These chats were not loaded because they already existed.&lt;/p&gt;{0}</source>
        <translation>&lt;p&gt;Estos chats no se han cargado porque ya existen.&lt;/p&gt;{0}</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="498"/>
        <location filename="../OllamaWidget.py" line="491"/>
        <source>New Chat</source>
        <translation>Nuevo Chat</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="492"/>
        <source>A model has to be selected first. Aborting...</source>
        <translation>Se ha de seleccionar un modelo antes. Abortando...</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="499"/>
        <source>Enter a title for the new chat:</source>
        <translation>Introducir título para el nuevo chat:</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="696"/>
        <source>Chat History</source>
        <translation>Historia de Chat</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="697"/>
        <source>Load</source>
        <translation>Cargar</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="700"/>
        <source>Clear All</source>
        <translation>Limpiar Todo</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="703"/>
        <source>Import</source>
        <translation>Importar</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="704"/>
        <source>Export</source>
        <translation>Exportar</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="710"/>
        <source>Model Management</source>
        <translation>Gestión de Modelos</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="1001"/>
        <location filename="../OllamaWidget.py" line="711"/>
        <source>List Models</source>
        <translation>Listar Modelos</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="1019"/>
        <location filename="../OllamaWidget.py" line="713"/>
        <source>List Running Models</source>
        <translation>Listar Modelos en Ejecución</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="717"/>
        <source>Show Model Library</source>
        <translation>Mostrar Biblioteca de Modelos</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="1036"/>
        <location filename="../OllamaWidget.py" line="722"/>
        <source>Install Model</source>
        <translation>Instalar Modelo</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="1109"/>
        <location filename="../OllamaWidget.py" line="1100"/>
        <location filename="../OllamaWidget.py" line="1089"/>
        <location filename="../OllamaWidget.py" line="725"/>
        <source>Remove Model</source>
        <translation>Eliminar Modelo</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="732"/>
        <source>Local Server</source>
        <translation>Servidor Local</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="734"/>
        <source>Start with Monitoring</source>
        <translation>Iniciar con Monitorización</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="738"/>
        <source>Start</source>
        <translation>Iniciar</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="741"/>
        <source>Stop</source>
        <translation>Detener</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="748"/>
        <source>ollama URLs</source>
        <translation>URLs ollama</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="750"/>
        <source>Model Library</source>
        <translation>Biblioteca de Modelos</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="754"/>
        <source>Download</source>
        <translation>Descarga</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="758"/>
        <source>Blog</source>
        <translation>Blog</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="775"/>
        <source>Configure...</source>
        <translation>Configurar...</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="816"/>
        <source>Clear All Chat Histories</source>
        <translation>Limpiar Todas las Historias de Chat</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="817"/>
        <source>&lt;p&gt;Do you really want to delete all chat histories? This is &lt;b&gt;irreversible&lt;/b&gt;.&lt;/p&gt;</source>
        <translation>&lt;p&gt;¿Desea realmente borrar todas las historias de chat? Esto es &lt;b&gt;irreversible&lt;/b&gt;.&lt;/p&gt;</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="832"/>
        <source>Import Chat History</source>
        <translation>Importar Historia de Chat</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="834"/>
        <source>Chat History Files (*.json);;All Files (*)</source>
        <translation>Archivos de Historia de Chat (*.json);;Todos los Archivos (*)</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="867"/>
        <location filename="../OllamaWidget.py" line="835"/>
        <source>Chat History Files (*.json)</source>
        <translation>Archivos de Historia de Chat (*.json)</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="865"/>
        <location filename="../OllamaWidget.py" line="855"/>
        <source>Export Chat History</source>
        <translation>Exportar Historia de Chat</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="856"/>
        <source>Select the chats to be exported:</source>
        <translation>Seleccionar chats a exportar:</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="954"/>
        <source>Run Local &apos;ollama&apos; Server</source>
        <translation>Ejecutar Servidor &apos;ollama&apos; Local</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="955"/>
        <source>The loacl &apos;ollama&apos; server process could not be started.</source>
        <translation>El proceso del servidor local &apos;ollama&apos; no se ha podido iniciar.</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="1002"/>
        <source>There are no models available.</source>
        <translation>No hay modelos disponibles.</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="1020"/>
        <source>There are no models running.</source>
        <translation>No hay modelos en ejecución.</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="1037"/>
        <source>Enter the name of the model to be installed:</source>
        <translation>Introducir el nombre del modelo a instalar:</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="1090"/>
        <source>Select the model to be removed by the &apos;ollama&apos; server:</source>
        <translation>Seleccionar el modelo a eliminar por el servidor &apos;ollama&apos;:</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="1101"/>
        <source>&lt;p&gt;The model &lt;b&gt;{0}&lt;/b&gt; was deleted successfully.&lt;/p&gt;</source>
        <translation>&lt;p&gt;El modelo &lt;b&gt;{0}&lt;/b&gt; se ha borrado con éxito.&lt;/p&gt;</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="1110"/>
        <source>&lt;p&gt;The model &lt;b&gt;{0}&lt;/b&gt; could not be removed from the &apos;ollama&apos; server.&lt;/p&gt;</source>
        <translation>&lt;p&gt;El modelo &lt;b&gt;{0}&lt;/b&gt; no se ha podido eliminar del servidor &apos;ollama&apos;.&lt;/p&gt;</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.py" line="1137"/>
        <source>Network Error</source>
        <translation>Error de Red</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.ui" line="0"/>
        <source>Press to reload the models list and update the &apos;ollama&apos; version information.</source>
        <translation>Pulsar para recargar la lista de modelos y actualizar la información de versión de &apos;ollama&apos;.</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.ui" line="0"/>
        <source>Select the model for the chat.</source>
        <translation>Seleccionar el modelo para el chat.</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.ui" line="0"/>
        <source>Press to start a new chat.</source>
        <translation>Pulsar para iniciar un nuevo chat.</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.ui" line="0"/>
        <source>Enter the message to be sent to the &apos;ollama&apos; server.</source>
        <translation>Introducir el mensaje a enviar al servidor &apos;ollama&apos;.</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.ui" line="0"/>
        <source>Enter Message</source>
        <translation>Introducir Mensaje</translation>
    </message>
    <message>
        <location filename="../OllamaWidget.ui" line="0"/>
        <source>Press to send the message of the current chat to the &apos;ollama&apos; server.</source>
        <translation>Pulsar para enviar el mensaje del chat actual al servidor &apos;ollama&apos;.</translation>
    </message>
</context>
<context>
    <name>PluginOllamaInterface</name>
    <message>
        <location filename="../../PluginAiOllama.py" line="184"/>
        <location filename="../../PluginAiOllama.py" line="183"/>
        <location filename="../../PluginAiOllama.py" line="179"/>
        <location filename="../../PluginAiOllama.py" line="83"/>
        <source>ollama AI Interface</source>
        <translation>Interfaz ollama AI</translation>
    </message>
    <message>
        <location filename="../../PluginAiOllama.py" line="185"/>
        <source>Ctrl+Alt+Shift+O</source>
        <translation>Ctrl+Alt+Shift+O</translation>
    </message>
    <message>
        <location filename="../../PluginAiOllama.py" line="191"/>
        <source>Switch the input focus to the ollama AI window.</source>
        <translation>Cambiar el foco de entrada a la ventana de ollama AI.</translation>
    </message>
    <message>
        <location filename="../../PluginAiOllama.py" line="194"/>
        <source>&lt;b&gt;Activate ollama AI Interface&lt;/b&gt;&lt;p&gt;This switches the input focus to the ollama AI window.&lt;/p&gt;</source>
        <translation>&lt;b&gt;Activar la Interfaz de ollama AI&lt;/b&gt;&lt;p&gt;Esto cambia el foco de entrada a la ventana de ollama AI.&lt;/p&gt;</translation>
    </message>
</context>
<context>
    <name>RunOllamaServerDialog</name>
    <message>
        <location filename="../RunOllamaServerDialog.ui" line="0"/>
        <source>ollama Server</source>
        <translation>Servidor ollama</translation>
    </message>
    <message>
        <location filename="../RunOllamaServerDialog.ui" line="0"/>
        <source>Output</source>
        <translation>Salida</translation>
    </message>
    <message>
        <location filename="../RunOllamaServerDialog.ui" line="0"/>
        <source>Press to restart the local ollama server.</source>
        <translation>Pulsar para restablecer el servidor local ollama.</translation>
    </message>
    <message>
        <location filename="../RunOllamaServerDialog.ui" line="0"/>
        <source>Re-Start Server</source>
        <translation>Reiniciar Servidor</translation>
    </message>
    <message>
        <location filename="../RunOllamaServerDialog.ui" line="0"/>
        <source>Press to stop the running ollama server.</source>
        <translation>Pulsar para detener el servidor ollama en ejecución.</translation>
    </message>
    <message>
        <location filename="../RunOllamaServerDialog.ui" line="0"/>
        <source>Stop Server</source>
        <translation>Detener Servidor</translation>
    </message>
    <message>
        <location filename="../RunOllamaServerDialog.py" line="79"/>
        <source>Run Local &apos;ollama&apos; Server</source>
        <translation>Ejecutar Servidor &apos;ollama&apos; Local</translation>
    </message>
    <message>
        <location filename="../RunOllamaServerDialog.py" line="80"/>
        <source>The local &apos;ollama&apos; server process could not be started.</source>
        <translation>El proceso del servidor local &apos;ollama&apos; no se ha podido iniciar.</translation>
    </message>
</context>
</TS>

eric ide

mercurial