Sat, 28 Sep 2024 16:39:30 +0200
Updated Spanish translations Ollama interface plugin, Default branch
<?xml version="1.0" encoding="utf-8"?> <!DOCTYPE TS> <TS version="2.1" language="es"> <context> <name>OllamaChatWidget</name> <message> <location filename="../OllamaChatWidget.py" line="41"/> <source><b>{0} - {1}</b></source> <comment>title, model name</comment> <translation><b>{0} - {1}</b></translation> </message> </context> <context> <name>OllamaClient</name> <message> <location filename="../OllamaClient.py" line="301"/> <source>100% CPU</source> <translation>100% CPU</translation> </message> <message> <location filename="../OllamaClient.py" line="303"/> <source>100% GPU</source> <translation>100% GPU</translation> </message> <message> <location filename="../OllamaClient.py" line="305"/> <source>unknown</source> <translation>desconocido</translation> </message> <message> <location filename="../OllamaClient.py" line="309"/> <source>{0}% / {1}% CPU / GPU</source> <translation>{0}% / {1}% CPU / GPU</translation> </message> <message> <location filename="../OllamaClient.py" line="465"/> <source><p>A network error occurred.</p><p>Error: {0}</p></source> <translation><p>Ha ocurrido un error de red.</p><p>Error: {0}</p></translation> </message> <message> <location filename="../OllamaClient.py" line="594"/> <source><p>Error: The local server at <b>{0}</b> is not responding.</p></source> <translation><p>Error: El servidor local en <b>{0}</b> no responde.</p></translation> </message> <message> <location filename="../OllamaClient.py" line="596"/> <source><p>Error: The configured server at <b>{0}</b> is not responding.</p></source> <translation><p>Error: El servidor configurado en <b>{0}</b> no responde.</p></translation> </message> </context> <context> <name>OllamaDetailedModelsDialog</name> <message> <location filename="../OllamaDetailedModelsDialog.ui" line="0"/> <source>Available Models</source> <translation>Modelos Disponibles</translation> </message> <message> <location filename="../OllamaDetailedModelsDialog.ui" line="0"/> <source>Name</source> <translation>Nombre</translation> </message> <message> <location filename="../OllamaDetailedModelsDialog.ui" line="0"/> <source>ID</source> <translation>ID</translation> </message> <message> <location filename="../OllamaDetailedModelsDialog.ui" line="0"/> <source>Size</source> <translation>Tamaño</translation> </message> <message> <location filename="../OllamaDetailedModelsDialog.ui" line="0"/> <source>Modified At</source> <translation>Modificado En</translation> </message> </context> <context> <name>OllamaHistoryEditDialog</name> <message> <location filename="../OllamaHistoryEditDialog.ui" line="0"/> <source>Edit Chat Parameters</source> <translation>Editar Parámetros del Chat</translation> </message> <message> <location filename="../OllamaHistoryEditDialog.ui" line="0"/> <source>Chat Title:</source> <translation>Título del Chat:</translation> </message> <message> <location filename="../OllamaHistoryEditDialog.ui" line="0"/> <source>Enter the title of the chat.</source> <translation>Introducir el título del chat.</translation> </message> <message> <location filename="../OllamaHistoryEditDialog.ui" line="0"/> <source>Model:</source> <translation>Modelo:</translation> </message> <message> <location filename="../OllamaHistoryEditDialog.ui" line="0"/> <source>Select the model to be used by the chat.</source> <translation>Seleccionar el modelo para usar en el chat.</translation> </message> </context> <context> <name>OllamaHistoryWidget</name> <message> <location filename="../OllamaHistoryWidget.ui" line="0"/> <source>Press to start a new chat based on the current history or switch to an already opened chat.</source> <translation>Pulsar para iniciar un nuevo chat basado en la historia actual o cambiar a un chat ya abierto.</translation> </message> <message> <location filename="../OllamaHistoryWidget.ui" line="0"/> <source>Press to edit the chat title.</source> <translation>Pulsar para editar el título del chat.</translation> </message> <message> <location filename="../OllamaHistoryWidget.ui" line="0"/> <source>Press to view the current chat history in a separate window.</source> <translation>Pulsar para ver la historia del chat actual en una ventana distinta.</translation> </message> <message> <location filename="../OllamaHistoryWidget.ui" line="0"/> <source>Press to delete this chat history.</source> <translation>Pulsar para borrar la historia de este chat.</translation> </message> <message> <source>Edit Chat Title</source> <translation type="vanished">Editar Título del Chat</translation> </message> <message> <source>Enter the new title:</source> <translation type="vanished">Introducir el nuevo título:</translation> </message> </context> <context> <name>OllamaPage</name> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <source><b>Configure 'ollama' Interface</b></source> <translation><b>Configurar la Interfaz 'ollama'</b></translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <source>Server URL</source> <translation>URL del Servidor</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <source>Scheme:</source> <translation>Esquema:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <source>Select the scheme of the 'ollama' server URL.</source> <translation>Seleccionar el esquema de URL del servidor 'ollama'.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <source>Host:</source> <translation>Host:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <source>Enter the host name of the 'ollama' server.</source> <translation>Introducir el nombre de host del servidor 'ollama'.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <source>Port:</source> <translation>Puerto:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <source>Enter the port of the 'ollama' server URL.</source> <translation>Introducir el puerto de la URL del servidor 'ollama'.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <source>Local Server</source> <translation>Servidor Local</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <source>Enter the port of the local 'ollama' server.</source> <translation>Introducir el puerto del servidor local 'ollama'.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <source>URLs</source> <translation>URLs</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <source>Model Library:</source> <translation>Biblioteca de Modelos:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <source>Enter the URL of the 'ollama' model library. Leave empty to use the default URL.</source> <translation>Introducir la URL de la biblioteca de modelos de 'ollama'. Dejar en blanco para utilizar la URL por defecto.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <source>Download:</source> <translation>Descarga:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <source>Enter the URL of the 'ollama' download page. Leave empty to use the default URL.</source> <translation>Introducir la URL de la página de descarga de 'ollama'. Dejar en blanco para utilizar la URL por defecto.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <source>Blog:</source> <translation>Blog:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <source>Enter the URL of the 'ollama' blog. Leave empty to use the default URL.</source> <translation>Introducir la URL del blog 'ollama'. Dejar en blanco para utilizar la URL por defecto.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <source>Heartbeat Timer:</source> <translation>Temporizador del Pulso:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <source>Enter the heartbeat timeout value (0 = disable).</source> <translation>Introducir el valor para el temporizador del pulso (0 = deshabilitar).</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <source>Disabled</source> <translation>Deshabilitado</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <source> s</source> <translation> s</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0"/> <source>Use streaming chat response</source> <translation>Usar repuesta de chat por streaming</translation> </message> </context> <context> <name>OllamaPullProgressDialog</name> <message> <location filename="../OllamaPullProgressDialog.ui" line="0"/> <source>Install Model</source> <translation>Instalar Modelo</translation> </message> <message> <location filename="../OllamaPullProgressDialog.py" line="138"/> <source><p>Installing model <b>{0}</b>.</p></source> <translation><p>Instalando modelo <b>{0}</b>.</p></translation> </message> <message> <location filename="../OllamaPullProgressDialog.py" line="182"/> <source>{0} / {1}</source> <comment>completed / total</comment> <translation>{0} / {1}</translation> </message> <message> <location filename="../OllamaPullProgressDialog.py" line="201"/> <source>Error: {0}</source> <translation>Error: {0}</translation> </message> </context> <context> <name>OllamaRunningModelsDialog</name> <message> <location filename="../OllamaRunningModelsDialog.ui" line="0"/> <source>Running Models</source> <translation>Modelos en Ejecución</translation> </message> <message> <location filename="../OllamaRunningModelsDialog.ui" line="0"/> <source>Name</source> <translation>Nombre</translation> </message> <message> <location filename="../OllamaRunningModelsDialog.ui" line="0"/> <source>ID</source> <translation>ID</translation> </message> <message> <location filename="../OllamaRunningModelsDialog.ui" line="0"/> <source>Size</source> <translation>Tamaño</translation> </message> <message> <location filename="../OllamaRunningModelsDialog.ui" line="0"/> <source>Processor</source> <translation>Procesador</translation> </message> <message> <location filename="../OllamaRunningModelsDialog.ui" line="0"/> <source>Expires</source> <translation>Expira</translation> </message> </context> <context> <name>OllamaWidget</name> <message> <location filename="../OllamaWidget.py" line="199"/> <source><b>ollama Server Version {0}</b></source> <translation><b>Versión del servidor ollama {0}</b></translation> </message> <message> <location filename="../OllamaWidget.py" line="363"/> <source>Save Chat History</source> <translation>Guardar Historia del Chat</translation> </message> <message> <location filename="../OllamaWidget.py" line="364"/> <source><p>The chat history could not be saved to <b>{0}</b>.</p><p>Reason: {1}</p></source> <translation><p>La historia del chat no se ha podido salvar en <b>{0}</b>.</p><p>Razón: {1}</p></translation> </message> <message> <location filename="../OllamaWidget.py" line="420"/> <location filename="../OllamaWidget.py" line="399"/> <source>Load Chat History</source> <translation>Cargar Historia de Chat</translation> </message> <message> <location filename="../OllamaWidget.py" line="400"/> <source><p>The chat history could not be loaded from <b>{0}</b>.</p><p>Reason: {1}</p></source> <translation><p>La historia de chat no se ha podido cargar desde <b>{0}</b>.</p><p>Razón: {1}</p></translation> </message> <message> <location filename="../OllamaWidget.py" line="421"/> <source><p>These chats were not loaded because they already existed.</p>{0}</source> <translation><p>Estos chats no se han cargado porque ya existen.</p>{0}</translation> </message> <message> <location filename="../OllamaWidget.py" line="498"/> <location filename="../OllamaWidget.py" line="491"/> <source>New Chat</source> <translation>Nuevo Chat</translation> </message> <message> <location filename="../OllamaWidget.py" line="492"/> <source>A model has to be selected first. Aborting...</source> <translation>Se ha de seleccionar un modelo antes. Abortando...</translation> </message> <message> <location filename="../OllamaWidget.py" line="499"/> <source>Enter a title for the new chat:</source> <translation>Introducir título para el nuevo chat:</translation> </message> <message> <location filename="../OllamaWidget.py" line="696"/> <source>Chat History</source> <translation>Historia de Chat</translation> </message> <message> <location filename="../OllamaWidget.py" line="697"/> <source>Load</source> <translation>Cargar</translation> </message> <message> <location filename="../OllamaWidget.py" line="700"/> <source>Clear All</source> <translation>Limpiar Todo</translation> </message> <message> <location filename="../OllamaWidget.py" line="703"/> <source>Import</source> <translation>Importar</translation> </message> <message> <location filename="../OllamaWidget.py" line="704"/> <source>Export</source> <translation>Exportar</translation> </message> <message> <location filename="../OllamaWidget.py" line="710"/> <source>Model Management</source> <translation>Gestión de Modelos</translation> </message> <message> <location filename="../OllamaWidget.py" line="1001"/> <location filename="../OllamaWidget.py" line="711"/> <source>List Models</source> <translation>Listar Modelos</translation> </message> <message> <location filename="../OllamaWidget.py" line="1019"/> <location filename="../OllamaWidget.py" line="713"/> <source>List Running Models</source> <translation>Listar Modelos en Ejecución</translation> </message> <message> <location filename="../OllamaWidget.py" line="717"/> <source>Show Model Library</source> <translation>Mostrar Biblioteca de Modelos</translation> </message> <message> <location filename="../OllamaWidget.py" line="1036"/> <location filename="../OllamaWidget.py" line="722"/> <source>Install Model</source> <translation>Instalar Modelo</translation> </message> <message> <location filename="../OllamaWidget.py" line="1109"/> <location filename="../OllamaWidget.py" line="1100"/> <location filename="../OllamaWidget.py" line="1089"/> <location filename="../OllamaWidget.py" line="725"/> <source>Remove Model</source> <translation>Eliminar Modelo</translation> </message> <message> <location filename="../OllamaWidget.py" line="732"/> <source>Local Server</source> <translation>Servidor Local</translation> </message> <message> <location filename="../OllamaWidget.py" line="734"/> <source>Start with Monitoring</source> <translation>Iniciar con Monitorización</translation> </message> <message> <location filename="../OllamaWidget.py" line="738"/> <source>Start</source> <translation>Iniciar</translation> </message> <message> <location filename="../OllamaWidget.py" line="741"/> <source>Stop</source> <translation>Detener</translation> </message> <message> <location filename="../OllamaWidget.py" line="748"/> <source>ollama URLs</source> <translation>URLs ollama</translation> </message> <message> <location filename="../OllamaWidget.py" line="750"/> <source>Model Library</source> <translation>Biblioteca de Modelos</translation> </message> <message> <location filename="../OllamaWidget.py" line="754"/> <source>Download</source> <translation>Descarga</translation> </message> <message> <location filename="../OllamaWidget.py" line="758"/> <source>Blog</source> <translation>Blog</translation> </message> <message> <location filename="../OllamaWidget.py" line="775"/> <source>Configure...</source> <translation>Configurar...</translation> </message> <message> <location filename="../OllamaWidget.py" line="816"/> <source>Clear All Chat Histories</source> <translation>Limpiar Todas las Historias de Chat</translation> </message> <message> <location filename="../OllamaWidget.py" line="817"/> <source><p>Do you really want to delete all chat histories? This is <b>irreversible</b>.</p></source> <translation><p>¿Desea realmente borrar todas las historias de chat? Esto es <b>irreversible</b>.</p></translation> </message> <message> <location filename="../OllamaWidget.py" line="832"/> <source>Import Chat History</source> <translation>Importar Historia de Chat</translation> </message> <message> <location filename="../OllamaWidget.py" line="834"/> <source>Chat History Files (*.json);;All Files (*)</source> <translation>Archivos de Historia de Chat (*.json);;Todos los Archivos (*)</translation> </message> <message> <location filename="../OllamaWidget.py" line="867"/> <location filename="../OllamaWidget.py" line="835"/> <source>Chat History Files (*.json)</source> <translation>Archivos de Historia de Chat (*.json)</translation> </message> <message> <location filename="../OllamaWidget.py" line="865"/> <location filename="../OllamaWidget.py" line="855"/> <source>Export Chat History</source> <translation>Exportar Historia de Chat</translation> </message> <message> <location filename="../OllamaWidget.py" line="856"/> <source>Select the chats to be exported:</source> <translation>Seleccionar chats a exportar:</translation> </message> <message> <location filename="../OllamaWidget.py" line="954"/> <source>Run Local 'ollama' Server</source> <translation>Ejecutar Servidor 'ollama' Local</translation> </message> <message> <location filename="../OllamaWidget.py" line="955"/> <source>The loacl 'ollama' server process could not be started.</source> <translation>El proceso del servidor local 'ollama' no se ha podido iniciar.</translation> </message> <message> <location filename="../OllamaWidget.py" line="1002"/> <source>There are no models available.</source> <translation>No hay modelos disponibles.</translation> </message> <message> <location filename="../OllamaWidget.py" line="1020"/> <source>There are no models running.</source> <translation>No hay modelos en ejecución.</translation> </message> <message> <location filename="../OllamaWidget.py" line="1037"/> <source>Enter the name of the model to be installed:</source> <translation>Introducir el nombre del modelo a instalar:</translation> </message> <message> <location filename="../OllamaWidget.py" line="1090"/> <source>Select the model to be removed by the 'ollama' server:</source> <translation>Seleccionar el modelo a eliminar por el servidor 'ollama':</translation> </message> <message> <location filename="../OllamaWidget.py" line="1101"/> <source><p>The model <b>{0}</b> was deleted successfully.</p></source> <translation><p>El modelo <b>{0}</b> se ha borrado con éxito.</p></translation> </message> <message> <location filename="../OllamaWidget.py" line="1110"/> <source><p>The model <b>{0}</b> could not be removed from the 'ollama' server.</p></source> <translation><p>El modelo <b>{0}</b> no se ha podido eliminar del servidor 'ollama'.</p></translation> </message> <message> <location filename="../OllamaWidget.py" line="1137"/> <source>Network Error</source> <translation>Error de Red</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0"/> <source>Press to reload the models list and update the 'ollama' version information.</source> <translation>Pulsar para recargar la lista de modelos y actualizar la información de versión de 'ollama'.</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0"/> <source>Select the model for the chat.</source> <translation>Seleccionar el modelo para el chat.</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0"/> <source>Press to start a new chat.</source> <translation>Pulsar para iniciar un nuevo chat.</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0"/> <source>Enter the message to be sent to the 'ollama' server.</source> <translation>Introducir el mensaje a enviar al servidor 'ollama'.</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0"/> <source>Enter Message</source> <translation>Introducir Mensaje</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0"/> <source>Press to send the message of the current chat to the 'ollama' server.</source> <translation>Pulsar para enviar el mensaje del chat actual al servidor 'ollama'.</translation> </message> </context> <context> <name>PluginOllamaInterface</name> <message> <location filename="../../PluginAiOllama.py" line="184"/> <location filename="../../PluginAiOllama.py" line="183"/> <location filename="../../PluginAiOllama.py" line="179"/> <location filename="../../PluginAiOllama.py" line="83"/> <source>ollama AI Interface</source> <translation>Interfaz ollama AI</translation> </message> <message> <location filename="../../PluginAiOllama.py" line="185"/> <source>Ctrl+Alt+Shift+O</source> <translation>Ctrl+Alt+Shift+O</translation> </message> <message> <location filename="../../PluginAiOllama.py" line="191"/> <source>Switch the input focus to the ollama AI window.</source> <translation>Cambiar el foco de entrada a la ventana de ollama AI.</translation> </message> <message> <location filename="../../PluginAiOllama.py" line="194"/> <source><b>Activate ollama AI Interface</b><p>This switches the input focus to the ollama AI window.</p></source> <translation><b>Activar la Interfaz de ollama AI</b><p>Esto cambia el foco de entrada a la ventana de ollama AI.</p></translation> </message> </context> <context> <name>RunOllamaServerDialog</name> <message> <location filename="../RunOllamaServerDialog.ui" line="0"/> <source>ollama Server</source> <translation>Servidor ollama</translation> </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0"/> <source>Output</source> <translation>Salida</translation> </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0"/> <source>Press to restart the local ollama server.</source> <translation>Pulsar para restablecer el servidor local ollama.</translation> </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0"/> <source>Re-Start Server</source> <translation>Reiniciar Servidor</translation> </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0"/> <source>Press to stop the running ollama server.</source> <translation>Pulsar para detener el servidor ollama en ejecución.</translation> </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0"/> <source>Stop Server</source> <translation>Detener Servidor</translation> </message> <message> <location filename="../RunOllamaServerDialog.py" line="79"/> <source>Run Local 'ollama' Server</source> <translation>Ejecutar Servidor 'ollama' Local</translation> </message> <message> <location filename="../RunOllamaServerDialog.py" line="80"/> <source>The local 'ollama' server process could not be started.</source> <translation>El proceso del servidor local 'ollama' no se ha podido iniciar.</translation> </message> </context> </TS>