OllamaInterface/i18n/ollama_de.ts

Mon, 16 Sep 2024 19:05:50 +0200

author
Detlev Offenbach <detlev@die-offenbachs.de>
date
Mon, 16 Sep 2024 19:05:50 +0200
changeset 44
ef9a85b8768a
parent 24
f4d5108e90fe
child 68
ca2e671f894e
permissions
-rw-r--r--

Added the capability to change the model of a chat.

<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="de_DE">
  <context>
    <name>OllamaChatWidget</name>
    <message>
      <location filename="../OllamaChatWidget.py" line="41" />
      <source>&lt;b&gt;{0} - {1}&lt;/b&gt;</source>
      <comment>title, model name</comment>
      <translation>&lt;b&gt;{0} - {1}&lt;/b&gt;</translation>
    </message>
  </context>
  <context>
    <name>OllamaClient</name>
    <message>
      <location filename="../OllamaClient.py" line="301" />
      <source>100% CPU</source>
      <translation>100% CPU</translation>
    </message>
    <message>
      <location filename="../OllamaClient.py" line="303" />
      <source>100% GPU</source>
      <translation>100% GPU</translation>
    </message>
    <message>
      <location filename="../OllamaClient.py" line="305" />
      <source>unknown</source>
      <translation>unbekannt</translation>
    </message>
    <message>
      <location filename="../OllamaClient.py" line="309" />
      <source>{0}% / {1}% CPU / GPU</source>
      <translation>{0}% / {1}% CPU / GPU</translation>
    </message>
    <message>
      <location filename="../OllamaClient.py" line="465" />
      <source>&lt;p&gt;A network error occurred.&lt;/p&gt;&lt;p&gt;Error: {0}&lt;/p&gt;</source>
      <translation>&lt;p&gt;Es gab einen Netzwerkfehler.&lt;/p&gt;&lt;p&gt;Fehler: {0}&lt;/p&gt;</translation>
    </message>
    <message>
      <location filename="../OllamaClient.py" line="594" />
      <source>&lt;p&gt;Error: The local server at &lt;b&gt;{0}&lt;/b&gt; is not responding.&lt;/p&gt;</source>
      <translation>&lt;p&gt;Fehler: Der lokale Server auf &lt;b&gt;{0}&lt;/b&gt; antwortet nicht.&lt;/p&gt;</translation>
    </message>
    <message>
      <location filename="../OllamaClient.py" line="596" />
      <source>&lt;p&gt;Error: The configured server at &lt;b&gt;{0}&lt;/b&gt; is not responding.&lt;/p&gt;</source>
      <translation>&lt;p&gt;Fehler: Der konfigurierte Server auf &lt;b&gt;{0}&lt;/b&gt; antwortet nicht.&lt;/p&gt;</translation>
    </message>
  </context>
  <context>
    <name>OllamaDetailedModelsDialog</name>
    <message>
      <location filename="../OllamaDetailedModelsDialog.ui" line="0" />
      <source>Available Models</source>
      <translation>Verfügbare Modelle</translation>
    </message>
    <message>
      <location filename="../OllamaDetailedModelsDialog.ui" line="0" />
      <source>Name</source>
      <translation>Name</translation>
    </message>
    <message>
      <location filename="../OllamaDetailedModelsDialog.ui" line="0" />
      <source>ID</source>
      <translation>ID</translation>
    </message>
    <message>
      <location filename="../OllamaDetailedModelsDialog.ui" line="0" />
      <source>Size</source>
      <translation>Größe</translation>
    </message>
    <message>
      <location filename="../OllamaDetailedModelsDialog.ui" line="0" />
      <source>Modified At</source>
      <translation>Geändert</translation>
    </message>
  </context>
  <context>
    <name>OllamaHistoryEditDialog</name>
    <message>
      <location filename="../OllamaHistoryEditDialog.ui" line="0" />
      <source>Edit Chat Parameters</source>
      <translation>Unterhaltungsparameter bearbeiten</translation>
    </message>
    <message>
      <location filename="../OllamaHistoryEditDialog.ui" line="0" />
      <source>Chat Title:</source>
      <translation>Unterhaltungstitel:</translation>
    </message>
    <message>
      <location filename="../OllamaHistoryEditDialog.ui" line="0" />
      <source>Enter the title of the chat.</source>
      <translation>Gib den Titel der Unterhaltung ein.</translation>
    </message>
    <message>
      <location filename="../OllamaHistoryEditDialog.ui" line="0" />
      <source>Model:</source>
      <translation>Modell:</translation>
    </message>
    <message>
      <location filename="../OllamaHistoryEditDialog.ui" line="0" />
      <source>Select the model to be used by the chat.</source>
      <translation>Wähle das für die Unterhaltung zu verwendende Modell.</translation>
    </message>
  </context>
  <context>
    <name>OllamaHistoryWidget</name>
    <message>
      <location filename="../OllamaHistoryWidget.ui" line="0" />
      <source>Press to start a new chat based on the current history or switch to an already opened chat.</source>
      <translation>Drücken, um eine neue Unterhaltung basierend auf der aktuellen Chronik zu starten oder zu einer bereits offenen Unterhaltung zu schalten.</translation>
    </message>
    <message>
      <location filename="../OllamaHistoryWidget.ui" line="0" />
      <source>Press to edit the chat title.</source>
      <translation>Drücken, um den Titel der Unterhaltung zu bearbeiten.</translation>
    </message>
    <message>
      <location filename="../OllamaHistoryWidget.ui" line="0" />
      <source>Press to view the current chat history in a separate window.</source>
      <translation>Drücken, um den Chat Verlauf in einem separaten Fenster anzuzeigen.</translation>
    </message>
    <message>
      <location filename="../OllamaHistoryWidget.ui" line="0" />
      <source>Press to delete this chat history.</source>
      <translation>Drücken, um die Chronik dieser Unterhaltung zu löschen.</translation>
    </message>
  </context>
  <context>
    <name>OllamaPage</name>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>&lt;b&gt;Configure 'ollama' Interface&lt;/b&gt;</source>
      <translation>&lt;b&gt;'ollama'-Schnittstelle einstellen&lt;/b&gt;</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Server URL</source>
      <translation>Server URL</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Scheme:</source>
      <translation>Schema:</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Select the scheme of the 'ollama' server URL.</source>
      <translation>Wähle das Schema der 'ollama' Server URL.</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Host:</source>
      <translation>Rechner:</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Enter the host name of the 'ollama' server.</source>
      <translation>Gib den Rechnernamen des 'ollama' Servers ein.</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Port:</source>
      <translation>Port:</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Enter the port of the 'ollama' server URL.</source>
      <translation>Gib den Port der 'ollama' Server URL ein.</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Local Server</source>
      <translation>Lokaler Server</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Enter the port of the local 'ollama' server.</source>
      <translation>Gib den Port des lokalen 'ollama' Servers ein.</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>URLs</source>
      <translation>URLs</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Model Library:</source>
      <translation>Modellbibliothek:</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Enter the URL of the 'ollama' model library. Leave empty to use the default URL.</source>
      <translation>Gib die URL der 'ollama' Modellbibliothek ein. Leer lassen zur Verwendung der Standard-URL.</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Download:</source>
      <translation>Download:</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Enter the URL of the 'ollama' download page. Leave empty to use the default URL.</source>
      <translation>Gib die URL der 'ollama' Downloadseite ein. Leer lassen zur Verwendung der Standard-URL.</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Blog:</source>
      <translation>Blog:</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Enter the URL of the 'ollama' blog. Leave empty to use the default URL.</source>
      <translation>Gib die URL des 'ollama' Blogs ein. Leer lassen zur Verwendung der Standard-URL.</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Heartbeat Timer:</source>
      <translation>Heartbeat Timer:</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Enter the heartbeat timeout value (0 = disable).</source>
      <translation>Gib das Intervall für den Heartbeat Timer ein (0 = deaktiviert).</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Disabled</source>
      <translation>Deaktiviert</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source> s</source>
      <translation> s</translation>
    </message>
    <message>
      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
      <source>Use streaming chat response</source>
      <translation>Streaming-Chat-Antwort verwenden</translation>
    </message>
  </context>
  <context>
    <name>OllamaPullProgressDialog</name>
    <message>
      <location filename="../OllamaPullProgressDialog.ui" line="0" />
      <source>Install Model</source>
      <translation>Modell installieren</translation>
    </message>
    <message>
      <location filename="../OllamaPullProgressDialog.py" line="138" />
      <source>&lt;p&gt;Installing model &lt;b&gt;{0}&lt;/b&gt;.&lt;/p&gt;</source>
      <translation>&lt;p&gt;Installiere Modell &lt;b&gt;{0}&lt;/b&gt;.&lt;/p&gt;</translation>
    </message>
    <message>
      <location filename="../OllamaPullProgressDialog.py" line="182" />
      <source>{0} / {1}</source>
      <comment>completed / total</comment>
      <translation>{0} / {1}</translation>
    </message>
    <message>
      <location filename="../OllamaPullProgressDialog.py" line="201" />
      <source>Error: {0}</source>
      <translation>Fehler: {0}</translation>
    </message>
  </context>
  <context>
    <name>OllamaRunningModelsDialog</name>
    <message>
      <location filename="../OllamaRunningModelsDialog.ui" line="0" />
      <source>Running Models</source>
      <translation>Ausgeführte Modelle</translation>
    </message>
    <message>
      <location filename="../OllamaRunningModelsDialog.ui" line="0" />
      <source>Name</source>
      <translation>Name</translation>
    </message>
    <message>
      <location filename="../OllamaRunningModelsDialog.ui" line="0" />
      <source>ID</source>
      <translation>ID</translation>
    </message>
    <message>
      <location filename="../OllamaRunningModelsDialog.ui" line="0" />
      <source>Size</source>
      <translation>Größe</translation>
    </message>
    <message>
      <location filename="../OllamaRunningModelsDialog.ui" line="0" />
      <source>Processor</source>
      <translation>Prozessor</translation>
    </message>
    <message>
      <location filename="../OllamaRunningModelsDialog.ui" line="0" />
      <source>Expires</source>
      <translation>Läuft ab</translation>
    </message>
  </context>
  <context>
    <name>OllamaWidget</name>
    <message>
      <location filename="../OllamaWidget.py" line="199" />
      <source>&lt;b&gt;ollama Server Version {0}&lt;/b&gt;</source>
      <translation>&lt;b&gt;ollama Server Version {0}&lt;/b&gt;</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="363" />
      <source>Save Chat History</source>
      <translation>Chat Verlauf speichern</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="364" />
      <source>&lt;p&gt;The chat history could not be saved to &lt;b&gt;{0}&lt;/b&gt;.&lt;/p&gt;&lt;p&gt;Reason: {1}&lt;/p&gt;</source>
      <translation>&lt;p&gt;Der Chat Verlauf konnte nicht nach &lt;b&gt;{0}&lt;/b&gt; gespeichert werden.&lt;/p&gt;&lt;p&gt;Ursache: {1}&lt;/p&gt;</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="420" />
      <location filename="../OllamaWidget.py" line="399" />
      <source>Load Chat History</source>
      <translation>Chat Verlauf laden</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="400" />
      <source>&lt;p&gt;The chat history could not be loaded from &lt;b&gt;{0}&lt;/b&gt;.&lt;/p&gt;&lt;p&gt;Reason: {1}&lt;/p&gt;</source>
      <translation>&lt;p&gt;Der Chat Verlauf konnte nicht aus &lt;b&gt;{0}&lt;/b&gt; geladen werden.&lt;/p&gt;&lt;p&gt;Ursache: {1}&lt;/p&gt;</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="421" />
      <source>&lt;p&gt;These chats were not loaded because they already existed.&lt;/p&gt;{0}</source>
      <translation>&lt;p&gt;Diese Chats wurden nicht geladen, da sie bereits existieren.&lt;/p&gt;{0}</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="498" />
      <location filename="../OllamaWidget.py" line="491" />
      <source>New Chat</source>
      <translation>Neuer Chat</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="492" />
      <source>A model has to be selected first. Aborting...</source>
      <translation>Es muss zuerst ein Modell ausgewählt werden. Abbruch...</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="499" />
      <source>Enter a title for the new chat:</source>
      <translation>Gib einen Titel für den neuen Chat ein:</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="696" />
      <source>Chat History</source>
      <translation>Chat Verlauf</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="697" />
      <source>Load</source>
      <translation>Laden</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="700" />
      <source>Clear All</source>
      <translation>Alle löschen</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="703" />
      <source>Import</source>
      <translation>Importieren</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="704" />
      <source>Export</source>
      <translation>Exportieren</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="710" />
      <source>Model Management</source>
      <translation>Modellverwaltung</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="1001" />
      <location filename="../OllamaWidget.py" line="711" />
      <source>List Models</source>
      <translation>Modelle auflisten</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="1019" />
      <location filename="../OllamaWidget.py" line="713" />
      <source>List Running Models</source>
      <translation>Laufende Modelle auflisten</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="717" />
      <source>Show Model Library</source>
      <translation>Modell Bibliothek anzeigen</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="1036" />
      <location filename="../OllamaWidget.py" line="722" />
      <source>Install Model</source>
      <translation>Modell installieren</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="1109" />
      <location filename="../OllamaWidget.py" line="1100" />
      <location filename="../OllamaWidget.py" line="1089" />
      <location filename="../OllamaWidget.py" line="725" />
      <source>Remove Model</source>
      <translation>Modell löschen</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="732" />
      <source>Local Server</source>
      <translation>Lokaler Server</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="734" />
      <source>Start with Monitoring</source>
      <translation>Mit Monitoring starten</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="738" />
      <source>Start</source>
      <translation>Starten</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="741" />
      <source>Stop</source>
      <translation>Stoppen</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="748" />
      <source>ollama URLs</source>
      <translation>ollama URLs</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="750" />
      <source>Model Library</source>
      <translation>Modellbibliothek</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="754" />
      <source>Download</source>
      <translation>Download</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="758" />
      <source>Blog</source>
      <translation>Blog</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="775" />
      <source>Configure...</source>
      <translation>Einstellungen...</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="816" />
      <source>Clear All Chat Histories</source>
      <translation>Alle Chat Verläufe löschen</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="817" />
      <source>&lt;p&gt;Do you really want to delete all chat histories? This is &lt;b&gt;irreversible&lt;/b&gt;.&lt;/p&gt;</source>
      <translation>&lt;p&gt;Sollen wirklich alle Chat Verläufe gelöscht werden? Dies ist &lt;b&gt;unumkehrbar&lt;/b&gt;.&lt;/p&gt;</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="832" />
      <source>Import Chat History</source>
      <translation>Chat Verlauf importieren</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="834" />
      <source>Chat History Files (*.json);;All Files (*)</source>
      <translation>Chat Verlauf Dateien (*.json);;Alle Dateien (*)</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="867" />
      <location filename="../OllamaWidget.py" line="835" />
      <source>Chat History Files (*.json)</source>
      <translation>Chat Verlauf Dateien (*.json)</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="865" />
      <location filename="../OllamaWidget.py" line="855" />
      <source>Export Chat History</source>
      <translation>Chat Verlauf exportieren</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="856" />
      <source>Select the chats to be exported:</source>
      <translation>Wähle die zu exportierenden Chats:</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="954" />
      <source>Run Local 'ollama' Server</source>
      <translation>Lokalen 'ollama' Server ausführen</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="955" />
      <source>The loacl 'ollama' server process could not be started.</source>
      <translation>Der lokale 'ollama' Serverprozess konnte nicht gestartet werden.</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="1002" />
      <source>There are no models available.</source>
      <translation>Es sind keine Modelle verfügbar.</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="1020" />
      <source>There are no models running.</source>
      <translation>Es werden keine Modelle ausgeführt.</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="1037" />
      <source>Enter the name of the model to be installed:</source>
      <translation>Gib den Namen des zu installierenden Modells ein:</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="1090" />
      <source>Select the model to be removed by the 'ollama' server:</source>
      <translation>Wähle das vom 'ollama' Server zu entfernende Modell aus:</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="1101" />
      <source>&lt;p&gt;The model &lt;b&gt;{0}&lt;/b&gt; was deleted successfully.&lt;/p&gt;</source>
      <translation>&lt;p&gt;Das Modell &lt;b&gt;{0}&lt;/b&gt; wurde erfolgreich entfernt.&lt;/p&gt;</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="1110" />
      <source>&lt;p&gt;The model &lt;b&gt;{0}&lt;/b&gt; could not be removed from the 'ollama' server.&lt;/p&gt;</source>
      <translation>&lt;p&gt;Das Modell &lt;b&gt;{0}&lt;/b&gt; konnte nicht vom 'ollama' Server entfernt werden.&lt;/p&gt;</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.py" line="1137" />
      <source>Network Error</source>
      <translation>Netzwerkfehler</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.ui" line="0" />
      <source>Press to reload the models list and update the 'ollama' version information.</source>
      <translation>Drücken, um die Modelliste neu zu laden und die 'ollama' Versionsinformation zu aktualisieren.</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.ui" line="0" />
      <source>Select the model for the chat.</source>
      <translation>Wähle das Modell für den Chat.</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.ui" line="0" />
      <source>Press to start a new chat.</source>
      <translation>Drücken, um einen neuen Chat zu starten.</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.ui" line="0" />
      <source>Enter the message to be sent to the 'ollama' server.</source>
      <translation>Gib die an den 'ollama' Server zu sendende Nachricht ein.</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.ui" line="0" />
      <source>Enter Message</source>
      <translation>Nachricht eingeben</translation>
    </message>
    <message>
      <location filename="../OllamaWidget.ui" line="0" />
      <source>Press to send the message of the current chat to the 'ollama' server.</source>
      <translation>Drücken, um die Nachricht des aktuellen Chats an den 'ollama' Server zu schicken.</translation>
    </message>
  </context>
  <context>
    <name>PluginOllamaInterface</name>
    <message>
      <location filename="../../PluginAiOllama.py" line="184" />
      <location filename="../../PluginAiOllama.py" line="183" />
      <location filename="../../PluginAiOllama.py" line="179" />
      <location filename="../../PluginAiOllama.py" line="83" />
      <source>ollama AI Interface</source>
      <translation>ollama KI Schnittstelle</translation>
    </message>
    <message>
      <location filename="../../PluginAiOllama.py" line="185" />
      <source>Ctrl+Alt+Shift+O</source>
      <translation>Ctrl+Alt+Shift+O</translation>
    </message>
    <message>
      <location filename="../../PluginAiOllama.py" line="191" />
      <source>Switch the input focus to the ollama AI window.</source>
      <translation>Schaltet den Eingabefokus auf das ollama KI Fenster um.</translation>
    </message>
    <message>
      <location filename="../../PluginAiOllama.py" line="194" />
      <source>&lt;b&gt;Activate ollama AI Interface&lt;/b&gt;&lt;p&gt;This switches the input focus to the ollama AI window.&lt;/p&gt;</source>
      <translation>&lt;b&gt;Aktiviere ollama KI Schnittstelle&lt;/b&gt;&lt;p&gt;Dies schaltet den Eingabefokus auf das ollama KI Fenster um.&lt;/p&gt;</translation>
    </message>
  </context>
  <context>
    <name>RunOllamaServerDialog</name>
    <message>
      <location filename="../RunOllamaServerDialog.ui" line="0" />
      <source>ollama Server</source>
      <translation>ollama Server</translation>
    </message>
    <message>
      <location filename="../RunOllamaServerDialog.ui" line="0" />
      <source>Output</source>
      <translation>Ausgabe</translation>
    </message>
    <message>
      <location filename="../RunOllamaServerDialog.ui" line="0" />
      <source>Press to restart the local ollama server.</source>
      <translation>Drücken, um den lokalen ollama Server neu zu starten.</translation>
    </message>
    <message>
      <location filename="../RunOllamaServerDialog.ui" line="0" />
      <source>Re-Start Server</source>
      <translation>Server neu starten</translation>
    </message>
    <message>
      <location filename="../RunOllamaServerDialog.ui" line="0" />
      <source>Press to stop the running ollama server.</source>
      <translation>Drücken, um den laufenden ollama Server anzuhalten.</translation>
    </message>
    <message>
      <location filename="../RunOllamaServerDialog.ui" line="0" />
      <source>Stop Server</source>
      <translation>Server stoppen</translation>
    </message>
    <message>
      <location filename="../RunOllamaServerDialog.py" line="79" />
      <source>Run Local 'ollama' Server</source>
      <translation>Lokalen 'ollama' Server ausführen</translation>
    </message>
    <message>
      <location filename="../RunOllamaServerDialog.py" line="80" />
      <source>The local 'ollama' server process could not be started.</source>
      <translation>Der lokale 'ollama' Serverprozess konnte nicht gestartet werden.</translation>
    </message>
  </context>
</TS>

eric ide

mercurial