Sat, 03 May 2025 18:45:59 +0200
Created global tag <release-10.1.9>.
<?xml version="1.0" encoding="utf-8"?> <!DOCTYPE TS> <TS version="2.1" language="de_DE"> <context> <name>OllamaChatWidget</name> <message> <location filename="../OllamaChatWidget.py" line="60" /> <source><b>{0} - {1}</b></source> <comment>title, model name</comment> <translation><b>{0} - {1}</b></translation> </message> </context> <context> <name>OllamaClient</name> <message> <location filename="../OllamaClient.py" line="301" /> <source>100% CPU</source> <translation>100% CPU</translation> </message> <message> <location filename="../OllamaClient.py" line="303" /> <source>100% GPU</source> <translation>100% GPU</translation> </message> <message> <location filename="../OllamaClient.py" line="305" /> <source>unknown</source> <translation>unbekannt</translation> </message> <message> <location filename="../OllamaClient.py" line="309" /> <source>{0}% / {1}% CPU / GPU</source> <translation>{0}% / {1}% CPU / GPU</translation> </message> <message> <location filename="../OllamaClient.py" line="465" /> <source><p>A network error occurred.</p><p>Error: {0}</p></source> <translation><p>Es gab einen Netzwerkfehler.</p><p>Fehler: {0}</p></translation> </message> <message> <location filename="../OllamaClient.py" line="597" /> <source><p>Error: The local server at <b>{0}</b> is not responding.</p></source> <translation><p>Fehler: Der lokale Server auf <b>{0}</b> antwortet nicht.</p></translation> </message> <message> <location filename="../OllamaClient.py" line="599" /> <source><p>Error: The configured server at <b>{0}</b> is not responding.</p></source> <translation><p>Fehler: Der konfigurierte Server auf <b>{0}</b> antwortet nicht.</p></translation> </message> </context> <context> <name>OllamaDetailedModelsDialog</name> <message> <location filename="../OllamaDetailedModelsDialog.ui" line="0" /> <source>Available Models</source> <translation>Verfügbare Modelle</translation> </message> <message> <location filename="../OllamaDetailedModelsDialog.ui" line="0" /> <source>Name</source> <translation>Name</translation> </message> <message> <location filename="../OllamaDetailedModelsDialog.ui" line="0" /> <source>ID</source> <translation>ID</translation> </message> <message> <location filename="../OllamaDetailedModelsDialog.ui" line="0" /> <source>Size</source> <translation>Größe</translation> </message> <message> <location filename="../OllamaDetailedModelsDialog.ui" line="0" /> <source>Modified At</source> <translation>Geändert</translation> </message> </context> <context> <name>OllamaHistoryEditDialog</name> <message> <location filename="../OllamaHistoryEditDialog.ui" line="0" /> <source>Edit Chat Parameters</source> <translation>Unterhaltungsparameter bearbeiten</translation> </message> <message> <location filename="../OllamaHistoryEditDialog.ui" line="0" /> <source>Chat Title:</source> <translation>Unterhaltungstitel:</translation> </message> <message> <location filename="../OllamaHistoryEditDialog.ui" line="0" /> <source>Enter the title of the chat.</source> <translation>Gib den Titel der Unterhaltung ein.</translation> </message> <message> <location filename="../OllamaHistoryEditDialog.ui" line="0" /> <source>Model:</source> <translation>Modell:</translation> </message> <message> <location filename="../OllamaHistoryEditDialog.ui" line="0" /> <source>Select the model to be used by the chat.</source> <translation>Wähle das für die Unterhaltung zu verwendende Modell.</translation> </message> </context> <context> <name>OllamaHistoryWidget</name> <message> <location filename="../OllamaHistoryWidget.ui" line="0" /> <source>Press to start a new chat based on the current history or switch to an already opened chat.</source> <translation>Drücken, um eine neue Unterhaltung basierend auf der aktuellen Chronik zu starten oder zu einer bereits offenen Unterhaltung zu schalten.</translation> </message> <message> <location filename="../OllamaHistoryWidget.ui" line="0" /> <source>Press to edit the chat title.</source> <translation>Drücken, um den Titel der Unterhaltung zu bearbeiten.</translation> </message> <message> <location filename="../OllamaHistoryWidget.ui" line="0" /> <source>Press to view the current chat history in a separate window.</source> <translation>Drücken, um den Chat Verlauf in einem separaten Fenster anzuzeigen.</translation> </message> <message> <location filename="../OllamaHistoryWidget.ui" line="0" /> <source>Press to delete this chat history.</source> <translation>Drücken, um die Chronik dieser Unterhaltung zu löschen.</translation> </message> </context> <context> <name>OllamaPage</name> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source><b>Configure 'ollama' Interface</b></source> <translation><b>'ollama'-Schnittstelle einstellen</b></translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Server URL</source> <translation>Server URL</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Scheme:</source> <translation>Schema:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Select the scheme of the 'ollama' server URL.</source> <translation>Wähle das Schema der 'ollama' Server URL.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Host:</source> <translation>Rechner:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Enter the host name of the 'ollama' server.</source> <translation>Gib den Rechnernamen des 'ollama' Servers ein.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Port:</source> <translation>Port:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Enter the port of the 'ollama' server URL.</source> <translation>Gib den Port der 'ollama' Server URL ein.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Local Server</source> <translation>Lokaler Server</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Enter the port of the local 'ollama' server.</source> <translation>Gib den Port des lokalen 'ollama' Servers ein.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>URLs</source> <translation>URLs</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Model Library:</source> <translation>Modellbibliothek:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Enter the URL of the 'ollama' model library. Leave empty to use the default URL.</source> <translation>Gib die URL der 'ollama' Modellbibliothek ein. Leer lassen zur Verwendung der Standard-URL.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Download:</source> <translation>Download:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Enter the URL of the 'ollama' download page. Leave empty to use the default URL.</source> <translation>Gib die URL der 'ollama' Downloadseite ein. Leer lassen zur Verwendung der Standard-URL.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Blog:</source> <translation>Blog:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Enter the URL of the 'ollama' blog. Leave empty to use the default URL.</source> <translation>Gib die URL des 'ollama' Blogs ein. Leer lassen zur Verwendung der Standard-URL.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Heartbeat Timer:</source> <translation>Heartbeat Timer:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Enter the heartbeat timeout value (0 = disable).</source> <translation>Gib das Intervall für den Heartbeat Timer ein (0 = deaktiviert).</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Disabled</source> <translation>Deaktiviert</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source> s</source> <translation> s</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Use streaming chat response</source> <translation>Streaming-Chat-Antwort verwenden</translation> </message> </context> <context> <name>OllamaPullProgressDialog</name> <message> <location filename="../OllamaPullProgressDialog.ui" line="0" /> <source>Install Model</source> <translation>Modell installieren</translation> </message> <message> <location filename="../OllamaPullProgressDialog.py" line="138" /> <source><p>Installing model <b>{0}</b>.</p></source> <translation><p>Installiere Modell <b>{0}</b>.</p></translation> </message> <message> <location filename="../OllamaPullProgressDialog.py" line="182" /> <source>{0} / {1}</source> <comment>completed / total</comment> <translation>{0} / {1}</translation> </message> <message> <location filename="../OllamaPullProgressDialog.py" line="201" /> <source>Error: {0}</source> <translation>Fehler: {0}</translation> </message> </context> <context> <name>OllamaRunningModelsDialog</name> <message> <location filename="../OllamaRunningModelsDialog.ui" line="0" /> <source>Running Models</source> <translation>Ausgeführte Modelle</translation> </message> <message> <location filename="../OllamaRunningModelsDialog.ui" line="0" /> <source>Name</source> <translation>Name</translation> </message> <message> <location filename="../OllamaRunningModelsDialog.ui" line="0" /> <source>ID</source> <translation>ID</translation> </message> <message> <location filename="../OllamaRunningModelsDialog.ui" line="0" /> <source>Size</source> <translation>Größe</translation> </message> <message> <location filename="../OllamaRunningModelsDialog.ui" line="0" /> <source>Processor</source> <translation>Prozessor</translation> </message> <message> <location filename="../OllamaRunningModelsDialog.ui" line="0" /> <source>Expires</source> <translation>Läuft ab</translation> </message> </context> <context> <name>OllamaWidget</name> <message> <location filename="../OllamaWidget.py" line="213" /> <source><b>ollama Server Version {0}</b></source> <translation><b>ollama Server Version {0}</b></translation> </message> <message> <location filename="../OllamaWidget.py" line="377" /> <source>Save Chat History</source> <translation>Chat Verlauf speichern</translation> </message> <message> <location filename="../OllamaWidget.py" line="378" /> <source><p>The chat history could not be saved to <b>{0}</b>.</p><p>Reason: {1}</p></source> <translation><p>Der Chat Verlauf konnte nicht nach <b>{0}</b> gespeichert werden.</p><p>Ursache: {1}</p></translation> </message> <message> <location filename="../OllamaWidget.py" line="434" /> <location filename="../OllamaWidget.py" line="413" /> <source>Load Chat History</source> <translation>Chat Verlauf laden</translation> </message> <message> <location filename="../OllamaWidget.py" line="414" /> <source><p>The chat history could not be loaded from <b>{0}</b>.</p><p>Reason: {1}</p></source> <translation><p>Der Chat Verlauf konnte nicht aus <b>{0}</b> geladen werden.</p><p>Ursache: {1}</p></translation> </message> <message> <location filename="../OllamaWidget.py" line="435" /> <source><p>These chats were not loaded because they already existed.</p>{0}</source> <translation><p>Diese Chats wurden nicht geladen, da sie bereits existieren.</p>{0}</translation> </message> <message> <location filename="../OllamaWidget.py" line="512" /> <location filename="../OllamaWidget.py" line="505" /> <source>New Chat</source> <translation>Neuer Chat</translation> </message> <message> <location filename="../OllamaWidget.py" line="506" /> <source>A model has to be selected first. Aborting...</source> <translation>Es muss zuerst ein Modell ausgewählt werden. Abbruch...</translation> </message> <message> <location filename="../OllamaWidget.py" line="513" /> <source>Enter a title for the new chat:</source> <translation>Gib einen Titel für den neuen Chat ein:</translation> </message> <message> <location filename="../OllamaWidget.py" line="712" /> <source>Chat History</source> <translation>Chat Verlauf</translation> </message> <message> <location filename="../OllamaWidget.py" line="713" /> <source>Load</source> <translation>Laden</translation> </message> <message> <location filename="../OllamaWidget.py" line="716" /> <source>Clear All</source> <translation>Alle löschen</translation> </message> <message> <location filename="../OllamaWidget.py" line="719" /> <source>Import</source> <translation>Importieren</translation> </message> <message> <location filename="../OllamaWidget.py" line="720" /> <source>Export</source> <translation>Exportieren</translation> </message> <message> <location filename="../OllamaWidget.py" line="726" /> <source>Model Management</source> <translation>Modellverwaltung</translation> </message> <message> <location filename="../OllamaWidget.py" line="1018" /> <location filename="../OllamaWidget.py" line="727" /> <source>List Models</source> <translation>Modelle auflisten</translation> </message> <message> <location filename="../OllamaWidget.py" line="1036" /> <location filename="../OllamaWidget.py" line="729" /> <source>List Running Models</source> <translation>Laufende Modelle auflisten</translation> </message> <message> <location filename="../OllamaWidget.py" line="733" /> <source>Show Model Library</source> <translation>Modell Bibliothek anzeigen</translation> </message> <message> <location filename="../OllamaWidget.py" line="1053" /> <location filename="../OllamaWidget.py" line="738" /> <source>Install Model</source> <translation>Modell installieren</translation> </message> <message> <location filename="../OllamaWidget.py" line="1126" /> <location filename="../OllamaWidget.py" line="1117" /> <location filename="../OllamaWidget.py" line="1106" /> <location filename="../OllamaWidget.py" line="741" /> <source>Remove Model</source> <translation>Modell löschen</translation> </message> <message> <location filename="../OllamaWidget.py" line="748" /> <source>Local Server</source> <translation>Lokaler Server</translation> </message> <message> <location filename="../OllamaWidget.py" line="750" /> <source>Start with Monitoring</source> <translation>Mit Monitoring starten</translation> </message> <message> <location filename="../OllamaWidget.py" line="754" /> <source>Start</source> <translation>Starten</translation> </message> <message> <location filename="../OllamaWidget.py" line="757" /> <source>Stop</source> <translation>Stoppen</translation> </message> <message> <location filename="../OllamaWidget.py" line="764" /> <source>ollama URLs</source> <translation>ollama URLs</translation> </message> <message> <location filename="../OllamaWidget.py" line="766" /> <source>Model Library</source> <translation>Modellbibliothek</translation> </message> <message> <location filename="../OllamaWidget.py" line="770" /> <source>Download</source> <translation>Download</translation> </message> <message> <location filename="../OllamaWidget.py" line="774" /> <source>Blog</source> <translation>Blog</translation> </message> <message> <location filename="../OllamaWidget.py" line="791" /> <source>Configure...</source> <translation>Einstellungen...</translation> </message> <message> <location filename="../OllamaWidget.py" line="832" /> <source>Clear All Chat Histories</source> <translation>Alle Chat Verläufe löschen</translation> </message> <message> <location filename="../OllamaWidget.py" line="833" /> <source><p>Do you really want to delete all chat histories? This is <b>irreversible</b>.</p></source> <translation><p>Sollen wirklich alle Chat Verläufe gelöscht werden? Dies ist <b>unumkehrbar</b>.</p></translation> </message> <message> <location filename="../OllamaWidget.py" line="848" /> <source>Import Chat History</source> <translation>Chat Verlauf importieren</translation> </message> <message> <location filename="../OllamaWidget.py" line="850" /> <source>Chat History Files (*.json);;All Files (*)</source> <translation>Chat Verlauf Dateien (*.json);;Alle Dateien (*)</translation> </message> <message> <location filename="../OllamaWidget.py" line="884" /> <location filename="../OllamaWidget.py" line="851" /> <source>Chat History Files (*.json)</source> <translation>Chat Verlauf Dateien (*.json)</translation> </message> <message> <location filename="../OllamaWidget.py" line="882" /> <location filename="../OllamaWidget.py" line="871" /> <source>Export Chat History</source> <translation>Chat Verlauf exportieren</translation> </message> <message> <location filename="../OllamaWidget.py" line="872" /> <source>Select the chats to be exported:</source> <translation>Wähle die zu exportierenden Chats:</translation> </message> <message> <location filename="../OllamaWidget.py" line="971" /> <source>Run Local 'ollama' Server</source> <translation>Lokalen 'ollama' Server ausführen</translation> </message> <message> <location filename="../OllamaWidget.py" line="972" /> <source>The loacl 'ollama' server process could not be started.</source> <translation>Der lokale 'ollama' Serverprozess konnte nicht gestartet werden.</translation> </message> <message> <location filename="../OllamaWidget.py" line="1019" /> <source>There are no models available.</source> <translation>Es sind keine Modelle verfügbar.</translation> </message> <message> <location filename="../OllamaWidget.py" line="1037" /> <source>There are no models running.</source> <translation>Es werden keine Modelle ausgeführt.</translation> </message> <message> <location filename="../OllamaWidget.py" line="1054" /> <source>Enter the name of the model to be installed:</source> <translation>Gib den Namen des zu installierenden Modells ein:</translation> </message> <message> <location filename="../OllamaWidget.py" line="1107" /> <source>Select the model to be removed by the 'ollama' server:</source> <translation>Wähle das vom 'ollama' Server zu entfernende Modell aus:</translation> </message> <message> <location filename="../OllamaWidget.py" line="1118" /> <source><p>The model <b>{0}</b> was deleted successfully.</p></source> <translation><p>Das Modell <b>{0}</b> wurde erfolgreich entfernt.</p></translation> </message> <message> <location filename="../OllamaWidget.py" line="1127" /> <source><p>The model <b>{0}</b> could not be removed from the 'ollama' server.</p></source> <translation><p>Das Modell <b>{0}</b> konnte nicht vom 'ollama' Server entfernt werden.</p></translation> </message> <message> <location filename="../OllamaWidget.py" line="1154" /> <source>Network Error</source> <translation>Netzwerkfehler</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0" /> <source>Press to reload the models list and update the 'ollama' version information.</source> <translation>Drücken, um die Modelliste neu zu laden und die 'ollama' Versionsinformation zu aktualisieren.</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0" /> <source>Select the model for the chat.</source> <translation>Wähle das Modell für den Chat.</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0" /> <source>Press to start a new chat.</source> <translation>Drücken, um einen neuen Chat zu starten.</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0" /> <source>Enter the message to be sent to the 'ollama' server.</source> <translation>Gib die an den 'ollama' Server zu sendende Nachricht ein.</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0" /> <source>Enter Message and press 'Ctrl-Return' to send it or use the send button.</source> <translation>Nachricht eingeben und mit 'Strg-Return' senden oder den Senden Knopf verwenden.</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0" /> <source>Press to send the message of the current chat to the 'ollama' server.</source> <translation>Drücken, um die Nachricht des aktuellen Chats an den 'ollama' Server zu schicken.</translation> </message> </context> <context> <name>PluginOllamaInterface</name> <message> <location filename="../../PluginAiOllama.py" line="184" /> <location filename="../../PluginAiOllama.py" line="183" /> <location filename="../../PluginAiOllama.py" line="179" /> <location filename="../../PluginAiOllama.py" line="83" /> <source>ollama AI Interface</source> <translation>ollama KI Schnittstelle</translation> </message> <message> <location filename="../../PluginAiOllama.py" line="185" /> <source>Ctrl+Alt+Shift+O</source> <translation>Ctrl+Alt+Shift+O</translation> </message> <message> <location filename="../../PluginAiOllama.py" line="191" /> <source>Switch the input focus to the ollama AI window.</source> <translation>Schaltet den Eingabefokus auf das ollama KI Fenster um.</translation> </message> <message> <location filename="../../PluginAiOllama.py" line="194" /> <source><b>Activate ollama AI Interface</b><p>This switches the input focus to the ollama AI window.</p></source> <translation><b>Aktiviere ollama KI Schnittstelle</b><p>Dies schaltet den Eingabefokus auf das ollama KI Fenster um.</p></translation> </message> </context> <context> <name>RunOllamaServerDialog</name> <message> <location filename="../RunOllamaServerDialog.ui" line="0" /> <source>ollama Server</source> <translation>ollama Server</translation> </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0" /> <source>Output</source> <translation>Ausgabe</translation> </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0" /> <source>Press to restart the local ollama server.</source> <translation>Drücken, um den lokalen ollama Server neu zu starten.</translation> </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0" /> <source>Re-Start Server</source> <translation>Server neu starten</translation> </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0" /> <source>Press to stop the running ollama server.</source> <translation>Drücken, um den laufenden ollama Server anzuhalten.</translation> </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0" /> <source>Stop Server</source> <translation>Server stoppen</translation> </message> <message> <location filename="../RunOllamaServerDialog.py" line="79" /> <source>Run Local 'ollama' Server</source> <translation>Lokalen 'ollama' Server ausführen</translation> </message> <message> <location filename="../RunOllamaServerDialog.py" line="80" /> <source>The local 'ollama' server process could not be started.</source> <translation>Der lokale 'ollama' Serverprozess konnte nicht gestartet werden.</translation> </message> </context> </TS>