Fri, 30 Aug 2024 12:04:28 +0200
Did the German translations and corrected some typing issues.
--- a/OllamaInterface/ConfigurationPage/OllamaPage.ui Thu Aug 29 13:59:50 2024 +0200 +++ b/OllamaInterface/ConfigurationPage/OllamaPage.ui Fri Aug 30 12:04:28 2024 +0200 @@ -51,12 +51,12 @@ </property> <item> <property name="text"> - <string>http</string> + <string notr="true">http</string> </property> </item> <item> <property name="text"> - <string>https</string> + <string notr="true">https</string> </property> </item> </widget>
--- a/OllamaInterface/ConfigurationPage/Ui_OllamaPage.py Thu Aug 29 13:59:50 2024 +0200 +++ b/OllamaInterface/ConfigurationPage/Ui_OllamaPage.py Fri Aug 30 12:04:28 2024 +0200 @@ -34,7 +34,9 @@ self.serverSchemeComboBox = QtWidgets.QComboBox(parent=self.groupBox) self.serverSchemeComboBox.setObjectName("serverSchemeComboBox") self.serverSchemeComboBox.addItem("") + self.serverSchemeComboBox.setItemText(0, "http") self.serverSchemeComboBox.addItem("") + self.serverSchemeComboBox.setItemText(1, "https") self.gridLayout.addWidget(self.serverSchemeComboBox, 0, 1, 1, 1) spacerItem = QtWidgets.QSpacerItem(387, 20, QtWidgets.QSizePolicy.Policy.Expanding, QtWidgets.QSizePolicy.Policy.Minimum) self.gridLayout.addItem(spacerItem, 0, 2, 1, 1) @@ -119,8 +121,6 @@ self.groupBox.setTitle(_translate("OllamaPage", "Server URL")) self.label.setText(_translate("OllamaPage", "Scheme:")) self.serverSchemeComboBox.setToolTip(_translate("OllamaPage", "Select the scheme of the \'ollama\' server URL.")) - self.serverSchemeComboBox.setItemText(0, _translate("OllamaPage", "http")) - self.serverSchemeComboBox.setItemText(1, _translate("OllamaPage", "https")) self.label_2.setText(_translate("OllamaPage", "Host:")) self.serverHostEdit.setToolTip(_translate("OllamaPage", "Enter the host name of the \'ollama\' server.")) self.label_3.setText(_translate("OllamaPage", "Port:"))
--- a/OllamaInterface/OllamaWidget.ui Thu Aug 29 13:59:50 2024 +0200 +++ b/OllamaInterface/OllamaWidget.ui Fri Aug 30 12:04:28 2024 +0200 @@ -58,9 +58,6 @@ <property name="toolTip"> <string>Press to reload the models list and update the 'ollama' version information.</string> </property> - <property name="statusTip"> - <string>Select to reload the list of selectable models.</string> - </property> </widget> </item> <item> @@ -71,8 +68,11 @@ <verstretch>0</verstretch> </sizepolicy> </property> + <property name="toolTip"> + <string>Select the model for the chat.</string> + </property> <property name="statusTip"> - <string>Select the model for the chat.</string> + <string/> </property> </widget> </item> @@ -122,7 +122,7 @@ </sizepolicy> </property> </widget> - <widget class="QWidget" name=""> + <widget class="QWidget" name="layoutWidget"> <layout class="QGridLayout" name="gridLayout"> <item row="0" column="0" rowspan="2"> <widget class="QPlainTextEdit" name="messageEdit">
--- a/OllamaInterface/RunOllamaServerDialog.py Thu Aug 29 13:59:50 2024 +0200 +++ b/OllamaInterface/RunOllamaServerDialog.py Fri Aug 30 12:04:28 2024 +0200 @@ -77,7 +77,7 @@ EricMessageBox.critical( None, self.tr("Run Local 'ollama' Server"), - self.tr("""The loacl 'ollama' server process could not be started."""), + self.tr("""The local 'ollama' server process could not be started."""), ) else: self.buttonBox.button(QDialogButtonBox.StandardButton.Close).setEnabled(
--- a/OllamaInterface/RunOllamaServerDialog.ui Thu Aug 29 13:59:50 2024 +0200 +++ b/OllamaInterface/RunOllamaServerDialog.ui Fri Aug 30 12:04:28 2024 +0200 @@ -50,7 +50,7 @@ <bool>false</bool> </property> <property name="toolTip"> - <string>Press to restart the loacl ollama server.</string> + <string>Press to restart the local ollama server.</string> </property> <property name="text"> <string>Re-Start Server</string>
--- a/OllamaInterface/Ui_OllamaWidget.py Thu Aug 29 13:59:50 2024 +0200 +++ b/OllamaInterface/Ui_OllamaWidget.py Fri Aug 30 12:04:28 2024 +0200 @@ -40,6 +40,7 @@ sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.modelComboBox.sizePolicy().hasHeightForWidth()) self.modelComboBox.setSizePolicy(sizePolicy) + self.modelComboBox.setStatusTip("") self.modelComboBox.setObjectName("modelComboBox") self.horizontalLayout.addWidget(self.modelComboBox) self.newChatButton = QtWidgets.QToolButton(parent=OllamaWidget) @@ -69,18 +70,18 @@ sizePolicy.setHeightForWidth(self.chatStackWidget.sizePolicy().hasHeightForWidth()) self.chatStackWidget.setSizePolicy(sizePolicy) self.chatStackWidget.setObjectName("chatStackWidget") - self.widget = QtWidgets.QWidget(parent=self.mainSplitter) - self.widget.setObjectName("widget") - self.gridLayout = QtWidgets.QGridLayout(self.widget) + self.layoutWidget = QtWidgets.QWidget(parent=self.mainSplitter) + self.layoutWidget.setObjectName("layoutWidget") + self.gridLayout = QtWidgets.QGridLayout(self.layoutWidget) self.gridLayout.setContentsMargins(0, 0, 0, 0) self.gridLayout.setObjectName("gridLayout") - self.messageEdit = QtWidgets.QPlainTextEdit(parent=self.widget) + self.messageEdit = QtWidgets.QPlainTextEdit(parent=self.layoutWidget) self.messageEdit.setTabChangesFocus(True) self.messageEdit.setObjectName("messageEdit") self.gridLayout.addWidget(self.messageEdit, 0, 0, 2, 1) spacerItem2 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Policy.Minimum, QtWidgets.QSizePolicy.Policy.Expanding) self.gridLayout.addItem(spacerItem2, 1, 1, 1, 1) - self.sendButton = QtWidgets.QToolButton(parent=self.widget) + self.sendButton = QtWidgets.QToolButton(parent=self.layoutWidget) self.sendButton.setEnabled(False) self.sendButton.setObjectName("sendButton") self.gridLayout.addWidget(self.sendButton, 0, 1, 1, 1) @@ -96,8 +97,7 @@ def retranslateUi(self, OllamaWidget): _translate = QtCore.QCoreApplication.translate self.reloadModelsButton.setToolTip(_translate("OllamaWidget", "Press to reload the models list and update the \'ollama\' version information.")) - self.reloadModelsButton.setStatusTip(_translate("OllamaWidget", "Select to reload the list of selectable models.")) - self.modelComboBox.setStatusTip(_translate("OllamaWidget", "Select the model for the chat.")) + self.modelComboBox.setToolTip(_translate("OllamaWidget", "Select the model for the chat.")) self.newChatButton.setToolTip(_translate("OllamaWidget", "Press to start a new chat.")) self.messageEdit.setToolTip(_translate("OllamaWidget", "Enter the message to be sent to the \'ollama\' server.")) self.messageEdit.setPlaceholderText(_translate("OllamaWidget", "Enter Message"))
--- a/OllamaInterface/Ui_RunOllamaServerDialog.py Thu Aug 29 13:59:50 2024 +0200 +++ b/OllamaInterface/Ui_RunOllamaServerDialog.py Fri Aug 30 12:04:28 2024 +0200 @@ -60,7 +60,7 @@ _translate = QtCore.QCoreApplication.translate RunOllamaServerDialog.setWindowTitle(_translate("RunOllamaServerDialog", "ollama Server")) self.groupBox.setTitle(_translate("RunOllamaServerDialog", "Output")) - self.restartServerButton.setToolTip(_translate("RunOllamaServerDialog", "Press to restart the loacl ollama server.")) + self.restartServerButton.setToolTip(_translate("RunOllamaServerDialog", "Press to restart the local ollama server.")) self.restartServerButton.setText(_translate("RunOllamaServerDialog", "Re-Start Server")) self.stopServerButton.setToolTip(_translate("RunOllamaServerDialog", "Press to stop the running ollama server.")) self.stopServerButton.setText(_translate("RunOllamaServerDialog", "Stop Server"))
--- a/OllamaInterface/i18n/ollama_de.ts Thu Aug 29 13:59:50 2024 +0200 +++ b/OllamaInterface/i18n/ollama_de.ts Fri Aug 30 12:04:28 2024 +0200 @@ -1,13 +1,13 @@ <?xml version="1.0" encoding="utf-8"?> <!DOCTYPE TS> -<TS version="2.1"> +<TS version="2.1" language="de_DE"> <context> <name>OllamaChatWidget</name> <message> <location filename="../OllamaChatWidget.py" line="41" /> <source><b>{0} - {1}</b></source> <comment>title, model name</comment> - <translation type="unfinished" /> + <translation><b>{0} - {1}</b></translation> </message> </context> <context> @@ -15,37 +15,37 @@ <message> <location filename="../OllamaClient.py" line="301" /> <source>100% CPU</source> - <translation type="unfinished" /> + <translation>100% CPU</translation> </message> <message> <location filename="../OllamaClient.py" line="303" /> <source>100% GPU</source> - <translation type="unfinished" /> + <translation>100% GPU</translation> </message> <message> <location filename="../OllamaClient.py" line="305" /> <source>unknown</source> - <translation type="unfinished" /> + <translation>unbekannt</translation> </message> <message> <location filename="../OllamaClient.py" line="309" /> <source>{0}% / {1}% CPU / GPU</source> - <translation type="unfinished" /> + <translation>{0}% / {1}% CPU / GPU</translation> </message> <message> <location filename="../OllamaClient.py" line="458" /> <source><p>A network error occurred.</p><p>Error: {0}</p></source> - <translation type="unfinished" /> + <translation><p>Es gab einen Netzwerkfehler.</p><p>Fehler: {0}</p></translation> </message> <message> <location filename="../OllamaClient.py" line="587" /> <source><p>Error: The local server at <b>{0}</b> is not responding.</p></source> - <translation type="unfinished" /> + <translation><p>Fehler: Der lokale Server auf <b>{0}</b> antwortet nicht.</p></translation> </message> <message> <location filename="../OllamaClient.py" line="591" /> <source><p>Error: The configured server at <b>{0}</b> is not responding.</p></source> - <translation type="unfinished" /> + <translation><p>Fehler: Der konfigurierte Server auf <b>{0}</b> antwortet nicht.</p></translation> </message> </context> <context> @@ -53,27 +53,27 @@ <message> <location filename="../OllamaDetailedModelsDialog.ui" line="0" /> <source>Available Models</source> - <translation type="unfinished" /> + <translation>Verfügbare Modelle</translation> </message> <message> <location filename="../OllamaDetailedModelsDialog.ui" line="0" /> <source>Name</source> - <translation type="unfinished" /> + <translation>Name</translation> </message> <message> <location filename="../OllamaDetailedModelsDialog.ui" line="0" /> <source>ID</source> - <translation type="unfinished" /> + <translation>ID</translation> </message> <message> <location filename="../OllamaDetailedModelsDialog.ui" line="0" /> <source>Size</source> - <translation type="unfinished" /> + <translation>Größe</translation> </message> <message> <location filename="../OllamaDetailedModelsDialog.ui" line="0" /> <source>Modified At</source> - <translation type="unfinished" /> + <translation>Geändert</translation> </message> </context> <context> @@ -81,27 +81,27 @@ <message> <location filename="../OllamaHistoryWidget.ui" line="0" /> <source>Press to start a new chat based on the current history or switch to an already opened chat.</source> - <translation type="unfinished" /> + <translation>Drücken, um eine neue Unterhaltung basierend auf der aktuellen Chronik zu starten oder zu einer bereits offenen Unterhaltung zu schalten.</translation> </message> <message> <location filename="../OllamaHistoryWidget.ui" line="0" /> <source>Press to edit the chat title.</source> - <translation type="unfinished" /> + <translation>Drücken, um den Titel der Unterhaltung zu bearbeiten.</translation> </message> <message> <location filename="../OllamaHistoryWidget.ui" line="0" /> <source>Press to delete this chat history.</source> - <translation type="unfinished" /> + <translation>Drücken, um die Chronik dieser Unterhaltung zu löschen.</translation> </message> <message> <location filename="../OllamaHistoryWidget.py" line="126" /> <source>Edit Chat Title</source> - <translation type="unfinished" /> + <translation>Titel der Unterhaltung bearbeiten</translation> </message> <message> <location filename="../OllamaHistoryWidget.py" line="127" /> <source>Enter the new title:</source> - <translation type="unfinished" /> + <translation>Gib den neuen Titel ein:</translation> </message> </context> <context> @@ -109,103 +109,93 @@ <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source><b>Configure 'ollama' Interface</b></source> - <translation type="unfinished" /> + <translation><b>'ollama'-Schnittstelle einstellen</b></translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Server URL</source> - <translation type="unfinished" /> + <translation>Server URL</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Scheme:</source> - <translation type="unfinished" /> + <translation>Schema:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Select the scheme of the 'ollama' server URL.</source> - <translation type="unfinished" /> - </message> - <message> - <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> - <source>http</source> - <translation type="unfinished" /> - </message> - <message> - <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> - <source>https</source> - <translation type="unfinished" /> + <translation>Wähle das Schema der 'ollama' Server URL.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Host:</source> - <translation type="unfinished" /> + <translation>Rechner:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Enter the host name of the 'ollama' server.</source> - <translation type="unfinished" /> + <translation>Gib den Rechnernamen des 'ollama' Servers ein.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Port:</source> - <translation type="unfinished" /> + <translation>Port:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Enter the port of the 'ollama' server URL.</source> - <translation type="unfinished" /> + <translation>Gib den Port der 'ollama' Server URL ein.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Local Server</source> - <translation type="unfinished" /> + <translation>Lokaler Server</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Enter the port of the local 'ollama' server.</source> - <translation type="unfinished" /> + <translation>Gib den Port des lokalen 'ollama' Servers ein.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Model Library</source> - <translation type="unfinished" /> + <translation>Modellbibliothek</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>URL:</source> - <translation type="unfinished" /> + <translation>URL:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Enter the URL of the 'ollama' model library.</source> - <translation type="unfinished" /> + <translation>Gib die URL der 'ollama' Modellbibliothek ein.</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Heartbeat Timer:</source> - <translation type="unfinished" /> + <translation>Heartbeat Timer:</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Enter the heartbeat timeout value (0 = disable).</source> - <translation type="unfinished" /> + <translation>Gib das Intervall für den Heartbeat Timer ein (0 = deaktiviert).</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Disabled</source> - <translation type="unfinished" /> + <translation>Deaktiviert</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source> s</source> - <translation type="unfinished" /> + <translation> s</translation> </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Use streaming chat response</source> - <translation type="unfinished" /> + <translation>Streaming-Chat-Antwort verwenden</translation> </message> </context> <context> @@ -213,23 +203,23 @@ <message> <location filename="../OllamaPullProgressDialog.ui" line="0" /> <source>Install Model</source> - <translation type="unfinished" /> + <translation>Modell installieren</translation> </message> <message> <location filename="../OllamaPullProgressDialog.py" line="138" /> <source><p>Installing model <b>{0}</b>.</p></source> - <translation type="unfinished" /> + <translation><p>Installiere Modell <b>{0}</b>.</p></translation> </message> <message> <location filename="../OllamaPullProgressDialog.py" line="182" /> <source>{0} / {1}</source> <comment>completed / total</comment> - <translation type="unfinished" /> + <translation>{0} / {1}</translation> </message> <message> <location filename="../OllamaPullProgressDialog.py" line="201" /> <source>Error: {0}</source> - <translation type="unfinished" /> + <translation>Fehler: {0}</translation> </message> </context> <context> @@ -237,32 +227,32 @@ <message> <location filename="../OllamaRunningModelsDialog.ui" line="0" /> <source>Running Models</source> - <translation type="unfinished" /> + <translation>Ausgeführte Modelle</translation> </message> <message> <location filename="../OllamaRunningModelsDialog.ui" line="0" /> <source>Name</source> - <translation type="unfinished" /> + <translation>Name</translation> </message> <message> <location filename="../OllamaRunningModelsDialog.ui" line="0" /> <source>ID</source> - <translation type="unfinished" /> + <translation>ID</translation> </message> <message> <location filename="../OllamaRunningModelsDialog.ui" line="0" /> <source>Size</source> - <translation type="unfinished" /> + <translation>Größe</translation> </message> <message> <location filename="../OllamaRunningModelsDialog.ui" line="0" /> <source>Processor</source> - <translation type="unfinished" /> + <translation>Prozessor</translation> </message> <message> <location filename="../OllamaRunningModelsDialog.ui" line="0" /> <source>Expires</source> - <translation type="unfinished" /> + <translation>Läuft ab</translation> </message> </context> <context> @@ -270,102 +260,102 @@ <message> <location filename="../OllamaWidget.py" line="191" /> <source><b>ollama Server Version {0}</b></source> - <translation type="unfinished" /> + <translation><b>ollama Server Version {0}</b></translation> </message> <message> <location filename="../OllamaWidget.py" line="339" /> <source>Save Chat History</source> - <translation type="unfinished" /> + <translation>Chat Verlauf speichern</translation> </message> <message> <location filename="../OllamaWidget.py" line="340" /> <source><p>The chat history could not be saved to <b>{0}</b>.</p><p>Reason: {1}</p></source> - <translation type="unfinished" /> + <translation><p>Der Chat Verlauf konnte nicht nach <b>{0}</b> gespeichert werden.</p><p>Ursache: {1}</p></translation> </message> <message> <location filename="../OllamaWidget.py" line="396" /> <location filename="../OllamaWidget.py" line="375" /> <source>Load Chat History</source> - <translation type="unfinished" /> + <translation>Chat Verlauf laden</translation> </message> <message> <location filename="../OllamaWidget.py" line="376" /> <source><p>The chat history could not be loaded from <b>{0}</b>.</p><p>Reason: {1}</p></source> - <translation type="unfinished" /> + <translation><p>Der Chat Verlauf konnte nicht aus <b>{0}</b> geladen werden.</p><p>Ursache: {1}</p></translation> </message> <message> <location filename="../OllamaWidget.py" line="397" /> <source><p>These chats were not loaded because they already existed.</p>{0}</source> - <translation type="unfinished" /> + <translation><p>Diese Chats wurden nicht geladen, da sie bereits existieren.</p>{0}</translation> </message> <message> <location filename="../OllamaWidget.py" line="474" /> <location filename="../OllamaWidget.py" line="467" /> <source>New Chat</source> - <translation type="unfinished" /> + <translation>Neuer Chat</translation> </message> <message> <location filename="../OllamaWidget.py" line="468" /> <source>A model has to be selected first. Aborting...</source> - <translation type="unfinished" /> + <translation>Es muss zuerst ein Modell ausgewählt werden. Abbruch...</translation> </message> <message> <location filename="../OllamaWidget.py" line="475" /> <source>Enter a title for the new chat:</source> - <translation type="unfinished" /> + <translation>Gib einen Titel für den neuen Chat ein:</translation> </message> <message> <location filename="../OllamaWidget.py" line="632" /> <source>Chat History</source> - <translation type="unfinished" /> + <translation>Chat Verlauf</translation> </message> <message> <location filename="../OllamaWidget.py" line="633" /> <source>Load</source> - <translation type="unfinished" /> + <translation>Laden</translation> </message> <message> <location filename="../OllamaWidget.py" line="636" /> <source>Clear All</source> - <translation type="unfinished" /> + <translation>Alle löschen</translation> </message> <message> <location filename="../OllamaWidget.py" line="639" /> <source>Import</source> - <translation type="unfinished" /> + <translation>Importieren</translation> </message> <message> <location filename="../OllamaWidget.py" line="640" /> <source>Export</source> - <translation type="unfinished" /> + <translation>Exportieren</translation> </message> <message> <location filename="../OllamaWidget.py" line="646" /> <source>Model Management</source> - <translation type="unfinished" /> + <translation>Modellverwaltung</translation> </message> <message> <location filename="../OllamaWidget.py" line="915" /> <location filename="../OllamaWidget.py" line="647" /> <source>List Models</source> - <translation type="unfinished" /> + <translation>Modelle auflisten</translation> </message> <message> <location filename="../OllamaWidget.py" line="933" /> <location filename="../OllamaWidget.py" line="649" /> <source>List Running Models</source> - <translation type="unfinished" /> + <translation>Laufende Modelle auflisten</translation> </message> <message> <location filename="../OllamaWidget.py" line="653" /> <source>Show Model Library</source> - <translation type="unfinished" /> + <translation>Modell Bibliothek anzeigen</translation> </message> <message> <location filename="../OllamaWidget.py" line="959" /> <location filename="../OllamaWidget.py" line="657" /> <source>Install Model</source> - <translation type="unfinished" /> + <translation>Modell installieren</translation> </message> <message> <location filename="../OllamaWidget.py" line="1032" /> @@ -373,149 +363,144 @@ <location filename="../OllamaWidget.py" line="1012" /> <location filename="../OllamaWidget.py" line="660" /> <source>Remove Model</source> - <translation type="unfinished" /> + <translation>Modell löschen</translation> </message> <message> <location filename="../OllamaWidget.py" line="667" /> <source>Local Server</source> - <translation type="unfinished" /> + <translation>Lokaler Server</translation> </message> <message> <location filename="../OllamaWidget.py" line="669" /> <source>Start with Monitoring</source> - <translation type="unfinished" /> + <translation>Mit Monitoring starten</translation> </message> <message> <location filename="../OllamaWidget.py" line="673" /> <source>Start</source> - <translation type="unfinished" /> + <translation>Starten</translation> </message> <message> <location filename="../OllamaWidget.py" line="676" /> <source>Stop</source> - <translation type="unfinished" /> + <translation>Stoppen</translation> </message> <message> <location filename="../OllamaWidget.py" line="690" /> <source>Configure...</source> - <translation type="unfinished" /> + <translation>Einstellungen...</translation> </message> <message> <location filename="../OllamaWidget.py" line="730" /> <source>Clear All Chat Histories</source> - <translation type="unfinished" /> + <translation>Alle Chat Verläufe löschen</translation> </message> <message> <location filename="../OllamaWidget.py" line="731" /> <source><p>Do you really want to delete all chat histories? This is <b>irreversible</b>.</p></source> - <translation type="unfinished" /> + <translation><p>Sollen wirklich alle Chat Verläufe gelöscht werden? Dies ist <b>unumkehrbar</b>.</p></translation> </message> <message> <location filename="../OllamaWidget.py" line="746" /> <source>Import Chat History</source> - <translation type="unfinished" /> + <translation>Chat Verlauf importieren</translation> </message> <message> <location filename="../OllamaWidget.py" line="748" /> <source>Chat History Files (*.json);;All Files (*)</source> - <translation type="unfinished" /> + <translation>Chat Verlauf Dateien (*.json);;Alle Dateien (*)</translation> </message> <message> <location filename="../OllamaWidget.py" line="781" /> <location filename="../OllamaWidget.py" line="749" /> <source>Chat History Files (*.json)</source> - <translation type="unfinished" /> + <translation>Chat Verlauf Dateien (*.json)</translation> </message> <message> <location filename="../OllamaWidget.py" line="779" /> <location filename="../OllamaWidget.py" line="769" /> <source>Export Chat History</source> - <translation type="unfinished" /> + <translation>Chat Verlauf exportieren</translation> </message> <message> <location filename="../OllamaWidget.py" line="770" /> <source>Select the chats to be exported:</source> - <translation type="unfinished" /> + <translation>Wähle die zu exportierenden Chats:</translation> </message> <message> <location filename="../OllamaWidget.py" line="868" /> <source>Run Local 'ollama' Server</source> - <translation type="unfinished" /> + <translation>Lokalen 'ollama' Server ausführen</translation> </message> <message> <location filename="../OllamaWidget.py" line="869" /> <source>The loacl 'ollama' server process could not be started.</source> - <translation type="unfinished" /> + <translation>Der lokale 'ollama' Serverprozess konnte nicht gestartet werden.</translation> </message> <message> <location filename="../OllamaWidget.py" line="916" /> <source>There are no models available.</source> - <translation type="unfinished" /> + <translation>Es sind keine Modelle verfügbar.</translation> </message> <message> <location filename="../OllamaWidget.py" line="934" /> <source>There are no models running.</source> - <translation type="unfinished" /> + <translation>Es werden keine Modelle ausgeführt.</translation> </message> <message> <location filename="../OllamaWidget.py" line="960" /> <source>Enter the name of the model to be installed:</source> - <translation type="unfinished" /> + <translation>Gib den Namen des zu installierenden Modells ein:</translation> </message> <message> <location filename="../OllamaWidget.py" line="1013" /> <source>Select the model to be removed by the 'ollama' server:</source> - <translation type="unfinished" /> + <translation>Wähle das vom 'ollama' Server zu entfernende Modell aus:</translation> </message> <message> <location filename="../OllamaWidget.py" line="1024" /> <source><p>The model <b>{0}</b> was deleted successfully.</p></source> - <translation type="unfinished" /> + <translation><p>Das Modell <b>{0}</b> wurde erfolgreich entfernt.</p></translation> </message> <message> <location filename="../OllamaWidget.py" line="1033" /> <source><p>The model <b>{0}</b> could not be removed from the 'ollama' server.</p></source> - <translation type="unfinished" /> + <translation><p>Das Modell <b>{0}</b> konnte nicht vom 'ollama' Server entfernt werden.</p></translation> </message> <message> <location filename="../OllamaWidget.py" line="1049" /> <source>Network Error</source> - <translation type="unfinished" /> + <translation>Netzwerkfehler</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0" /> <source>Press to reload the models list and update the 'ollama' version information.</source> - <translation type="unfinished" /> - </message> - <message> - <location filename="../OllamaWidget.ui" line="0" /> - <source>Select to reload the list of selectable models.</source> - <translation type="unfinished" /> + <translation>Drücken, um die Modelliste neu zu laden und die 'ollama' Versionsinformation zu aktualisieren.</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0" /> <source>Select the model for the chat.</source> - <translation type="unfinished" /> + <translation>Wähle das Modell für den Chat.</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0" /> <source>Press to start a new chat.</source> - <translation type="unfinished" /> + <translation>Drücken, um einen neuen Chat zu starten.</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0" /> <source>Enter the message to be sent to the 'ollama' server.</source> - <translation type="unfinished" /> + <translation>Gib die an den 'ollama' Server zu sendende Nachricht ein.</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0" /> <source>Enter Message</source> - <translation type="unfinished" /> + <translation>Nachricht eingeben</translation> </message> <message> <location filename="../OllamaWidget.ui" line="0" /> <source>Press to send the message of the current chat to the 'ollama' server.</source> - <translation type="unfinished" /> + <translation>Drücken, um die Nachricht des aktuellen Chats an den 'ollama' Server zu schicken.</translation> </message> </context> <context> @@ -526,22 +511,22 @@ <location filename="../../PluginAiOllama.py" line="176" /> <location filename="../../PluginAiOllama.py" line="82" /> <source>ollama AI Interface</source> - <translation type="unfinished" /> + <translation>ollama KI Schnittstelle</translation> </message> <message> <location filename="../../PluginAiOllama.py" line="182" /> <source>Ctrl+Alt+Shift+O</source> - <translation type="unfinished" /> + <translation>Ctrl+Alt+Shift+O</translation> </message> <message> <location filename="../../PluginAiOllama.py" line="188" /> <source>Switch the input focus to the ollama AI window.</source> - <translation type="unfinished" /> + <translation>Schaltet den Eingabefokus auf das ollama KI Fenster um.</translation> </message> <message> <location filename="../../PluginAiOllama.py" line="191" /> <source><b>Activate ollama AI Interface</b><p>This switches the input focus to the ollama AI window.</p></source> - <translation type="unfinished" /> + <translation><b>Aktiviere ollama KI Schnittstelle</b><p>Dies schaltet den Eingabefokus auf das ollama KI Fenster um.</p></translation> </message> </context> <context> @@ -549,42 +534,42 @@ <message> <location filename="../RunOllamaServerDialog.ui" line="0" /> <source>ollama Server</source> - <translation type="unfinished" /> + <translation>ollama Server</translation> </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0" /> <source>Output</source> - <translation type="unfinished" /> + <translation>Ausgabe</translation> </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0" /> - <source>Press to restart the loacl ollama server.</source> - <translation type="unfinished" /> + <source>Press to restart the local ollama server.</source> + <translation>Drücken, um den lokalen ollama Server neu zu starten.</translation> </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0" /> <source>Re-Start Server</source> - <translation type="unfinished" /> + <translation>Server neu starten</translation> </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0" /> <source>Press to stop the running ollama server.</source> - <translation type="unfinished" /> + <translation>Drücken, um den laufenden ollama Server anzuhalten.</translation> </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0" /> <source>Stop Server</source> - <translation type="unfinished" /> + <translation>Server stoppen</translation> </message> <message> <location filename="../RunOllamaServerDialog.py" line="79" /> <source>Run Local 'ollama' Server</source> - <translation type="unfinished" /> + <translation>Lokalen 'ollama' Server ausführen</translation> </message> <message> <location filename="../RunOllamaServerDialog.py" line="80" /> - <source>The loacl 'ollama' server process could not be started.</source> - <translation type="unfinished" /> + <source>The local 'ollama' server process could not be started.</source> + <translation>Der lokale 'ollama' Serverprozess konnte nicht gestartet werden.</translation> </message> </context> </TS>
--- a/OllamaInterface/i18n/ollama_empty.ts Thu Aug 29 13:59:50 2024 +0200 +++ b/OllamaInterface/i18n/ollama_empty.ts Fri Aug 30 12:04:28 2024 +0200 @@ -128,16 +128,6 @@ </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> - <source>http</source> - <translation type="unfinished" /> - </message> - <message> - <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> - <source>https</source> - <translation type="unfinished" /> - </message> - <message> - <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Host:</source> <translation type="unfinished" /> </message> @@ -489,11 +479,6 @@ </message> <message> <location filename="../OllamaWidget.ui" line="0" /> - <source>Select to reload the list of selectable models.</source> - <translation type="unfinished" /> - </message> - <message> - <location filename="../OllamaWidget.ui" line="0" /> <source>Select the model for the chat.</source> <translation type="unfinished" /> </message> @@ -558,7 +543,7 @@ </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0" /> - <source>Press to restart the loacl ollama server.</source> + <source>Press to restart the local ollama server.</source> <translation type="unfinished" /> </message> <message> @@ -583,7 +568,7 @@ </message> <message> <location filename="../RunOllamaServerDialog.py" line="80" /> - <source>The loacl 'ollama' server process could not be started.</source> + <source>The local 'ollama' server process could not be started.</source> <translation type="unfinished" /> </message> </context>
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/OllamaInterface/i18n/ollama_en.qm Fri Aug 30 12:04:28 2024 +0200 @@ -0,0 +1,1 @@ +<¸dÊÍ!¿`¡½Ý \ No newline at end of file
--- a/OllamaInterface/i18n/ollama_en.ts Thu Aug 29 13:59:50 2024 +0200 +++ b/OllamaInterface/i18n/ollama_en.ts Fri Aug 30 12:04:28 2024 +0200 @@ -128,16 +128,6 @@ </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> - <source>http</source> - <translation type="unfinished" /> - </message> - <message> - <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> - <source>https</source> - <translation type="unfinished" /> - </message> - <message> - <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Host:</source> <translation type="unfinished" /> </message> @@ -489,11 +479,6 @@ </message> <message> <location filename="../OllamaWidget.ui" line="0" /> - <source>Select to reload the list of selectable models.</source> - <translation type="unfinished" /> - </message> - <message> - <location filename="../OllamaWidget.ui" line="0" /> <source>Select the model for the chat.</source> <translation type="unfinished" /> </message> @@ -558,7 +543,7 @@ </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0" /> - <source>Press to restart the loacl ollama server.</source> + <source>Press to restart the local ollama server.</source> <translation type="unfinished" /> </message> <message> @@ -583,7 +568,7 @@ </message> <message> <location filename="../RunOllamaServerDialog.py" line="80" /> - <source>The loacl 'ollama' server process could not be started.</source> + <source>The local 'ollama' server process could not be started.</source> <translation type="unfinished" /> </message> </context>
--- a/OllamaInterface/i18n/ollama_es.ts Thu Aug 29 13:59:50 2024 +0200 +++ b/OllamaInterface/i18n/ollama_es.ts Fri Aug 30 12:04:28 2024 +0200 @@ -128,16 +128,6 @@ </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> - <source>http</source> - <translation type="unfinished" /> - </message> - <message> - <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> - <source>https</source> - <translation type="unfinished" /> - </message> - <message> - <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Host:</source> <translation type="unfinished" /> </message> @@ -489,11 +479,6 @@ </message> <message> <location filename="../OllamaWidget.ui" line="0" /> - <source>Select to reload the list of selectable models.</source> - <translation type="unfinished" /> - </message> - <message> - <location filename="../OllamaWidget.ui" line="0" /> <source>Select the model for the chat.</source> <translation type="unfinished" /> </message> @@ -558,7 +543,7 @@ </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0" /> - <source>Press to restart the loacl ollama server.</source> + <source>Press to restart the local ollama server.</source> <translation type="unfinished" /> </message> <message> @@ -583,7 +568,7 @@ </message> <message> <location filename="../RunOllamaServerDialog.py" line="80" /> - <source>The loacl 'ollama' server process could not be started.</source> + <source>The local 'ollama' server process could not be started.</source> <translation type="unfinished" /> </message> </context>
--- a/OllamaInterface/i18n/ollama_ru.ts Thu Aug 29 13:59:50 2024 +0200 +++ b/OllamaInterface/i18n/ollama_ru.ts Fri Aug 30 12:04:28 2024 +0200 @@ -128,16 +128,6 @@ </message> <message> <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> - <source>http</source> - <translation type="unfinished" /> - </message> - <message> - <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> - <source>https</source> - <translation type="unfinished" /> - </message> - <message> - <location filename="../ConfigurationPage/OllamaPage.ui" line="0" /> <source>Host:</source> <translation type="unfinished" /> </message> @@ -489,11 +479,6 @@ </message> <message> <location filename="../OllamaWidget.ui" line="0" /> - <source>Select to reload the list of selectable models.</source> - <translation type="unfinished" /> - </message> - <message> - <location filename="../OllamaWidget.ui" line="0" /> <source>Select the model for the chat.</source> <translation type="unfinished" /> </message> @@ -558,7 +543,7 @@ </message> <message> <location filename="../RunOllamaServerDialog.ui" line="0" /> - <source>Press to restart the loacl ollama server.</source> + <source>Press to restart the local ollama server.</source> <translation type="unfinished" /> </message> <message> @@ -583,7 +568,7 @@ </message> <message> <location filename="../RunOllamaServerDialog.py" line="80" /> - <source>The loacl 'ollama' server process could not be started.</source> + <source>The local 'ollama' server process could not be started.</source> <translation type="unfinished" /> </message> </context>
--- a/PluginAiOllama.epj Thu Aug 29 13:59:50 2024 +0200 +++ b/PluginAiOllama.epj Fri Aug 30 12:04:28 2024 +0200 @@ -328,8 +328,10 @@ "TRANSLATIONEXCEPTIONS": [], "TRANSLATIONPATTERN": "OllamaInterface/i18n/ollama_%language%.ts", "TRANSLATIONS": [ + "OllamaInterface/i18n/ollama_de.qm", "OllamaInterface/i18n/ollama_de.ts", "OllamaInterface/i18n/ollama_empty.ts", + "OllamaInterface/i18n/ollama_en.qm", "OllamaInterface/i18n/ollama_en.ts", "OllamaInterface/i18n/ollama_es.ts", "OllamaInterface/i18n/ollama_ru.ts"