Wed, 28 Aug 2024 18:20:33 +0200
Changed the message input widget to a QPlainTextEdit to be able to enter longer texts.
# -*- coding: utf-8 -*- # Copyright (c) 2024 Detlev Offenbach <detlev@die-offenbachs.de> # """ Module implementing the main ollama interface widget. """ import json import os from PyQt6.QtCore import QProcess, QProcessEnvironment, Qt, QTimer, QUrl, pyqtSlot from PyQt6.QtGui import QDesktopServices from PyQt6.QtWidgets import ( QDialog, QInputDialog, QLineEdit, QMenu, QVBoxLayout, QWidget, ) from eric7 import Globals from eric7.EricGui import EricPixmapCache from eric7.EricWidgets import EricFileDialog, EricMessageBox from eric7.EricWidgets.EricApplication import ericApp from eric7.EricWidgets.EricListSelectionDialog import EricListSelectionDialog from .OllamaChatWidget import OllamaChatWidget from .OllamaClient import OllamaClient from .OllamaHistoryWidget import OllamaHistoryWidget from .Ui_OllamaWidget import Ui_OllamaWidget class OllamaWidget(QWidget, Ui_OllamaWidget): """ Class implementing the main ollama interface widget. """ OllamaHistoryFile = "ollama_history.json" def __init__(self, plugin, fromEric=True, parent=None): """ Constructor @param plugin reference to the plug-in object @type PluginOllamaInterface @param fromEric flag indicating the eric-ide mode (defaults to True) (True = eric-ide mode, False = application mode) @type bool (optional) @param parent reference to the parent widget (defaults to None) @type QWidget (optional) """ super().__init__(parent) self.setupUi(self) self.__plugin = plugin self.__client = OllamaClient(plugin, self) if fromEric: self.layout().setContentsMargins(0, 3, 0, 0) else: self.layout().setContentsMargins(0, 0, 0, 0) iconSuffix = "-dark" if ericApp().usesDarkPalette() else "-light" self.ollamaMenuButton.setIcon(EricPixmapCache.getIcon("superMenu")) self.reloadModelsButton.setIcon(EricPixmapCache.getIcon("reload")) self.newChatButton.setIcon(EricPixmapCache.getIcon("plus")) self.sendButton.setIcon( EricPixmapCache.getIcon( os.path.join("OllamaInterface", "icons", "send{0}".format(iconSuffix)) ) ) self.ollamaMenuButton.setAutoRaise(True) self.ollamaMenuButton.setShowMenuInside(True) self.__chatHistoryLayout = QVBoxLayout() self.historyScrollWidget.setLayout(self.__chatHistoryLayout) self.__chatHistoryLayout.addStretch(1) self.mainSplitter.setSizes([200, 2000, 100]) self.newChatButton.setEnabled(False) self.__handleServerStateChanged(False) self.__pullProgressDialog = None self.__pulling = False self.__localServerDialog = None self.__localServerProcess = None self.__availableModels = [] self.__connectClient() self.__initOllamaMenu() self.sendButton.clicked.connect(self.__sendMessage) self.__loadHistory() self.__updateMessageEditState() def __connectClient(self): """ Private method to connect the client signals. """ self.__client.serverStateChanged.connect(self.__handleServerStateChanged) self.__client.serverVersion.connect(self.__setHeaderLabel) self.__client.modelsList.connect(self.__populateModelSelector) self.__client.modelsList.connect(self.__checkHistoryModels) self.__client.replyReceived.connect(self.__handleServerMessage) self.__client.pullStatus.connect(self.__handlePullStatus) self.__client.pullError.connect(self.__handlePullError) self.__client.errorOccurred.connect(self.__handleClientError) self.__client.finished.connect(self.__handleClientFinished) @pyqtSlot(bool) def __handleServerStateChanged(self, ok): """ Private slot handling a change in the 'ollama' server responsiveness. @param ok flag indicating a responsive 'ollama' server @type bool """ if ok: self.__finishSetup() else: self.ollamaVersionLabel.setText( self.tr("<b>Error: The configured server is not responding.</b>") ) self.setEnabled(ok) @pyqtSlot() def __finishSetup(self): """ Private slot to finish the UI setup. """ self.__client.version() self.__client.list() @pyqtSlot() def on_reloadModelsButton_clicked(self): """ Private slot to reload the list of available models. """ self.__finishSetup() @pyqtSlot(str) def on_modelComboBox_currentTextChanged(self, model): """ Private slot handling the selection of a model. @param model name of the selected model @type str """ self.newChatButton.setEnabled(bool(model)) ############################################################################ ## Methods handling signals from the 'ollama' client. ############################################################################ @pyqtSlot(str) def __setHeaderLabel(self, version): """ Private slot to receive the 'ollama' server version and set the header. @param version 'ollama' server version' @type str """ self.ollamaVersionLabel.setText( self.tr("<b>ollama Server Version {0}</b>").format(version) ) @pyqtSlot(list) def __populateModelSelector(self, modelNames): """ Private slot to receive the list of available model names and populate the model selector with them. @param modelNames list of model names @type list[str] """ self.__availableModels = modelNames[:] self.modelComboBox.clear() self.modelComboBox.addItem("") self.modelComboBox.addItems( sorted(n.replace(":latest", "") for n in modelNames) ) @pyqtSlot(list) def __checkHistoryModels(self, modelNames): """ Private slot to set the chat history entry states according to available models. @param modelNames list of model names @type list[str] """ names = [n.replace(":latest", "") for n in modelNames] for index in range(self.__chatHistoryLayout.count() - 1): self.__chatHistoryLayout.itemAt(index).widget().checkModelAvailable(names) ############################################################################ ## Methods handling signals from the chat history widgets. ############################################################################ def __createHistoryWidget(self, title, model, jsonStr=None): """ Private method to create a chat history widget and insert it into the respective layout. @param title title of the chat @type str @param model name of the model @type str @param jsonStr string containing JSON serialize chat history data (defaults to None) @type str (optional) @return reference to the created history widget @rtype OllamaHistoryWidget """ history = OllamaHistoryWidget(title=title, model=model, jsonStr=jsonStr) self.__chatHistoryLayout.insertWidget( self.__chatHistoryLayout.count() - 1, history ) history.deleteChatHistory.connect(self.__deleteHistory) history.dataChanged.connect(self.__saveHistory) history.newChatWithHistory.connect(self.__newChatWithHistory) self.__saveHistory() QTimer.singleShot(0, self.__scrollHistoryToBottom) return history @pyqtSlot() def __scrollHistoryToBottom(self): """ Private slot to scroll the history widget to the bottom. """ scrollbar = self.historyScrollArea.verticalScrollBar() scrollbar.setMaximum(self.historyScrollWidget.height()) scrollbar.setValue(scrollbar.maximum()) def __findHistoryWidget(self, hid): """ Private method to find the widget of a given chat history ID. @param hid ID of the chat history @type str @return reference to the chat history widget @rtype OllamaHistoryWidget """ for index in range(self.__chatHistoryLayout.count() - 1): widget = self.__chatHistoryLayout.itemAt(index).widget() if widget.getId() == hid: return widget return None def __getHistoryIds(self): """ Private method to get a list of all history IDs. @return list of history IDs @rtype list[str] """ hids = [] for index in range(self.__chatHistoryLayout.count() - 1): widget = self.__chatHistoryLayout.itemAt(index).widget() hids.append(widget.getId()) return hids def __historyFilePath(self): """ Private method to get the path name of the chat history file. @return file path of the chat history file @rtype str """ return os.path.join(Globals.getConfigDir(), OllamaWidget.OllamaHistoryFile) @pyqtSlot() def __saveHistory(self): """ Private method to save the current chat history to the history file. """ # step 1: collect all history entries entries = {} for index in range(self.__chatHistoryLayout.count() - 1): widget = self.__chatHistoryLayout.itemAt(index).widget() hid = widget.getId() entries[hid] = widget.saveToJson() # step 2: save the collected chat histories filePath = self.__historyFilePath() self.__saveChatHistoryFile(filePath, entries) def __saveChatHistoryFile(self, filePath, entries): """ Private method to save the chat history entries to a file. @param filePath file name to save to @type str @param entries dictionary containing the chat history entries as a JSON serialized string indexed by their ID @type dict[str, str] """ try: with open(filePath, "w") as f: json.dump(entries, f) except OSError as err: EricMessageBox.critical( self, self.tr("Save Chat History"), self.tr( "<p>The chat history could not be saved to <b>{0}</b>.</p>" "<p>Reason: {1}</p>" ).format(filePath, str(err)), ) def __loadHistory(self): """ Private method to load a previously saved history file. """ # step 1: load the history file, if it exists filePath = self.__historyFilePath() self.__loadChatHistoriesFile(filePath) def __loadChatHistoriesFile(self, filePath, reportDuplicates=False): """ Private method to load chat history entries from a given file. @param filePath path of the chat history file @type str @param reportDuplicates flag indicating to report skipped chat history entries (defaults to False) @type bool (optional) @return flag indicating success @rtype str """ if not os.path.exists(filePath): return False try: with open(filePath, "r") as f: entries = json.load(f) except OSError as err: EricMessageBox.critical( self, self.tr("Load Chat History"), self.tr( "<p>The chat history could not be loaded from <b>{0}</b>.</p>" "<p>Reason: {1}</p>" ).format(filePath, str(err)), ) return False # step 2: create history widgets existingIDs = self.__getHistoryIds() skipped = [] for hid in entries: if hid in existingIDs: data = json.loads(entries[hid]) skipped.append(data["title"]) else: self.__createHistoryWidget("", "", jsonStr=entries[hid]) if skipped and reportDuplicates: EricMessageBox.warning( self, self.tr("Load Chat History"), self.tr( "<p>These chats were not loaded because they already existed.</p>" "{0}" ).format("<ul><li>{0}</li></ul>".format("</li><li>".join(skipped))), ) return True def clearHistory(self): """ Public method to clear the history entries and close all chats. """ while self.__chatHistoryLayout.count() > 1: # do not delete the spacer at the end of the list item = self.__chatHistoryLayout.takeAt(0) if item is not None: hid = item.widget().getId() self.__removeChatWidget(hid) item.widget().deleteLater() self.__saveHistory() @pyqtSlot(str) def __deleteHistory(self, hid): """ Private slot to delete the history with the given ID. @param hid ID of the history to be deleted @type str """ widget = self.__findHistoryWidget(hid) if widget is not None: widgetIndex = self.__chatHistoryLayout.indexOf(widget) item = self.__chatHistoryLayout.takeAt(widgetIndex) if item is not None: item.widget().deleteLater() self.__saveHistory() self.__removeChatWidget(hid) ####################################################################### ## Chat related methods below ####################################################################### def __findChatWidget(self, hid): """ Private method to find a chat widget given a chat history ID. @param hid chat history ID @type str @return reference to the chat widget related to the given ID @rtype OllamaChatWidget """ for index in range(self.chatStackWidget.count()): widget = self.chatStackWidget.widget(index) if widget.getHistoryId() == hid: return widget return None @pyqtSlot() def on_newChatButton_clicked(self): """ Private slot to start a new chat with the 'ollama' server. """ model = self.modelComboBox.currentText() if not model: EricMessageBox.critical( self, self.tr("New Chat"), self.tr("""A model has to be selected first. Aborting..."""), ) return title, ok = QInputDialog.getText( self, self.tr("New Chat"), self.tr("Enter a title for the new chat:"), QLineEdit.EchoMode.Normal, ) if ok and title: historyWidget = self.__createHistoryWidget(title, model) hid = historyWidget.getId() chatWidget = OllamaChatWidget(hid=hid, title=title, model=model) index = self.chatStackWidget.addWidget(chatWidget) self.chatStackWidget.setCurrentIndex(index) self.__updateMessageEditState() self.messageEdit.setFocus(Qt.FocusReason.OtherFocusReason) @pyqtSlot(str) def __newChatWithHistory(self, hid): """ Private slot to start a new chat using a previously saved history. @param hid ID of the history to be used @type str """ chatWidget = self.__findChatWidget(hid) if chatWidget is None: historyWidget = self.__findHistoryWidget(hid) if historyWidget is None: # Oops, treat it as a new chat. self.on_newChatButton_clicked() return chatWidget = OllamaChatWidget( hid=hid, title=historyWidget.getTitle(), model=historyWidget.getModel() ) index = self.chatStackWidget.addWidget(chatWidget) self.chatStackWidget.setCurrentIndex(index) for message in historyWidget.getMessages(): chatWidget.addMessage(role=message["role"], message=message["content"]) else: # simply switch to the already existing chatWidget self.chatStackWidget.setCurrentWidget(chatWidget) self.__updateMessageEditState() self.messageEdit.setFocus(Qt.FocusReason.OtherFocusReason) def __removeChatWidget(self, hid): """ Private method to remove a chat widget given its chat history ID. @param hid chat history ID @type str """ widget = self.__findChatWidget(hid) if widget is not None: self.chatStackWidget.removeWidget(widget) @pyqtSlot() def __updateMessageEditState(self): """ Private slot to set the enabled state of the message line edit and the send button. """ chatActive = bool(self.chatStackWidget.count()) hasText = bool(self.messageEdit.toPlainText()) self.messageEdit.setEnabled(chatActive) self.sendButton.setEnabled(chatActive and hasText) @pyqtSlot() def on_messageEdit_textChanged(self): """ Private slot to handle a change of the entered message. """ self.sendButton.setEnabled(bool(self.messageEdit.toPlainText())) @pyqtSlot() def __sendMessage(self): """ Private method to send the given message of the current chat to the 'ollama' server. This sends the message with context (i.e. the history of the current chat). """ msg = self.messageEdit.toPlainText() if not msg: # empty message => ignore return if not bool(self.chatStackWidget.count()): # no current stack => ignore return # 1. determine hid of the current chat via chat stack widget chatWidget = self.chatStackWidget.currentWidget() hid = chatWidget.getHistoryId() # 2. get chat history widget via hid from chat history widget historyWidget = self.__findHistoryWidget(hid) if historyWidget is not None: # 3. append the message to the history historyWidget.addToMessages("user", msg) # 4. get the complete messages list from the history messages = historyWidget.getMessages() # 5. add the message to the current chat and an empty one # for the response chatWidget.addMessage("user", msg) chatWidget.addMessage("assistant", "") # 6. send the request via the client (non-streaming (?)) model = historyWidget.getModel() self.__client.chat( model=model, messages=messages, streaming=self.__plugin.getPreferences("StreamingChatResponse"), ) # 7. clear the message editor and give input focus back self.messageEdit.clear() self.messageEdit.setFocus(Qt.FocusReason.OtherFocusReason) @pyqtSlot(str, str, bool) def __handleServerMessage(self, content, role, done): """ Private slot handling an 'ollama' server chat response. @param content message sent by the server @type str @param role role name @type str @param done flag indicating the last chat response @type bool """ if not bool(self.chatStackWidget.count()): # no current stack => ignore return chatWidget = self.chatStackWidget.currentWidget() chatWidget.appendMessage(content) if done: hid = chatWidget.getHistoryId() historyWidget = self.__findHistoryWidget(hid) if historyWidget is not None: historyWidget.addToMessages(role, chatWidget.getRecentMessage()) ####################################################################### ## Menu related methods below ####################################################################### def __initOllamaMenu(self): """ Private method to create the super menu and attach it to the super menu button. """ ################################################################### ## Menu with Chat History related actions ################################################################### self.__chatHistoryMenu = QMenu(self.tr("Chat History")) self.__chatHistoryMenu.addAction(self.tr("Load"), self.__loadHistory) self.__chatHistoryMenu.addSeparator() self.__clearHistoriesAct = self.__chatHistoryMenu.addAction( self.tr("Clear All"), self.__menuClearAllHistories ) self.__chatHistoryMenu.addSeparator() self.__chatHistoryMenu.addAction(self.tr("Import"), self.__menuImportHistories) self.__chatHistoryMenu.addAction(self.tr("Export"), self.__menuExportHistories) ################################################################### ## Menu with Model related actions ################################################################### self.__modelMenu = QMenu(self.tr("Model Management")) self.__modelMenu.addAction(self.tr("List Models"), self.__showModels) self.__modelMenu.addAction( self.tr("List Running Models"), self.__showRunningModels ) self.__modelMenu.addSeparator() self.__modelMenu.addAction( self.tr("Show Model Library"), self.__showModelLibrary ) self.__modelMenu.addSeparator() self.__pullModelAct = self.__modelMenu.addAction( self.tr("Install Model"), self.__pullModel ) self.__removeModelAct = self.__modelMenu.addAction( self.tr("Remove Model"), self.__removeModel ) ################################################################### ## Menu with Local Server related actions ################################################################### self.__localServerMenu = QMenu(self.tr("Local Server")) self.__localServerStartMonitorAct = self.__localServerMenu.addAction( self.tr("Start with Monitoring"), self.__startLocalServerMonitoring ) self.__localServerMenu.addSeparator() self.__startLocalServerAct = self.__localServerMenu.addAction( self.tr("Start"), self.__startLocalServer ) self.__stopLocalServerAct = self.__localServerMenu.addAction( self.tr("Stop"), self.__stopLocalServer ) ################################################################### ## Main menu ################################################################### self.__ollamaMenu = QMenu() self.__ollamaMenu.addMenu(self.__chatHistoryMenu) self.__ollamaMenu.addSeparator() self.__ollamaMenu.addMenu(self.__modelMenu) self.__ollamaMenu.addSeparator() self.__ollamaMenu.addMenu(self.__localServerMenu) self.__ollamaMenu.addSeparator() self.__ollamaMenu.addAction(self.tr("Configure..."), self.__ollamaConfigure) self.__ollamaMenu.aboutToShow.connect(self.__aboutToShowOllamaMenu) self.ollamaMenuButton.setMenu(self.__ollamaMenu) @pyqtSlot() def __aboutToShowOllamaMenu(self): """ Private slot to set the action enabled status. """ self.__clearHistoriesAct.setEnabled(self.__chatHistoryLayout.count() > 1) self.__localServerStartMonitorAct.setEnabled( self.__localServerProcess is None and self.__localServerDialog is None ) self.__startLocalServerAct.setEnabled( self.__localServerProcess is None and self.__localServerDialog is None ) self.__stopLocalServerAct.setEnabled( self.__localServerProcess is not None and self.__localServerDialog is None ) self.__pullModelAct.setEnabled(not self.__pulling) self.__removeModelAct.setEnabled(bool(self.__availableModels)) @pyqtSlot() def __ollamaConfigure(self): """ Private slot to show the ollama configuration page. """ ericApp().getObject("UserInterface").showPreferences("ollamaPage") @pyqtSlot() def __menuClearAllHistories(self): """ Private slot to clear all chat history entries. """ yes = EricMessageBox.yesNo( self, self.tr("Clear All Chat Histories"), self.tr( "<p>Do you really want to delete all chat histories? This is" " <b>irreversible</b>.</p>" ), ) if yes: self.clearHistory() @pyqtSlot() def __menuImportHistories(self): """ Private slot to import chat history entries from a file. """ historyFile = EricFileDialog.getOpenFileName( self, self.tr("Import Chat History"), "", self.tr("Chat History Files (*.json);;All Files (*)"), self.tr("Chat History Files (*.json)"), ) if historyFile: self.__loadChatHistoriesFile(historyFile, reportDuplicates=True) @pyqtSlot() def __menuExportHistories(self): """ Private slot to export chat history entries to a file. """ entries = [] for index in range(self.__chatHistoryLayout.count() - 1): item = self.__chatHistoryLayout.itemAt(index) widget = item.widget() hid = widget.getId() title = widget.getTitle() entries.append((title, hid)) dlg = EricListSelectionDialog( entries, title=self.tr("Export Chat History"), message=self.tr("Select the chats to be exported:"), checkBoxSelection=True, showSelectAll=True, ) if dlg.exec() == QDialog.DialogCode.Accepted: selectedChats = dlg.getSelection() fileName = EricFileDialog.getSaveFileName( self, self.tr("Export Chat History"), "", self.tr("Chat History Files (*.json)"), None, EricFileDialog.DontConfirmOverwrite, ) if fileName: if not fileName.endswith(".json"): fileName += ".json" entries = {} for _, hid in selectedChats: historyWidget = self.__findHistoryWidget(hid) if historyWidget is not None: entries[hid] = historyWidget.saveToJson() self.__saveChatHistoryFile(fileName, entries) def prepareServerRuntimeEnvironment(self): """ Public method to prepare a QProcessEnvironment object. @return prepared environment object to be used with QProcess @rtype QProcessEnvironment """ env = QProcessEnvironment.systemEnvironment() env.insert( "OLLAMA_HOST", "127.0.0.1:{0}".format(self.__plugin.getPreferences("OllamaLocalPort")), ) return env @pyqtSlot() def __startLocalServerMonitoring(self): """ Private slot to open a dialog for running a local 'ollama' server instance and monitor its output. """ from .RunOllamaServerDialog import RunOllamaServerDialog self.__localServerDialog = RunOllamaServerDialog( self.__client, self.__plugin, self ) self.__localServerDialog.serverStarted.connect(self.__serverStarted) self.__localServerDialog.serverStopped.connect(self.__serverStopped) self.__localServerDialog.finished.connect(self.__serverDialogClosed) self.__localServerDialog.show() self.__localServerDialog.startServer() @pyqtSlot() def __serverStarted(self): """ Private slot to handle the start of a local server. """ self.__client.setMode(True) self.on_reloadModelsButton_clicked() @pyqtSlot() def __serverStopped(self): """ Private slot to handle the stopping of a local server. """ self.__client.setMode(False) self.on_reloadModelsButton_clicked() @pyqtSlot() def __serverDialogClosed(self): """ Private slot handling the closing of the local server dialog. """ self.__localServerDialog.deleteLater() self.__localServerDialog = None @pyqtSlot() def __startLocalServer(self): """ Private slot to start a local 'ollama' server instance in the background. """ env = self.prepareServerRuntimeEnvironment() self.__localServerProcess = QProcess() self.__localServerProcess.setProcessEnvironment(env) self.__localServerProcess.finished.connect(self.__localServerProcessFinished) command = "ollama" args = ["serve"] self.__localServerProcess.start(command, args) ok = self.__localServerProcess.waitForStarted(10000) if not ok: EricMessageBox.critical( None, self.tr("Run Local 'ollama' Server"), self.tr("""The loacl 'ollama' server process could not be started."""), ) self.__localServerProcess = None else: self.__serverStarted() @pyqtSlot() def __stopLocalServer(self): """ Private slot to stop a running local 'ollama' server instance. """ if self.__localServerProcess is not None: self.__localServerProcess.terminate() @pyqtSlot() def __localServerProcessFinished(self): """ Private slot handling the finishing of the local 'ollama' server process. """ if ( self.__localServerProcess is not None and self.__localServerProcess.state() != QProcess.ProcessState.NotRunning ): self.__localServerProcess.terminate() QTimer.singleShot(2000, self.__localServerProcess.kill) self.__localServerProcess.waitForFinished(3000) self.__localServerProcess = None self.__serverStopped() @pyqtSlot() def __showModels(self): """ Private slot to ask the 'ollama' server for a list of available models with some details. """ from .OllamaDetailedModelsDialog import OllamaDetailedModelsDialog models = self.__client.listDetails() if models: dlg = OllamaDetailedModelsDialog(models, self) dlg.exec() else: EricMessageBox.information( self, self.tr("List Models"), self.tr("There are no models available."), ) @pyqtSlot() def __showRunningModels(self): """ Private slot to show a dialog with data of the running models. """ from .OllamaRunningModelsDialog import OllamaRunningModelsDialog models = self.__client.listRunning() if models: dlg = OllamaRunningModelsDialog(models, self) dlg.exec() else: EricMessageBox.information( self, self.tr("List Running Models"), self.tr("There are no models running."), ) @pyqtSlot() def __showModelLibrary(self): """ Private slot to open the 'ollama' model librayr web site. """ urlStr = self.__plugin.getPreferences("OllamaModelLibraryUrl") url = QUrl.fromUserInput(urlStr) QDesktopServices.openUrl(url) @pyqtSlot() def __pullModel(self): """ Private slot to download a model from the 'ollama' model library. """ from .OllamaPullProgressDialog import OllamaPullProgressDialog if self.__pulling: # only one pull operation supported return model, ok = QInputDialog.getText( self, self.tr("Install Model"), self.tr("Enter the name of the model to be installed:"), QLineEdit.EchoMode.Normal, ) if ok and model: self.__pulling = True if self.__pullProgressDialog is None: self.__pullProgressDialog = OllamaPullProgressDialog(self) self.__pullProgressDialog.abortPull.connect(self.__client.abortPull) self.__pullProgressDialog.setModel(model) self.__pullProgressDialog.clear() self.__pullProgressDialog.show() self.__client.pull(model) @pyqtSlot(str, str, "unsigned long int", "unsigned long int") def __handlePullStatus(self, status, idStr, total, completed): """ Private slot to handle a pull status update. @param status status message reported by the 'ollama' server @type str @param idStr ID of the file being pulled or empty @type str @param total size of the file being pulled or 0 in case of an empty ID @type int @param completed downloaded bytes or 0 in case of an empty ID @type int """ if self.__pullProgressDialog is not None: self.__pullProgressDialog.setStatus(status, idStr, total, completed) @pyqtSlot(str) def __handlePullError(self, errMsg): """ Private slot to handle an error during a pull operation. @param errMsg error message @type str """ if self.__pullProgressDialog is not None: self.__pullProgressDialog.showError(errMsg) @pyqtSlot() def __removeModel(self): """ Private slot to remove a model from the 'ollama' server. """ if self.__availableModels: modelName, ok = QInputDialog.getItem( self, self.tr("Remove Model"), self.tr("Select the model to be removed by the 'ollama' server:"), [""] + sorted(self.__availableModels), 0, False, ) if ok and modelName: deleted = self.__client.remove(modelName) if deleted: EricMessageBox.information( self, self.tr("Remove Model"), self.tr( "<p>The model <b>{0}</b> was deleted successfully.</p>" ).format(modelName), ) self.__client.list() # reload the list of models else: EricMessageBox.warning( self, self.tr("Remove Model"), self.tr( "<p>The model <b>{0}</b> could not be removed from the" " 'ollama' server.</p>" ).format(modelName), ) @pyqtSlot(str) def __handleClientError(self, errMsg): """ Private slot to handle an error message sent by the server. @param errMsg error message @type str """ EricMessageBox.warning( self, self.tr("Network Error"), errMsg, ) @pyqtSlot() def __handleClientFinished(self): """ Private slot to handle the end of a client server interaction. """ if self.__pullProgressDialog is not None and self.__pulling: self.__pullProgressDialog.setFinished(True) self.__pulling = False self.__client.list()