OllamaInterface/OllamaWidget.py

changeset 8
3118d16e526e
parent 7
eb1dec15b2f0
child 9
c471738b75b3
diff -r eb1dec15b2f0 -r 3118d16e526e OllamaInterface/OllamaWidget.py
--- a/OllamaInterface/OllamaWidget.py	Thu Aug 08 18:33:49 2024 +0200
+++ b/OllamaInterface/OllamaWidget.py	Sun Aug 25 19:44:24 2024 +0200
@@ -9,7 +9,8 @@
 import json
 import os
 
-from PyQt6.QtCore import QProcessEnvironment, Qt, QTimer, pyqtSlot
+from PyQt6.QtCore import QProcess, QProcessEnvironment, Qt, QTimer, pyqtSlot, QUrl
+from PyQt6.QtGui import QDesktopServices
 from PyQt6.QtWidgets import (
     QDialog,
     QInputDialog,
@@ -136,7 +137,7 @@
         """
         Private slot to reload the list of available models.
         """
-        self.__client.list()
+        self.__finishSetup()
 
     @pyqtSlot(str)
     def on_modelComboBox_currentTextChanged(self, model):
@@ -599,14 +600,8 @@
         menu button.
         """
         # TODO: implement the menu and menu methods
-        #       * Show Model Details
-        #       * Show Model Processes
         #       * Pull Model
-        #       * Show Model Shop (via a web browser)
         #       * Remove Model
-        #       * Local Server
-        #           * Start
-        #           * Stop
         ###################################################################
         ## Menu with Chat History related actions
         ###################################################################
@@ -622,6 +617,23 @@
         self.__chatHistoryMenu.addAction(self.tr("Export"), self.__menuExportHistories)
 
         ###################################################################
+        ## Menu with Model related actions
+        ###################################################################
+
+        self.__modelMenu = QMenu(self.tr("Model Management"))
+        self.__modelMenu.addAction(self.tr("List Models"), self.__showModels)
+        self.__modelMenu.addAction(
+            self.tr("List Running Models"), self.__showRunningModels
+        )
+        self.__modelMenu.addSeparator()
+        self.__modelMenu.addAction(
+            self.tr("Show Model Library"), self.__showModelLibrary
+        )
+        self.__modelMenu.addSeparator()
+        self.__modelMenu.addAction(self.tr("Download Model"), self.__pullModel)
+        self.__modelMenu.addAction(self.tr("Remove Model"), self.__removeModel)
+
+        ###################################################################
         ## Menu with Local Server related actions
         ###################################################################
 
@@ -644,6 +656,8 @@
         self.__ollamaMenu = QMenu()
         self.__ollamaMenu.addMenu(self.__chatHistoryMenu)
         self.__ollamaMenu.addSeparator()
+        self.__ollamaMenu.addMenu(self.__modelMenu)
+        self.__ollamaMenu.addSeparator()
         self.__ollamaMenu.addMenu(self.__localServerMenu)
         self.__ollamaMenu.addSeparator()
         self.__ollamaMenu.addAction(self.tr("Configure..."), self.__ollamaConfigure)
@@ -770,7 +784,6 @@
         Private slot to open a dialog for running a local 'ollama' server instance
         and monitor its output.
         """
-        # TODO: not implemented yet
         from .RunOllamaServerDialog import RunOllamaServerDialog
 
         self.__localServerDialog = RunOllamaServerDialog(
@@ -783,22 +796,6 @@
         self.__localServerDialog.startServer()
 
     @pyqtSlot()
-    def __startLocalServer(self):
-        """
-        Private slot to start a local 'ollama' server instance in the background.
-        """
-        # TODO: not implemented yet
-        pass
-
-    @pyqtSlot()
-    def __stopLocalServer(self):
-        """
-        Private slot to stop a running local 'ollama' server instance.
-        """
-        # TODO: not implemented yet
-        pass
-
-    @pyqtSlot()
     def __serverStarted(self):
         """
         Private slot to handle the start of a local server.
@@ -821,3 +818,115 @@
         """
         self.__localServerDialog.deleteLater()
         self.__localServerDialog = None
+
+    @pyqtSlot()
+    def __startLocalServer(self):
+        """
+        Private slot to start a local 'ollama' server instance in the background.
+        """
+        env = self.prepareServerRuntimeEnvironment()
+        self.__localServerProcess = QProcess()
+        self.__localServerProcess.setProcessEnvironment(env)
+        self.__localServerProcess.finished.connect(self.__localServerProcessFinished)
+
+        command = "ollama"
+        args = ["serve"]
+
+        self.__localServerProcess.start(command, args)
+        ok = self.__localServerProcess.waitForStarted(10000)
+        if not ok:
+            EricMessageBox.critical(
+                None,
+                self.tr("Run Local 'ollama' Server"),
+                self.tr("""The loacl 'ollama' server process could not be started."""),
+            )
+            self.__localServerProcess = None
+        else:
+            self.__serverStarted()
+
+    @pyqtSlot()
+    def __stopLocalServer(self):
+        """
+        Private slot to stop a running local 'ollama' server instance.
+        """
+        if self.__localServerProcess is not None:
+            self.__localServerProcess.terminate()
+
+    @pyqtSlot()
+    def __localServerProcessFinished(self):
+        """
+        Private slot handling the finishing of the local 'ollama' server process.
+        """
+        if (
+            self.__localServerProcess is not None
+            and self.__localServerProcess.state() != QProcess.ProcessState.NotRunning
+        ):
+            self.__localServerProcess.terminate()
+            QTimer.singleShot(2000, self.__localServerProcess.kill)
+            self.__localServerProcess.waitForFinished(3000)
+
+        self.__localServerProcess = None
+
+        self.__serverStopped()
+
+    @pyqtSlot()
+    def __showModels(self):
+        """
+        Private slot to ask the 'ollama' server for a list of available models with
+        some details.
+        """
+        from .OllamaDetailedModelsDialog import OllamaDetailedModelsDialog
+
+        models = self.__client.listDetails()
+        if models:
+            dlg = OllamaDetailedModelsDialog(models, self)
+            dlg.exec()
+        else:
+            EricMessageBox.information(
+                self,
+                self.tr("List Models"),
+                self.tr("There are no models available."),
+            )
+
+    @pyqtSlot()
+    def __showRunningModels(self):
+        """
+        Private slot to show a dialog with data of the running models.
+        """
+        from .OllamaRunningModelsDialog import OllamaRunningModelsDialog
+
+        models = self.__client.listRunning()
+        if models:
+            dlg = OllamaRunningModelsDialog(models, self)
+            dlg.exec()
+        else:
+            EricMessageBox.information(
+                self,
+                self.tr("List Running Models"),
+                self.tr("There are no models running."),
+            )
+
+    @pyqtSlot()
+    def __showModelLibrary(self):
+        """
+        Private slot to open the 'ollama' model librayr web site.
+        """
+        urlStr = self.__plugin.getPreferences("OllamaModelLibraryUrl")
+        url = QUrl.fromUserInput(urlStr)
+        QDesktopServices.openUrl(url)
+
+    @pyqtSlot()
+    def __pullModel(self):
+        """
+        Private slot to download a model from the 'ollama' model library.
+        """
+        # TODO: not implemented yet
+        pass
+
+    @pyqtSlot()
+    def __removeModel(self):
+        """
+        Private slot to remove a model from the 'ollama' server.
+        """
+        # TODO: not implemented yet
+        pass

eric ide

mercurial