Implemented some more menu actions.

Sun, 25 Aug 2024 19:44:24 +0200

author
Detlev Offenbach <detlev@die-offenbachs.de>
date
Sun, 25 Aug 2024 19:44:24 +0200
changeset 8
3118d16e526e
parent 7
eb1dec15b2f0
child 9
c471738b75b3

Implemented some more menu actions.
- Start and Stop a local 'ollama' server in the background.
- List available models.
- List running models.
- Open the 'ollama' model library in a browser.

OllamaInterface/OllamaClient.py file | annotate | diff | comparison | revisions
OllamaInterface/OllamaDetailedModelsDialog.py file | annotate | diff | comparison | revisions
OllamaInterface/OllamaDetailedModelsDialog.ui file | annotate | diff | comparison | revisions
OllamaInterface/OllamaRunningModelsDialog.py file | annotate | diff | comparison | revisions
OllamaInterface/OllamaRunningModelsDialog.ui file | annotate | diff | comparison | revisions
OllamaInterface/OllamaWidget.py file | annotate | diff | comparison | revisions
OllamaInterface/OllamaWidget.ui file | annotate | diff | comparison | revisions
OllamaInterface/RunOllamaServerDialog.py file | annotate | diff | comparison | revisions
OllamaInterface/Ui_OllamaDetailedModelsDialog.py file | annotate | diff | comparison | revisions
OllamaInterface/Ui_OllamaRunningModelsDialog.py file | annotate | diff | comparison | revisions
OllamaInterface/Ui_OllamaWidget.py file | annotate | diff | comparison | revisions
PluginAiOllama.epj file | annotate | diff | comparison | revisions
PluginAiOllama.py file | annotate | diff | comparison | revisions
--- a/OllamaInterface/OllamaClient.py	Thu Aug 08 18:33:49 2024 +0200
+++ b/OllamaInterface/OllamaClient.py	Sun Aug 25 19:44:24 2024 +0200
@@ -45,11 +45,6 @@
         from the 'ollama' server was received
     @signal modelsList(modelNames:list[str]) emitted after the list of model
         names was obtained from the 'ollama' server
-    @signal detailedModelsList(models:list[dict]) emitted after the list of
-        models was obtained from the 'ollama' server giving some model details
-    @signal runningModelsList(models:list[dict]) emitted after the list of
-        running models was obtained from the 'ollama' server giving some model
-        execution details
     @signal pullStatus(msg:str, id:str, total:int, completed:int) emitted to indicate
         the status of a pull request as reported by the 'ollama' server
     @signal serverVersion(version:str) emitted after the server version was obtained
@@ -63,8 +58,6 @@
 
     replyReceived = pyqtSignal(str, str, bool)
     modelsList = pyqtSignal(list)
-    detailedModelsList = pyqtSignal(list)
-    runningModelsList = pyqtSignal(list)
     pullStatus = pyqtSignal(str, str, int, int)
     serverVersion = pyqtSignal(str)
     finished = pyqtSignal()
@@ -247,79 +240,65 @@
         Public method to request a list of models available locally from the 'ollama'
         server with some model details.
         """
-        # TODO: not implemented yet
-        self.__sendRequest("tags", processResponse=self.__processDetailedModelsList)
+        response = self.__sendSyncRequest("tags")
 
-    def __processDetailedModelsList(self, response):
-        """
-        Private method to process the tags response of the 'ollama' server extracting
-        some model details.
-
-        @param response dictionary containing the tags response
-        @type dict
-        """
         models = []
-        with contextlib.suppress(KeyError):
-            for model in response["models"]:
-                name = model["name"]
-                if name:
-                    models.append(
-                        {
-                            "name": name,
-                            "id": model["digest"][:20],  # first 20 characters only
-                            "size": model["size"],
-                            "modified": datetime.datetime.fromisoformat(
-                                model["modified_at"]
-                            ),
-                        }
-                    )
-        self.detailedModelsList.emit(models)
+        if response is not None:
+            with contextlib.suppress(KeyError):
+                for model in response["models"]:
+                    name = model["name"]
+                    if name:
+                        models.append(
+                            {
+                                "name": name,
+                                "id": model["digest"][:20],  # first 20 characters only
+                                "size": model["size"],
+                                "modified": datetime.datetime.fromisoformat(
+                                    model["modified_at"]
+                                ),
+                            }
+                        )
+
+        return models
 
     def listRunning(self):
         """
         Public method to request a list of running models from the 'ollama' server.
         """
-        # TODO: not implemented yet
-        self.__sendRequest("ps", processResponse=self.__processRunningModelsList)
+        response = self.__sendSyncRequest("ps")
 
-    def __processRunningModelsList(self, response):
-        """
-        Private method to process the ps response of the 'ollama' server extracting
-        some model execution details.
-
-        @param response dictionary containing the ps response
-        @type dict
-        """
         models = []
-        with contextlib.suppress(KeyError):
-            for model in response["models"]:
-                name = model["name"]
-                if name:
-                    if model["size_vram"] == 0:
-                        processor = self.tr("100% CPU")
-                    elif model["size_vram"] == model["size"]:
-                        processor = self.tr("100% GPU")
-                    elif model["size_vram"] > model["size_"] or model["size"] == 0:
-                        processor = self.tr("unknown")
-                    else:
-                        sizeCpu = model["size"] - model["size_vram"]
-                        cpuPercent = round(sizeCpu / model["size_vram"] * 100)
-                        processor = self.tr("{0}% / {1}% CPU / GPU").format(
-                            cpuPercent, 100 - cpuPercent
+        if response is not None:
+            with contextlib.suppress(KeyError):
+                for model in response["models"]:
+                    name = model["name"]
+                    if name:
+                        if model["size_vram"] == 0:
+                            processor = self.tr("100% CPU")
+                        elif model["size_vram"] == model["size"]:
+                            processor = self.tr("100% GPU")
+                        elif model["size_vram"] > model["size"] or model["size"] == 0:
+                            processor = self.tr("unknown")
+                        else:
+                            sizeCpu = model["size"] - model["size_vram"]
+                            cpuPercent = round(sizeCpu / model["size_vram"] * 100)
+                            processor = self.tr("{0}% / {1}% CPU / GPU").format(
+                                cpuPercent, 100 - cpuPercent
+                            )
+                        models.append(
+                            {
+                                "name": name,
+                                "id": model["digest"][:20],  # first 20 characters only
+                                "size": model["size"],
+                                "size_vram": model["size_vram"],
+                                "processor": processor,
+                                "expires": datetime.datetime.fromisoformat(
+                                    model["expires_at"]
+                                ),
+                            }
                         )
-                    models.append(
-                        {
-                            "name": name,
-                            "id": model["digest"][:20],  # first 20 characters only
-                            "size": model["size"],
-                            "size_vram": model["size_vram"],
-                            "processor": processor,
-                            "expires": datetime.datetime.fromisoformat(
-                                model["expires_at"]
-                            ),
-                        }
-                    )
-        self.runningModelsList.emit(models)
+
+        return models
 
     def version(self):
         """
@@ -346,22 +325,20 @@
         """
         return self.__state
 
-    def __sendRequest(self, endpoint, data=None, processResponse=None):
+    def __getServerReply(self, endpoint, data=None):
         """
-        Private method to send a request to the 'ollama' server and handle its
-        responses.
+        Private method to send a request to the 'ollama' server and return a reply
+        object.
 
         @param endpoint 'ollama' API endpoint to be contacted
         @type str
         @param data dictionary containing the data to send to the server
             (defaults to None)
         @type dict (optional)
-        @param processResponse function handling the received data (defaults to None)
-        @type function (optional)
+        @return 'ollama' server reply
+        @rtype QNetworkReply
         """
-        self.__state = OllamaClientState.Requesting
-
-        ollamaUrl = QUrl(
+        ollamaUrl =  QUrl(
             "{0}://{1}:{2}/api/{3}".format(
                 self.__plugin.getPreferences("OllamaScheme"),
                 (
@@ -386,9 +363,53 @@
             reply = self.__networkManager.post(request, jsonData)
         else:
             reply = self.__networkManager.get(request)
+        reply.errorOccurred.connect(lambda error: self.__errorOccurred(error, reply))
+        return reply
 
+    def __sendRequest(self, endpoint, data=None, processResponse=None):
+        """
+        Private method to send a request to the 'ollama' server and handle its
+        responses.
+
+        @param endpoint 'ollama' API endpoint to be contacted
+        @type str
+        @param data dictionary containing the data to send to the server
+            (defaults to None)
+        @type dict (optional)
+        @param processResponse function handling the received data (defaults to None)
+        @type function (optional)
+        """
+        self.__state = OllamaClientState.Requesting
+
+        ##ollamaUrl = QUrl(
+            ##"{0}://{1}:{2}/api/{3}".format(
+                ##self.__plugin.getPreferences("OllamaScheme"),
+                ##(
+                    ##"127.0.0.1"
+                    ##if self.__localServer
+                    ##else self.__plugin.getPreferences("OllamaHost")
+                ##),
+                ##(
+                    ##self.__plugin.getPreferences("OllamaLocalPort")
+                    ##if self.__localServer
+                    ##else self.__plugin.getPreferences("OllamaPort")
+                ##),
+                ##endpoint,
+            ##)
+        ##)
+        ##request = QNetworkRequest(ollamaUrl)
+        ##if data is not None:
+            ##request.setHeader(
+                ##QNetworkRequest.KnownHeaders.ContentTypeHeader, "application/json"
+            ##)
+            ##jsonData = json.dumps(data).encode("utf-8")
+            ##reply = self.__networkManager.post(request, jsonData)
+        ##else:
+            ##reply = self.__networkManager.get(request)
+##
+        reply = self.__getServerReply(endpoint=endpoint, data=data)
         reply.finished.connect(lambda: self.__replyFinished(reply))
-        reply.errorOccurred.connect(lambda error: self.__errorOccurred(error, reply))
+        ##reply.errorOccurred.connect(lambda error: self.__errorOccurred(error, reply))
         reply.readyRead.connect(lambda: self.__processData(reply, processResponse))
         self.__replies.append(reply)
 
@@ -441,6 +462,36 @@
                 if data and processResponse:
                     processResponse(data)
 
+    def __sendSyncRequest(self, endpoint, data=None):
+        """
+        Private method to send a request to the 'ollama' server and handle its
+        responses.
+
+        @param endpoint 'ollama' API endpoint to be contacted
+        @type str
+        @param data dictionary containing the data to send to the server
+            (defaults to None)
+        @type dict (optional)
+        """
+        self.__state = OllamaClientState.Requesting
+
+        reply = self.__getServerReply(endpoint=endpoint, data=data)
+        while not reply.isFinished():
+            QCoreApplication.processEvents()
+            QThread.msleep(100)
+
+        reply.deleteLater()
+
+        self.__state = OllamaClientState.Finished
+
+        if reply.error() == QNetworkReply.NetworkError.NoError:
+            buffer = bytes(reply.readAll())
+            with contextlib.suppress(json.JSONDecodeError):
+                data = json.loads(buffer)
+                return data
+
+        return None
+
     def heartbeat(self):
         """
         Public method to check, if the 'ollama' server has started and is responsive.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/OllamaInterface/OllamaDetailedModelsDialog.py	Sun Aug 25 19:44:24 2024 +0200
@@ -0,0 +1,46 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2024 Detlev Offenbach <detlev@die-offenbachs.de>
+#
+
+"""
+Module implementing a dialog to show details of the available models.
+"""
+
+from PyQt6.QtWidgets import QDialog, QTreeWidgetItem
+
+from eric7 import Globals
+
+from .Ui_OllamaDetailedModelsDialog import Ui_OllamaDetailedModelsDialog
+
+
+class OllamaDetailedModelsDialog(QDialog, Ui_OllamaDetailedModelsDialog):
+    """
+    Class implementing a dialog to show details of the available models.
+    """
+
+    def __init__(self, models, parent=None):
+        """
+        Constructor
+
+        @param models list of available models with details
+        @type list[dict[str: Any]]
+        @param parent reference to the parent widget (defaults to None)
+        @type QWidget (optional)
+        """
+        super().__init__(parent)
+        self.setupUi(self)
+
+        for model in models:
+            QTreeWidgetItem(
+                self.modelsList,
+                [
+                    model["name"],
+                    model["id"],
+                    Globals.dataString(model["size"]),
+                    model["modified"].strftime("%Y-%m-%d %H:%M:%S"),
+                ]
+            )
+
+        for column in range(self.modelsList.columnCount()):
+            self.modelsList.resizeColumnToContents(column)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/OllamaInterface/OllamaDetailedModelsDialog.ui	Sun Aug 25 19:44:24 2024 +0200
@@ -0,0 +1,109 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<ui version="4.0">
+ <class>OllamaDetailedModelsDialog</class>
+ <widget class="QDialog" name="OllamaDetailedModelsDialog">
+  <property name="geometry">
+   <rect>
+    <x>0</x>
+    <y>0</y>
+    <width>500</width>
+    <height>400</height>
+   </rect>
+  </property>
+  <property name="windowTitle">
+   <string>Available Models</string>
+  </property>
+  <property name="sizeGripEnabled">
+   <bool>true</bool>
+  </property>
+  <layout class="QVBoxLayout" name="verticalLayout">
+   <item>
+    <widget class="QTreeWidget" name="modelsList">
+     <property name="editTriggers">
+      <set>QAbstractItemView::NoEditTriggers</set>
+     </property>
+     <property name="alternatingRowColors">
+      <bool>true</bool>
+     </property>
+     <property name="selectionMode">
+      <enum>QAbstractItemView::NoSelection</enum>
+     </property>
+     <property name="rootIsDecorated">
+      <bool>false</bool>
+     </property>
+     <property name="itemsExpandable">
+      <bool>false</bool>
+     </property>
+     <property name="sortingEnabled">
+      <bool>true</bool>
+     </property>
+     <column>
+      <property name="text">
+       <string>Name</string>
+      </property>
+     </column>
+     <column>
+      <property name="text">
+       <string>ID</string>
+      </property>
+     </column>
+     <column>
+      <property name="text">
+       <string>Size</string>
+      </property>
+     </column>
+     <column>
+      <property name="text">
+       <string>Modified At</string>
+      </property>
+     </column>
+    </widget>
+   </item>
+   <item>
+    <widget class="QDialogButtonBox" name="buttonBox">
+     <property name="orientation">
+      <enum>Qt::Horizontal</enum>
+     </property>
+     <property name="standardButtons">
+      <set>QDialogButtonBox::Close</set>
+     </property>
+    </widget>
+   </item>
+  </layout>
+ </widget>
+ <resources/>
+ <connections>
+  <connection>
+   <sender>buttonBox</sender>
+   <signal>accepted()</signal>
+   <receiver>OllamaDetailedModelsDialog</receiver>
+   <slot>accept()</slot>
+   <hints>
+    <hint type="sourcelabel">
+     <x>248</x>
+     <y>254</y>
+    </hint>
+    <hint type="destinationlabel">
+     <x>157</x>
+     <y>274</y>
+    </hint>
+   </hints>
+  </connection>
+  <connection>
+   <sender>buttonBox</sender>
+   <signal>rejected()</signal>
+   <receiver>OllamaDetailedModelsDialog</receiver>
+   <slot>reject()</slot>
+   <hints>
+    <hint type="sourcelabel">
+     <x>316</x>
+     <y>260</y>
+    </hint>
+    <hint type="destinationlabel">
+     <x>286</x>
+     <y>274</y>
+    </hint>
+   </hints>
+  </connection>
+ </connections>
+</ui>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/OllamaInterface/OllamaRunningModelsDialog.py	Sun Aug 25 19:44:24 2024 +0200
@@ -0,0 +1,47 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2024 Detlev Offenbach <detlev@die-offenbachs.de>
+#
+
+"""
+Module implementing a dialog to show details of the running models.
+"""
+
+from PyQt6.QtWidgets import QDialog, QTreeWidgetItem
+
+from eric7 import Globals
+
+from .Ui_OllamaRunningModelsDialog import Ui_OllamaRunningModelsDialog
+
+
+class OllamaRunningModelsDialog(QDialog, Ui_OllamaRunningModelsDialog):
+    """
+    Class implementing a dialog to show details of the running models.
+    """
+
+    def __init__(self, models, parent=None):
+        """
+        Constructor
+
+        @param models list of available models with details
+        @type list[dict[str: Any]]
+        @param parent reference to the parent widget (defaults to None)
+        @type QWidget (optional)
+        """
+        super().__init__(parent)
+        self.setupUi(self)
+
+        for model in models:
+            QTreeWidgetItem(
+                self.modelsList,
+                [
+                    model["name"],
+                    model["id"],
+                    Globals.dataString(model["size"]),
+                    model["processor"],
+                    model["expires"].strftime("%Y-%m-%d %H:%M:%S"),
+                ]
+            )
+
+        for column in range(self.modelsList.columnCount()):
+            self.modelsList.resizeColumnToContents(column)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/OllamaInterface/OllamaRunningModelsDialog.ui	Sun Aug 25 19:44:24 2024 +0200
@@ -0,0 +1,114 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<ui version="4.0">
+ <class>OllamaRunningModelsDialog</class>
+ <widget class="QDialog" name="OllamaRunningModelsDialog">
+  <property name="geometry">
+   <rect>
+    <x>0</x>
+    <y>0</y>
+    <width>600</width>
+    <height>400</height>
+   </rect>
+  </property>
+  <property name="windowTitle">
+   <string>Running Models</string>
+  </property>
+  <property name="sizeGripEnabled">
+   <bool>true</bool>
+  </property>
+  <layout class="QVBoxLayout" name="verticalLayout">
+   <item>
+    <widget class="QTreeWidget" name="modelsList">
+     <property name="editTriggers">
+      <set>QAbstractItemView::NoEditTriggers</set>
+     </property>
+     <property name="alternatingRowColors">
+      <bool>true</bool>
+     </property>
+     <property name="selectionMode">
+      <enum>QAbstractItemView::NoSelection</enum>
+     </property>
+     <property name="rootIsDecorated">
+      <bool>false</bool>
+     </property>
+     <property name="itemsExpandable">
+      <bool>false</bool>
+     </property>
+     <property name="sortingEnabled">
+      <bool>true</bool>
+     </property>
+     <column>
+      <property name="text">
+       <string>Name</string>
+      </property>
+     </column>
+     <column>
+      <property name="text">
+       <string>ID</string>
+      </property>
+     </column>
+     <column>
+      <property name="text">
+       <string>Size</string>
+      </property>
+     </column>
+     <column>
+      <property name="text">
+       <string>Processor</string>
+      </property>
+     </column>
+     <column>
+      <property name="text">
+       <string>Expires</string>
+      </property>
+     </column>
+    </widget>
+   </item>
+   <item>
+    <widget class="QDialogButtonBox" name="buttonBox">
+     <property name="orientation">
+      <enum>Qt::Horizontal</enum>
+     </property>
+     <property name="standardButtons">
+      <set>QDialogButtonBox::Close</set>
+     </property>
+    </widget>
+   </item>
+  </layout>
+ </widget>
+ <resources/>
+ <connections>
+  <connection>
+   <sender>buttonBox</sender>
+   <signal>accepted()</signal>
+   <receiver>OllamaRunningModelsDialog</receiver>
+   <slot>accept()</slot>
+   <hints>
+    <hint type="sourcelabel">
+     <x>248</x>
+     <y>254</y>
+    </hint>
+    <hint type="destinationlabel">
+     <x>157</x>
+     <y>274</y>
+    </hint>
+   </hints>
+  </connection>
+  <connection>
+   <sender>buttonBox</sender>
+   <signal>rejected()</signal>
+   <receiver>OllamaRunningModelsDialog</receiver>
+   <slot>reject()</slot>
+   <hints>
+    <hint type="sourcelabel">
+     <x>316</x>
+     <y>260</y>
+    </hint>
+    <hint type="destinationlabel">
+     <x>286</x>
+     <y>274</y>
+    </hint>
+   </hints>
+  </connection>
+ </connections>
+</ui>
--- a/OllamaInterface/OllamaWidget.py	Thu Aug 08 18:33:49 2024 +0200
+++ b/OllamaInterface/OllamaWidget.py	Sun Aug 25 19:44:24 2024 +0200
@@ -9,7 +9,8 @@
 import json
 import os
 
-from PyQt6.QtCore import QProcessEnvironment, Qt, QTimer, pyqtSlot
+from PyQt6.QtCore import QProcess, QProcessEnvironment, Qt, QTimer, pyqtSlot, QUrl
+from PyQt6.QtGui import QDesktopServices
 from PyQt6.QtWidgets import (
     QDialog,
     QInputDialog,
@@ -136,7 +137,7 @@
         """
         Private slot to reload the list of available models.
         """
-        self.__client.list()
+        self.__finishSetup()
 
     @pyqtSlot(str)
     def on_modelComboBox_currentTextChanged(self, model):
@@ -599,14 +600,8 @@
         menu button.
         """
         # TODO: implement the menu and menu methods
-        #       * Show Model Details
-        #       * Show Model Processes
         #       * Pull Model
-        #       * Show Model Shop (via a web browser)
         #       * Remove Model
-        #       * Local Server
-        #           * Start
-        #           * Stop
         ###################################################################
         ## Menu with Chat History related actions
         ###################################################################
@@ -622,6 +617,23 @@
         self.__chatHistoryMenu.addAction(self.tr("Export"), self.__menuExportHistories)
 
         ###################################################################
+        ## Menu with Model related actions
+        ###################################################################
+
+        self.__modelMenu = QMenu(self.tr("Model Management"))
+        self.__modelMenu.addAction(self.tr("List Models"), self.__showModels)
+        self.__modelMenu.addAction(
+            self.tr("List Running Models"), self.__showRunningModels
+        )
+        self.__modelMenu.addSeparator()
+        self.__modelMenu.addAction(
+            self.tr("Show Model Library"), self.__showModelLibrary
+        )
+        self.__modelMenu.addSeparator()
+        self.__modelMenu.addAction(self.tr("Download Model"), self.__pullModel)
+        self.__modelMenu.addAction(self.tr("Remove Model"), self.__removeModel)
+
+        ###################################################################
         ## Menu with Local Server related actions
         ###################################################################
 
@@ -644,6 +656,8 @@
         self.__ollamaMenu = QMenu()
         self.__ollamaMenu.addMenu(self.__chatHistoryMenu)
         self.__ollamaMenu.addSeparator()
+        self.__ollamaMenu.addMenu(self.__modelMenu)
+        self.__ollamaMenu.addSeparator()
         self.__ollamaMenu.addMenu(self.__localServerMenu)
         self.__ollamaMenu.addSeparator()
         self.__ollamaMenu.addAction(self.tr("Configure..."), self.__ollamaConfigure)
@@ -770,7 +784,6 @@
         Private slot to open a dialog for running a local 'ollama' server instance
         and monitor its output.
         """
-        # TODO: not implemented yet
         from .RunOllamaServerDialog import RunOllamaServerDialog
 
         self.__localServerDialog = RunOllamaServerDialog(
@@ -783,22 +796,6 @@
         self.__localServerDialog.startServer()
 
     @pyqtSlot()
-    def __startLocalServer(self):
-        """
-        Private slot to start a local 'ollama' server instance in the background.
-        """
-        # TODO: not implemented yet
-        pass
-
-    @pyqtSlot()
-    def __stopLocalServer(self):
-        """
-        Private slot to stop a running local 'ollama' server instance.
-        """
-        # TODO: not implemented yet
-        pass
-
-    @pyqtSlot()
     def __serverStarted(self):
         """
         Private slot to handle the start of a local server.
@@ -821,3 +818,115 @@
         """
         self.__localServerDialog.deleteLater()
         self.__localServerDialog = None
+
+    @pyqtSlot()
+    def __startLocalServer(self):
+        """
+        Private slot to start a local 'ollama' server instance in the background.
+        """
+        env = self.prepareServerRuntimeEnvironment()
+        self.__localServerProcess = QProcess()
+        self.__localServerProcess.setProcessEnvironment(env)
+        self.__localServerProcess.finished.connect(self.__localServerProcessFinished)
+
+        command = "ollama"
+        args = ["serve"]
+
+        self.__localServerProcess.start(command, args)
+        ok = self.__localServerProcess.waitForStarted(10000)
+        if not ok:
+            EricMessageBox.critical(
+                None,
+                self.tr("Run Local 'ollama' Server"),
+                self.tr("""The loacl 'ollama' server process could not be started."""),
+            )
+            self.__localServerProcess = None
+        else:
+            self.__serverStarted()
+
+    @pyqtSlot()
+    def __stopLocalServer(self):
+        """
+        Private slot to stop a running local 'ollama' server instance.
+        """
+        if self.__localServerProcess is not None:
+            self.__localServerProcess.terminate()
+
+    @pyqtSlot()
+    def __localServerProcessFinished(self):
+        """
+        Private slot handling the finishing of the local 'ollama' server process.
+        """
+        if (
+            self.__localServerProcess is not None
+            and self.__localServerProcess.state() != QProcess.ProcessState.NotRunning
+        ):
+            self.__localServerProcess.terminate()
+            QTimer.singleShot(2000, self.__localServerProcess.kill)
+            self.__localServerProcess.waitForFinished(3000)
+
+        self.__localServerProcess = None
+
+        self.__serverStopped()
+
+    @pyqtSlot()
+    def __showModels(self):
+        """
+        Private slot to ask the 'ollama' server for a list of available models with
+        some details.
+        """
+        from .OllamaDetailedModelsDialog import OllamaDetailedModelsDialog
+
+        models = self.__client.listDetails()
+        if models:
+            dlg = OllamaDetailedModelsDialog(models, self)
+            dlg.exec()
+        else:
+            EricMessageBox.information(
+                self,
+                self.tr("List Models"),
+                self.tr("There are no models available."),
+            )
+
+    @pyqtSlot()
+    def __showRunningModels(self):
+        """
+        Private slot to show a dialog with data of the running models.
+        """
+        from .OllamaRunningModelsDialog import OllamaRunningModelsDialog
+
+        models = self.__client.listRunning()
+        if models:
+            dlg = OllamaRunningModelsDialog(models, self)
+            dlg.exec()
+        else:
+            EricMessageBox.information(
+                self,
+                self.tr("List Running Models"),
+                self.tr("There are no models running."),
+            )
+
+    @pyqtSlot()
+    def __showModelLibrary(self):
+        """
+        Private slot to open the 'ollama' model librayr web site.
+        """
+        urlStr = self.__plugin.getPreferences("OllamaModelLibraryUrl")
+        url = QUrl.fromUserInput(urlStr)
+        QDesktopServices.openUrl(url)
+
+    @pyqtSlot()
+    def __pullModel(self):
+        """
+        Private slot to download a model from the 'ollama' model library.
+        """
+        # TODO: not implemented yet
+        pass
+
+    @pyqtSlot()
+    def __removeModel(self):
+        """
+        Private slot to remove a model from the 'ollama' server.
+        """
+        # TODO: not implemented yet
+        pass
--- a/OllamaInterface/OllamaWidget.ui	Thu Aug 08 18:33:49 2024 +0200
+++ b/OllamaInterface/OllamaWidget.ui	Sun Aug 25 19:44:24 2024 +0200
@@ -55,6 +55,9 @@
     <layout class="QHBoxLayout" name="horizontalLayout">
      <item>
       <widget class="QToolButton" name="reloadModelsButton">
+       <property name="toolTip">
+        <string>Press to reload the models list and update the 'ollama' version information.</string>
+       </property>
        <property name="statusTip">
         <string>Select to reload the list of selectable models.</string>
        </property>
--- a/OllamaInterface/RunOllamaServerDialog.py	Thu Aug 08 18:33:49 2024 +0200
+++ b/OllamaInterface/RunOllamaServerDialog.py	Sun Aug 25 19:44:24 2024 +0200
@@ -76,8 +76,8 @@
         if not ok:
             EricMessageBox.critical(
                 None,
-                self.tr("Run Local ollama Server"),
-                self.tr("""The loacl ollama server process could not be started."""),
+                self.tr("Run Local 'ollama' Server"),
+                self.tr("""The loacl 'ollama' server process could not be started."""),
             )
         else:
             self.buttonBox.button(QDialogButtonBox.StandardButton.Close).setEnabled(
@@ -140,7 +140,8 @@
         """
         Private slot to stop the running server.
         """
-        self.__process.terminate()
+        if self.__process is not None:
+            self.__process.terminate()
 
     @pyqtSlot()
     def on_restartServerButton_clicked(self):
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/OllamaInterface/Ui_OllamaDetailedModelsDialog.py	Sun Aug 25 19:44:24 2024 +0200
@@ -0,0 +1,45 @@
+# Form implementation generated from reading ui file 'OllamaInterface/OllamaDetailedModelsDialog.ui'
+#
+# Created by: PyQt6 UI code generator 6.7.1
+#
+# WARNING: Any manual changes made to this file will be lost when pyuic6 is
+# run again.  Do not edit this file unless you know what you are doing.
+
+
+from PyQt6 import QtCore, QtGui, QtWidgets
+
+
+class Ui_OllamaDetailedModelsDialog(object):
+    def setupUi(self, OllamaDetailedModelsDialog):
+        OllamaDetailedModelsDialog.setObjectName("OllamaDetailedModelsDialog")
+        OllamaDetailedModelsDialog.resize(500, 400)
+        OllamaDetailedModelsDialog.setSizeGripEnabled(True)
+        self.verticalLayout = QtWidgets.QVBoxLayout(OllamaDetailedModelsDialog)
+        self.verticalLayout.setObjectName("verticalLayout")
+        self.modelsList = QtWidgets.QTreeWidget(parent=OllamaDetailedModelsDialog)
+        self.modelsList.setEditTriggers(QtWidgets.QAbstractItemView.EditTrigger.NoEditTriggers)
+        self.modelsList.setAlternatingRowColors(True)
+        self.modelsList.setSelectionMode(QtWidgets.QAbstractItemView.SelectionMode.NoSelection)
+        self.modelsList.setRootIsDecorated(False)
+        self.modelsList.setItemsExpandable(False)
+        self.modelsList.setObjectName("modelsList")
+        self.verticalLayout.addWidget(self.modelsList)
+        self.buttonBox = QtWidgets.QDialogButtonBox(parent=OllamaDetailedModelsDialog)
+        self.buttonBox.setOrientation(QtCore.Qt.Orientation.Horizontal)
+        self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.StandardButton.Close)
+        self.buttonBox.setObjectName("buttonBox")
+        self.verticalLayout.addWidget(self.buttonBox)
+
+        self.retranslateUi(OllamaDetailedModelsDialog)
+        self.buttonBox.accepted.connect(OllamaDetailedModelsDialog.accept) # type: ignore
+        self.buttonBox.rejected.connect(OllamaDetailedModelsDialog.reject) # type: ignore
+        QtCore.QMetaObject.connectSlotsByName(OllamaDetailedModelsDialog)
+
+    def retranslateUi(self, OllamaDetailedModelsDialog):
+        _translate = QtCore.QCoreApplication.translate
+        OllamaDetailedModelsDialog.setWindowTitle(_translate("OllamaDetailedModelsDialog", "Available Models"))
+        self.modelsList.setSortingEnabled(True)
+        self.modelsList.headerItem().setText(0, _translate("OllamaDetailedModelsDialog", "Name"))
+        self.modelsList.headerItem().setText(1, _translate("OllamaDetailedModelsDialog", "ID"))
+        self.modelsList.headerItem().setText(2, _translate("OllamaDetailedModelsDialog", "Size"))
+        self.modelsList.headerItem().setText(3, _translate("OllamaDetailedModelsDialog", "Modified At"))
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/OllamaInterface/Ui_OllamaRunningModelsDialog.py	Sun Aug 25 19:44:24 2024 +0200
@@ -0,0 +1,46 @@
+# Form implementation generated from reading ui file 'OllamaInterface/OllamaRunningModelsDialog.ui'
+#
+# Created by: PyQt6 UI code generator 6.7.1
+#
+# WARNING: Any manual changes made to this file will be lost when pyuic6 is
+# run again.  Do not edit this file unless you know what you are doing.
+
+
+from PyQt6 import QtCore, QtGui, QtWidgets
+
+
+class Ui_OllamaRunningModelsDialog(object):
+    def setupUi(self, OllamaRunningModelsDialog):
+        OllamaRunningModelsDialog.setObjectName("OllamaRunningModelsDialog")
+        OllamaRunningModelsDialog.resize(600, 400)
+        OllamaRunningModelsDialog.setSizeGripEnabled(True)
+        self.verticalLayout = QtWidgets.QVBoxLayout(OllamaRunningModelsDialog)
+        self.verticalLayout.setObjectName("verticalLayout")
+        self.modelsList = QtWidgets.QTreeWidget(parent=OllamaRunningModelsDialog)
+        self.modelsList.setEditTriggers(QtWidgets.QAbstractItemView.EditTrigger.NoEditTriggers)
+        self.modelsList.setAlternatingRowColors(True)
+        self.modelsList.setSelectionMode(QtWidgets.QAbstractItemView.SelectionMode.NoSelection)
+        self.modelsList.setRootIsDecorated(False)
+        self.modelsList.setItemsExpandable(False)
+        self.modelsList.setObjectName("modelsList")
+        self.verticalLayout.addWidget(self.modelsList)
+        self.buttonBox = QtWidgets.QDialogButtonBox(parent=OllamaRunningModelsDialog)
+        self.buttonBox.setOrientation(QtCore.Qt.Orientation.Horizontal)
+        self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.StandardButton.Close)
+        self.buttonBox.setObjectName("buttonBox")
+        self.verticalLayout.addWidget(self.buttonBox)
+
+        self.retranslateUi(OllamaRunningModelsDialog)
+        self.buttonBox.accepted.connect(OllamaRunningModelsDialog.accept) # type: ignore
+        self.buttonBox.rejected.connect(OllamaRunningModelsDialog.reject) # type: ignore
+        QtCore.QMetaObject.connectSlotsByName(OllamaRunningModelsDialog)
+
+    def retranslateUi(self, OllamaRunningModelsDialog):
+        _translate = QtCore.QCoreApplication.translate
+        OllamaRunningModelsDialog.setWindowTitle(_translate("OllamaRunningModelsDialog", "Running Models"))
+        self.modelsList.setSortingEnabled(True)
+        self.modelsList.headerItem().setText(0, _translate("OllamaRunningModelsDialog", "Name"))
+        self.modelsList.headerItem().setText(1, _translate("OllamaRunningModelsDialog", "ID"))
+        self.modelsList.headerItem().setText(2, _translate("OllamaRunningModelsDialog", "Size"))
+        self.modelsList.headerItem().setText(3, _translate("OllamaRunningModelsDialog", "Processor"))
+        self.modelsList.headerItem().setText(4, _translate("OllamaRunningModelsDialog", "Expires"))
--- a/OllamaInterface/Ui_OllamaWidget.py	Thu Aug 08 18:33:49 2024 +0200
+++ b/OllamaInterface/Ui_OllamaWidget.py	Sun Aug 25 19:44:24 2024 +0200
@@ -93,6 +93,7 @@
 
     def retranslateUi(self, OllamaWidget):
         _translate = QtCore.QCoreApplication.translate
+        self.reloadModelsButton.setToolTip(_translate("OllamaWidget", "Press to reload the models list and update the \'ollama\' version information."))
         self.reloadModelsButton.setStatusTip(_translate("OllamaWidget", "Select to reload the list of selectable models."))
         self.modelComboBox.setStatusTip(_translate("OllamaWidget", "Select the model for the chat."))
         self.newChatButton.setToolTip(_translate("OllamaWidget", "Press to start a new chat."))
--- a/PluginAiOllama.epj	Thu Aug 08 18:33:49 2024 +0200
+++ b/PluginAiOllama.epj	Sun Aug 25 19:44:24 2024 +0200
@@ -199,7 +199,9 @@
     },
     "FORMS": [
       "OllamaInterface/OllamaChatWidget.ui",
+      "OllamaInterface/OllamaDetailedModelsDialog.ui",
       "OllamaInterface/OllamaHistoryWidget.ui",
+      "OllamaInterface/OllamaRunningModelsDialog.ui",
       "OllamaInterface/OllamaWidget.ui",
       "OllamaInterface/RunOllamaServerDialog.ui"
     ],
@@ -296,11 +298,15 @@
       "OllamaInterface/OllamaChatMessageBox.py",
       "OllamaInterface/OllamaChatWidget.py",
       "OllamaInterface/OllamaClient.py",
+      "OllamaInterface/OllamaDetailedModelsDialog.py",
       "OllamaInterface/OllamaHistoryWidget.py",
+      "OllamaInterface/OllamaRunningModelsDialog.py",
       "OllamaInterface/OllamaWidget.py",
       "OllamaInterface/RunOllamaServerDialog.py",
       "OllamaInterface/Ui_OllamaChatWidget.py",
+      "OllamaInterface/Ui_OllamaDetailedModelsDialog.py",
       "OllamaInterface/Ui_OllamaHistoryWidget.py",
+      "OllamaInterface/Ui_OllamaRunningModelsDialog.py",
       "OllamaInterface/Ui_OllamaWidget.py",
       "OllamaInterface/Ui_RunOllamaServerDialog.py",
       "OllamaInterface/__init__.py",
--- a/PluginAiOllama.py	Thu Aug 08 18:33:49 2024 +0200
+++ b/PluginAiOllama.py	Sun Aug 25 19:44:24 2024 +0200
@@ -130,6 +130,7 @@
             "OllamaLocalPort": 11435,  # port for locally started ollama server
             "OllamaHeartbeatInterval": 5,  # 5 seconds heartbeat time; 0 = disabled
             "StreamingChatResponse": True,
+            "OllamaModelLibraryUrl": "https://ollama.com/library",
         }
 
         self.__translator = None

eric ide

mercurial