OllamaInterface/i18n/ollama_en.ts

changeset 20
8cb7bfe07e15
parent 19
1797a34618d8
child 21
22245a10b118
--- a/OllamaInterface/i18n/ollama_en.ts	Thu Aug 29 13:59:50 2024 +0200
+++ b/OllamaInterface/i18n/ollama_en.ts	Fri Aug 30 12:04:28 2024 +0200
@@ -128,16 +128,6 @@
     </message>
     <message>
       <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
-      <source>http</source>
-      <translation type="unfinished" />
-    </message>
-    <message>
-      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
-      <source>https</source>
-      <translation type="unfinished" />
-    </message>
-    <message>
-      <location filename="../ConfigurationPage/OllamaPage.ui" line="0" />
       <source>Host:</source>
       <translation type="unfinished" />
     </message>
@@ -489,11 +479,6 @@
     </message>
     <message>
       <location filename="../OllamaWidget.ui" line="0" />
-      <source>Select to reload the list of selectable models.</source>
-      <translation type="unfinished" />
-    </message>
-    <message>
-      <location filename="../OllamaWidget.ui" line="0" />
       <source>Select the model for the chat.</source>
       <translation type="unfinished" />
     </message>
@@ -558,7 +543,7 @@
     </message>
     <message>
       <location filename="../RunOllamaServerDialog.ui" line="0" />
-      <source>Press to restart the loacl ollama server.</source>
+      <source>Press to restart the local ollama server.</source>
       <translation type="unfinished" />
     </message>
     <message>
@@ -583,7 +568,7 @@
     </message>
     <message>
       <location filename="../RunOllamaServerDialog.py" line="80" />
-      <source>The loacl 'ollama' server process could not be started.</source>
+      <source>The local 'ollama' server process could not be started.</source>
       <translation type="unfinished" />
     </message>
   </context>

eric ide

mercurial