PluginMetricsRadon.py

changeset 10
8b1920a22df3
parent 9
7f6e04213998
child 11
de8cadbd6a41
--- a/PluginMetricsRadon.py	Thu Sep 17 19:57:14 2015 +0200
+++ b/PluginMetricsRadon.py	Fri Sep 18 19:46:57 2015 +0200
@@ -53,14 +53,14 @@
         maintainability index was determined for a file
     @signal complexityDone(str, dict) emitted when the
         cyclomatic complexity was determined for a file
-    @signal error(str, str) emitted in case of an error
-    @signal batchFinished() emitted when a code metrics batch is done
+    @signal error(str, str, str) emitted in case of an error
+    @signal batchFinished(str) emitted when a code metrics batch is done
     """
     metricsDone = pyqtSignal(str, dict)
     maintainabilityIndexDone = pyqtSignal(str, dict)
     complexityDone = pyqtSignal(str, dict)
-    error = pyqtSignal(str, str)
-    batchFinished = pyqtSignal()
+    error = pyqtSignal(str, str, str)
+    batchFinished = pyqtSignal(str)
     
     def __init__(self, ui):
         """
@@ -77,49 +77,100 @@
         
         path = os.path.join(os.path.dirname(__file__), packageName)
         try:
+            # raw code metrics calculation
             self.backgroundService.serviceConnect(
-                'radon', 'Python2', path, 'CodeMetricsCalculator',
-                self.metricsCalculationDone,
-                onErrorCallback=self.serviceErrorPy2,
-                onBatchDone=self.batchJobDone)
+                'radon_raw', 'Python2', path, 'CodeMetricsCalculator',
+                lambda fn, res: self.metricsCalculationDone("raw", fn, res),
+                onErrorCallback=lambda fx, lang, fn, msg: self.serviceErrorPy2(
+                    "raw", fx, lang, fn, msg),
+                onBatchDone=lambda fx, lang: self.batchJobDone(
+                    "raw", fx, lang))
             self.backgroundService.serviceConnect(
-                'radon', 'Python3', path, 'CodeMetricsCalculator',
-                self.metricsCalculationDone,
-                onErrorCallback=self.serviceErrorPy3,
-                onBatchDone=self.batchJobDone)
+                'radon_raw', 'Python3', path, 'CodeMetricsCalculator',
+                lambda fn, res: self.metricsCalculationDone("raw", fn, res),
+                onErrorCallback=lambda fx, lang, fn, msg: self.serviceErrorPy3(
+                    "raw", fx, lang, fn, msg),
+                onBatchDone=lambda fx, lang: self.batchJobDone(
+                    "raw", fx, lang))
+            
+            # maintainability index calculation
+            self.backgroundService.serviceConnect(
+                'radon_mi', 'Python2', path, 'MaintainabilityIndexCalculator',
+                lambda fn, res: self.metricsCalculationDone("mi", fn, res),
+                onErrorCallback=lambda fx, lang, fn, msg: self.serviceErrorPy2(
+                    "mi", fx, lang, fn, msg),
+                onBatchDone=lambda fx, lang: self.batchJobDone(
+                    "mi", fx, lang))
+            self.backgroundService.serviceConnect(
+                'radon_mi', 'Python3', path, 'MaintainabilityIndexCalculator',
+                lambda fn, res: self.metricsCalculationDone("mi", fn, res),
+                onErrorCallback=lambda fx, lang, fn, msg: self.serviceErrorPy3(
+                    "mi", fx, lang, fn, msg),
+                onBatchDone=lambda fx, lang: self.batchJobDone(
+                    "mi", fx, lang))
+            
             self.hasBatch = True
         except TypeError:
+            # backward compatibility for eric 6.0
+            # raw code metrics calculation
             self.backgroundService.serviceConnect(
-                'radon', 'Python2', path, 'CodeMetricsCalculator',
-                self.metricsCalculationDone,
-                onErrorCallback=self.serviceErrorPy2)
+                'radon_raw', 'Python2', path, 'CodeMetricsCalculator',
+                lambda fn, res: self.metricsCalculationDone("raw", fn, res),
+                onErrorCallback=lambda fx, lang, fn, msg: self.serviceErrorPy2(
+                    "raw", fx, lang, fn, msg))
             self.backgroundService.serviceConnect(
-                'radon', 'Python3', path, 'CodeMetricsCalculator',
-                self.metricsCalculationDone,
-                onErrorCallback=self.serviceErrorPy3)
+                'radon_raw', 'Python3', path, 'CodeMetricsCalculator',
+                lambda fn, res: self.metricsCalculationDone("raw", fn, res),
+                onErrorCallback=lambda fx, lang, fn, msg: self.serviceErrorPy3(
+                    "raw", fx, lang, fn, msg))
+            
+            # maintainability index calculation
+            self.backgroundService.serviceConnect(
+                'radon_mi', 'Python2', path, 'MaintainabilityIndexCalculator',
+                lambda fn, res: self.metricsCalculationDone("mi", fn, res),
+                onErrorCallback=lambda fx, lang, fn, msg: self.serviceErrorPy2(
+                    "mi", fx, lang, fn, msg))
+            self.backgroundService.serviceConnect(
+                'radon_mi', 'Python3', path, 'MaintainabilityIndexCalculator',
+                lambda fn, res: self.metricsCalculationDone("mi", fn, res),
+                onErrorCallback=lambda fx, lang, fn, msg: self.serviceErrorPy3(
+                    "mi", fx, lang, fn, msg))
+            
             self.hasBatch = False
         
-        self.queuedBatches = []
-        self.batchesFinished = True
+        self.queuedBatches = {
+            "raw": [],
+            "mi": [],
+            "cc": [],
+        }
+        self.batchesFinished = {
+            "raw": True,
+            "mi": True,
+            "cc": True,
+        }
         
         self.__translator = None
         self.__loadTranslator()
     
-    def __serviceError(self, fn, msg):
+    def __serviceError(self, type_, fn, msg):
         """
         Private slot handling service errors.
         
+        @param type_ type of the calculated metrics
+        @type str, one of ["raw", "mi", "cc"]
         @param fn file name
         @type str
         @param msg message text
         @type str
         """
-        self.error.emit(fn, msg)
+        self.error.emit(type_, fn, msg)
     
-    def serviceErrorPy2(self, fx, lang, fn, msg):
+    def serviceErrorPy2(self, type_, fx, lang, fn, msg):
         """
         Public slot handling service errors for Python 2.
         
+        @param type_ type of the calculated metrics
+        @type str, one of ["raw", "mi", "cc"]
         @param fx service name
         @type str
         @param lang language
@@ -129,17 +180,20 @@
         @param msg message text
         @type str
         """
-        if fx in ['radon', 'batch_radon'] and lang == 'Python2':
-            if fx == 'radon':
-                self.__serviceError(fn, msg)
+        if fx in ['radon_' + type_, 'batch_radon_' + type_] and \
+                lang == 'Python2':
+            if fx == 'radon_' + type_:
+                self.__serviceError(type_, fn, msg)
             else:
-                self.__serviceError(self.tr("Python 2 batch job"), msg)
-                self.batchJobDone(fx, lang)
+                self.__serviceError(type_, self.tr("Python 2 batch job"), msg)
+                self.batchJobDone(type_, fx, lang)
     
-    def serviceErrorPy3(self, fx, lang, fn, msg):
+    def serviceErrorPy3(self, type_, fx, lang, fn, msg):
         """
         Public slot handling service errors for Python 3.
         
+        @param type_ type of the calculated metrics
+        @type str, one of ["raw", "mi", "cc"]
         @param fx service name
         @type str
         @param lang language
@@ -149,52 +203,57 @@
         @param msg message text
         @type str
         """
-        if fx in ['radon', 'batch_radon'] and lang == 'Python3':
-            if fx == 'radon':
-                self.__serviceError(fn, msg)
+        if fx in ['radon_' + type_, 'batch_radon_' + type_] and \
+                lang == 'Python3':
+            if fx == 'radon_' + type_:
+                self.__serviceError(type_, fn, msg)
             else:
-                self.__serviceError(self.tr("Python 3 batch job"), msg)
-                self.batchJobDone(fx, lang)
+                self.__serviceError(type_, self.tr("Python 3 batch job"), msg)
+                self.batchJobDone(type_, fx, lang)
     
-    def batchJobDone(self, fx, lang):
+    def batchJobDone(self, type_, fx, lang):
         """
         Public slot handling the completion of a batch job.
         
+        @param type_ type of the calculated metrics
+        @type str, one of ["raw", "mi", "cc"]
         @param fx service name
         @type str
         @param lang language
         @type str
         """
-        if fx in ['radon', 'batch_radon']:
-            if lang in self.queuedBatches:
-                self.queuedBatches.remove(lang)
+        if fx in ['radon_' + type_, 'batch_radon_' + type_]:
+            if lang in self.queuedBatches[type_]:
+                self.queuedBatches[type_].remove(lang)
             # prevent sending the signal multiple times
-            if len(self.queuedBatches) == 0 and not self.batchesFinished:
-                self.batchFinished.emit()
-                self.batchesFinished = True
+            if len(self.queuedBatches[type_]) == 0 and \
+                    not self.batchesFinished[type_]:
+                self.batchFinished.emit(type_)
+                self.batchesFinished[type_] = True
     
-    def metricsCalculationDone(self, filename, metricsType, result):
+    def metricsCalculationDone(self, type_, filename, result):
         """
         Public slot to dispatch the result.
         
+        @param type_ type of the calculated metrics
+        @type str, one of ["raw", "mi", "cc"]
         @param filename name of the file the results belong to
         @type str
-        @param metricsType type of the calculated metrics
-        @type str, one of ["raw", "mi", "cc"]
         @param result result dictionary
         @type dict
         """
-        if metricsType == "raw":
+        if type_ == "raw":
             self.metricsDone.emit(filename, result)
-        elif metricsType == "mi":
+        elif type_ == "mi":
             self.maintainabilityIndexDone.emit(filename, result)
-        elif metricsType == "cc":
+        elif type_ == "cc":
             self.complexityDone.emit(filename, result)
         else:
             self.error.emit(
+                type_,
                 filename,
                 self.tr("Unknown metrics result received ({0}).").format(
-                    metricsType)
+                    type_)
             )
     
     def __initialize(self):
@@ -240,7 +299,7 @@
             return
         
         self.backgroundService.enqueueRequest(
-            'radon', lang, filename, [source, 'raw'])
+            'radon_raw', lang, filename, [source])
 
     def rawMetricsBatch(self, argumentsList):
         """
@@ -260,22 +319,22 @@
             if lang not in ['Python2', 'Python3']:
                 continue
             else:
-                data[lang].append((filename, source, 'raw'))
+                data[lang].append((filename, source))
         
-        self.queuedBatches = []
+        self.queuedBatches["raw"] = []
         for lang in ['Python2', 'Python3']:
             if data[lang]:
-                self.queuedBatches.append(lang)
-                self.backgroundService.enqueueRequest('batch_radon', lang, "",
-                                                      data[lang])
-                self.batchesFinished = False
+                self.queuedBatches["raw"].append(lang)
+                self.backgroundService.enqueueRequest('batch_radon_raw', lang,
+                                                      "", data[lang])
+                self.batchesFinished["raw"] = False
     
     def cancelRawMetricsBatch(self):
         """
         Public method to cancel all batch jobs.
         """
         for lang in ['Python2', 'Python3']:
-            self.backgroundService.requestCancel('batch_radon', lang)
+            self.backgroundService.requestCancel('batch_radon_raw', lang)
 
     def maintainabilityIndex(self, lang, filename, source):
         """
@@ -296,7 +355,7 @@
             return
         
         self.backgroundService.enqueueRequest(
-            'radon', lang, filename, [source, 'mi'])
+            'radon_mi', lang, filename, [source])
 
     def maintainabilityIndexBatch(self, argumentsList):
         """
@@ -316,22 +375,22 @@
             if lang not in ['Python2', 'Python3']:
                 continue
             else:
-                data[lang].append((filename, source, 'mi'))
+                data[lang].append((filename, source))
         
-        self.queuedBatches = []
+        self.queuedBatches["mi"] = []
         for lang in ['Python2', 'Python3']:
             if data[lang]:
-                self.queuedBatches.append(lang)
-                self.backgroundService.enqueueRequest('batch_radon', lang, "",
-                                                      data[lang])
-                self.batchesFinished = False
+                self.queuedBatches["mi"].append(lang)
+                self.backgroundService.enqueueRequest('batch_radon_mi', lang,
+                                                      "", data[lang])
+                self.batchesFinished["mi"] = False
     
     def cancelMaintainabilityIndexBatch(self):
         """
         Public method to cancel all batch jobs.
         """
         for lang in ['Python2', 'Python3']:
-            self.backgroundService.requestCancel('batch_radon', lang)
+            self.backgroundService.requestCancel('batch_radon_mi', lang)
     
     def activate(self):
         """

eric ide

mercurial