Sun, 09 Apr 2017 17:14:50 +0200
Fixed a serious bug related to the queue module being called Queue in Python 2.
# -*- coding: utf-8 -*- # Copyright (c) 2015 - 2017 Detlev Offenbach <detlev@die-offenbachs.de> # """ Module implementing the maintainability index service. """ from __future__ import unicode_literals try: str = unicode # __IGNORE_EXCEPTION__ __IGNORE_WARNING__ except NameError: pass try: import Queue as queue # Py2 except ImportError: import queue import sys import multiprocessing def initService(): """ Initialize the service and return the entry point. @return the entry point for the background client (function) """ return maintainabilityIndex def initBatchService(): """ Initialize the batch service and return the entry point. @return the entry point for the background client (function) """ return batchMaintainabilityIndex def maintainabilityIndex(file, text=""): """ Private function to calculate the maintainability index of one file. @param file source filename @type str @param text source text @type str @return tuple containing the result dictionary @rtype (tuple of dict) """ return __maintainabilityIndex(file, text) def batchMaintainabilityIndex(argumentsList, send, fx, cancelled): """ Module function to calculate the maintainability index for a batch of files. @param argumentsList list of arguments tuples as given for maintainabilityIndex @type list @param send reference to send function @type function @param fx registered service name @type str @param cancelled reference to function checking for a cancellation @type function """ try: NumberOfProcesses = multiprocessing.cpu_count() if NumberOfProcesses >= 1: NumberOfProcesses -= 1 except NotImplementedError: NumberOfProcesses = 1 # Create queues taskQueue = multiprocessing.Queue() doneQueue = multiprocessing.Queue() # Submit tasks (initially two time number of processes initialTasks = 2 * NumberOfProcesses for task in argumentsList[:initialTasks]: taskQueue.put(task) # Start worker processes for i in range(NumberOfProcesses): multiprocessing.Process(target=worker, args=(taskQueue, doneQueue))\ .start() # Get and send results endIndex = len(argumentsList) - initialTasks for i in range(len(argumentsList)): resultSent = False wasCancelled = False while not resultSent: try: # get result (waiting max. 3 seconds and send it to frontend filename, result = doneQueue.get() send(fx, filename, result) resultSent = True except queue.Empty: # ignore empty queue, just carry on if cancelled(): wasCancelled = True break if wasCancelled or cancelled(): # just exit the loop ignoring the results of queued tasks break if i < endIndex: taskQueue.put(argumentsList[i + initialTasks]) # Tell child processes to stop for i in range(NumberOfProcesses): taskQueue.put('STOP') def worker(inputQueue, outputQueue): """ Module function acting as the parallel worker for the maintainability index calculation. @param inputQueue input queue @type multiprocessing.Queue @param outputQueue output queue @type multiprocessing.Queue """ for filename, source in iter(inputQueue.get, 'STOP'): result = __maintainabilityIndex(filename, source) outputQueue.put((filename, result)) def __maintainabilityIndex(file, text=""): """ Private function to calculate the maintainability index for one Python file. @param file source filename @type str @param text source text @type str @return tuple containing the result dictionary @rtype (tuple of dict) """ from radon.metrics import mi_visit, mi_rank # Check type for py2: if not str it's unicode if sys.version_info[0] == 2: try: text = text.encode('utf-8') except UnicodeError: pass try: mi = mi_visit(text, True) rank = mi_rank(mi) res = {"mi": mi, "rank": rank} except Exception as err: res = {"error": str(err)} return (res, )