Wed, 21 Dec 2022 08:52:36 +0100
Adapted some import statements to eric 23.1 and newer.
# -*- coding: utf-8 -*- # Copyright (c) 2015 - 2022 Detlev Offenbach <detlev@die-offenbachs.de> # """ Module implementing the raw code metrics service. """ import multiprocessing import queue def initService(): """ Initialize the service and return the entry point. @return the entry point for the background client (function) """ return rawCodeMetrics def initBatchService(): """ Initialize the batch service and return the entry point. @return the entry point for the background client (function) """ return batchRawCodeMetrics def rawCodeMetrics(file, text=""): """ Private function to calculate the raw code metrics of one file. @param file source filename @type str @param text source text @type str @return tuple containing the result dictionary @rtype (tuple of dict) """ return __rawCodeMetrics(file, text) def batchRawCodeMetrics(argumentsList, send, fx, cancelled, maxProcesses=0): """ Module function to calculate the raw code metrics for a batch of files. @param argumentsList list of arguments tuples as given for rawCodeMetrics @type list @param send reference to send function @type function @param fx registered service name @type str @param cancelled reference to function checking for a cancellation @type function @param maxProcesses number of processes to be used @type int """ if maxProcesses == 0: # determine based on CPU count try: NumberOfProcesses = multiprocessing.cpu_count() if NumberOfProcesses >= 1: NumberOfProcesses -= 1 except NotImplementedError: NumberOfProcesses = 1 else: NumberOfProcesses = maxProcesses # Create queues taskQueue = multiprocessing.Queue() doneQueue = multiprocessing.Queue() # Submit tasks (initially two time number of processes initialTasks = 2 * NumberOfProcesses for task in argumentsList[:initialTasks]: taskQueue.put(task) # Start worker processes workers = [ multiprocessing.Process(target=workerTask, args=(taskQueue, doneQueue)) for _ in range(NumberOfProcesses) ] for worker in workers: worker.start() # Get and send results endIndex = len(argumentsList) - initialTasks for i in range(len(argumentsList)): resultSent = False wasCancelled = False while not resultSent: try: # get result (waiting max. 3 seconds and send it to frontend filename, result = doneQueue.get() send(fx, filename, result) resultSent = True except queue.Empty: # ignore empty queue, just carry on if cancelled(): wasCancelled = True break if wasCancelled or cancelled(): # just exit the loop ignoring the results of queued tasks break if i < endIndex: taskQueue.put(argumentsList[i + initialTasks]) # Tell child processes to stop for _ in range(NumberOfProcesses): taskQueue.put("STOP") for worker in workers: worker.join() worker.close() def workerTask(inputQueue, outputQueue): """ Module function acting as the parallel worker for the raw code metrics calculation. @param inputQueue input queue @type multiprocessing.Queue @param outputQueue output queue @type multiprocessing.Queue """ for filename, source in iter(inputQueue.get, "STOP"): result = __rawCodeMetrics(filename, source) outputQueue.put((filename, result)) def __rawCodeMetrics(file, text=""): """ Private function to calculate the raw code metrics for one Python file. @param file source filename @type str @param text source text @type str @return tuple containing the result dictionary @rtype (tuple of dict) """ from radon.raw import analyze try: res = __raw2Dict(analyze(text)) except Exception as err: res = {"error": str(err)} return (res,) def __raw2Dict(obj): """ Private function to convert an object holding raw analysis results into a dictionary. @param obj object as returned from analyze() @type radon.raw.Module @return conversion result @rtype dict """ result = {} for a in obj._fields: v = getattr(obj, a, None) if v is not None: result[a] = v return result