Commit 847b80a9 authored by Stephen Smith's avatar Stephen Smith Committed by Smith
Browse files

Update UI Folder

parent 0f4e5f60
......@@ -13,6 +13,18 @@ endif()
add_python_package(mantidqtinterfaces INSTALL_LIB_DIRS ${_install_lib_dirs})
# Subdirectories from which ui files need processing to py files FilterEvents
# doesn't need any special work
add_subdirectory(mantidqtinterfaces/ui)
# Chain all required interface custom targets into CompilePyUI
add_custom_target(CompilePyUI DEPENDS CompileUIUI)
# Put them into the 'CompileUI' folder or group in VS and the like, for
# convenience
set_property(TARGET CompilePyUI PROPERTY FOLDER "CompilePyUI")
set_property(TARGET CompileUIUI PROPERTY FOLDER "CompilePyUI")
# Setup dependency chain
add_dependencies(mantidqtinterfaces PythonInterface)
......
include(UiToPy)
add_subdirectory(dataprocessorinterface)
add_subdirectory(batchwidget)
add_subdirectory(poldi)
add_subdirectory(drill)
set(UI_FILES
)
set(UI_FILES)
UiToPy(UI_FILES CompileUIUIBase)
uitopy(UI_FILES CompileUIUIBase)
add_custom_target(CompileUIUI
DEPENDS CompileUIUIBase
CompileUIDataProcessorInterface
CompileUIBatchWidgetInterface
CompileUIPoldi)
add_custom_target(
CompileUIUI DEPENDS CompileUIUIBase CompileUIDataProcessorInterface
CompileUIBatchWidgetInterface
)
# Put all ui targets inside the 'CompilePyUI' folder or group in VS and the
# like, for convenience
set_property(TARGET CompileUIUIBase PROPERTY FOLDER "CompilePyUI")
set_property(TARGET CompileUIDataProcessorInterface
PROPERTY FOLDER "CompilePyUI")
set_property(TARGET CompileUIPoldi PROPERTY FOLDER "CompilePyUI")
set_property(
TARGET CompileUIDataProcessorInterface PROPERTY FOLDER "CompilePyUI"
)
DRILL (Data Reduction for ILL) is the GUI for cross-technique data reduction at the ILL.
# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
from qtpy.QtCore import QObject, Signal
from mantid.api import AlgorithmObserver
class DrillAlgorithmObserverSignals(QObject):
"""
Signals that the observer could send.
"""
finished = Signal(int, str) # return code (0: success - 1: error), error msg
progress = Signal(float) # progress value between 0.0 and 1.0
class DrillAlgorithmObserver(AlgorithmObserver):
"""
Class that defines an observer for the algorithms started through the DrILL
interface. It basically overrides the handle methods to propagate signals.
"""
def __init__(self):
super(DrillAlgorithmObserver, self).__init__()
self.signals = DrillAlgorithmObserverSignals()
self.error = False
self.errorMsg = None
def finishHandle(self):
"""
Called when the observed algo is finished.
"""
if self.error:
self.signals.finished.emit(1, self.errorMsg)
else:
self.signals.finished.emit(0, "")
def errorHandle(self, msg):
"""
Called when the observed algo encounter an error.
"""
self.error = True
self.errorMsg = msg
def progressHandle(self, p, msg, estimatedTime, progressPrecision):
"""
Called when the observed algo reports its progress.
Args:
p (float): progress value between 0.0 and 1.0
msp (str): an associated message
"""
self.signals.progress.emit(p)
# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
from qtpy.QtCore import QObject, Signal, QThreadPool
class DrillAlgorithmPoolSignals(QObject):
"""
Signals that the pool could send.
"""
"""
Sent when a task starts.
Args:
str: the name of the task.
"""
taskStarted = Signal(str)
"""
Sent when a task ends with success.
Args:
str: the name of the task
"""
taskSuccess = Signal(str)
"""
Sent when a task ends with an error.
Args:
str: the name of the task
str: the error message
"""
taskError = Signal(str, str)
"""
Sent when the global progress of the pool is updated.
Args:
int: progress in percent
"""
progressUpdate = Signal(int)
"""
Sent when all the tasks are done.
"""
processingDone = Signal()
class DrillAlgorithmPool(QThreadPool):
"""
Class that defines an observer for the algorithms started through the DrILL
interface.
"""
def __init__(self):
super(DrillAlgorithmPool, self).__init__()
self.signals = DrillAlgorithmPoolSignals()
# list of all tasks
self._tasks = set()
# set of finished tasks
self._tasksDone = 0
# progress value of each task (between 0.0 and 1.0)
self._progresses = dict()
# if the threadpool is currently running
self._running = False
# to limit the number of threads
# self.setMaxThreadCount(1)
def addProcesses(self, tasks):
"""
Add a list of tasks to the thread pool.
Args:
tasks (list(DrillTask)): list of tasks
"""
if not tasks:
self.signals.processingDone.emit()
return
self._running = True
for task in tasks:
self._tasks.add(task)
self._progresses[task] = 0.0
task.signals.started.connect(self.onTaskStarted)
task.signals.finished.connect(self.onTaskFinished)
task.signals.progress.connect(self.onProgress)
self.start(task)
def abortProcessing(self):
"""
Abort the processing. This function stops the currently running
process(es) and remove the pending one(s) from the queue.
"""
self._running = False
self.clear()
for task in [task for task in self._tasks]:
task.cancel()
self._tasks.clear()
self._tasksDone = 0
self._progresses.clear()
self.signals.processingDone.emit()
def onTaskStarted(self, task):
"""
Called when a task is started.
Args:
task (DrillTask): the task
"""
self.signals.taskStarted.emit(task.getName())
def onTaskFinished(self, task, ret, msg):
"""
Called each time a task in the pool is ending.
Args:
task (DrillTask): the task
ret (int): return code. (0 for success)
msf (str): error msg if needed
"""
if task in self._tasks:
self._tasks.remove(task)
if task in self._progresses:
del self._progresses[task]
else:
return
self._tasksDone += 1
if ret:
self.signals.taskError.emit(task.getName(), msg)
else:
self.signals.taskSuccess.emit(task.getName())
if self._running:
if not self._tasks:
self._tasksDone = 0
self.clear()
self._running = False
self._progresses.clear()
self.signals.processingDone.emit()
def onProgress(self, task, p):
"""
Called each time a task in the pool reports on its progress.
Args:
task (DrillTask): the task
p (float): progress between 0.0 and 1.0
"""
self._progresses[task] = p
progress = 0.0
for i in self._progresses:
progress += self._progresses[i] * 100
progress += 100 * self._tasksDone
progress /= len(self._tasks) + self._tasksDone
self.signals.progressUpdate.emit(progress)
# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
from mantid.simpleapi import mtd, AlgorithmManager
from mantid.kernel import config, logger
from mantid.api import WorkspaceGroup
from .configurations import RundexSettings
from .DrillAlgorithmPool import DrillAlgorithmPool
from .DrillTask import DrillTask
import re
import os
class DrillExportModel:
"""
Dictionnary containing algorithms and activation state.
"""
_exportAlgorithms = None
"""
Dictionnary of export file extensions.
"""
_exportExtensions = None
"""
Dictionnary of export algorithm short doc.
"""
_exportDocs = None
"""
ThreadPool to run export algorithms asynchronously.
"""
_pool = None
"""
Dictionnary of all the exports (dict(str:set(str))).
"""
_exports = None
"""
Dictionnary of the successful exports (dict(str:set(str))).
"""
_successExport = None
def __init__(self, acquisitionMode):
"""
Create the export model by providing an aquisition mode.
Args:
acquisitionMode (str): acquisition mode
"""
self._exportAlgorithms = {k: v for k, v in RundexSettings.EXPORT_ALGORITHMS[acquisitionMode].items()}
self._exportExtensions = dict()
self._exportDocs = dict()
for a in self._exportAlgorithms.keys():
if a in RundexSettings.EXPORT_ALGO_EXTENSION:
self._exportExtensions[a] = \
RundexSettings.EXPORT_ALGO_EXTENSION[a]
try:
alg = AlgorithmManager.createUnmanaged(a)
self._exportDocs[a] = alg.summary()
except:
pass
self._pool = DrillAlgorithmPool()
self._pool.signals.taskError.connect(self._onTaskError)
self._pool.signals.taskSuccess.connect(self._onTaskSuccess)
self._exports = dict()
self._successExports = dict()
def getAlgorithms(self):
"""
Get the list of export algorithm
Returns:
list(str): names of algorithms
"""
return [algo for algo in self._exportAlgorithms.keys()]
def getAlgorithmExtentions(self):
"""
Get the extension used for the output file of each export algorithm.
Returns:
dict(str:str): dictionnary algo:extension
"""
return {k:v for k,v in self._exportExtensions.items()}
def getAlgorithmDocs(self):
"""
Get the short documentation of each export algorithm.
Return:
dict(str:str): dictionnary algo:doc
"""
return {k:v for k,v in self._exportDocs.items()}
def isAlgorithmActivated(self, algorithm):
"""
Get the state of a specific algorithm.
Args:
algorithm: name of the algo
"""
if algorithm in self._exportAlgorithms:
return self._exportAlgorithms[algorithm]
else:
return False
def activateAlgorithm(self, algorithm):
"""
Activate a spefific algorithm.
Args:
algorithm (str): name of the algo
"""
if algorithm in self._exportAlgorithms:
self._exportAlgorithms[algorithm] = True
def inactivateAlgorithm(self, algorithm):
"""
Inactivate a specific algorithm.
Args:
algorithm (str): name of the algo
"""
if algorithm in self._exportAlgorithms:
self._exportAlgorithms[algorithm] = False
def _validCriteria(self, ws, algo):
"""
Check if the criteria of the export algorithm is valid ot not.
Args:
ws (name): name of the workspace on which the critaeria will be
tested
algo (str): name of the export algorithm
"""
if algo not in RundexSettings.EXPORT_ALGO_CRITERIA:
return True
criteria = RundexSettings.EXPORT_ALGO_CRITERIA[algo]
if not criteria:
return True
try:
processingAlgo = mtd[ws].getHistory().lastAlgorithm()
params = re.findall("%[a-zA-Z]*%", criteria)
for param in params:
value = processingAlgo.getPropertyValue(param[1:-1])
criteria = criteria.replace(param, '"' + value + '"')
return bool(eval(criteria))
except:
return False
def _onTaskSuccess(self, name):
"""
Triggered when the export finished with success.
Args:
name (str): the task name
"""
name = name.split(':')
wsName = name[0]
filename = name[1]
if wsName not in self._successExports:
self._successExports[wsName] = set()
self._successExports[wsName].add(filename)
if wsName in self._exports:
self._exports[wsName].discard(filename)
if not self._exports[wsName]:
del self._exports[wsName]
self._logSuccessExport(wsName)
def _onTaskError(self, name, msg):
"""
Triggered when the export failed.
Args:
name (str): the task name
msg (str): error msg
"""
name = name.split(':')
wsName = name[0]
filename = name[1]
logger.error("Error while exporting workspace {}.".format(wsName))
logger.error(msg)
if wsName in self._exports:
self._exports[wsName].discard(filename)
if not self._exports[wsName]:
del self._exports[wsName]
self._logSuccessExport(wsName)
def _logSuccessExport(self, wsName):
"""
Log all the successful exports.
Args:
wsName (str): name of the concerned workspace
"""
if wsName not in self._successExports:
return
filenames = ", ".join(self._successExports[wsName])
logger.notice("Successful export of workspace {} to {}"
.format(wsName, filenames))
del self._successExports[wsName]
def run(self, sample):
"""
Run the export algorithms on a sample. For each export algorithm, the
function will try to validate the criteria (using _validCriteria()) on
the output workspace that corresponds to the sample. If the criteria are
valid, the export will be run on all workspaces whose name contains the
sample name.
Args:
sample (DrillSample): sample to be exported
"""
exportPath = config.getString("defaultsave.directory")
if not exportPath:
logger.warning("Default save directory is not defined. Please "
"specify one in the data directories dialog to "
"enable exports.")
return
workspaceName = sample.getOutputName()
try:
outputWs = mtd[workspaceName]
if isinstance(outputWs, WorkspaceGroup):
names = outputWs.getNames()
outputWs = names[0]
else:
outputWs = workspaceName
except:
return
tasks = list()
for algo,active in self._exportAlgorithms.items():
if not active:
continue
if not self._validCriteria(outputWs, algo):
logger.notice("Export of sample {} with {} was skipped "
"because workspaces are not compatible."
.format(outputWs, algo))
continue
for wsName in mtd.getObjectNames(contain=workspaceName):
if isinstance(mtd[wsName], WorkspaceGroup):
continue
filename = os.path.join(
exportPath,
wsName + RundexSettings.EXPORT_ALGO_EXTENSION[algo])
name = wsName + ":" + filename
if wsName not in self._exports:
self._exports[wsName] = set()
self._exports[wsName].add(filename)
kwargs = {}
if 'Ascii' in algo:
log_list = (mtd[wsName].getInstrument().getStringParameter('log_list_to_save')[0]).split(',')
kwargs['LogList'] = [log.strip() for log in log_list] # removes white spaces
if 'Reflectometry' in algo:
kwargs['WriteHeader'] = True
kwargs['FileExtension'] = 'custom'
else:
kwargs['WriteXError'] = True
task = DrillTask(name, algo, InputWorkspace=wsName,
FileName=filename, **kwargs)
tasks.append(task)
self._pool.addProcesses(tasks)
# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
import queue
import threading
import time
from qtpy.QtCore import QObject, Signal
import mantid.simpleapi as sapi
class DrillParameter:
"""
Class that defines a parameter to be checked.