From e650b5027462e34e86a2fdc35eecbd59df0fd06d Mon Sep 17 00:00:00 2001 From: Mathieu Doucet <matd10@yahoo.com> Date: Mon, 25 Jun 2018 14:41:59 -0400 Subject: [PATCH] Allow workspace group as inputs --- .../MagnetismReflectometryReduction.py | 243 ++++++++++-------- 1 file changed, 137 insertions(+), 106 deletions(-) diff --git a/Framework/PythonInterface/plugins/algorithms/MagnetismReflectometryReduction.py b/Framework/PythonInterface/plugins/algorithms/MagnetismReflectometryReduction.py index 5438d42fbf2..0f6d89ed297 100644 --- a/Framework/PythonInterface/plugins/algorithms/MagnetismReflectometryReduction.py +++ b/Framework/PythonInterface/plugins/algorithms/MagnetismReflectometryReduction.py @@ -5,7 +5,6 @@ from __future__ import (absolute_import, division, print_function) import math import numpy as np -import functools from mantid.api import * from mantid.simpleapi import * from mantid.kernel import * @@ -37,9 +36,12 @@ class MagnetismReflectometryReduction(PythonAlgorithm): """ Friendly description """ return "Magnetism Reflectometer (REFM) reduction" + def checkGroups(self): + """Allow us to deal with a workspace group""" + return False + def PyInit(self): """ Initialization """ - self.declareProperty(StringArrayProperty("RunNumbers"), "List of run numbers to process") self.declareProperty(WorkspaceProperty("InputWorkspace", "", Direction.Input, PropertyMode.Optional), "Optionally, we can provide a scattering workspace directly") @@ -89,10 +91,9 @@ class MagnetismReflectometryReduction(PythonAlgorithm): self.declareProperty("QMin", 0.005, doc="Minimum Q-value") self.declareProperty("QStep", 0.02, doc="Step size in Q. Enter a negative value to get a log scale") self.declareProperty("AngleOffset", 0.0, doc="angle offset (rad)") - self.declareProperty(MatrixWorkspaceProperty("OutputWorkspace", "", Direction.Output), "Output workspace") + self.declareProperty(WorkspaceProperty("OutputWorkspace", "", Direction.Output), "Output workspace") self.declareProperty("TimeAxisStep", 40.0, doc="Binning step size for the time axis. TOF for detector binning, wavelength for constant Q") - self.declareProperty("EntryName", "entry-Off_Off", doc="Name of the entry to load") self.declareProperty("CropFirstAndLastPoints", True, doc="If true, we crop the first and last points") self.declareProperty("ConstQTrim", 0.5, doc="With const-Q binning, cut Q bins with contributions fewer than ConstQTrim of WL bins") @@ -102,88 +103,84 @@ class MagnetismReflectometryReduction(PythonAlgorithm): #pylint: disable=too-many-locals def PyExec(self): """ Main execution """ - # DATA + # Reduction parameters dataPeakRange = self.getProperty("SignalPeakPixelRange").value dataBackRange = self.getProperty("SignalBackgroundPixelRange").value - # NORMALIZATION - normBackRange = self.getProperty("NormBackgroundPixelRange").value - normPeakRange = self.getProperty("NormPeakPixelRange").value - - # Load the data - ws_event_data = self.load_data() - - # Number of pixels in each direction - self.number_of_pixels_x = int(ws_event_data.getInstrument().getNumberParameter("number-of-x-pixels")[0]) - self.number_of_pixels_y = int(ws_event_data.getInstrument().getNumberParameter("number-of-y-pixels")[0]) - # ----- Process Sample Data ------------------------------------------- crop_request = self.getProperty("CutLowResDataAxis").value low_res_range = self.getProperty("LowResDataAxisPixelRange").value bck_request = self.getProperty("SubtractSignalBackground").value - data_cropped = self.process_data(ws_event_data, - crop_request, low_res_range, - dataPeakRange, bck_request, dataBackRange) - - # ----- Normalization ------------------------------------------------- perform_normalization = self.getProperty("ApplyNormalization").value - if perform_normalization: - # Load normalization - ws_event_norm = self.load_direct_beam() - run_number = str(ws_event_norm.getRunNumber()) - crop_request = self.getProperty("CutLowResNormAxis").value - low_res_range = self.getProperty("LowResNormAxisPixelRange").value - bck_request = self.getProperty("SubtractNormBackground").value - norm_cropped = self.process_data(ws_event_norm, - crop_request, low_res_range, - normPeakRange, bck_request, normBackRange) - # Avoid leaving trash behind (remove only if we loaded the data) - if self.getProperty("NormalizationWorkspace").value is None: - AnalysisDataService.remove(str(ws_event_norm)) - - # Sum up the normalization peak - norm_summed = SumSpectra(InputWorkspace = norm_cropped) - norm_summed = RebinToWorkspace(WorkspaceToRebin=norm_summed, - WorkspaceToMatch=data_cropped, - OutputWorkspace=str(norm_summed)) - - # Normalize the data - normalized_data = data_cropped / norm_summed - - AddSampleLog(Workspace=normalized_data, LogName='normalization_run', LogText=run_number) - AddSampleLog(Workspace=normalized_data, LogName='normalization_file_path', - LogText=norm_summed.getRun().getProperty("Filename").value) - norm_dirpix = norm_summed.getRun().getProperty('DIRPIX').getStatistics().mean - AddSampleLog(Workspace=normalized_data, LogName='normalization_dirpix', - LogText=str(norm_dirpix), LogType='Number', LogUnit='pixel') - - # Avoid leaving trash behind - AnalysisDataService.remove(str(data_cropped)) - AnalysisDataService.remove(str(norm_cropped)) - AnalysisDataService.remove(str(norm_summed)) - else: - normalized_data = data_cropped - AddSampleLog(Workspace=normalized_data, LogName='normalization_run', LogText="None") - # At this point, the workspace should be considered a distribution of points - normalized_data = ConvertToPointData(InputWorkspace=normalized_data, - OutputWorkspace=str(normalized_data)) + # Processed normalization workspace + norm_summed = None + output_list = [] - # Convert to Q and clean up the distribution - constant_q_binning = self.getProperty("ConstantQBinning").value - if constant_q_binning: - q_rebin = self.constant_q(normalized_data, dataPeakRange) + for workspace in self.load_data(): + try: + logger.notice("Processing %s" % str(workspace)) + data_cropped = self.process_data(workspace, + crop_request, low_res_range, + dataPeakRange, bck_request, dataBackRange) + + # Normalization + if perform_normalization: + if norm_summed is None: + norm_summed = self.process_direct_beam(data_cropped) + + # Normalize the data + normalized_data = Divide(LHSWorkspace=data_cropped, RHSWorkspace=norm_summed, + OutputWorkspace=str(data_cropped)+'_normalized') + + AddSampleLog(Workspace=normalized_data, LogName='normalization_run', LogText=str(norm_summed.getRunNumber())) + AddSampleLog(Workspace=normalized_data, LogName='normalization_file_path', + LogText=norm_summed.getRun().getProperty("Filename").value) + norm_dirpix = norm_summed.getRun().getProperty('DIRPIX').getStatistics().mean + AddSampleLog(Workspace=normalized_data, LogName='normalization_dirpix', + LogText=str(norm_dirpix), LogType='Number', LogUnit='pixel') + + # Avoid leaving trash behind + AnalysisDataService.remove(str(data_cropped)) + else: + normalized_data = data_cropped + AddSampleLog(Workspace=normalized_data, LogName='normalization_run', LogText="None") + + # At this point, the workspace should be considered a distribution of points + point_data = ConvertToPointData(InputWorkspace=normalized_data, + OutputWorkspace=str(workspace)+'_') + # Avoid leaving trash behind + AnalysisDataService.remove(str(normalized_data)) + + # Convert to Q and clean up the distribution + constant_q_binning = self.getProperty("ConstantQBinning").value + if constant_q_binning: + q_rebin = self.constant_q(point_data, dataPeakRange) + else: + q_rebin = self.convert_to_q(point_data) + q_rebin = self.cleanup_reflectivity(q_rebin) + + # Avoid leaving trash behind + AnalysisDataService.remove(str(point_data)) + + # Add dQ to each Q point + q_rebin = self.compute_resolution(q_rebin) + output_list.append(q_rebin) + except: + logger.error("Could not process %s" % str(workspace)) + + # Prepare output workspace group + if len(output_list)>1: + output_wsg = self.getPropertyValue("OutputWorkspace") + GroupWorkspaces(InputWorkspaces=output_list, + OutputWorkspace=output_wsg) + self.setProperty("OutputWorkspace", output_wsg) else: - q_rebin = self.convert_to_q(normalized_data) - q_rebin = self.cleanup_reflectivity(q_rebin) + self.setProperty("OutputWorkspace", output_list[0]) - # Avoid leaving trash behind - AnalysisDataService.remove(str(normalized_data)) - - # Add dQ to each Q point - q_rebin = self.compute_resolution(q_rebin) - - self.setProperty('OutputWorkspace', q_rebin) + # Clean up leftover workspace + if norm_summed is not None: + AnalysisDataService.remove(str(norm_summed)) def load_data(self): """ @@ -192,30 +189,20 @@ class MagnetismReflectometryReduction(PythonAlgorithm): Supplying a workspace takes precedence over supplying a list of runs """ - dataRunNumbers = self.getProperty("RunNumbers").value - ws_event_data = self.getProperty("InputWorkspace").value - - if ws_event_data is not None: - return ws_event_data - - if len(dataRunNumbers) > 0: - # If we have multiple files, add them - file_list = [] - for item in dataRunNumbers: - # The standard mode of operation is to give a run number as input - try: - data_file = FileFinder.findRuns("%s%s" % (INSTRUMENT_NAME, item))[0] - except RuntimeError: - # Allow for a file name or file path as input - data_file = FileFinder.findRuns(item)[0] - file_list.append(data_file) - runs = functools.reduce((lambda x, y: '%s+%s' % (x, y)), file_list) - entry_name = self.getProperty("EntryName").value - ws_event_data = LoadEventNexus(Filename=runs, NXentryName=entry_name, - OutputWorkspace="%s_%s" % (INSTRUMENT_NAME, dataRunNumbers[0])) + input_workspaces = self.getProperty("InputWorkspace").value + + if isinstance(input_workspaces, WorkspaceGroup): + ws_list = input_workspaces + else: + ws_list = [input_workspaces] + + # Sanity check, and retrieve some info while we're at it. + if ws_list: + self.number_of_pixels_x = int(ws_list[0].getInstrument().getNumberParameter("number-of-x-pixels")[0]) + self.number_of_pixels_y = int(ws_list[0].getInstrument().getNumberParameter("number-of-y-pixels")[0]) else: raise RuntimeError("No input data was specified") - return ws_event_data + return ws_list def load_direct_beam(self): """ @@ -248,6 +235,32 @@ class MagnetismReflectometryReduction(PythonAlgorithm): # If we are here, we haven't found the data we need and we need to stop execution. raise RuntimeError("Could not find direct beam data for run %s" % normalizationRunNumber) + def process_direct_beam(self, data_cropped): + """ + Process the direct beam and rebin it to match our + scattering data. + :param Workspace data_cropped: scattering data workspace + """ + # Load normalization + ws_event_norm = self.load_direct_beam() + + # Retrieve reduction parameters + normBackRange = self.getProperty("NormBackgroundPixelRange").value + normPeakRange = self.getProperty("NormPeakPixelRange").value + crop_request = self.getProperty("CutLowResNormAxis").value + low_res_range = self.getProperty("LowResNormAxisPixelRange").value + bck_request = self.getProperty("SubtractNormBackground").value + + norm_cropped = self.process_data(ws_event_norm, + crop_request, low_res_range, + normPeakRange, bck_request, normBackRange, + rebin_to_ws=data_cropped) + # Avoid leaving trash behind (remove only if we loaded the data) + if self.getProperty("NormalizationWorkspace").value is None: + AnalysisDataService.remove(str(ws_event_norm)) + + return norm_cropped + def constant_q(self, workspace, peak): """ Compute reflectivity using constant-Q binning @@ -350,7 +363,10 @@ class MagnetismReflectometryReduction(PythonAlgorithm): refl[i] = 0.0 refl_err[i] = 0.0 - q_rebin = CreateWorkspace(DataX=axis_z, DataY=refl, DataE=refl_err, ParentWorkspace=workspace) + name_output_ws = str(workspace)+'_reflectivity' #self.getPropertyValue("OutputWorkspace") + q_rebin = CreateWorkspace(DataX=axis_z, DataY=refl, DataE=refl_err, + ParentWorkspace=workspace, OutputWorkspace=name_output_ws) + # At this point we still have a histogram, and we need to convert to point data q_rebin = ConvertToPointData(InputWorkspace=q_rebin) return q_rebin @@ -418,7 +434,7 @@ class MagnetismReflectometryReduction(PythonAlgorithm): data_x[i] = constant / data_x[i] q_workspace = SortXAxis(InputWorkspace=q_workspace, OutputWorkspace=str(q_workspace)) - name_output_ws = self.getPropertyValue("OutputWorkspace") + name_output_ws = str(workspace)+'_reflectivity' try: q_rebin = Rebin(InputWorkspace=q_workspace, Params=q_range, OutputWorkspace=name_output_ws) @@ -624,9 +640,16 @@ class MagnetismReflectometryReduction(PythonAlgorithm): #pylint: disable=too-many-arguments def process_data(self, workspace, crop_low_res, low_res_range, - peak_range, subtract_background, background_range): + peak_range, subtract_background, background_range, rebin_to_ws=None): """ Common processing for both sample data and normalization. + :param workspace: event workspace to process + :param crop_low_res: if True, the low-resolution direction will be cropped + :param low_res_range: low-resolution direction pixel range + :param peak_range: pixel range of the specular reflection peak + :param subtract_background: if True, the background will be subtracted + :param background__range: pixel range of the background region + :param rebin_to_ws: Workspace to rebin to instead of doing independent rebinning """ use_wl_cut = self.getProperty("UseWLTimeAxis").value constant_q_binning = self.getProperty("ConstantQBinning").value @@ -648,10 +671,15 @@ class MagnetismReflectometryReduction(PythonAlgorithm): tof_min, tof_max) raise RuntimeError(error_msg) - tof_step = self.getProperty("TimeAxisStep").value - logger.notice("Time axis range: %s %s %s [%s %s]" % (tof_range[0], tof_step, tof_range[1], tof_min, tof_max)) - workspace = Rebin(InputWorkspace=workspace, Params=[tof_range[0], tof_step, tof_range[1]], - OutputWorkspace="%s_histo" % str(workspace)) + if rebin_to_ws is not None: + workspace = RebinToWorkspace(WorkspaceToRebin=workspace, + WorkspaceToMatch=rebin_to_ws, + OutputWorkspace="%s_histo" % str(workspace)) + else: + tof_step = self.getProperty("TimeAxisStep").value + logger.notice("Time axis range: %s %s %s [%s %s]" % (tof_range[0], tof_step, tof_range[1], tof_min, tof_max)) + workspace = Rebin(InputWorkspace=workspace, Params=[tof_range[0], tof_step, tof_range[1]], + OutputWorkspace="%s_histo" % str(workspace)) if constant_q_binning and not use_wl_cut: # Convert to wavelength @@ -664,7 +692,7 @@ class MagnetismReflectometryReduction(PythonAlgorithm): low_res_max = self.number_of_pixels_y if crop_low_res: low_res_min = int(low_res_range[0]) - low_res_max = int(low_res_range[1]) + low_res_max = min(int(low_res_range[1]), self.number_of_pixels_y-1) # Subtract background if subtract_background: @@ -706,10 +734,13 @@ class MagnetismReflectometryReduction(PythonAlgorithm): # Crop to only the selected peak region cropped = CropWorkspace(InputWorkspace=subtracted, - StartWorkspaceIndex=int(peak_range[0]), - EndWorkspaceIndex=int(peak_range[1]), + StartWorkspaceIndex=max(0, int(peak_range[0])), + EndWorkspaceIndex=min(int(peak_range[1]), self.number_of_pixels_x-1), OutputWorkspace="%s_cropped" % str(subtracted)) + if rebin_to_ws is not None: + cropped = SumSpectra(InputWorkspace = cropped) + # Avoid leaving trash behind AnalysisDataService.remove(str(workspace)) AnalysisDataService.remove(str(subtracted)) -- GitLab