Skip to content
Snippets Groups Projects
Commit 9dc74fa2 authored by Simon Heybrock's avatar Simon Heybrock Committed by GitHub
Browse files

Merge pull request #20491 from mantidproject/20483_IndirectDiffractionIncorrectRelationship

Indirect - Diffraction Reduction - Incorrect summing of files
parents c70e2494 de5a2192
No related branches found
No related tags found
No related merge requests found
Showing
with 292 additions and 116 deletions
...@@ -37,6 +37,11 @@ void export_FileFinder() { ...@@ -37,6 +37,11 @@ void export_FileFinder() {
"The hint can be a comma separated list of run numbers and can also " "The hint can be a comma separated list of run numbers and can also "
"include ranges of runs, e.g. 123-135 or equivalently 123-35" "include ranges of runs, e.g. 123-135 or equivalently 123-35"
"If no instrument prefix is given then the current default is used.") "If no instrument prefix is given then the current default is used.")
.def("getCaseSensitive", &FileFinderImpl::getCaseSensitive, (arg("self")),
"Option to get if file finder should be case sensitive.")
.def("setCaseSensitive", &FileFinderImpl::setCaseSensitive,
(arg("self"), arg("cs")),
"Option to set if file finder should be case sensitive.")
.def("Instance", &FileFinder::Instance, .def("Instance", &FileFinder::Instance,
return_value_policy<reference_existing_object>(), return_value_policy<reference_existing_object>(),
"Returns a reference to the FileFinder singleton instance") "Returns a reference to the FileFinder singleton instance")
......
...@@ -3,7 +3,7 @@ from __future__ import (absolute_import, division, print_function) ...@@ -3,7 +3,7 @@ from __future__ import (absolute_import, division, print_function)
import os import os
from IndirectReductionCommon import load_files from IndirectReductionCommon import load_files, load_file_ranges
from mantid.simpleapi import * from mantid.simpleapi import *
from mantid.api import * from mantid.api import *
...@@ -128,16 +128,6 @@ class ISISIndirectDiffractionReduction(DataProcessorAlgorithm): ...@@ -128,16 +128,6 @@ class ISISIndirectDiffractionReduction(DataProcessorAlgorithm):
logger.warning('type = ' + str(type(mode))) logger.warning('type = ' + str(type(mode)))
issues['CalFile'] = 'Cal Files are currently only available for use in OSIRIS diffspec mode' issues['CalFile'] = 'Cal Files are currently only available for use in OSIRIS diffspec mode'
num_samples = len(input_files)
num_vanadium = len(self.getProperty('VanadiumFiles').value)
if num_samples != num_vanadium and num_vanadium != 0:
run_num_mismatch = 'You must input the same number of sample and vanadium runs'
issues['InputFiles'] = run_num_mismatch
issues['VanadiumFiles'] = run_num_mismatch
if self._grouping_method == 'Workspace' and self._grouping_workspace is None:
issues['GroupingWorkspace'] = 'Must select a grouping workspace for current GroupingWorkspace'
return issues return issues
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
...@@ -163,13 +153,13 @@ class ISISIndirectDiffractionReduction(DataProcessorAlgorithm): ...@@ -163,13 +153,13 @@ class ISISIndirectDiffractionReduction(DataProcessorAlgorithm):
load_opts['Mode'] = 'FoilOut' load_opts['Mode'] = 'FoilOut'
load_opts['LoadMonitors'] = True load_opts['LoadMonitors'] = True
self._workspace_names, self._chopped_data = load_files(self._data_files, self._workspace_names, self._chopped_data = load_file_ranges(self._data_files,
self._ipf_filename, self._ipf_filename,
self._spectra_range[0], self._spectra_range[0],
self._spectra_range[1], self._spectra_range[1],
sum_files=self._sum_files, sum_files=self._sum_files,
load_logs=self._load_logs, load_logs=self._load_logs,
load_opts=load_opts) load_opts=load_opts)
# applies the changes in the provided calibration file # applies the changes in the provided calibration file
self._apply_calibration() self._apply_calibration()
...@@ -182,10 +172,12 @@ class ISISIndirectDiffractionReduction(DataProcessorAlgorithm): ...@@ -182,10 +172,12 @@ class ISISIndirectDiffractionReduction(DataProcessorAlgorithm):
self._ipf_filename, self._ipf_filename,
self._spectra_range[0], self._spectra_range[0],
self._spectra_range[1], self._spectra_range[1],
sum_files=self._sum_files,
load_logs=self._load_logs, load_logs=self._load_logs,
load_opts=load_opts) load_opts=load_opts)
if len(self._workspace_names) > len(self._vanadium_runs):
raise RuntimeError("There cannot be more sample runs than vanadium runs.")
for index, c_ws_name in enumerate(self._workspace_names): for index, c_ws_name in enumerate(self._workspace_names):
is_multi_frame = isinstance(mtd[c_ws_name], WorkspaceGroup) is_multi_frame = isinstance(mtd[c_ws_name], WorkspaceGroup)
...@@ -283,13 +275,11 @@ class ISISIndirectDiffractionReduction(DataProcessorAlgorithm): ...@@ -283,13 +275,11 @@ class ISISIndirectDiffractionReduction(DataProcessorAlgorithm):
# Remove the container workspaces # Remove the container workspaces
if self._container_workspace is not None: if self._container_workspace is not None:
DeleteWorkspace(self._container_workspace) self._delete_all([self._container_workspace])
DeleteWorkspace(self._container_workspace + '_mon')
# Remove the vanadium workspaces
if self._vanadium_ws: if self._vanadium_ws:
for van_ws in self._vanadium_ws: self._delete_all(self._vanadium_ws)
DeleteWorkspace(van_ws)
DeleteWorkspace(van_ws+'_mon')
# Rename output workspaces # Rename output workspaces
output_workspace_names = [rename_reduction(ws_name, self._sum_files) for ws_name in self._workspace_names] output_workspace_names = [rename_reduction(ws_name, self._sum_files) for ws_name in self._workspace_names]
...@@ -306,7 +296,6 @@ class ISISIndirectDiffractionReduction(DataProcessorAlgorithm): ...@@ -306,7 +296,6 @@ class ISISIndirectDiffractionReduction(DataProcessorAlgorithm):
""" """
Gets algorithm properties. Gets algorithm properties.
""" """
self._output_ws = self.getPropertyValue('OutputWorkspace') self._output_ws = self.getPropertyValue('OutputWorkspace')
self._data_files = self.getProperty('InputFiles').value self._data_files = self.getProperty('InputFiles').value
self._container_data_files = self.getProperty('ContainerFiles').value self._container_data_files = self.getProperty('ContainerFiles').value
...@@ -340,17 +329,10 @@ class ISISIndirectDiffractionReduction(DataProcessorAlgorithm): ...@@ -340,17 +329,10 @@ class ISISIndirectDiffractionReduction(DataProcessorAlgorithm):
self._ipf_filename = os.path.join(config['instrumentDefinition.directory'], self._ipf_filename) self._ipf_filename = os.path.join(config['instrumentDefinition.directory'], self._ipf_filename)
logger.information('IPF filename is: %s' % self._ipf_filename) logger.information('IPF filename is: %s' % self._ipf_filename)
if len(self._data_files) == 1:
logger.warning('SumFiles options has no effect when only one file is provided')
# Only enable sum files if we actually have more than one file # Only enable sum files if we actually have more than one file
sum_files = self.getProperty('SumFiles').value self._sum_files = self.getProperty('SumFiles').value
self._sum_files = False
if sum_files:
num_raw_files = len(self._data_files)
if num_raw_files > 1:
self._sum_files = True
logger.information('Summing files enabled (have %d files)' % num_raw_files)
else:
logger.information('SumFiles options is ignored when only one file is provided')
def _apply_calibration(self): def _apply_calibration(self):
""" """
...@@ -388,6 +370,19 @@ class ISISIndirectDiffractionReduction(DataProcessorAlgorithm): ...@@ -388,6 +370,19 @@ class ISISIndirectDiffractionReduction(DataProcessorAlgorithm):
Factor=scale_factor, Factor=scale_factor,
Operation='Multiply') Operation='Multiply')
def _delete_all(self, workspace_names):
"""
Deletes the workspaces with the specified names and their associated
monitor workspaces.
:param workspace_names: The names of the workspaces to delete.
"""
for workspace_name in workspace_names:
DeleteWorkspace(workspace_name)
if mtd.doesExist(workspace_name + "_mon"):
DeleteWorkspace(workspace_name + '_mon')
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
......
...@@ -394,8 +394,9 @@ class OSIRISDiffractionReduction(PythonAlgorithm): ...@@ -394,8 +394,9 @@ class OSIRISDiffractionReduction(PythonAlgorithm):
num_samples = len(self._sample_runs) num_samples = len(self._sample_runs)
num_vanadium = len(self._vanadium_runs) num_vanadium = len(self._vanadium_runs)
if num_samples != num_vanadium:
run_num_mismatch = 'You must input the same number of sample and vanadium runs' if num_samples > num_vanadium:
run_num_mismatch = 'You must input at least as many vanadium files as sample files'
issues['Sample'] = run_num_mismatch issues['Sample'] = run_num_mismatch
issues['Vanadium'] = run_num_mismatch issues['Vanadium'] = run_num_mismatch
if self._container_files: if self._container_files:
......
...@@ -5,6 +5,7 @@ from __future__ import (absolute_import, division, print_function) ...@@ -5,6 +5,7 @@ from __future__ import (absolute_import, division, print_function)
import unittest import unittest
from mantid.simpleapi import * from mantid.simpleapi import *
from mantid.api import * from mantid.api import *
from mantid.kernel import config
class ISISIndirectDiffractionReductionTest(unittest.TestCase): class ISISIndirectDiffractionReductionTest(unittest.TestCase):
...@@ -73,23 +74,26 @@ class ISISIndirectDiffractionReductionTest(unittest.TestCase): ...@@ -73,23 +74,26 @@ class ISISIndirectDiffractionReductionTest(unittest.TestCase):
""" """
Test summing multiple runs. Test summing multiple runs.
""" """
cs = FileFinder.Instance().getCaseSensitive()
wks = ISISIndirectDiffractionReduction(InputFiles=['IRS26176.RAW', 'IRS26173.RAW'], FileFinder.Instance().setCaseSensitive(False)
wks = ISISIndirectDiffractionReduction(InputFiles=['26173-26176'],
SumFiles=True, SumFiles=True,
Instrument='IRIS', Instrument='IRIS',
Mode='diffspec', Mode='diffspec',
SpectraRange=[105, 112]) SpectraRange=[105, 112])
FileFinder.Instance().setCaseSensitive(cs)
self.assertTrue(isinstance(wks, WorkspaceGroup), 'Result workspace should be a workspace group.') self.assertTrue(isinstance(wks, WorkspaceGroup), 'Result workspace should be a workspace group.')
self.assertEqual(len(wks), 1) self.assertEqual(len(wks), 1)
self.assertEqual(wks.getNames()[0], 'iris26176_multi_diffspec_red') self.assertEqual(wks.getNames()[0], 'iris26173_multi_diffspec_red')
red_ws = wks[0] red_ws = wks[0]
self.assertEqual(red_ws.getAxis(0).getUnit().unitID(), 'dSpacing') self.assertEqual(red_ws.getAxis(0).getUnit().unitID(), 'dSpacing')
self.assertEqual(red_ws.getNumberHistograms(), 1) self.assertEqual(red_ws.getNumberHistograms(), 1)
self.assertTrue('multi_run_numbers' in red_ws.getRun()) self.assertTrue('multi_run_numbers' in red_ws.getRun())
self.assertEqual(red_ws.getRun().get('multi_run_numbers').value, '26176,26173') self.assertEqual(red_ws.getRun().get('multi_run_numbers').value, '26173,26174,26175,26176')
def test_grouping_individual(self): def test_grouping_individual(self):
""" """
......
94e0fbf798a1b3c3d8718dfe4ebcaab3
019aa1211f82d2fc383dcd64e8679bb9
...@@ -36,18 +36,20 @@ Elwin ...@@ -36,18 +36,20 @@ Elwin
Bugfixes Bugfixes
-------- --------
- Save Result now writes to file the temperature-dependent elastic intensity normalized to the lowest temperature. - Save Result now writes to file the temperature-dependent elastic intensity normalized to the lowest temperature.
- Added 'ExtractMembers' property to ConvolutionFitSequential algorithm - this allows for extracting the members of the
convolution fitting into their own workspaces.
ConvFit ConvFit
~~~~~~~ ~~~~~~~
Improvements
------------
- Added 'ExtractMembers' property to ConvolutionFitSequential algorithm - this allows for extracting the members of the
convolution fitting into their own workspaces.
Bugfixes Bugfixes
-------- --------
- Correct treatment of the resolution function: convolve sample and resolution spectra with same momentum transfer. - Correct treatment of the resolution function: convolve sample and resolution spectra with same momentum transfer.
- Property to pass the workspace index added to :ref:`algm-ConvolutionFitSequential`. - Property to pass the workspace index added to :ref:`algm-ConvolutionFitSequential`.
MSDFit MSDFit
~~~~~~ ~~~~~~
...@@ -82,5 +84,10 @@ Bugfixes ...@@ -82,5 +84,10 @@ Bugfixes
- An issue has been fixed in :ref:`algm-IndirectILLEnergyTransfer` when handling the data with mirror sense, that have shifted 0 monitor counts in the left and right wings. This was causing the left and right workspaces to have different x-axis binning and to fail to sum during the unmirroring step. - An issue has been fixed in :ref:`algm-IndirectILLEnergyTransfer` when handling the data with mirror sense, that have shifted 0 monitor counts in the left and right wings. This was causing the left and right workspaces to have different x-axis binning and to fail to sum during the unmirroring step.
- An issue has been fixed in :ref:`algm-IndirectILLReductionFWS` when the scaling of the data after vanadium calibration was not applied. - An issue has been fixed in :ref:`algm-IndirectILLReductionFWS` when the scaling of the data after vanadium calibration was not applied.
- :ref:`algm-CalculateSampleTransmission` now divides by the tabulated wavelength when calculating the absorption cross section. - :ref:`algm-CalculateSampleTransmission` now divides by the tabulated wavelength when calculating the absorption cross section.
- The Sum Files option in the Indirect Diffraction Reduction interface now allows for correctly corresponding each sum of
sample runs defined with a range (e.g. A-B, where A and B are run numbers) to the corresponding vanadium run, dependent on D-Range.
- The 'Sample Runs' field in the Indirect Diffraction Interface now recognizes 3 operators: '-', '+', ':'. The '-' operator is used
to supply a given range of runs and sum them when SumFiles is checked. The '+' operator is used to supply a given list of runs and
sum when SumFiles is checked. The ':' operator is used to supply a range of runs, which will never be summed.
`Full list of changes on GitHub <http://github.com/mantidproject/mantid/pulls?q=is%3Apr+milestone%3A%22Release+3.11%22+is%3Amerged+label%3A%22Component%3A+Indirect+Inelastic%22>`_ `Full list of changes on GitHub <http://github.com/mantidproject/mantid/pulls?q=is%3Apr+milestone%3A%22Release+3.11%22+is%3Amerged+label%3A%22Component%3A+Indirect+Inelastic%22>`_
...@@ -112,6 +112,14 @@ void IndirectDiffractionReduction::run() { ...@@ -112,6 +112,14 @@ void IndirectDiffractionReduction::run() {
showInformationBox("Sample files input is invalid."); showInformationBox("Sample files input is invalid.");
return; return;
} }
if (mode == "diffspec" && m_uiForm.ckUseVanadium->isChecked() &&
m_uiForm.rfVanFile_only->getFilenames().isEmpty()) {
showInformationBox("Use Vanadium File checked but no vanadium files "
"have been supplied.");
return;
}
if (instName == "OSIRIS") { if (instName == "OSIRIS") {
if (mode == "diffonly") { if (mode == "diffonly") {
if (!validateVanCal()) { if (!validateVanCal()) {
...@@ -145,7 +153,10 @@ void IndirectDiffractionReduction::algorithmComplete(bool error) { ...@@ -145,7 +153,10 @@ void IndirectDiffractionReduction::algorithmComplete(bool error) {
// Handles completion of the diffraction algorithm chain // Handles completion of the diffraction algorithm chain
disconnect(m_batchAlgoRunner, 0, this, SLOT(algorithmComplete(bool))); disconnect(m_batchAlgoRunner, 0, this, SLOT(algorithmComplete(bool)));
deleteGroupingWorkspace(); // Delete grouping workspace, if created.
if (AnalysisDataService::Instance().doesExist(m_groupingWsName)) {
deleteGroupingWorkspace();
}
if (error) { if (error) {
showInformationBox( showInformationBox(
...@@ -364,8 +375,7 @@ void IndirectDiffractionReduction::runGenericReduction(QString instName, ...@@ -364,8 +375,7 @@ void IndirectDiffractionReduction::runGenericReduction(QString instName,
msgDiffReduction->setProperty("LoadLogFiles", msgDiffReduction->setProperty("LoadLogFiles",
m_uiForm.ckLoadLogs->isChecked()); m_uiForm.ckLoadLogs->isChecked());
msgDiffReduction->setProperty( msgDiffReduction->setProperty(
"InputFiles", "InputFiles", m_uiForm.rfSampleFiles->getText().toStdString());
m_uiForm.rfSampleFiles->getFilenames().join(",").toStdString());
msgDiffReduction->setProperty("SpectraRange", detRange); msgDiffReduction->setProperty("SpectraRange", detRange);
msgDiffReduction->setProperty("RebinParam", rebin.toStdString()); msgDiffReduction->setProperty("RebinParam", rebin.toStdString());
msgDiffReduction->setProperty("OutputWorkspace", msgDiffReduction->setProperty("OutputWorkspace",
......
...@@ -97,7 +97,7 @@ ...@@ -97,7 +97,7 @@
<item row="2" column="2"> <item row="2" column="2">
<widget class="QCheckBox" name="ckSumFiles"> <widget class="QCheckBox" name="ckSumFiles">
<property name="text"> <property name="text">
<string>Sum Files</string> <string>Sum Sample Files</string>
</property> </property>
</widget> </widget>
</item> </item>
...@@ -201,7 +201,7 @@ ...@@ -201,7 +201,7 @@
<item row="5" column="0"> <item row="5" column="0">
<widget class="QCheckBox" name="ckManualDRange"> <widget class="QCheckBox" name="ckManualDRange">
<property name="text"> <property name="text">
<string>Manual dRange:</string> <string>Manual dRange (Vanadium):</string>
</property> </property>
</widget> </widget>
</item> </item>
......
from __future__ import (absolute_import, division, print_function) from __future__ import (absolute_import, division, print_function)
from mantid.simpleapi import Load
from mantid.api import WorkspaceGroup, AlgorithmManager from mantid.api import WorkspaceGroup, AlgorithmManager
from mantid import mtd, logger, config from mantid import mtd, logger, config
...@@ -8,6 +9,80 @@ import numpy as np ...@@ -8,6 +9,80 @@ import numpy as np
# ------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
def create_range_from(range_str, delimiter):
"""
Creates a range from the specified string, by splitting by the specified
delimiter.
:param range_str: The range string, in the format A-B where A is the lower
bound of the range, - is the delimiter and B is the upper
bound of the range.
:param delimiter: The range delimiter.
:return: The range created from the range string.
"""
lower, upper = range_str.split(delimiter, 1)
return range(int(lower), int(upper)+1)
def create_file_range_parser(instrument):
"""
Creates a parser which takes a specified file range string of the
format A-B, and returns a list of the files in that range preceded
by the specified instrument name.
:param instrument: The instrument name.
:return: A file range parser.
"""
def parser(file_range):
file_range = file_range.strip()
# Check whether this is a range or single file
if '-' in file_range:
return [[instrument + str(run) for run in create_range_from(file_range, '-')]]
elif ':' in file_range:
return [[instrument + str(run)] for run in create_range_from(file_range, ':')]
elif '+' in file_range:
return [[instrument + run for run in file_range.split('+')]]
else:
try:
return [[instrument + str(int(file_range))]]
except ValueError:
return [[file_range]]
return parser
def load_file_ranges(file_ranges, ipf_filename, spec_min, spec_max, sum_files=True, load_logs=True, load_opts=None):
"""
Loads a set of files from specified file ranges and extracts just the spectra we
care about (i.e. detector range and monitor).
@param file_ranges List of data file ranges
@param ipf_filename File path/name for the instrument parameter file to load
@param spec_min Minimum spectra ID to load
@param spec_max Maximum spectra ID to load
@param sum_files Sum loaded files
@param load_logs Load log files when loading runs
@param load_opts Additional options to be passed to load algorithm
@return List of loaded workspace names and flag indicating chopped data
"""
instrument = os.path.splitext(os.path.basename(ipf_filename))[0]
instrument = instrument.split('_')[0]
parse_file_range = create_file_range_parser(instrument)
file_ranges = [file_range for range_str in file_ranges for file_range in range_str.split(',')]
file_groups = [file_group for file_range in file_ranges for file_group in parse_file_range(file_range)]
workspace_names = []
chopped_data = False
for file_group in file_groups:
created_workspaces, chopped_data = load_files(file_group, ipf_filename, spec_min,
spec_max, sum_files, load_logs, load_opts)
workspace_names.extend(created_workspaces)
return workspace_names, chopped_data
def load_files(data_files, ipf_filename, spec_min, spec_max, sum_files=False, load_logs=True, load_opts=None): def load_files(data_files, ipf_filename, spec_min, spec_max, sum_files=False, load_logs=True, load_opts=None):
""" """
...@@ -23,9 +98,33 @@ def load_files(data_files, ipf_filename, spec_min, spec_max, sum_files=False, lo ...@@ -23,9 +98,33 @@ def load_files(data_files, ipf_filename, spec_min, spec_max, sum_files=False, lo
@return List of loaded workspace names and flag indicating chopped data @return List of loaded workspace names and flag indicating chopped data
""" """
from mantid.simpleapi import (Load, LoadVesuvio, LoadParameterFile, workspace_names, chopped_data = _load_files(data_files, ipf_filename, spec_min, spec_max, load_logs, load_opts)
ChopData, ExtractSingleSpectrum,
CropWorkspace, DeleteWorkspace) # Sum files if needed
if sum_files and len(data_files) > 1:
if chopped_data:
workspace_names = sum_chopped_runs(workspace_names)
else:
workspace_names = sum_regular_runs(workspace_names)
logger.information('Summed workspace names: %s' % (str(workspace_names)))
return workspace_names, chopped_data
def _load_files(file_specifiers, ipf_filename, spec_min, spec_max, load_logs=True, load_opts=None):
"""
Loads a set of files and extracts just the spectra we care about (i.e. detector range and monitor).
@param file_specifiers List of data file specifiers
@param ipf_filename File path/name for the instrument parameter file to load
@param spec_min Minimum spectra ID to load
@param spec_max Maximum spectra ID to load
@param load_logs Load log files when loading runs
@param load_opts Additional options to be passed to load algorithm
@return List of loaded workspace names and flag indicating chopped data
"""
delete_monitors = False delete_monitors = False
if load_opts is None: if load_opts is None:
...@@ -36,93 +135,146 @@ def load_files(data_files, ipf_filename, spec_min, spec_max, sum_files=False, lo ...@@ -36,93 +135,146 @@ def load_files(data_files, ipf_filename, spec_min, spec_max, sum_files=False, lo
load_opts.pop("DeleteMonitors") load_opts.pop("DeleteMonitors")
workspace_names = [] workspace_names = []
chopped_data = False
for filename in data_files: for file_specifier in file_specifiers:
# The filename without path and extension will be the workspace name # The filename without path and extension will be the workspace name
ws_name = os.path.splitext(os.path.basename(str(filename)))[0] ws_name = os.path.splitext(os.path.basename(str(file_specifier)))[0]
logger.debug('Loading file %s as workspace %s' % (filename, ws_name)) logger.debug('Loading file %s as workspace %s' % (file_specifier, ws_name))
do_load(file_specifier, ws_name, ipf_filename, load_logs, load_opts)
if 'VESUVIO' in ipf_filename: workspace = mtd[ws_name]
# Load all spectra. They are cropped later
LoadVesuvio(Filename=str(filename),
OutputWorkspace=ws_name,
SpectrumList='1-198',
**load_opts)
else:
Load(Filename=filename,
OutputWorkspace=ws_name,
LoadLogFiles=load_logs,
**load_opts)
# Load the instrument parameters
LoadParameterFile(Workspace=ws_name,
Filename=ipf_filename)
# Add the workspace to the list of workspaces # Add the workspace to the list of workspaces
workspace_names.append(ws_name) workspace_names.append(ws_name)
# Get the spectrum number for the monitor # Get the spectrum number for the monitor
instrument = mtd[ws_name].getInstrument() instrument = workspace.getInstrument()
monitor_index = int(instrument.getNumberParameter('Workflow.Monitor1-SpectrumNumber')[0]) monitor_param = instrument.getNumberParameter('Workflow.Monitor1-SpectrumNumber')
logger.debug('Workspace %s monitor 1 spectrum number :%d' % (ws_name, monitor_index))
# Chop data if required if monitor_param:
try: monitor_index = int(monitor_param[0])
chop_threshold = mtd[ws_name].getInstrument().getNumberParameter('Workflow.ChopDataIfGreaterThan')[0] logger.debug('Workspace %s monitor 1 spectrum number :%d' % (ws_name, monitor_index))
x_max = mtd[ws_name].readX(0)[-1]
chopped_data = x_max > chop_threshold
except IndexError:
chopped_data = False
logger.information('Workspace {0} need data chop: {1}'.format(ws_name, str(chopped_data)))
workspaces = [ws_name] workspaces, chopped_data = chop_workspace(workspace, monitor_index)
if chopped_data: crop_workspaces(workspaces, spec_min, spec_max, not delete_monitors, monitor_index)
ChopData(InputWorkspace=ws_name,
OutputWorkspace=ws_name,
MonitorWorkspaceIndex=monitor_index,
IntegrationRangeLower=5000.0,
IntegrationRangeUpper=10000.0,
NChops=5)
workspaces = mtd[ws_name].getNames()
for chop_ws_name in workspaces:
# Get the monitor spectrum
monitor_ws_name = chop_ws_name + '_mon'
ExtractSingleSpectrum(InputWorkspace=chop_ws_name,
OutputWorkspace=monitor_ws_name,
WorkspaceIndex=monitor_index)
if delete_monitors:
DeleteWorkspace(Workspace=monitor_ws_name)
# Crop to the detectors required
chop_ws = mtd[chop_ws_name]
CropWorkspace(InputWorkspace=chop_ws_name,
OutputWorkspace=chop_ws_name,
StartWorkspaceIndex=chop_ws.getIndexFromSpectrumNumber(int(spec_min)),
EndWorkspaceIndex=chop_ws.getIndexFromSpectrumNumber(int(spec_max)))
logger.information('Loaded workspace names: %s' % (str(workspace_names))) logger.information('Loaded workspace names: %s' % (str(workspace_names)))
logger.information('Chopped data: %s' % (str(chopped_data))) logger.information('Chopped data: %s' % (str(chopped_data)))
# Sum files if needed
if sum_files and len(data_files) > 1:
if chopped_data:
workspace_names = sum_chopped_runs(workspace_names)
else:
workspace_names = sum_regular_runs(workspace_names)
if delete_monitors: if delete_monitors:
load_opts['DeleteMonitors'] = True load_opts['DeleteMonitors'] = True
logger.information('Summed workspace names: %s' % (str(workspace_names)))
return workspace_names, chopped_data return workspace_names, chopped_data
# ------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
def do_load(file_specifier, output_ws_name, ipf_filename, load_logs, load_opts):
"""
Loads the files, passing the given file specifier in the load command.
:param file_specifier: The file specifier (single file, range or sum)
:param output_ws_name: The name of the output workspace to create
:param ipf_filename: The instrument parameter file to load with
:param load_opts: Additional loading options
:param load_logs: If True, load logs
"""
from mantid.simpleapi import LoadVesuvio, LoadParameterFile
if 'VESUVIO' in ipf_filename:
# Load all spectra. They are cropped later
LoadVesuvio(Filename=str(file_specifier),
OutputWorkspace=output_ws_name,
SpectrumList='1-198',
**load_opts)
else:
Load(Filename=file_specifier,
OutputWorkspace=output_ws_name,
LoadLogFiles=load_logs,
**load_opts)
# Load the instrument parameters
LoadParameterFile(Workspace=output_ws_name,
Filename=ipf_filename)
# -------------------------------------------------------------------------------
def chop_workspace(workspace, monitor_index):
"""
Chops the specified workspace if its maximum x-value exceeds its instrument
parameter, 'Workflow.ChopDataIfGreaterThan'.
:param workspace: The workspace to chop
:param monitor_index: The index of the monitor spectra in the workspace.
:return: A tuple of the list of output workspace names and a boolean
specifying whether the workspace was chopped.
"""
from mantid.simpleapi import ChopData
workspace_name = workspace.getName()
# Chop data if required
try:
chop_threshold = workspace.getInstrument().getNumberParameter('Workflow.ChopDataIfGreaterThan')[0]
x_max = workspace.readX(0)[-1]
chopped_data = x_max > chop_threshold
except IndexError:
logger.warning("Chop threshold not found in instrument parameters")
chopped_data = False
logger.information('Workspace {0} need data chop: {1}'.format(workspace_name, str(chopped_data)))
if chopped_data:
ChopData(InputWorkspace=workspace,
OutputWorkspace=workspace_name,
MonitorWorkspaceIndex=monitor_index,
IntegrationRangeLower=5000.0,
IntegrationRangeUpper=10000.0,
NChops=5)
return mtd[workspace_name].getNames(), True
else:
return [workspace_name], False
# -------------------------------------------------------------------------------
def crop_workspaces(workspace_names, spec_min, spec_max, extract_monitors=True, monitor_index=0):
"""
Crops the workspaces with the specified workspace names, from the specified minimum
spectra to the specified maximum spectra.
:param workspace_names: The names of the workspaces to crop
:param spec_min: The minimum spectra of the cropping region
:param spec_max: The maximum spectra of the cropping region
:param extract_monitors: If True, extracts monitors from the workspaces
:param monitor_index: The index of the monitors in the workspaces
"""
from mantid.simpleapi import ExtractSingleSpectrum, CropWorkspace
for workspace_name in workspace_names:
if extract_monitors:
# Get the monitor spectrum
monitor_ws_name = workspace_name + '_mon'
ExtractSingleSpectrum(InputWorkspace=workspace_name,
OutputWorkspace=monitor_ws_name,
WorkspaceIndex=monitor_index)
# Crop to the detectors required
workspace = mtd[workspace_name]
CropWorkspace(InputWorkspace=workspace_name,
OutputWorkspace=workspace_name,
StartWorkspaceIndex=workspace.getIndexFromSpectrumNumber(int(spec_min)),
EndWorkspaceIndex=workspace.getIndexFromSpectrumNumber(int(spec_max)))
# -------------------------------------------------------------------------------
def sum_regular_runs(workspace_names): def sum_regular_runs(workspace_names):
""" """
Sum runs with single workspace data. Sum runs with single workspace data.
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment