diff --git a/Framework/PythonInterface/mantid/api/src/Exports/FileFinder.cpp b/Framework/PythonInterface/mantid/api/src/Exports/FileFinder.cpp
index 00cf5ee82bb6d596dfca593540cce0fe2d81562d..0431dcd3f7ccf62d52e9bf5b4adea7c072335810 100644
--- a/Framework/PythonInterface/mantid/api/src/Exports/FileFinder.cpp
+++ b/Framework/PythonInterface/mantid/api/src/Exports/FileFinder.cpp
@@ -37,6 +37,11 @@ void export_FileFinder() {
            "The hint can be a comma separated list of run numbers and can also "
            "include ranges of runs, e.g. 123-135 or equivalently 123-35"
            "If no instrument prefix is given then the current default is used.")
+      .def("getCaseSensitive", &FileFinderImpl::getCaseSensitive, (arg("self")),
+           "Option to get if file finder should be case sensitive.")
+      .def("setCaseSensitive", &FileFinderImpl::setCaseSensitive,
+           (arg("self"), arg("cs")),
+           "Option to set if file finder should be case sensitive.")
       .def("Instance", &FileFinder::Instance,
            return_value_policy<reference_existing_object>(),
            "Returns a reference to the FileFinder singleton instance")
diff --git a/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/ISISIndirectDiffractionReduction.py b/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/ISISIndirectDiffractionReduction.py
index bd317b23faf667e2f861be87725701b466c70e05..6a73db3139338eb2f23acedfefb832b252193730 100644
--- a/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/ISISIndirectDiffractionReduction.py
+++ b/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/ISISIndirectDiffractionReduction.py
@@ -3,7 +3,7 @@ from __future__ import (absolute_import, division, print_function)
 
 import os
 
-from IndirectReductionCommon import load_files
+from IndirectReductionCommon import load_files, load_file_ranges
 
 from mantid.simpleapi import *
 from mantid.api import *
@@ -128,16 +128,6 @@ class ISISIndirectDiffractionReduction(DataProcessorAlgorithm):
                 logger.warning('type = ' + str(type(mode)))
                 issues['CalFile'] = 'Cal Files are currently only available for use in OSIRIS diffspec mode'
 
-        num_samples = len(input_files)
-        num_vanadium = len(self.getProperty('VanadiumFiles').value)
-        if num_samples != num_vanadium and num_vanadium != 0:
-            run_num_mismatch = 'You must input the same number of sample and vanadium runs'
-            issues['InputFiles'] = run_num_mismatch
-            issues['VanadiumFiles'] = run_num_mismatch
-
-        if self._grouping_method == 'Workspace' and self._grouping_workspace is None:
-            issues['GroupingWorkspace'] = 'Must select a grouping workspace for current GroupingWorkspace'
-
         return issues
 
     # ------------------------------------------------------------------------------
@@ -163,13 +153,13 @@ class ISISIndirectDiffractionReduction(DataProcessorAlgorithm):
             load_opts['Mode'] = 'FoilOut'
             load_opts['LoadMonitors'] = True
 
-        self._workspace_names, self._chopped_data = load_files(self._data_files,
-                                                               self._ipf_filename,
-                                                               self._spectra_range[0],
-                                                               self._spectra_range[1],
-                                                               sum_files=self._sum_files,
-                                                               load_logs=self._load_logs,
-                                                               load_opts=load_opts)
+        self._workspace_names, self._chopped_data = load_file_ranges(self._data_files,
+                                                                     self._ipf_filename,
+                                                                     self._spectra_range[0],
+                                                                     self._spectra_range[1],
+                                                                     sum_files=self._sum_files,
+                                                                     load_logs=self._load_logs,
+                                                                     load_opts=load_opts)
 
         # applies the changes in the provided calibration file
         self._apply_calibration()
@@ -182,10 +172,12 @@ class ISISIndirectDiffractionReduction(DataProcessorAlgorithm):
                                               self._ipf_filename,
                                               self._spectra_range[0],
                                               self._spectra_range[1],
-                                              sum_files=self._sum_files,
                                               load_logs=self._load_logs,
                                               load_opts=load_opts)
 
+            if len(self._workspace_names) > len(self._vanadium_runs):
+                raise RuntimeError("There cannot be more sample runs than vanadium runs.")
+
         for index, c_ws_name in enumerate(self._workspace_names):
             is_multi_frame = isinstance(mtd[c_ws_name], WorkspaceGroup)
 
@@ -283,13 +275,11 @@ class ISISIndirectDiffractionReduction(DataProcessorAlgorithm):
 
         # Remove the container workspaces
         if self._container_workspace is not None:
-            DeleteWorkspace(self._container_workspace)
-            DeleteWorkspace(self._container_workspace + '_mon')
+            self._delete_all([self._container_workspace])
 
+        # Remove the vanadium workspaces
         if self._vanadium_ws:
-            for van_ws in self._vanadium_ws:
-                DeleteWorkspace(van_ws)
-                DeleteWorkspace(van_ws+'_mon')
+            self._delete_all(self._vanadium_ws)
 
         # Rename output workspaces
         output_workspace_names = [rename_reduction(ws_name, self._sum_files) for ws_name in self._workspace_names]
@@ -306,7 +296,6 @@ class ISISIndirectDiffractionReduction(DataProcessorAlgorithm):
         """
         Gets algorithm properties.
         """
-
         self._output_ws = self.getPropertyValue('OutputWorkspace')
         self._data_files = self.getProperty('InputFiles').value
         self._container_data_files = self.getProperty('ContainerFiles').value
@@ -340,17 +329,10 @@ class ISISIndirectDiffractionReduction(DataProcessorAlgorithm):
             self._ipf_filename = os.path.join(config['instrumentDefinition.directory'], self._ipf_filename)
         logger.information('IPF filename is: %s' % self._ipf_filename)
 
+        if len(self._data_files) == 1:
+            logger.warning('SumFiles options has no effect when only one file is provided')
         # Only enable sum files if we actually have more than one file
-        sum_files = self.getProperty('SumFiles').value
-        self._sum_files = False
-
-        if sum_files:
-            num_raw_files = len(self._data_files)
-            if num_raw_files > 1:
-                self._sum_files = True
-                logger.information('Summing files enabled (have %d files)' % num_raw_files)
-            else:
-                logger.information('SumFiles options is ignored when only one file is provided')
+        self._sum_files = self.getProperty('SumFiles').value
 
     def _apply_calibration(self):
         """
@@ -388,6 +370,19 @@ class ISISIndirectDiffractionReduction(DataProcessorAlgorithm):
                       Factor=scale_factor,
                       Operation='Multiply')
 
+    def _delete_all(self, workspace_names):
+        """
+        Deletes the workspaces with the specified names and their associated
+        monitor workspaces.
+
+        :param workspace_names: The names of the workspaces to delete.
+        """
+
+        for workspace_name in workspace_names:
+            DeleteWorkspace(workspace_name)
+
+            if mtd.doesExist(workspace_name + "_mon"):
+                DeleteWorkspace(workspace_name + '_mon')
 
 # ------------------------------------------------------------------------------
 
diff --git a/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/OSIRISDiffractionReduction.py b/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/OSIRISDiffractionReduction.py
index 9d7821aa8ccb0b6d693d6f090214ff8c0507d183..004a3881328b716048b6cddb53e2aa21692fc316 100644
--- a/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/OSIRISDiffractionReduction.py
+++ b/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/OSIRISDiffractionReduction.py
@@ -394,8 +394,9 @@ class OSIRISDiffractionReduction(PythonAlgorithm):
 
         num_samples = len(self._sample_runs)
         num_vanadium = len(self._vanadium_runs)
-        if num_samples != num_vanadium:
-            run_num_mismatch = 'You must input the same number of sample and vanadium runs'
+
+        if num_samples > num_vanadium:
+            run_num_mismatch = 'You must input at least as many vanadium files as sample files'
             issues['Sample'] = run_num_mismatch
             issues['Vanadium'] = run_num_mismatch
         if self._container_files:
diff --git a/Framework/PythonInterface/test/python/plugins/algorithms/WorkflowAlgorithms/ISISIndirectDiffractionReductionTest.py b/Framework/PythonInterface/test/python/plugins/algorithms/WorkflowAlgorithms/ISISIndirectDiffractionReductionTest.py
index b52b217a28e3449d524d9cb5b0c6cbe478ae8b91..181e71cb2e5bf7312202a373ec8fe439d4cb39a4 100644
--- a/Framework/PythonInterface/test/python/plugins/algorithms/WorkflowAlgorithms/ISISIndirectDiffractionReductionTest.py
+++ b/Framework/PythonInterface/test/python/plugins/algorithms/WorkflowAlgorithms/ISISIndirectDiffractionReductionTest.py
@@ -5,6 +5,7 @@ from __future__ import (absolute_import, division, print_function)
 import unittest
 from mantid.simpleapi import *
 from mantid.api import *
+from mantid.kernel import config
 
 
 class ISISIndirectDiffractionReductionTest(unittest.TestCase):
@@ -73,23 +74,26 @@ class ISISIndirectDiffractionReductionTest(unittest.TestCase):
         """
         Test summing multiple runs.
         """
+        cs = FileFinder.Instance().getCaseSensitive()
 
-        wks = ISISIndirectDiffractionReduction(InputFiles=['IRS26176.RAW', 'IRS26173.RAW'],
+        FileFinder.Instance().setCaseSensitive(False)
+        wks = ISISIndirectDiffractionReduction(InputFiles=['26173-26176'],
                                                SumFiles=True,
                                                Instrument='IRIS',
                                                Mode='diffspec',
                                                SpectraRange=[105, 112])
+        FileFinder.Instance().setCaseSensitive(cs)
 
         self.assertTrue(isinstance(wks, WorkspaceGroup), 'Result workspace should be a workspace group.')
         self.assertEqual(len(wks), 1)
-        self.assertEqual(wks.getNames()[0], 'iris26176_multi_diffspec_red')
+        self.assertEqual(wks.getNames()[0], 'iris26173_multi_diffspec_red')
 
         red_ws = wks[0]
         self.assertEqual(red_ws.getAxis(0).getUnit().unitID(), 'dSpacing')
         self.assertEqual(red_ws.getNumberHistograms(), 1)
 
         self.assertTrue('multi_run_numbers' in red_ws.getRun())
-        self.assertEqual(red_ws.getRun().get('multi_run_numbers').value, '26176,26173')
+        self.assertEqual(red_ws.getRun().get('multi_run_numbers').value, '26173,26174,26175,26176')
 
     def test_grouping_individual(self):
         """
diff --git a/Testing/Data/UnitTest/IRS26174.RAW.md5 b/Testing/Data/UnitTest/IRS26174.RAW.md5
new file mode 100644
index 0000000000000000000000000000000000000000..a594850cef672d7bab72f3c6a4d04bb4997c68e8
--- /dev/null
+++ b/Testing/Data/UnitTest/IRS26174.RAW.md5
@@ -0,0 +1 @@
+94e0fbf798a1b3c3d8718dfe4ebcaab3
diff --git a/Testing/Data/UnitTest/IRS26175.RAW.md5 b/Testing/Data/UnitTest/IRS26175.RAW.md5
new file mode 100644
index 0000000000000000000000000000000000000000..cb46f48bc5faef6e550b2857b89abe83bb8c3245
--- /dev/null
+++ b/Testing/Data/UnitTest/IRS26175.RAW.md5
@@ -0,0 +1 @@
+019aa1211f82d2fc383dcd64e8679bb9
diff --git a/docs/source/release/v3.11.0/indirect_inelastic.rst b/docs/source/release/v3.11.0/indirect_inelastic.rst
index ce609d3618b98b0cc94c234b37da4db8a3b45324..214283d212a8a96f76cd222603cf261886cfdb85 100644
--- a/docs/source/release/v3.11.0/indirect_inelastic.rst
+++ b/docs/source/release/v3.11.0/indirect_inelastic.rst
@@ -36,18 +36,20 @@ Elwin
 Bugfixes
 --------
 - Save Result now writes to file the temperature-dependent elastic intensity normalized to the lowest temperature.
-- Added 'ExtractMembers' property to ConvolutionFitSequential algorithm - this allows for extracting the members of the
-  convolution fitting into their own workspaces.
 
 ConvFit
 ~~~~~~~
 
+Improvements
+------------
+- Added 'ExtractMembers' property to ConvolutionFitSequential algorithm - this allows for extracting the members of the
+  convolution fitting into their own workspaces.
+
 Bugfixes
 --------
 - Correct treatment of the resolution function: convolve sample and resolution spectra with same momentum transfer.
 - Property to pass the workspace index added to :ref:`algm-ConvolutionFitSequential`.
 
-
 MSDFit
 ~~~~~~
 
@@ -82,5 +84,10 @@ Bugfixes
 - An issue has been fixed in :ref:`algm-IndirectILLEnergyTransfer` when handling the data with mirror sense, that have shifted 0 monitor counts in the left and right wings. This was causing the left and right workspaces to have different x-axis binning and to fail to sum during the unmirroring step. 
 - An issue has been fixed in :ref:`algm-IndirectILLReductionFWS` when the scaling of the data after vanadium calibration was not applied.
 - :ref:`algm-CalculateSampleTransmission` now divides by the tabulated wavelength when calculating the absorption cross section.
+- The Sum Files option in the Indirect Diffraction Reduction interface now allows for correctly corresponding each sum of
+  sample runs defined with a range (e.g. A-B, where A and B are run numbers) to the corresponding vanadium run, dependent on D-Range.
+- The 'Sample Runs' field in the Indirect Diffraction Interface now recognizes 3 operators: '-', '+', ':'. The '-' operator is used
+  to supply a given range of runs and sum them when SumFiles is checked. The '+' operator is used to supply a given list of runs and
+  sum when SumFiles is checked. The ':' operator is used to supply a range of runs, which will never be summed.
 
 `Full list of changes on GitHub <http://github.com/mantidproject/mantid/pulls?q=is%3Apr+milestone%3A%22Release+3.11%22+is%3Amerged+label%3A%22Component%3A+Indirect+Inelastic%22>`_
diff --git a/qt/scientific_interfaces/Indirect/IndirectDiffractionReduction.cpp b/qt/scientific_interfaces/Indirect/IndirectDiffractionReduction.cpp
index c308dc7f5745a49606a7c14b84bdec2ae46303f3..2cb4f68f006b9263917e0012e676cda08c1429a5 100644
--- a/qt/scientific_interfaces/Indirect/IndirectDiffractionReduction.cpp
+++ b/qt/scientific_interfaces/Indirect/IndirectDiffractionReduction.cpp
@@ -112,6 +112,14 @@ void IndirectDiffractionReduction::run() {
     showInformationBox("Sample files input is invalid.");
     return;
   }
+
+  if (mode == "diffspec" && m_uiForm.ckUseVanadium->isChecked() &&
+      m_uiForm.rfVanFile_only->getFilenames().isEmpty()) {
+    showInformationBox("Use Vanadium File checked but no vanadium files "
+                       "have been supplied.");
+    return;
+  }
+
   if (instName == "OSIRIS") {
     if (mode == "diffonly") {
       if (!validateVanCal()) {
@@ -145,7 +153,10 @@ void IndirectDiffractionReduction::algorithmComplete(bool error) {
   // Handles completion of the diffraction algorithm chain
   disconnect(m_batchAlgoRunner, 0, this, SLOT(algorithmComplete(bool)));
 
-  deleteGroupingWorkspace();
+  // Delete grouping workspace, if created.
+  if (AnalysisDataService::Instance().doesExist(m_groupingWsName)) {
+    deleteGroupingWorkspace();
+  }
 
   if (error) {
     showInformationBox(
@@ -364,8 +375,7 @@ void IndirectDiffractionReduction::runGenericReduction(QString instName,
   msgDiffReduction->setProperty("LoadLogFiles",
                                 m_uiForm.ckLoadLogs->isChecked());
   msgDiffReduction->setProperty(
-      "InputFiles",
-      m_uiForm.rfSampleFiles->getFilenames().join(",").toStdString());
+      "InputFiles", m_uiForm.rfSampleFiles->getText().toStdString());
   msgDiffReduction->setProperty("SpectraRange", detRange);
   msgDiffReduction->setProperty("RebinParam", rebin.toStdString());
   msgDiffReduction->setProperty("OutputWorkspace",
diff --git a/qt/scientific_interfaces/Indirect/IndirectDiffractionReduction.ui b/qt/scientific_interfaces/Indirect/IndirectDiffractionReduction.ui
index 8d3d4515b75d0b5b54e3c06d52c8c5e34305976a..c2976d71c034093ac323d6d69aaae2ab76a240f2 100644
--- a/qt/scientific_interfaces/Indirect/IndirectDiffractionReduction.ui
+++ b/qt/scientific_interfaces/Indirect/IndirectDiffractionReduction.ui
@@ -97,7 +97,7 @@
        <item row="2" column="2">
         <widget class="QCheckBox" name="ckSumFiles">
          <property name="text">
-          <string>Sum Files</string>
+          <string>Sum Sample Files</string>
          </property>
         </widget>
        </item>
@@ -201,7 +201,7 @@
            <item row="5" column="0">
             <widget class="QCheckBox" name="ckManualDRange">
              <property name="text">
-              <string>Manual dRange:</string>
+              <string>Manual dRange (Vanadium):</string>
              </property>
             </widget>
            </item>
diff --git a/scripts/Inelastic/IndirectReductionCommon.py b/scripts/Inelastic/IndirectReductionCommon.py
index a361510a0592860a597946b6619d696aef463266..7071860b05b5fd2749c68e14ff32a8c14a528234 100644
--- a/scripts/Inelastic/IndirectReductionCommon.py
+++ b/scripts/Inelastic/IndirectReductionCommon.py
@@ -1,4 +1,5 @@
 from __future__ import (absolute_import, division, print_function)
+from mantid.simpleapi import Load
 from mantid.api import WorkspaceGroup, AlgorithmManager
 from mantid import mtd, logger, config
 
@@ -8,6 +9,80 @@ import numpy as np
 
 # -------------------------------------------------------------------------------
 
+def create_range_from(range_str, delimiter):
+    """
+    Creates a range from the specified string, by splitting by the specified
+    delimiter.
+
+    :param range_str:   The range string, in the format A-B where A is the lower
+                        bound of the range, - is the delimiter and B is the upper
+                        bound of the range.
+    :param delimiter:   The range delimiter.
+    :return:            The range created from the range string.
+    """
+    lower, upper = range_str.split(delimiter, 1)
+    return range(int(lower), int(upper)+1)
+
+
+def create_file_range_parser(instrument):
+    """
+    Creates a parser which takes a specified file range string of the
+    format A-B, and returns a list of the files in that range preceded
+    by the specified instrument name.
+
+    :param instrument:  The instrument name.
+    :return:            A file range parser.
+    """
+
+    def parser(file_range):
+        file_range = file_range.strip()
+        # Check whether this is a range or single file
+        if '-' in file_range:
+            return [[instrument + str(run) for run in create_range_from(file_range, '-')]]
+        elif ':' in file_range:
+            return [[instrument + str(run)] for run in create_range_from(file_range, ':')]
+        elif '+' in file_range:
+            return [[instrument + run for run in file_range.split('+')]]
+        else:
+            try:
+                return [[instrument + str(int(file_range))]]
+            except ValueError:
+                return [[file_range]]
+
+    return parser
+
+
+def load_file_ranges(file_ranges, ipf_filename, spec_min, spec_max, sum_files=True, load_logs=True, load_opts=None):
+    """
+    Loads a set of files from specified file ranges and extracts just the spectra we
+    care about (i.e. detector range and monitor).
+
+    @param file_ranges List of data file ranges
+    @param ipf_filename File path/name for the instrument parameter file to load
+    @param spec_min Minimum spectra ID to load
+    @param spec_max Maximum spectra ID to load
+    @param sum_files Sum loaded files
+    @param load_logs Load log files when loading runs
+    @param load_opts Additional options to be passed to load algorithm
+
+    @return List of loaded workspace names and flag indicating chopped data
+    """
+    instrument = os.path.splitext(os.path.basename(ipf_filename))[0]
+    instrument = instrument.split('_')[0]
+    parse_file_range = create_file_range_parser(instrument)
+    file_ranges = [file_range for range_str in file_ranges for file_range in range_str.split(',')]
+    file_groups = [file_group for file_range in file_ranges for file_group in parse_file_range(file_range)]
+
+    workspace_names = []
+    chopped_data = False
+
+    for file_group in file_groups:
+        created_workspaces, chopped_data = load_files(file_group, ipf_filename, spec_min,
+                                                      spec_max, sum_files, load_logs, load_opts)
+        workspace_names.extend(created_workspaces)
+
+    return workspace_names, chopped_data
+
 
 def load_files(data_files, ipf_filename, spec_min, spec_max, sum_files=False, load_logs=True, load_opts=None):
     """
@@ -23,9 +98,33 @@ def load_files(data_files, ipf_filename, spec_min, spec_max, sum_files=False, lo
 
     @return List of loaded workspace names and flag indicating chopped data
     """
-    from mantid.simpleapi import (Load, LoadVesuvio, LoadParameterFile,
-                                  ChopData, ExtractSingleSpectrum,
-                                  CropWorkspace, DeleteWorkspace)
+    workspace_names, chopped_data = _load_files(data_files, ipf_filename, spec_min, spec_max, load_logs, load_opts)
+
+    # Sum files if needed
+    if sum_files and len(data_files) > 1:
+        if chopped_data:
+            workspace_names = sum_chopped_runs(workspace_names)
+        else:
+            workspace_names = sum_regular_runs(workspace_names)
+
+    logger.information('Summed workspace names: %s' % (str(workspace_names)))
+
+    return workspace_names, chopped_data
+
+
+def _load_files(file_specifiers, ipf_filename, spec_min, spec_max, load_logs=True, load_opts=None):
+    """
+    Loads a set of files and extracts just the spectra we care about (i.e. detector range and monitor).
+
+    @param file_specifiers List of data file specifiers
+    @param ipf_filename File path/name for the instrument parameter file to load
+    @param spec_min Minimum spectra ID to load
+    @param spec_max Maximum spectra ID to load
+    @param load_logs Load log files when loading runs
+    @param load_opts Additional options to be passed to load algorithm
+
+    @return List of loaded workspace names and flag indicating chopped data
+    """
     delete_monitors = False
 
     if load_opts is None:
@@ -36,93 +135,146 @@ def load_files(data_files, ipf_filename, spec_min, spec_max, sum_files=False, lo
         load_opts.pop("DeleteMonitors")
 
     workspace_names = []
+    chopped_data = False
 
-    for filename in data_files:
+    for file_specifier in file_specifiers:
         # The filename without path and extension will be the workspace name
-        ws_name = os.path.splitext(os.path.basename(str(filename)))[0]
-        logger.debug('Loading file %s as workspace %s' % (filename, ws_name))
-
-        if 'VESUVIO' in ipf_filename:
-            # Load all spectra. They are cropped later
-            LoadVesuvio(Filename=str(filename),
-                        OutputWorkspace=ws_name,
-                        SpectrumList='1-198',
-                        **load_opts)
-        else:
-            Load(Filename=filename,
-                 OutputWorkspace=ws_name,
-                 LoadLogFiles=load_logs,
-                 **load_opts)
-
-        # Load the instrument parameters
-        LoadParameterFile(Workspace=ws_name,
-                          Filename=ipf_filename)
+        ws_name = os.path.splitext(os.path.basename(str(file_specifier)))[0]
+        logger.debug('Loading file %s as workspace %s' % (file_specifier, ws_name))
+        do_load(file_specifier, ws_name, ipf_filename, load_logs, load_opts)
+        workspace = mtd[ws_name]
 
         # Add the workspace to the list of workspaces
         workspace_names.append(ws_name)
 
         # Get the spectrum number for the monitor
-        instrument = mtd[ws_name].getInstrument()
-        monitor_index = int(instrument.getNumberParameter('Workflow.Monitor1-SpectrumNumber')[0])
-        logger.debug('Workspace %s monitor 1 spectrum number :%d' % (ws_name, monitor_index))
+        instrument = workspace.getInstrument()
+        monitor_param = instrument.getNumberParameter('Workflow.Monitor1-SpectrumNumber')
 
-        # Chop data if required
-        try:
-            chop_threshold = mtd[ws_name].getInstrument().getNumberParameter('Workflow.ChopDataIfGreaterThan')[0]
-            x_max = mtd[ws_name].readX(0)[-1]
-            chopped_data = x_max > chop_threshold
-        except IndexError:
-            chopped_data = False
-        logger.information('Workspace {0} need data chop: {1}'.format(ws_name, str(chopped_data)))
+        if monitor_param:
+            monitor_index = int(monitor_param[0])
+            logger.debug('Workspace %s monitor 1 spectrum number :%d' % (ws_name, monitor_index))
 
-        workspaces = [ws_name]
-        if chopped_data:
-            ChopData(InputWorkspace=ws_name,
-                     OutputWorkspace=ws_name,
-                     MonitorWorkspaceIndex=monitor_index,
-                     IntegrationRangeLower=5000.0,
-                     IntegrationRangeUpper=10000.0,
-                     NChops=5)
-            workspaces = mtd[ws_name].getNames()
-
-        for chop_ws_name in workspaces:
-            # Get the monitor spectrum
-            monitor_ws_name = chop_ws_name + '_mon'
-            ExtractSingleSpectrum(InputWorkspace=chop_ws_name,
-                                  OutputWorkspace=monitor_ws_name,
-                                  WorkspaceIndex=monitor_index)
-
-            if delete_monitors:
-                DeleteWorkspace(Workspace=monitor_ws_name)
-
-            # Crop to the detectors required
-            chop_ws = mtd[chop_ws_name]
-            CropWorkspace(InputWorkspace=chop_ws_name,
-                          OutputWorkspace=chop_ws_name,
-                          StartWorkspaceIndex=chop_ws.getIndexFromSpectrumNumber(int(spec_min)),
-                          EndWorkspaceIndex=chop_ws.getIndexFromSpectrumNumber(int(spec_max)))
+            workspaces, chopped_data = chop_workspace(workspace, monitor_index)
+            crop_workspaces(workspaces, spec_min, spec_max, not delete_monitors, monitor_index)
 
     logger.information('Loaded workspace names: %s' % (str(workspace_names)))
     logger.information('Chopped data: %s' % (str(chopped_data)))
 
-    # Sum files if needed
-    if sum_files and len(data_files) > 1:
-        if chopped_data:
-            workspace_names = sum_chopped_runs(workspace_names)
-        else:
-            workspace_names = sum_regular_runs(workspace_names)
-
     if delete_monitors:
         load_opts['DeleteMonitors'] = True
 
-    logger.information('Summed workspace names: %s' % (str(workspace_names)))
-
     return workspace_names, chopped_data
 
 
 # -------------------------------------------------------------------------------
 
 
+def do_load(file_specifier, output_ws_name, ipf_filename, load_logs, load_opts):
+    """
+    Loads the files, passing the given file specifier in the load command.
+
+    :param file_specifier:  The file specifier (single file, range or sum)
+    :param output_ws_name:  The name of the output workspace to create
+    :param ipf_filename:    The instrument parameter file to load with
+    :param load_opts:       Additional loading options
+    :param load_logs:       If True, load logs
+    """
+    from mantid.simpleapi import LoadVesuvio, LoadParameterFile
+
+    if 'VESUVIO' in ipf_filename:
+        # Load all spectra. They are cropped later
+        LoadVesuvio(Filename=str(file_specifier),
+                    OutputWorkspace=output_ws_name,
+                    SpectrumList='1-198',
+                    **load_opts)
+    else:
+        Load(Filename=file_specifier,
+             OutputWorkspace=output_ws_name,
+             LoadLogFiles=load_logs,
+             **load_opts)
+
+    # Load the instrument parameters
+    LoadParameterFile(Workspace=output_ws_name,
+                      Filename=ipf_filename)
+
+
+# -------------------------------------------------------------------------------
+
+
+def chop_workspace(workspace, monitor_index):
+    """
+    Chops the specified workspace if its maximum x-value exceeds its instrument
+    parameter, 'Workflow.ChopDataIfGreaterThan'.
+
+    :param workspace:     The workspace to chop
+    :param monitor_index: The index of the monitor spectra in the workspace.
+    :return:              A tuple of the list of output workspace names and a boolean
+                          specifying whether the workspace was chopped.
+    """
+    from mantid.simpleapi import ChopData
+
+    workspace_name = workspace.getName()
+
+    # Chop data if required
+    try:
+        chop_threshold = workspace.getInstrument().getNumberParameter('Workflow.ChopDataIfGreaterThan')[0]
+        x_max = workspace.readX(0)[-1]
+        chopped_data = x_max > chop_threshold
+    except IndexError:
+        logger.warning("Chop threshold not found in instrument parameters")
+        chopped_data = False
+    logger.information('Workspace {0} need data chop: {1}'.format(workspace_name, str(chopped_data)))
+
+    if chopped_data:
+        ChopData(InputWorkspace=workspace,
+                 OutputWorkspace=workspace_name,
+                 MonitorWorkspaceIndex=monitor_index,
+                 IntegrationRangeLower=5000.0,
+                 IntegrationRangeUpper=10000.0,
+                 NChops=5)
+        return mtd[workspace_name].getNames(), True
+    else:
+        return [workspace_name], False
+
+
+# -------------------------------------------------------------------------------
+
+
+def crop_workspaces(workspace_names, spec_min, spec_max, extract_monitors=True, monitor_index=0):
+    """
+    Crops the workspaces with the specified workspace names, from the specified minimum
+    spectra to the specified maximum spectra.
+
+    :param workspace_names:     The names of the workspaces to crop
+    :param spec_min:            The minimum spectra of the cropping region
+    :param spec_max:            The maximum spectra of the cropping region
+    :param extract_monitors:    If True, extracts monitors from the workspaces
+    :param monitor_index:       The index of the monitors in the workspaces
+    """
+    from mantid.simpleapi import ExtractSingleSpectrum, CropWorkspace
+
+    for workspace_name in workspace_names:
+
+        if extract_monitors:
+            # Get the monitor spectrum
+            monitor_ws_name = workspace_name + '_mon'
+            ExtractSingleSpectrum(InputWorkspace=workspace_name,
+                                  OutputWorkspace=monitor_ws_name,
+                                  WorkspaceIndex=monitor_index)
+
+        # Crop to the detectors required
+        workspace = mtd[workspace_name]
+
+        CropWorkspace(InputWorkspace=workspace_name,
+                      OutputWorkspace=workspace_name,
+                      StartWorkspaceIndex=workspace.getIndexFromSpectrumNumber(int(spec_min)),
+                      EndWorkspaceIndex=workspace.getIndexFromSpectrumNumber(int(spec_max)))
+
+
+# -------------------------------------------------------------------------------
+
+
 def sum_regular_runs(workspace_names):
     """
     Sum runs with single workspace data.