diff --git a/Framework/PythonInterface/mantid/api/src/Exports/WorkspaceGroup.cpp b/Framework/PythonInterface/mantid/api/src/Exports/WorkspaceGroup.cpp
index 9ff121860cdfc972cec73c172a0156c01764c9eb..74417897033c259105ecfb7744f9646dc6cab5fb 100644
--- a/Framework/PythonInterface/mantid/api/src/Exports/WorkspaceGroup.cpp
+++ b/Framework/PythonInterface/mantid/api/src/Exports/WorkspaceGroup.cpp
@@ -93,7 +93,7 @@ void export_WorkspaceGroup() {
            return_value_policy<Policies::ToWeakPtr>(),
            "Returns the item at the given index")
       .def("isMultiPeriod", &WorkspaceGroup::isMultiperiod, arg("self"),
-           "Retuns true if the workspace group is multi-period")
+           "Returns true if the workspace group is multi-period")
       // ------------ Operators --------------------------------
       .def("__len__", &WorkspaceGroup::getNumberOfEntries, arg("self"),
            "Gets the number of entries in the workspace group")
diff --git a/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSReductionCore.py b/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSReductionCore.py
index ebcfbbc336063abb70042871662ba11cc78bdd93..83d89901ca5d0eb7b875eee0b6b3f58a99b9a4fd 100644
--- a/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSReductionCore.py
+++ b/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSReductionCore.py
@@ -9,18 +9,15 @@
 """ SANSReductionCore algorithm runs the sequence of reduction steps which are necessary to reduce a data set."""
 
 from __future__ import (absolute_import, division, print_function)
-from mantid.kernel import (Direction, PropertyManagerProperty, StringListValidator)
-from mantid.api import (DistributedDataProcessorAlgorithm, MatrixWorkspaceProperty, AlgorithmFactory, PropertyMode,
-                        IEventWorkspace, Progress)
+from mantid.api import AlgorithmFactory, Progress
 
-from sans.state.state_base import create_deserialized_sans_state_from_property_manager
 from sans.algorithm_detail.mask_workspace import mask_bins
-from sans.common.constants import EMPTY_NAME
-from sans.common.general_functions import (create_child_algorithm, append_to_sans_file_tag)
-from sans.common.enums import (DetectorType, DataType)
+from sans.common.enums import DetectorType
 
+from SANSReductionCoreBase import SANSReductionCoreBase
 
-class SANSReductionCore(DistributedDataProcessorAlgorithm):
+
+class SANSReductionCore(SANSReductionCoreBase):
     def category(self):
         return 'SANS\\Reduction'
 
@@ -28,71 +25,8 @@ class SANSReductionCore(DistributedDataProcessorAlgorithm):
         return ' Runs the the core reduction elements.'
 
     def PyInit(self):
-        # ----------
-        # INPUT
-        # ----------
-        self.declareProperty(PropertyManagerProperty('SANSState'),
-                             doc='A property manager which fulfills the SANSState contract.')
-
-        # WORKSPACES
-        # Scatter Workspaces
-        self.declareProperty(MatrixWorkspaceProperty('ScatterWorkspace', '',
-                                                     optional=PropertyMode.Optional, direction=Direction.Input),
-                             doc='The scatter workspace. This workspace does not contain monitors.')
-        self.declareProperty(MatrixWorkspaceProperty('ScatterMonitorWorkspace', '',
-                                                     optional=PropertyMode.Optional, direction=Direction.Input),
-                             doc='The scatter monitor workspace. This workspace only contains monitors.')
-
-        # Transmission Workspace
-        self.declareProperty(MatrixWorkspaceProperty('TransmissionWorkspace', '',
-                                                     optional=PropertyMode.Optional, direction=Direction.Input),
-                             doc='The transmission workspace.')
-
-        # Direct Workspace
-        self.declareProperty(MatrixWorkspaceProperty('DirectWorkspace', '',
-                                                     optional=PropertyMode.Optional, direction=Direction.Input),
-                             doc='The direct workspace.')
-
-        self.setPropertyGroup("ScatterWorkspace", 'Data')
-        self.setPropertyGroup("ScatterMonitorWorkspace", 'Data')
-        self.setPropertyGroup("TransmissionWorkspace", 'Data')
-        self.setPropertyGroup("DirectWorkspace", 'Data')
-
-        # The component
-        allowed_detectors = StringListValidator([DetectorType.to_string(DetectorType.LAB),
-                                                 DetectorType.to_string(DetectorType.HAB)])
-        self.declareProperty("Component", DetectorType.to_string(DetectorType.LAB),
-                             validator=allowed_detectors, direction=Direction.Input,
-                             doc="The component of the instrument which is to be reduced.")
-
-        # The data type
-        allowed_data = StringListValidator([DataType.to_string(DataType.Sample),
-                                            DataType.to_string(DataType.Can)])
-        self.declareProperty("DataType", DataType.to_string(DataType.Sample),
-                             validator=allowed_data, direction=Direction.Input,
-                             doc="The component of the instrument which is to be reduced.")
-
-        # ----------
-        # OUTPUT
-        # ----------
-        self.declareProperty(MatrixWorkspaceProperty("OutputWorkspace", '', direction=Direction.Output),
-                             doc='The output workspace.')
-
-        self.declareProperty(MatrixWorkspaceProperty('SumOfCounts', '', optional=PropertyMode.Optional,
-                                                     direction=Direction.Output),
-                             doc='The sum of the counts of the output workspace.')
-
-        self.declareProperty(MatrixWorkspaceProperty('SumOfNormFactors', '', optional=PropertyMode.Optional,
-                                                     direction=Direction.Output),
-                             doc='The sum of the counts of the output workspace.')
-
-        self.declareProperty(MatrixWorkspaceProperty('CalculatedTransmissionWorkspace', '', optional=PropertyMode.Optional,
-                                                     direction=Direction.Output),
-                             doc='The calculated transmission workspace')
-
-        self.declareProperty(MatrixWorkspaceProperty('UnfittedTransmissionWorkspace', '', optional=PropertyMode.Optional,
-                                                     direction=Direction.Output),
-                             doc='The unfitted transmission workspace')
+        self._pyinit_input()
+        self._pyinit_output()
 
     def PyExec(self):
         # Get the input
@@ -127,7 +61,7 @@ class SANSReductionCore(DistributedDataProcessorAlgorithm):
                                                                        data_type_as_string)
 
         # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-        # COMPATIBILITY BEGIN
+        # COMPATIBILITY
         # The old reduction workflow converted the workspace to a histogram at this point.
         # A more recent workflow keeps the workspaces as Events for longer, to make use of cheap rebinning for
         # EventWorkspaces, and to optimise for event slicing.
@@ -135,62 +69,8 @@ class SANSReductionCore(DistributedDataProcessorAlgorithm):
         # to keep track of the bin masking. These masks are lifted from the dummy workspace to the actual workspace
         # near the end of the reduction.
         # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-        compatibility = state.compatibility
-        is_event_workspace = isinstance(workspace, IEventWorkspace)
-        use_dummy_workspace = False
-        if is_event_workspace:
-            if compatibility.use_compatibility_mode:
-                # We convert the workspace here to a histogram workspace, since we cannot otherwise
-                # compare the results between the old and the new reduction workspace in a meaningful manner.
-                # The old one is histogram and the new one is event.
-                # Rebin to monitor workspace
-                if compatibility.time_rebin_string:
-                    rebin_name = "Rebin"
-                    rebin_option = {"InputWorkspace": workspace,
-                                    "Params": compatibility.time_rebin_string,
-                                    "OutputWorkspace": EMPTY_NAME,
-                                    "PreserveEvents": False}
-                    rebin_alg = create_child_algorithm(self, rebin_name, **rebin_option)
-                    rebin_alg.execute()
-                    workspace = rebin_alg.getProperty("OutputWorkspace").value
-                else:
-                    rebin_name = "RebinToWorkspace"
-                    rebin_option = {"WorkspaceToRebin": workspace,
-                                    "WorkspaceToMatch": monitor_workspace,
-                                    "OutputWorkspace": EMPTY_NAME,
-                                    "PreserveEvents": False}
-                    rebin_alg = create_child_algorithm(self, rebin_name, **rebin_option)
-                    rebin_alg.execute()
-                    workspace = rebin_alg.getProperty("OutputWorkspace").value
-            else:
-                # If not using compatibility mode, we create a histogram from the workspace, which will store
-                # the bin masking.
-                # Extract a single spectrum to make operations as quick as possible.
-                # We only need the mask flags, not the y data.
-                use_dummy_workspace = True
-
-                # Extract only a single spectrum so dummy workspace which contains bin masks is a small as possible
-                # (cheaper operations).
-                # This is find because we only care about the mask flags in this workspace, not the y data.
-                extract_spectrum_name = "ExtractSingleSpectrum"
-                extract_spectrum_option = {"InputWorkspace": workspace,
-                                           "OutputWorkspace": "dummy_mask_workspace",
-                                           "WorkspaceIndex": 0}
-                extract_spectrum_alg = create_child_algorithm(self, extract_spectrum_name, **extract_spectrum_option)
-                extract_spectrum_alg.execute()
-                dummy_mask_workspace = extract_spectrum_alg.getProperty("OutputWorkspace").value
-
-                rebin_name = "RebinToWorkspace"
-                rebin_option = {"WorkspaceToRebin": dummy_mask_workspace,
-                                "WorkspaceToMatch": monitor_workspace,
-                                "OutputWorkspace": "dummy_mask_workspace",
-                                "PreserveEvents": False}
-                rebin_alg = create_child_algorithm(self, rebin_name, **rebin_option)
-                rebin_alg.execute()
-                dummy_mask_workspace = rebin_alg.getProperty("OutputWorkspace").value
-        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-        # COMPATIBILITY END
-        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+        workspace, dummy_mask_workspace, \
+            use_dummy_workspace = self._check_compatibility_mode(workspace, monitor_workspace, state.compatibility)
 
         # ------------------------------------------------------------
         # 4. Move the workspace into the correct position
@@ -273,169 +153,6 @@ class SANSReductionCore(DistributedDataProcessorAlgorithm):
         self.setProperty("CalculatedTransmissionWorkspace", calculated_transmission_workspace)
         self.setProperty("UnfittedTransmissionWorkspace", unfitted_transmission_workspace)
 
-    def _get_cropped_workspace(self, component):
-        scatter_workspace = self.getProperty("ScatterWorkspace").value
-        crop_name = "SANSCrop"
-        crop_options = {"InputWorkspace": scatter_workspace,
-                        "OutputWorkspace": EMPTY_NAME,
-                        "Component": component}
-        crop_alg = create_child_algorithm(self, crop_name, **crop_options)
-        crop_alg.execute()
-        return crop_alg.getProperty("OutputWorkspace").value
-
-    def _slice(self, state_serialized, workspace, monitor_workspace, data_type_as_string):
-        slice_name = "SANSSliceEvent"
-        slice_options = {"SANSState": state_serialized,
-                         "InputWorkspace": workspace,
-                         "InputWorkspaceMonitor": monitor_workspace,
-                         "OutputWorkspace": EMPTY_NAME,
-                         "OutputWorkspaceMonitor": "dummy2",
-                         "DataType": data_type_as_string}
-        slice_alg = create_child_algorithm(self, slice_name, **slice_options)
-        slice_alg.execute()
-
-        workspace = slice_alg.getProperty("OutputWorkspace").value
-        monitor_workspace = slice_alg.getProperty("OutputWorkspaceMonitor").value
-        slice_event_factor = slice_alg.getProperty("SliceEventFactor").value
-        return workspace, monitor_workspace, slice_event_factor
-
-    def _move(self, state_serialized, workspace, component, is_transmission=False):
-        # First we set the workspace to zero, since it might have been moved around by the user in the ADS
-        # Second we use the initial move to bring the workspace into the correct position
-        move_name = "SANSMove"
-        move_options = {"SANSState": state_serialized,
-                        "Workspace": workspace,
-                        "MoveType": "SetToZero",
-                        "Component": ""}
-        move_alg = create_child_algorithm(self, move_name, **move_options)
-        move_alg.execute()
-        workspace = move_alg.getProperty("Workspace").value
-
-        # Do the initial move
-        move_alg.setProperty("MoveType", "InitialMove")
-        move_alg.setProperty("Component", component)
-        move_alg.setProperty("Workspace", workspace)
-        move_alg.setProperty("IsTransmissionWorkspace", is_transmission)
-        move_alg.execute()
-        return move_alg.getProperty("Workspace").value
-
-    def _mask(self, state_serialized, workspace, component):
-        mask_name = "SANSMaskWorkspace"
-        mask_options = {"SANSState": state_serialized,
-                        "Workspace": workspace,
-                        "Component": component}
-        mask_alg = create_child_algorithm(self, mask_name, **mask_options)
-        mask_alg.execute()
-        return mask_alg.getProperty("Workspace").value
-
-    def _convert_to_wavelength(self, state_serialized, workspace):
-        wavelength_name = "SANSConvertToWavelength"
-        wavelength_options = {"SANSState": state_serialized,
-                              "InputWorkspace": workspace}
-        wavelength_alg = create_child_algorithm(self, wavelength_name, **wavelength_options)
-        wavelength_alg.setPropertyValue("OutputWorkspace", EMPTY_NAME)
-        wavelength_alg.setProperty("OutputWorkspace", workspace)
-        wavelength_alg.execute()
-        return wavelength_alg.getProperty("OutputWorkspace").value
-
-    def _scale(self, state_serialized, workspace):
-        scale_name = "SANSScale"
-        scale_options = {"SANSState": state_serialized,
-                         "InputWorkspace": workspace,
-                         "OutputWorkspace": EMPTY_NAME}
-        scale_alg = create_child_algorithm(self, scale_name, **scale_options)
-        scale_alg.execute()
-        return scale_alg.getProperty("OutputWorkspace").value
-
-    def _adjustment(self, state_serialized, workspace, monitor_workspace, component_as_string, data_type):
-        transmission_workspace = self._get_transmission_workspace()
-        direct_workspace = self._get_direct_workspace()
-
-        adjustment_name = "SANSCreateAdjustmentWorkspaces"
-        adjustment_options = {"SANSState": state_serialized,
-                              "Component": component_as_string,
-                              "DataType": data_type,
-                              "MonitorWorkspace": monitor_workspace,
-                              "SampleData": workspace,
-                              "OutputWorkspaceWavelengthAdjustment": EMPTY_NAME,
-                              "OutputWorkspacePixelAdjustment": EMPTY_NAME,
-                              "OutputWorkspaceWavelengthAndPixelAdjustment": EMPTY_NAME}
-        if transmission_workspace:
-            transmission_workspace = self._move(state_serialized, transmission_workspace, component_as_string,
-                                                is_transmission=True)
-            adjustment_options.update({"TransmissionWorkspace": transmission_workspace})
-
-        if direct_workspace:
-            direct_workspace = self._move(state_serialized, direct_workspace, component_as_string, is_transmission=True)
-            adjustment_options.update({"DirectWorkspace": direct_workspace})
-
-        adjustment_alg = create_child_algorithm(self, adjustment_name, **adjustment_options)
-        adjustment_alg.execute()
-
-        wavelength_adjustment = adjustment_alg.getProperty("OutputWorkspaceWavelengthAdjustment").value
-        pixel_adjustment = adjustment_alg.getProperty("OutputWorkspacePixelAdjustment").value
-        wavelength_and_pixel_adjustment = adjustment_alg.getProperty(
-                                           "OutputWorkspaceWavelengthAndPixelAdjustment").value
-        calculated_transmission_workspace = adjustment_alg.getProperty("CalculatedTransmissionWorkspace").value
-        unfitted_transmission_workspace = adjustment_alg.getProperty("UnfittedTransmissionWorkspace").value
-        return wavelength_adjustment, pixel_adjustment, wavelength_and_pixel_adjustment, \
-            calculated_transmission_workspace, unfitted_transmission_workspace
-
-    def _copy_bin_masks(self, workspace, dummy_workspace):
-        mask_options = {"InputWorkspace": workspace,
-                        "MaskedWorkspace": dummy_workspace,
-                        "OutputWorkspace": EMPTY_NAME}
-        mask_alg = create_child_algorithm(self, "MaskBinsFromWorkspace", **mask_options)
-        mask_alg.execute()
-        return mask_alg.getProperty("OutputWorkspace").value
-
-    def _convert_to_histogram(self, workspace):
-        if isinstance(workspace, IEventWorkspace):
-            convert_name = "RebinToWorkspace"
-            convert_options = {"WorkspaceToRebin": workspace,
-                               "WorkspaceToMatch": workspace,
-                               "OutputWorkspace": "OutputWorkspace",
-                               "PreserveEvents": False}
-            convert_alg = create_child_algorithm(self, convert_name, **convert_options)
-            convert_alg.execute()
-            workspace = convert_alg.getProperty("OutputWorkspace").value
-            append_to_sans_file_tag(workspace, "_histogram")
-
-        return workspace
-
-    def _convert_to_q(self, state_serialized, workspace, wavelength_adjustment_workspace, pixel_adjustment_workspace,
-                      wavelength_and_pixel_adjustment_workspace):
-        """
-        A conversion to momentum transfer is performed in this step.
-
-        The conversion can be either to the modulus of Q in which case the output is a 1D workspace, or it can
-        be a 2D reduction where the y axis is Qy, ie it is a numeric axis.
-        @param state: a SANSState object
-        @param workspace: the workspace to convert to momentum transfer.
-        @param wavelength_adjustment_workspace: the wavelength adjustment workspace.
-        @param pixel_adjustment_workspace: the pixel adjustment workspace.
-        @param wavelength_and_pixel_adjustment_workspace: the wavelength and pixel adjustment workspace.
-        @return: a reduced workspace
-        """
-        convert_name = "SANSConvertToQ"
-        convert_options = {"InputWorkspace": workspace,
-                           "OutputWorkspace": EMPTY_NAME,
-                           "SANSState": state_serialized,
-                           "OutputParts": True}
-        if wavelength_adjustment_workspace:
-            convert_options.update({"InputWorkspaceWavelengthAdjustment": wavelength_adjustment_workspace})
-        if pixel_adjustment_workspace:
-            convert_options.update({"InputWorkspacePixelAdjustment": pixel_adjustment_workspace})
-        if wavelength_and_pixel_adjustment_workspace:
-            convert_options.update({"InputWorkspaceWavelengthAndPixelAdjustment":
-                                    wavelength_and_pixel_adjustment_workspace})
-        convert_alg = create_child_algorithm(self, convert_name, **convert_options)
-        convert_alg.execute()
-        data_workspace = convert_alg.getProperty("OutputWorkspace").value
-        sum_of_counts = convert_alg.getProperty("SumOfCounts").value
-        sum_of_norms = convert_alg.getProperty("SumOfNormFactors").value
-        return data_workspace, sum_of_counts, sum_of_norms
-
     def validateInputs(self):
         errors = dict()
         # Check that the input can be converted into the right state object
@@ -446,32 +163,6 @@ class SANSReductionCore(DistributedDataProcessorAlgorithm):
             errors.update({"SANSSingleReduction": str(err)})
         return errors
 
-    def _get_state(self):
-        state_property_manager = self.getProperty("SANSState").value
-        state = create_deserialized_sans_state_from_property_manager(state_property_manager)
-        state.property_manager = state_property_manager
-        return state
-
-    def _get_transmission_workspace(self):
-        transmission_workspace = self.getProperty("TransmissionWorkspace").value
-        return self._get_cloned_workspace(transmission_workspace) if transmission_workspace else None
-
-    def _get_direct_workspace(self):
-        direct_workspace = self.getProperty("DirectWorkspace").value
-        return self._get_cloned_workspace(direct_workspace) if direct_workspace else None
-
-    def _get_monitor_workspace(self):
-        monitor_workspace = self.getProperty("ScatterMonitorWorkspace").value
-        return self._get_cloned_workspace(monitor_workspace)
-
-    def _get_cloned_workspace(self, workspace):
-        clone_name = "CloneWorkspace"
-        clone_options = {"InputWorkspace": workspace,
-                         "OutputWorkspace": EMPTY_NAME}
-        clone_alg = create_child_algorithm(self, clone_name, **clone_options)
-        clone_alg.execute()
-        return clone_alg.getProperty("OutputWorkspace").value
-
     def _get_progress(self):
         return Progress(self, start=0.0, end=1.0, nreports=10)
 
diff --git a/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSReductionCoreBase.py b/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSReductionCoreBase.py
new file mode 100644
index 0000000000000000000000000000000000000000..581cce00782dd9c1dd9fc667cda854d4d14a1213
--- /dev/null
+++ b/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSReductionCoreBase.py
@@ -0,0 +1,333 @@
+# Mantid Repository : https://github.com/mantidproject/mantid
+#
+# Copyright &copy; 2018 ISIS Rutherford Appleton Laboratory UKRI,
+#     NScD Oak Ridge National Laboratory, European Spallation Source
+#     & Institut Laue - Langevin
+# SPDX - License - Identifier: GPL - 3.0 +
+# pylint: disable=invalid-name
+
+"""A base class to share functionality between SANSReductionCore algorithms."""
+
+from __future__ import (absolute_import, division, print_function)
+from mantid.kernel import (Direction, PropertyManagerProperty, StringListValidator)
+from mantid.api import (DistributedDataProcessorAlgorithm, MatrixWorkspaceProperty, PropertyMode, IEventWorkspace)
+
+from sans.state.state_base import create_deserialized_sans_state_from_property_manager
+from sans.common.constants import EMPTY_NAME
+from sans.common.general_functions import (create_child_algorithm, append_to_sans_file_tag)
+from sans.common.enums import (DetectorType, DataType)
+
+
+class SANSReductionCoreBase(DistributedDataProcessorAlgorithm):
+    def _pyinit_input(self):
+        # ----------
+        # INPUT
+        # ----------
+        self.declareProperty(PropertyManagerProperty('SANSState'),
+                             doc='A property manager which fulfills the SANSState contract.')
+
+        # WORKSPACES
+        # Scatter Workspaces
+        self.declareProperty(MatrixWorkspaceProperty('ScatterWorkspace', '',
+                                                     optional=PropertyMode.Optional, direction=Direction.Input),
+                             doc='The scatter workspace. This workspace does not contain monitors.')
+        self.declareProperty(MatrixWorkspaceProperty('ScatterMonitorWorkspace', '',
+                                                     optional=PropertyMode.Optional, direction=Direction.Input),
+                             doc='The scatter monitor workspace. This workspace only contains monitors.')
+
+        # Transmission Workspace
+        self.declareProperty(MatrixWorkspaceProperty('TransmissionWorkspace', '',
+                                                     optional=PropertyMode.Optional, direction=Direction.Input),
+                             doc='The transmission workspace.')
+
+        # Direct Workspace
+        self.declareProperty(MatrixWorkspaceProperty('DirectWorkspace', '',
+                                                     optional=PropertyMode.Optional, direction=Direction.Input),
+                             doc='The direct workspace.')
+
+        self.setPropertyGroup("ScatterWorkspace", 'Data')
+        self.setPropertyGroup("ScatterMonitorWorkspace", 'Data')
+        self.setPropertyGroup("TransmissionWorkspace", 'Data')
+        self.setPropertyGroup("DirectWorkspace", 'Data')
+
+        # The component
+        allowed_detectors = StringListValidator([DetectorType.to_string(DetectorType.LAB),
+                                                 DetectorType.to_string(DetectorType.HAB)])
+        self.declareProperty("Component", DetectorType.to_string(DetectorType.LAB),
+                             validator=allowed_detectors, direction=Direction.Input,
+                             doc="The component of the instrument which is to be reduced.")
+
+        # The data type
+        allowed_data = StringListValidator([DataType.to_string(DataType.Sample),
+                                            DataType.to_string(DataType.Can)])
+        self.declareProperty("DataType", DataType.to_string(DataType.Sample),
+                             validator=allowed_data, direction=Direction.Input,
+                             doc="The component of the instrument which is to be reduced.")
+
+    def _pyinit_output(self):
+        # ----------
+        # OUTPUT
+        # ----------
+        self.declareProperty(MatrixWorkspaceProperty("OutputWorkspace", '', direction=Direction.Output),
+                             doc='The output workspace.')
+
+        self.declareProperty(MatrixWorkspaceProperty('SumOfCounts', '', optional=PropertyMode.Optional,
+                                                     direction=Direction.Output),
+                             doc='The sum of the counts of the output workspace.')
+
+        self.declareProperty(MatrixWorkspaceProperty('SumOfNormFactors', '', optional=PropertyMode.Optional,
+                                                     direction=Direction.Output),
+                             doc='The sum of the counts of the output workspace.')
+
+        self.declareProperty(MatrixWorkspaceProperty('CalculatedTransmissionWorkspace', '', optional=PropertyMode.Optional,
+                                                     direction=Direction.Output),
+                             doc='The calculated transmission workspace')
+
+        self.declareProperty(MatrixWorkspaceProperty('UnfittedTransmissionWorkspace', '', optional=PropertyMode.Optional,
+                                                     direction=Direction.Output),
+                             doc='The unfitted transmission workspace')
+
+    def _get_cropped_workspace(self, component):
+        scatter_workspace = self.getProperty("ScatterWorkspace").value
+        crop_name = "SANSCrop"
+        crop_options = {"InputWorkspace": scatter_workspace,
+                        "OutputWorkspace": EMPTY_NAME,
+                        "Component": component}
+        crop_alg = create_child_algorithm(self, crop_name, **crop_options)
+        crop_alg.execute()
+        return crop_alg.getProperty("OutputWorkspace").value
+
+    def _slice(self, state_serialized, workspace, monitor_workspace, data_type_as_string):
+        slice_name = "SANSSliceEvent"
+        slice_options = {"SANSState": state_serialized,
+                         "InputWorkspace": workspace,
+                         "InputWorkspaceMonitor": monitor_workspace,
+                         "OutputWorkspace": EMPTY_NAME,
+                         "OutputWorkspaceMonitor": "dummy2",
+                         "DataType": data_type_as_string}
+        slice_alg = create_child_algorithm(self, slice_name, **slice_options)
+        slice_alg.execute()
+
+        workspace = slice_alg.getProperty("OutputWorkspace").value
+        monitor_workspace = slice_alg.getProperty("OutputWorkspaceMonitor").value
+        slice_event_factor = slice_alg.getProperty("SliceEventFactor").value
+        return workspace, monitor_workspace, slice_event_factor
+
+    def _move(self, state_serialized, workspace, component, is_transmission=False):
+        # First we set the workspace to zero, since it might have been moved around by the user in the ADS
+        # Second we use the initial move to bring the workspace into the correct position
+        move_name = "SANSMove"
+        move_options = {"SANSState": state_serialized,
+                        "Workspace": workspace,
+                        "MoveType": "SetToZero",
+                        "Component": ""}
+        move_alg = create_child_algorithm(self, move_name, **move_options)
+        move_alg.execute()
+        workspace = move_alg.getProperty("Workspace").value
+
+        # Do the initial move
+        move_alg.setProperty("MoveType", "InitialMove")
+        move_alg.setProperty("Component", component)
+        move_alg.setProperty("Workspace", workspace)
+        move_alg.setProperty("IsTransmissionWorkspace", is_transmission)
+        move_alg.execute()
+        return move_alg.getProperty("Workspace").value
+
+    def _mask(self, state_serialized, workspace, component):
+        mask_name = "SANSMaskWorkspace"
+        mask_options = {"SANSState": state_serialized,
+                        "Workspace": workspace,
+                        "Component": component}
+        mask_alg = create_child_algorithm(self, mask_name, **mask_options)
+        mask_alg.execute()
+        return mask_alg.getProperty("Workspace").value
+
+    def _convert_to_wavelength(self, state_serialized, workspace):
+        wavelength_name = "SANSConvertToWavelength"
+        wavelength_options = {"SANSState": state_serialized,
+                              "InputWorkspace": workspace}
+        wavelength_alg = create_child_algorithm(self, wavelength_name, **wavelength_options)
+        wavelength_alg.setPropertyValue("OutputWorkspace", EMPTY_NAME)
+        wavelength_alg.setProperty("OutputWorkspace", workspace)
+        wavelength_alg.execute()
+        return wavelength_alg.getProperty("OutputWorkspace").value
+
+    def _scale(self, state_serialized, workspace):
+        scale_name = "SANSScale"
+        scale_options = {"SANSState": state_serialized,
+                         "InputWorkspace": workspace,
+                         "OutputWorkspace": EMPTY_NAME}
+        scale_alg = create_child_algorithm(self, scale_name, **scale_options)
+        scale_alg.execute()
+        return scale_alg.getProperty("OutputWorkspace").value
+
+    def _adjustment(self, state_serialized, workspace, monitor_workspace, component_as_string, data_type):
+        transmission_workspace = self._get_transmission_workspace()
+        direct_workspace = self._get_direct_workspace()
+
+        adjustment_name = "SANSCreateAdjustmentWorkspaces"
+        adjustment_options = {"SANSState": state_serialized,
+                              "Component": component_as_string,
+                              "DataType": data_type,
+                              "MonitorWorkspace": monitor_workspace,
+                              "SampleData": workspace,
+                              "OutputWorkspaceWavelengthAdjustment": EMPTY_NAME,
+                              "OutputWorkspacePixelAdjustment": EMPTY_NAME,
+                              "OutputWorkspaceWavelengthAndPixelAdjustment": EMPTY_NAME}
+        if transmission_workspace:
+            transmission_workspace = self._move(state_serialized, transmission_workspace, component_as_string,
+                                                is_transmission=True)
+            adjustment_options.update({"TransmissionWorkspace": transmission_workspace})
+
+        if direct_workspace:
+            direct_workspace = self._move(state_serialized, direct_workspace, component_as_string, is_transmission=True)
+            adjustment_options.update({"DirectWorkspace": direct_workspace})
+
+        adjustment_alg = create_child_algorithm(self, adjustment_name, **adjustment_options)
+        adjustment_alg.execute()
+
+        wavelength_adjustment = adjustment_alg.getProperty("OutputWorkspaceWavelengthAdjustment").value
+        pixel_adjustment = adjustment_alg.getProperty("OutputWorkspacePixelAdjustment").value
+        wavelength_and_pixel_adjustment = adjustment_alg.getProperty(
+                                           "OutputWorkspaceWavelengthAndPixelAdjustment").value
+        calculated_transmission_workspace = adjustment_alg.getProperty("CalculatedTransmissionWorkspace").value
+        unfitted_transmission_workspace = adjustment_alg.getProperty("UnfittedTransmissionWorkspace").value
+        return wavelength_adjustment, pixel_adjustment, wavelength_and_pixel_adjustment, \
+            calculated_transmission_workspace, unfitted_transmission_workspace
+
+    def _copy_bin_masks(self, workspace, dummy_workspace):
+        mask_options = {"InputWorkspace": workspace,
+                        "MaskedWorkspace": dummy_workspace,
+                        "OutputWorkspace": EMPTY_NAME}
+        mask_alg = create_child_algorithm(self, "MaskBinsFromWorkspace", **mask_options)
+        mask_alg.execute()
+        return mask_alg.getProperty("OutputWorkspace").value
+
+    def _convert_to_histogram(self, workspace):
+        if isinstance(workspace, IEventWorkspace):
+            convert_name = "RebinToWorkspace"
+            convert_options = {"WorkspaceToRebin": workspace,
+                               "WorkspaceToMatch": workspace,
+                               "OutputWorkspace": "OutputWorkspace",
+                               "PreserveEvents": False}
+            convert_alg = create_child_algorithm(self, convert_name, **convert_options)
+            convert_alg.execute()
+            workspace = convert_alg.getProperty("OutputWorkspace").value
+            append_to_sans_file_tag(workspace, "_histogram")
+
+        return workspace
+
+    def _convert_to_q(self, state_serialized, workspace, wavelength_adjustment_workspace, pixel_adjustment_workspace,
+                      wavelength_and_pixel_adjustment_workspace):
+        """
+        A conversion to momentum transfer is performed in this step.
+
+        The conversion can be either to the modulus of Q in which case the output is a 1D workspace, or it can
+        be a 2D reduction where the y axis is Qy, ie it is a numeric axis.
+        @param state: a SANSState object
+        @param workspace: the workspace to convert to momentum transfer.
+        @param wavelength_adjustment_workspace: the wavelength adjustment workspace.
+        @param pixel_adjustment_workspace: the pixel adjustment workspace.
+        @param wavelength_and_pixel_adjustment_workspace: the wavelength and pixel adjustment workspace.
+        @return: a reduced workspace
+        """
+        convert_name = "SANSConvertToQ"
+        convert_options = {"InputWorkspace": workspace,
+                           "OutputWorkspace": EMPTY_NAME,
+                           "SANSState": state_serialized,
+                           "OutputParts": True}
+        if wavelength_adjustment_workspace:
+            convert_options.update({"InputWorkspaceWavelengthAdjustment": wavelength_adjustment_workspace})
+        if pixel_adjustment_workspace:
+            convert_options.update({"InputWorkspacePixelAdjustment": pixel_adjustment_workspace})
+        if wavelength_and_pixel_adjustment_workspace:
+            convert_options.update({"InputWorkspaceWavelengthAndPixelAdjustment":
+                                    wavelength_and_pixel_adjustment_workspace})
+        convert_alg = create_child_algorithm(self, convert_name, **convert_options)
+        convert_alg.execute()
+        data_workspace = convert_alg.getProperty("OutputWorkspace").value
+        sum_of_counts = convert_alg.getProperty("SumOfCounts").value
+        sum_of_norms = convert_alg.getProperty("SumOfNormFactors").value
+        return data_workspace, sum_of_counts, sum_of_norms
+
+    def _get_state(self):
+        state_property_manager = self.getProperty("SANSState").value
+        state = create_deserialized_sans_state_from_property_manager(state_property_manager)
+        state.property_manager = state_property_manager
+        return state
+
+    def _get_transmission_workspace(self):
+        transmission_workspace = self.getProperty("TransmissionWorkspace").value
+        return self._get_cloned_workspace(transmission_workspace) if transmission_workspace else None
+
+    def _get_direct_workspace(self):
+        direct_workspace = self.getProperty("DirectWorkspace").value
+        return self._get_cloned_workspace(direct_workspace) if direct_workspace else None
+
+    def _get_monitor_workspace(self):
+        monitor_workspace = self.getProperty("ScatterMonitorWorkspace").value
+        return self._get_cloned_workspace(monitor_workspace)
+
+    def _get_cloned_workspace(self, workspace):
+        clone_name = "CloneWorkspace"
+        clone_options = {"InputWorkspace": workspace,
+                         "OutputWorkspace": EMPTY_NAME}
+        clone_alg = create_child_algorithm(self, clone_name, **clone_options)
+        clone_alg.execute()
+        return clone_alg.getProperty("OutputWorkspace").value
+
+    def _check_compatibility_mode(self, workspace, monitor_workspace, compatibility):
+        is_event_workspace = isinstance(workspace, IEventWorkspace)
+        use_dummy_workspace = False
+        dummy_mask_workspace = None
+        if is_event_workspace:
+            if compatibility.use_compatibility_mode:
+                # We convert the workspace here to a histogram workspace, since we cannot otherwise
+                # compare the results between the old and the new reduction workspace in a meaningful manner.
+                # The old one is histogram and the new one is event.
+                # Rebin to monitor workspace
+                if compatibility.time_rebin_string:
+                    rebin_name = "Rebin"
+                    rebin_option = {"InputWorkspace": workspace,
+                                    "Params": compatibility.time_rebin_string,
+                                    "OutputWorkspace": EMPTY_NAME,
+                                    "PreserveEvents": False}
+                    rebin_alg = create_child_algorithm(self, rebin_name, **rebin_option)
+                    rebin_alg.execute()
+                    workspace = rebin_alg.getProperty("OutputWorkspace").value
+                else:
+                    rebin_name = "RebinToWorkspace"
+                    rebin_option = {"WorkspaceToRebin": workspace,
+                                    "WorkspaceToMatch": monitor_workspace,
+                                    "OutputWorkspace": EMPTY_NAME,
+                                    "PreserveEvents": False}
+                    rebin_alg = create_child_algorithm(self, rebin_name, **rebin_option)
+                    rebin_alg.execute()
+                    workspace = rebin_alg.getProperty("OutputWorkspace").value
+            else:
+                # If not using compatibility mode, we create a histogram from the workspace, which will store
+                # the bin masking.
+                # Extract a single spectrum to make operations as quick as possible.
+                # We only need the mask flags, not the y data.
+                use_dummy_workspace = True
+
+                # Extract only a single spectrum so dummy workspace which contains bin masks is a small as possible
+                # (cheaper operations).
+                # This is find because we only care about the mask flags in this workspace, not the y data.
+                extract_spectrum_name = "ExtractSingleSpectrum"
+                extract_spectrum_option = {"InputWorkspace": workspace,
+                                           "OutputWorkspace": "dummy_mask_workspace",
+                                           "WorkspaceIndex": 0}
+                extract_spectrum_alg = create_child_algorithm(self, extract_spectrum_name, **extract_spectrum_option)
+                extract_spectrum_alg.execute()
+                dummy_mask_workspace = extract_spectrum_alg.getProperty("OutputWorkspace").value
+
+                rebin_name = "RebinToWorkspace"
+                rebin_option = {"WorkspaceToRebin": dummy_mask_workspace,
+                                "WorkspaceToMatch": monitor_workspace,
+                                "OutputWorkspace": "dummy_mask_workspace",
+                                "PreserveEvents": False}
+                rebin_alg = create_child_algorithm(self, rebin_name, **rebin_option)
+                rebin_alg.execute()
+                dummy_mask_workspace = rebin_alg.getProperty("OutputWorkspace").value
+        return workspace, dummy_mask_workspace, use_dummy_workspace
diff --git a/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSReductionCoreEventSlice.py b/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSReductionCoreEventSlice.py
new file mode 100644
index 0000000000000000000000000000000000000000..3ce2136e441eed5bc07160c6e0c1adb99b9ce62c
--- /dev/null
+++ b/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSReductionCoreEventSlice.py
@@ -0,0 +1,171 @@
+# Mantid Repository : https://github.com/mantidproject/mantid
+#
+# Copyright &copy; 2019 ISIS Rutherford Appleton Laboratory UKRI,
+#     NScD Oak Ridge National Laboratory, European Spallation Source
+#     & Institut Laue - Langevin
+# SPDX - License - Identifier: GPL - 3.0 +
+# pylint: disable=invalid-name
+
+""" SANSReductionCoreEventSlice algorithm runs the sequence of reduction steps which are necessary to reduce a data set,
+for which data must be event sliced. These steps are: slicing, adjustment, convert to q."""
+
+from __future__ import (absolute_import, division, print_function)
+
+from mantid.api import (MatrixWorkspaceProperty, AlgorithmFactory, PropertyMode,
+                        Progress)
+from mantid.kernel import (Direction, PropertyManagerProperty, StringListValidator)
+from sans.common.enums import (DetectorType, DataType)
+
+from SANSReductionCoreBase import SANSReductionCoreBase
+
+
+class SANSReductionCoreEventSlice(SANSReductionCoreBase):
+    def category(self):
+        return 'SANS\\Reduction'
+
+    def summary(self):
+        return 'Runs the the core reduction elements which need to be carried out ' \
+               'on individual event slices.'
+
+    def PyInit(self):
+        # ----------
+        # INPUT
+        # ----------
+        self.declareProperty(PropertyManagerProperty('SANSState'),
+                             doc='A property manager which fulfills the SANSState contract.')
+
+        # WORKSPACES
+        # Scatter Workspaces
+        self.declareProperty(MatrixWorkspaceProperty('ScatterWorkspace', '',
+                                                     optional=PropertyMode.Optional, direction=Direction.Input),
+                             doc='The scatter workspace. This workspace does not contain monitors.')
+        self.declareProperty(MatrixWorkspaceProperty('DummyMaskWorkspace', '',
+                                                     optional=PropertyMode.Optional, direction=Direction.Input),
+                             doc='The histogram workspace containing mask bins for the event workspace, to be copied '
+                                 'over after event slicing.')
+        self.declareProperty(MatrixWorkspaceProperty('ScatterMonitorWorkspace', '',
+                                                     optional=PropertyMode.Optional, direction=Direction.Input),
+                             doc='The scatter monitor workspace. This workspace only contains monitors.')
+        # Direct Workspace
+        self.declareProperty(MatrixWorkspaceProperty('DirectWorkspace', '',
+                                                     optional=PropertyMode.Optional, direction=Direction.Input),
+                             doc='The direct workspace.')
+        # Transmission Workspace
+        self.declareProperty(MatrixWorkspaceProperty('TransmissionWorkspace', '',
+                                                     optional=PropertyMode.Optional, direction=Direction.Input),
+                             doc='The transmission workspace')
+
+        self.setPropertyGroup("ScatterWorkspace", 'Data')
+        self.setPropertyGroup("ScatterMonitorWorkspace", 'Data')
+        self.setPropertyGroup("DummyMaskWorkspace", 'Data')
+        self.setPropertyGroup("DirectWorkspace", 'Data')
+        self.setPropertyGroup("TransmissionWorkspace", 'Data')
+
+        # The component
+        allowed_detectors = StringListValidator([DetectorType.to_string(DetectorType.LAB),
+                                                 DetectorType.to_string(DetectorType.HAB)])
+        self.declareProperty("Component", DetectorType.to_string(DetectorType.LAB),
+                             validator=allowed_detectors, direction=Direction.Input,
+                             doc="The component of the instrument which is to be reduced.")
+
+        # The data type
+        allowed_data = StringListValidator([DataType.to_string(DataType.Sample),
+                                            DataType.to_string(DataType.Can)])
+        self.declareProperty("DataType", DataType.to_string(DataType.Sample),
+                             validator=allowed_data, direction=Direction.Input,
+                             doc="The component of the instrument which is to be reduced.")
+
+        # ----------
+        # OUTPUT
+        # ----------
+        # SANSReductionCoreEventSlice has the same outputs as SANSReductionCore
+        self._pyinit_output()
+
+    def PyExec(self):
+        # Get the input
+        state = self._get_state()
+        state_serialized = state.property_manager
+        progress = self._get_progress()
+
+        workspace = self.getProperty("ScatterWorkspace").value
+        # --------------------------------------------------------------------------------------------------------------
+        # 1. Create event slice
+        #    This will cut out a time-based (user-defined) slice.
+        # --------------------------------------------------------------------------------------------------------------
+        progress.report("Event slicing ...")
+        data_type_as_string = self.getProperty("DataType").value
+        monitor_workspace = self._get_monitor_workspace()
+        workspace, monitor_workspace, slice_event_factor = self._slice(state_serialized, workspace, monitor_workspace,
+                                                                       data_type_as_string)
+
+        # --------------------------------------------------------------------------------------------------------------
+        # 2. Create adjustment workspaces, those are
+        #     1. pixel-based adjustments
+        #     2. wavelength-based adjustments
+        #     3. pixel-and-wavelength-based adjustments
+        # --------------------------------------------------------------------------------------------------------------
+        component_as_string = self.getProperty("Component").value
+        data_type_as_string = self.getProperty("DataType").value
+        progress.report("Creating adjustment workspaces ...")
+        wavelength_adjustment_workspace, pixel_adjustment_workspace, wavelength_and_pixel_adjustment_workspace, \
+            calculated_transmission_workspace, unfitted_transmission_workspace = \
+            self._adjustment(state_serialized, workspace, monitor_workspace, component_as_string, data_type_as_string)
+
+        # ------------------------------------------------------------
+        # 3. Convert event workspaces to histogram workspaces
+        # ------------------------------------------------------------
+        progress.report("Converting to histogram mode ...")
+        workspace = self._convert_to_histogram(workspace)
+
+        # ------------------------------------------------------------
+        # 4. Re-mask. We need to bin mask in histogram mode in order
+        #    to have knowledge of masked regions: masking
+        #    EventWorkspaces simply removes their events
+        # ------------------------------------------------------------
+        dummy_mask_workspace = self.getProperty("DummyMaskWorkspace").value
+        workspace = self._copy_bin_masks(workspace, dummy_mask_workspace)
+
+        # ------------------------------------------------------------
+        # 5. Convert to Q
+        # -----------------------------------------------------------
+        progress.report("Converting to q ...")
+        workspace, sum_of_counts, sum_of_norms = self._convert_to_q(state_serialized,
+                                                                    workspace,
+                                                                    wavelength_adjustment_workspace,
+                                                                    pixel_adjustment_workspace,
+                                                                    wavelength_and_pixel_adjustment_workspace)
+
+        progress.report("Completed SANSReductionCoreEventSlice...")
+
+        # ------------------------------------------------------------
+        # Populate the output
+        # ------------------------------------------------------------
+        self.setProperty("OutputWorkspace", workspace)
+
+        # ------------------------------------------------------------
+        # Diagnostic output
+        # ------------------------------------------------------------
+        if sum_of_counts:
+            self.setProperty("SumOfCounts", sum_of_counts)
+        if sum_of_norms:
+            self.setProperty("SumOfNormFactors", sum_of_norms)
+
+        self.setProperty("CalculatedTransmissionWorkspace", calculated_transmission_workspace)
+        self.setProperty("UnfittedTransmissionWorkspace", unfitted_transmission_workspace)
+
+    def validateInputs(self):
+        errors = dict()
+        # Check that the input can be converted into the right state object
+        try:
+            state = self._get_state()
+            state.validate()
+        except ValueError as err:
+            errors.update({"SANSSingleReductionEventSlice": str(err)})
+        return errors
+
+    def _get_progress(self):
+        return Progress(self, start=0.0, end=1.0, nreports=5)
+
+
+# Register algorithm with Mantid
+AlgorithmFactory.subscribe(SANSReductionCoreEventSlice)
diff --git a/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSReductionCorePreprocess.py b/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSReductionCorePreprocess.py
new file mode 100644
index 0000000000000000000000000000000000000000..f006f22215e02a98fda9c8a0b7af353e99e84b36
--- /dev/null
+++ b/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSReductionCorePreprocess.py
@@ -0,0 +1,136 @@
+# Mantid Repository : https://github.com/mantidproject/mantid
+#
+# Copyright &copy; 2019 ISIS Rutherford Appleton Laboratory UKRI,
+#     NScD Oak Ridge National Laboratory, European Spallation Source
+#     & Institut Laue - Langevin
+# SPDX - License - Identifier: GPL - 3.0 +
+# pylint: disable=invalid-name
+
+""" SANSReductionCorePreprocess algorithm runs the sequence of reduction steps which are necessary to reduce a data set,
+which can be performed before event slicing."""
+
+from __future__ import (absolute_import, division, print_function)
+
+from mantid.api import MatrixWorkspaceProperty, AlgorithmFactory, Progress
+from mantid.kernel import Direction
+from sans.algorithm_detail.mask_workspace import mask_bins
+from sans.common.enums import DetectorType
+
+from SANSReductionCoreBase import SANSReductionCoreBase
+
+
+class SANSReductionCorePreprocess(SANSReductionCoreBase):
+    def category(self):
+        return 'SANS\\Reduction'
+
+    def summary(self):
+        return 'Runs the initial core reduction elements. These are the steps which ' \
+               'can be carried out before event slicing.'
+
+    def PyInit(self):
+        # ----------
+        # INPUT
+        # ----------
+        # SANSReductionCorePreprocess has the same inputs as SANSReductionCore
+        self._pyinit_input()
+
+        # ----------
+        # OUTPUT
+        # ----------
+        self.declareProperty(MatrixWorkspaceProperty("OutputWorkspace", '', direction=Direction.Output),
+                             doc='The output workspace.')
+        self.declareProperty(MatrixWorkspaceProperty("DummyMaskWorkspace", '', direction=Direction.Output),
+                             doc='The histogram workspace which contains bin masks for non-compatibility mode.')
+
+        self.declareProperty(MatrixWorkspaceProperty("OutputMonitorWorkspace", '', direction=Direction.Output),
+                             doc='The output monitor workspace.')
+
+    def PyExec(self):
+        # Get the input
+        state = self._get_state()
+        state_serialized = state.property_manager
+        component_as_string = self.getProperty("Component").value
+        progress = self._get_progress()
+
+        # --------------------------------------------------------------------------------------------------------------
+        # 1. Crop workspace by detector name
+        #    This will create a reduced copy of the original workspace with only those spectra which are relevant
+        #    for this particular reduction.
+        # --------------------------------------------------------------------------------------------------------------
+        progress.report("Cropping ...")
+        workspace = self._get_cropped_workspace(component_as_string)
+
+        # --------------------------------------------------------------------------------------------
+        # 2. Perform dark run subtraction
+        #    This will subtract a dark background from the scatter workspace. Note that dark background subtraction
+        #    will also affect the transmission calculation later on.
+        # --------------------------------------------------------------------------------------------------------------
+
+        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+        # COMPATIBILITY BEGIN
+        # IMPORTANT: This section of the code should only be temporary. It allows us to convert to histogram
+        # early on and hence compare the new reduction results with the output of the new reduction chain.
+        # Once the new reduction chain is established, we should remove the compatibility feature.
+        # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+        monitor_workspace = self._get_monitor_workspace()
+        workspace, dummy_mask_workspace, \
+            use_dummy_workspace = self._check_compatibility_mode(workspace, monitor_workspace, state.compatibility)
+
+        # ------------------------------------------------------------
+        # 3. Move the workspace into the correct position
+        #    The detectors in the workspaces are set such that the beam centre is at (0,0). The position is
+        #    a user-specified value which can be obtained with the help of the beam centre finder.
+        # ------------------------------------------------------------
+        progress.report("Moving ...")
+        workspace = self._move(state_serialized, workspace, component_as_string)
+        monitor_workspace = self._move(state_serialized, monitor_workspace, component_as_string)
+
+        # --------------------------------------------------------------------------------------------------------------
+        # 4. Apply masking (pixel masking and time masking)
+        # --------------------------------------------------------------------------------------------------------------
+        progress.report("Masking ...")
+        workspace = self._mask(state_serialized, workspace, component_as_string)
+
+        # --------------------------------------------------------------------------------------------------------------
+        # 5. Convert to Wavelength
+        # --------------------------------------------------------------------------------------------------------------
+        progress.report("Converting to wavelength ...")
+        workspace = self._convert_to_wavelength(state_serialized, workspace)
+        # Convert and rebin the dummy workspace to get correct bin flags
+        if use_dummy_workspace:
+            dummy_mask_workspace = mask_bins(state.mask, dummy_mask_workspace,
+                                             DetectorType.from_string(component_as_string))
+            dummy_mask_workspace = self._convert_to_wavelength(state_serialized, dummy_mask_workspace)
+
+        # --------------------------------------------------------------------------------------------------------------
+        # 6. Multiply by volume and absolute scale
+        # --------------------------------------------------------------------------------------------------------------
+        progress.report("Multiplying by volume and absolute scale ...")
+        workspace = self._scale(state_serialized, workspace)
+
+        progress.report("Completed SANSReductionCorePreprocess ...")
+
+        # ------------------------------------------------------------
+        # Populate the output
+        # ------------------------------------------------------------
+        self.setProperty("OutputWorkspace", workspace)
+        if use_dummy_workspace:
+            self.setProperty("DummyMaskWorkspace", dummy_mask_workspace)
+        self.setProperty("OutputMonitorWorkspace", monitor_workspace)
+
+    def validateInputs(self):
+        errors = dict()
+        # Check that the input can be converted into the right state object
+        try:
+            state = self._get_state()
+            state.validate()
+        except ValueError as err:
+            errors.update({"SANSSingleReductionEventSlice": str(err)})
+        return errors
+
+    def _get_progress(self):
+        return Progress(self, start=0.0, end=1.0, nreports=6)
+
+
+# Register algorithm with Mantid
+AlgorithmFactory.subscribe(SANSReductionCorePreprocess)
diff --git a/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSSingleReduction.py b/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSSingleReduction.py
index b3d1d01b19bdc1288697e3b47129c7641ad390df..2a8a680f654f791af065023ce27e0c265a2cbef3 100644
--- a/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSSingleReduction.py
+++ b/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSSingleReduction.py
@@ -9,89 +9,28 @@
 """ SANSSingleReduction algorithm performs a single reduction."""
 
 from __future__ import (absolute_import, division, print_function)
-from mantid.kernel import (Direction, PropertyManagerProperty, Property)
-from mantid.api import (DistributedDataProcessorAlgorithm, MatrixWorkspaceProperty, AlgorithmFactory, PropertyMode, Progress)
+
+from mantid.api import (MatrixWorkspaceProperty, AlgorithmFactory, PropertyMode)
+from mantid.kernel import (Direction, Property)
 from mantid.simpleapi import CloneWorkspace
-from sans.state.state_base import create_deserialized_sans_state_from_property_manager
+from sans.algorithm_detail.single_execution import (run_core_reduction, run_optimized_for_can)
 from sans.common.enums import (ReductionMode, DataType, ISISReductionMode, FitType)
-from sans.common.general_functions import (create_child_algorithm, does_can_workspace_exist_on_ads)
-from sans.algorithm_detail.single_execution import (run_core_reduction, get_final_output_workspaces,
-                                                    get_merge_bundle_for_merge_request, run_optimized_for_can)
-from sans.algorithm_detail.bundles import ReductionSettingBundle
-from sans.algorithm_detail.strip_end_nans_and_infs import strip_end_nans
+from sans.common.general_functions import does_can_workspace_exist_on_ads
+
+from SANSSingleReductionBase import SANSSingleReductionBase
 
 
-class SANSSingleReduction(DistributedDataProcessorAlgorithm):
+class SANSSingleReduction(SANSSingleReductionBase):
     def category(self):
         return 'SANS\\Reduction'
 
+    def version(self):
+        return 1
+
     def summary(self):
         return 'Performs a single reduction of SANS data.'
 
-    def PyInit(self):
-        # ----------
-        # INPUT
-        # ----------
-        self.declareProperty(PropertyManagerProperty('SANSState'),
-                             doc='A property manager which fulfills the SANSState contract.')
-
-        self.declareProperty("UseOptimizations", True, direction=Direction.Input,
-                             doc="When enabled the ADS is being searched for already loaded and reduced workspaces. "
-                                 "Depending on your concrete reduction, this could provide a significant"
-                                 " performance boost")
-
-        self.declareProperty("SaveCan", False, direction=Direction.Input,
-                             doc="When enabled, the unsubtracted can and sam workspaces are added to the ADS.")
-
-        # Sample Scatter Workspaces
-        self.declareProperty(MatrixWorkspaceProperty('SampleScatterWorkspace', '',
-                                                     optional=PropertyMode.Mandatory, direction=Direction.Input),
-                             doc='The sample scatter workspace. This workspace does not contain monitors.')
-        self.declareProperty(MatrixWorkspaceProperty('SampleScatterMonitorWorkspace', '',
-                                                     optional=PropertyMode.Mandatory, direction=Direction.Input),
-                             doc='The sample scatter monitor workspace. This workspace only contains monitors.')
-
-        # Sample Transmission Workspace
-        self.declareProperty(MatrixWorkspaceProperty('SampleTransmissionWorkspace', '',
-                                                     optional=PropertyMode.Optional, direction=Direction.Input),
-                             doc='The sample transmission workspace.')
-
-        # Sample Direct Workspace
-        self.declareProperty(MatrixWorkspaceProperty('SampleDirectWorkspace', '',
-                                                     optional=PropertyMode.Optional, direction=Direction.Input),
-                             doc='The sample scatter direct workspace.')
-
-        self.setPropertyGroup("SampleScatterWorkspace", 'Sample')
-        self.setPropertyGroup("SampleScatterMonitorWorkspace", 'Sample')
-        self.setPropertyGroup("SampleTransmissionWorkspace", 'Sample')
-        self.setPropertyGroup("SampleDirectWorkspace", 'Sample')
-
-        # Can Scatter Workspaces
-        self.declareProperty(MatrixWorkspaceProperty('CanScatterWorkspace', '',
-                                                     optional=PropertyMode.Optional, direction=Direction.Input),
-                             doc='The can scatter workspace. This workspace does not contain monitors.')
-        self.declareProperty(MatrixWorkspaceProperty('CanScatterMonitorWorkspace', '',
-                                                     optional=PropertyMode.Optional, direction=Direction.Input),
-                             doc='The can scatter monitor workspace. This workspace only contains monitors.')
-
-        # Sample Transmission Workspace
-        self.declareProperty(MatrixWorkspaceProperty('CanTransmissionWorkspace', '',
-                                                     optional=PropertyMode.Optional, direction=Direction.Input),
-                             doc='The can transmission workspace.')
-
-        # Sample Direct Workspace
-        self.declareProperty(MatrixWorkspaceProperty('CanDirectWorkspace', '',
-                                                     optional=PropertyMode.Optional, direction=Direction.Input),
-                             doc='The sample scatter direct workspace.')
-
-        self.setPropertyGroup("CanScatterWorkspace", 'Can')
-        self.setPropertyGroup("CanScatterMonitorWorkspace", 'Can')
-        self.setPropertyGroup("CanTransmissionWorkspace", 'Can')
-        self.setPropertyGroup("CanDirectWorkspace", 'Can')
-
-        # ----------
-        # OUTPUT
-        # ----------
+    def _declare_output_properties(self):
         self.declareProperty('OutScaleFactor', defaultValue=Property.EMPTY_DBL, direction=Direction.Output,
                              doc='Applied scale factor.')
 
@@ -164,33 +103,38 @@ class SANSSingleReduction(DistributedDataProcessorAlgorithm):
         self.setPropertyGroup("OutputWorkspaceHABCanCount", 'Opt Output')
         self.setPropertyGroup("OutputWorkspaceHABCanNorm", 'Opt Output')
 
-    def PyExec(self):
-        # Get state
-        state = self._get_state()
-
-        # Get reduction mode
-        overall_reduction_mode = self._get_reduction_mode(state)
-
-        # Decide which core reduction information to run, i.e. HAB, LAB, ALL, MERGED. In the case of ALL and MERGED,
-        # the required simple reduction modes need to be run. Normally this is HAB and LAB, future implementations
-        # might have more detectors though (or different types)
-        reduction_setting_bundles = self._get_reduction_setting_bundles(state, overall_reduction_mode)
+    def PyInit(self):
+        self._pyinit()
 
-        # Run core reductions
-        use_optimizations = self.getProperty("UseOptimizations").value
-        save_can = self.getProperty("SaveCan").value
+    def PyExec(self):
+        self._pyexec()
 
-        # Create the reduction core algorithm
-        reduction_name = "SANSReductionCore"
-        reduction_options = {}
-        reduction_alg = create_child_algorithm(self, reduction_name, **reduction_options)
+    @staticmethod
+    def _reduction_name():
+        return "SANSReductionCore"
 
-        # Set up progress
-        progress = self._get_progress(len(reduction_setting_bundles), overall_reduction_mode)
+    def do_initial_reduction(self, state, overall_reduction_mode):
+        """
+        Version 1 does not have an initial reduction.
+        This method is required for compatibility with version 2.
+        This method create bundles for the main reduction.
+        """
+        return [self._get_reduction_setting_bundles(state, overall_reduction_mode)]
 
-        # --------------------------------------------------------------------------------------------------------------
-        # Reduction
-        # --------------------------------------------------------------------------------------------------------------
+    def do_reduction(self, reduction_alg, reduction_setting_bundles, use_optimizations, progress):
+        """
+        Perform the main reduction.
+        :param reduction_alg: SANSReductionCore algorithm
+        :param reduction_setting_bundles: a list of lists containing workspaces to be reduced.
+                                          The outer list is for compatibility with version 2
+                                          and only contains one inner list
+        :param use_optimizations: bool. If true, use can optimizations
+        :param progress: a progress bar
+        :return: output_bundles: a list containing a single list of output workspaces
+                 output_parts_bundles: a list containing a single list of output workspaces
+                 output_transmission_bundles: a list containing transmission workspaces
+        """
+        reduction_setting_bundles = reduction_setting_bundles[0]
         output_bundles = []
         output_parts_bundles = []
         output_transmission_bundles = []
@@ -199,161 +143,33 @@ class SANSSingleReduction(DistributedDataProcessorAlgorithm):
             # We want to make use of optimizations here. If a can workspace has already been reduced with the same can
             # settings and is stored in the ADS, then we should use it (provided the user has optimizations enabled).
             if use_optimizations and reduction_setting_bundle.data_type is DataType.Can:
-                output_bundle, output_parts_bundle, output_transmission_bundle = run_optimized_for_can(reduction_alg,
-                                                                                                       reduction_setting_bundle)
+                output_bundle, output_parts_bundle, \
+                    output_transmission_bundle = run_optimized_for_can(reduction_alg, reduction_setting_bundle)
             else:
-                output_bundle, output_parts_bundle, output_transmission_bundle = run_core_reduction(reduction_alg,
-                                                                                                    reduction_setting_bundle)
+                output_bundle, output_parts_bundle, \
+                    output_transmission_bundle = run_core_reduction(reduction_alg, reduction_setting_bundle)
             output_bundles.append(output_bundle)
             output_parts_bundles.append(output_parts_bundle)
             output_transmission_bundles.append(output_transmission_bundle)
+        return [output_bundles], [output_parts_bundles], output_transmission_bundles
+
+    def set_shift_and_scale_output(self, scale_factors, shift_factors):
+        self.setProperty("OutScaleFactor", scale_factors[0])
+        self.setProperty("OutShiftFactor", shift_factors[0])
 
-        reduction_mode_vs_output_workspaces = {}
-
-        # --------------------------------------------------------------------------------------------------------------
-        # Deal with non-merged
-        # Note that we have non-merged workspaces even in the case of a merged reduction, ie LAB and HAB results
-        # --------------------------------------------------------------------------------------------------------------
-        progress.report("Final clean up...")
-        output_workspaces_non_merged = get_final_output_workspaces(output_bundles, self)
-        reduction_mode_vs_output_workspaces.update(output_workspaces_non_merged)
-
-        # --------------------------------------------------------------------------------------------------------------
-        # Deal with merging
-        # --------------------------------------------------------------------------------------------------------------
-        # Merge if required with stitching etc.
-        if overall_reduction_mode is ReductionMode.Merged:
-            progress.report("Merging reductions ...")
-            merge_bundle = get_merge_bundle_for_merge_request(output_parts_bundles, self)
-            self.set_shift_and_scale_output(merge_bundle)
-            reduction_mode_vs_output_workspaces.update({ReductionMode.Merged: merge_bundle.merged_workspace})
-            scaled_HAB = strip_end_nans(merge_bundle.scaled_hab_workspace, self)
-            reduction_mode_vs_output_workspaces.update({ISISReductionMode.HAB: scaled_HAB})
-
-        # --------------------------------------------------------------------------------------------------------------
-        # Set the output workspaces
-        # --------------------------------------------------------------------------------------------------------------
-        # Set sample logs
-        # Todo: Set sample log -> unfitted transmission workspace. Should probably set on
-        # higher level (SANSBatch)
-        # Set the output workspaces
-        self.set_output_workspaces(reduction_mode_vs_output_workspaces)
-
-        # --------------------------------------------------------------------------------------------------------------
-        # Set the reduced can workspaces on the output if optimizations are
-        # enabled. This will allow SANSBatchReduction to add them to the ADS.
-        # --------------------------------------------------------------------------------------------------------------
-        if use_optimizations:
-            self.set_reduced_can_workspace_on_output(output_bundles, output_parts_bundles)
-
-        if save_can:
-            self.set_can_and_sam_on_output(output_bundles)
-
-        self.set_transmission_workspaces_on_output(output_transmission_bundles,
-                                                   state.adjustment.calculate_transmission.fit)
-
-    def validateInputs(self):
-        errors = dict()
-        # Check that the input can be converted into the right state object
-        try:
-            state = self._get_state()
-            state.validate()
-        except ValueError as err:
-            errors.update({"SANSSingleReduction": str(err)})
-        return errors
-
-    def _get_state(self):
-        state_property_manager = self.getProperty("SANSState").value
-        state = create_deserialized_sans_state_from_property_manager(state_property_manager)
-        state.property_manager = state_property_manager
-        return state
-
-    def _get_reduction_mode(self, state):
-        reduction_info = state.reduction
-        reduction_mode = reduction_info.reduction_mode
-        return reduction_mode
-
-    def _get_reduction_setting_bundles(self, state, reduction_mode):
-        # We need to output the parts if we request a merged reduction mode. This is necessary for stitching later on.
-        output_parts = reduction_mode is ReductionMode.Merged
-
-        # If the reduction mode is MERGED, then we need to make sure that all reductions for that selection
-        # are executed, i.e. we need to split it up
-        if reduction_mode is ReductionMode.Merged:
-            # If we are dealing with a merged reduction we need to know which detectors should be merged.
-            reduction_info = state.reduction
-            reduction_modes = reduction_info.get_merge_strategy()
-        elif reduction_mode is ReductionMode.All:
-            reduction_info = state.reduction
-            reduction_modes = reduction_info.get_all_reduction_modes()
-        else:
-            reduction_modes = [reduction_mode]
-
-        # Create the Scatter information
-        sample_info = self._create_reduction_bundles_for_data_type(state=state,
-                                                                   data_type=DataType.Sample,
-                                                                   reduction_modes=reduction_modes,
-                                                                   output_parts=output_parts,
-                                                                   scatter_name="SampleScatterWorkspace",
-                                                                   scatter_monitor_name="SampleScatterMonitorWorkspace",
-                                                                   transmission_name="SampleTransmissionWorkspace",
-                                                                   direct_name="SampleDirectWorkspace")
-
-        # Create the Can information
-        can_info = self._create_reduction_bundles_for_data_type(state=state,
-                                                                data_type=DataType.Can,
-                                                                reduction_modes=reduction_modes,
-                                                                output_parts=output_parts,
-                                                                scatter_name="CanScatterWorkspace",
-                                                                scatter_monitor_name="CanScatterMonitorWorkspace",
-                                                                transmission_name="CanTransmissionWorkspace",
-                                                                direct_name="CanDirectWorkspace")
-        reduction_setting_bundles = sample_info
-
-        # Make sure that the can information has at least a scatter and a monitor workspace
-        for can_bundle in can_info:
-            if can_bundle.scatter_workspace is not None and can_bundle.scatter_monitor_workspace is not None:
-                reduction_setting_bundles.append(can_bundle)
-        return reduction_setting_bundles
-
-    def _create_reduction_bundles_for_data_type(self, state, data_type, reduction_modes, output_parts,
-                                                scatter_name, scatter_monitor_name, transmission_name, direct_name):
-        # Get workspaces
-        scatter_workspace = self.getProperty(scatter_name).value
-
-        scatter_monitor_workspace = self.getProperty(scatter_monitor_name).value
-        transmission_workspace = self.getProperty(transmission_name).value
-        direct_workspace = self.getProperty(direct_name).value
-
-        # Iterate over all requested reduction types, i.e. LAB, HAB, ..
-        reduction_setting_bundles = []
-        for reduction_mode in reduction_modes:
-            reduction_setting_bundle = ReductionSettingBundle(state=state,
-                                                              data_type=data_type,
-                                                              reduction_mode=reduction_mode,
-                                                              output_parts=output_parts,
-                                                              scatter_workspace=scatter_workspace,
-                                                              scatter_monitor_workspace=scatter_monitor_workspace,
-                                                              transmission_workspace=transmission_workspace,
-                                                              direct_workspace=direct_workspace)
-            reduction_setting_bundles.append(reduction_setting_bundle)
-        return reduction_setting_bundles
-
-    def set_shift_and_scale_output(self, merge_bundle):
-        self.setProperty("OutScaleFactor", merge_bundle.scale)
-        self.setProperty("OutShiftFactor", merge_bundle.shift)
-
-    def set_output_workspaces(self, reduction_mode_vs_output_workspaces):
+    def set_output_workspaces(self, reduction_mode_vs_output_workspaces, reduction_mode_vs_workspace_names):
         """
         Sets the output workspaces which can be HAB, LAB or Merged.
 
         At this step we also provide a workspace name to the sample logs which can be used later on for saving
         :param reduction_mode_vs_output_workspaces:  map from reduction mode to output workspace
+        :param reduction_mode_vs_workspace_names: an unused dict. Required for version 2 compatibility
         """
         # Note that this breaks the flexibility that we have established with the reduction mode. We have not hardcoded
         # HAB or LAB anywhere which means that in the future there could be other detectors of relevance. Here we
         # reference HAB and LAB directly since we currently don't want to rely on dynamic properties. See also in PyInit
-        for reduction_mode, output_workspace in list(reduction_mode_vs_output_workspaces.items()):
+        for reduction_mode, output_workspaces in list(reduction_mode_vs_output_workspaces.items()):
+            output_workspace = output_workspaces[0]
             # In an MPI reduction output_workspace is produced on the master rank, skip others.
             if output_workspace is None:
                 continue
@@ -367,21 +183,17 @@ class SANSSingleReduction(DistributedDataProcessorAlgorithm):
                 raise RuntimeError("SANSSingleReduction: Cannot set the output workspace. The selected reduction "
                                    "mode {0} is unknown.".format(reduction_mode))
 
-    def set_reduced_can_workspace_on_output(self, output_bundles, output_bundles_part):
+    def set_reduced_can_workspace_on_output(self, output_bundles):
         """
         Sets the reduced can workspaces on the output properties.
 
         The reduced can workspaces can be:
         1. LAB Can
-        2. LAB Can Count
-        3. LAB Can Norm
         4. HAB Can
-        5. HAB Can Count
-        6. HAB Can Norm
-        :param output_bundles: a list of output bundles
-        :param output_bundles_part: a list of partial output bundles
+        :param output_bundles: a list containing a single list of output bundles
         """
         # Find the LAB Can and HAB Can entries if they exist
+        output_bundles = output_bundles[0]
         for output_bundle in output_bundles:
             if output_bundle.data_type is DataType.Can:
                 reduction_mode = output_bundle.reduction_mode
@@ -397,7 +209,19 @@ class SANSSingleReduction(DistributedDataProcessorAlgorithm):
                         raise RuntimeError("SANSSingleReduction: The reduction mode {0} should not"
                                            " be set with a can.".format(reduction_mode))
 
+    def set_reduced_can_count_and_norm_on_output(self, output_bundles_parts):
+        """
+        Sets the reduced can count and norm group workspaces on the output properties.
+        The reduced can workspaces can be:
+        1. LAB Can Count
+        2. LAB Can Norm
+        3. HAB Can Count
+        4. HAB Can Norm
+
+        :param output_bundles_parts: a list containing a single list of output bundle parts
+        """
         # Find the partial output bundles fo LAB Can and HAB Can if they exist
+        output_bundles_part = output_bundles_parts[0]
         for output_bundle_part in output_bundles_part:
             if output_bundle_part.data_type is DataType.Can:
                 reduction_mode = output_bundle_part.reduction_mode
@@ -419,7 +243,7 @@ class SANSSingleReduction(DistributedDataProcessorAlgorithm):
                                            " be set with a partial can.".format(reduction_mode))
 
     def set_can_and_sam_on_output(self, output_bundles):
-        '''
+        """
         Sets the reduced can and sam workspaces.
         These can be:
         1. LAB Can
@@ -427,9 +251,9 @@ class SANSSingleReduction(DistributedDataProcessorAlgorithm):
         3. LAB Sample
         4. HAB Sample
         Cans are also output for optimization, so check for double output.
-        :param output_bundles: a list of output_bundles
-        '''
-
+        :param output_bundles: a list containing a single list of output_bundles
+        """
+        output_bundles = output_bundles[0]
         for output_bundle in output_bundles:
             if output_bundle.data_type is DataType.Can:
                 reduction_mode = output_bundle.reduction_mode
@@ -480,10 +304,23 @@ class SANSSingleReduction(DistributedDataProcessorAlgorithm):
                 raise RuntimeError("SANSSingleReduction: The data type {0} should be"
                                    " sample or can.".format(transmission_bundle.data_type))
 
-    def _get_progress(self, number_of_reductions, overall_reduction_mode):
-        number_from_merge = 1 if overall_reduction_mode is ReductionMode.Merged else 0
-        number_of_progress_reports = number_of_reductions + number_from_merge + 1
-        return Progress(self, start=0.0, end=1.0, nreports=number_of_progress_reports)
+    def _get_workspace_names(self, reduction_mode_vs_workspace_names, event_slice_bundle):
+        """
+        This method is for compatibility with version 2. It is not required for version 1
+        """
+        return reduction_mode_vs_workspace_names
+
+    def _get_merged_workspace_name(self, event_slice_part_bundle):
+        """
+        This method is for compatibility with version 2. It is not required for version 1
+        """
+        return ""
+
+    def _get_output_workspace_name(self, *args, **kwargs):
+        """
+        This method is for compatibility with version 2. It is not required for version 1
+        """
+        return ""
 
 
 # Register algorithm with Mantid
diff --git a/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSSingleReduction2.py b/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSSingleReduction2.py
new file mode 100644
index 0000000000000000000000000000000000000000..8b566a12fcd7a869567cb92c682ecc0fea0d8c6f
--- /dev/null
+++ b/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSSingleReduction2.py
@@ -0,0 +1,530 @@
+# Mantid Repository : https://github.com/mantidproject/mantid
+#
+# Copyright &copy; 2019 ISIS Rutherford Appleton Laboratory UKRI,
+#     NScD Oak Ridge National Laboratory, European Spallation Source
+#     & Institut Laue - Langevin
+# SPDX - License - Identifier: GPL - 3.0 +
+# pylint: disable=invalid-name
+
+""" SANSSingleReduction version 2 algorithm performs a single reduction on event sliced data."""
+
+from __future__ import (absolute_import, division, print_function)
+
+from copy import deepcopy
+
+from mantid.api import (AlgorithmFactory, AnalysisDataService,
+                        MatrixWorkspaceProperty, PropertyMode,
+                        WorkspaceGroup, WorkspaceGroupProperty)
+from mantid.simpleapi import CloneWorkspace
+from mantid.kernel import Direction
+from sans.algorithm_detail.bundles import EventSliceSettingBundle
+from sans.algorithm_detail.single_execution import (run_initial_event_slice_reduction, run_core_event_slice_reduction,
+                                                    get_reduction_mode_vs_output_bundles, run_optimized_for_can)
+from sans.common.enums import (ReductionMode, DataType, ISISReductionMode, FitType)
+from sans.common.general_functions import (create_child_algorithm, does_can_workspace_exist_on_ads,
+                                           get_transmission_output_name, get_output_name)
+
+from SANSSingleReductionBase import SANSSingleReductionBase
+
+
+class SANSSingleReduction(SANSSingleReductionBase):
+    def category(self):
+        return 'SANS\\Reduction'
+
+    def version(self):
+        return 2
+
+    def summary(self):
+        return 'Performs a single reduction of SANS data, optimised for event slices.'
+
+    def _declare_output_properties(self):
+        self.declareProperty(MatrixWorkspaceProperty('OutShiftAndScaleFactor', '', optional=PropertyMode.Optional,
+                                                     direction=Direction.Output),
+                             doc='A workspace containing the applied shift factor as X data and applied scale factor '
+                                 'as Y data.')
+
+        # This breaks our flexibility with the reduction mode. We need to check if we can populate this based on
+        # the available reduction modes for the state input. TODO: check if this is possible
+        self.declareProperty(WorkspaceGroupProperty('OutputWorkspaceLAB', '',
+                                                    optional=PropertyMode.Optional, direction=Direction.Output),
+                             doc='The output workspace for the low-angle bank.')
+        self.declareProperty(WorkspaceGroupProperty('OutputWorkspaceHAB', '',
+                                                    optional=PropertyMode.Optional, direction=Direction.Output),
+                             doc='The output workspace for the high-angle bank.')
+        self.declareProperty(WorkspaceGroupProperty('OutputWorkspaceMerged', '',
+                                                    optional=PropertyMode.Optional, direction=Direction.Output),
+                             doc='The output workspace for the merged reduction.')
+        self.setPropertyGroup("OutShiftAndScaleFactor", 'Output')
+        self.setPropertyGroup("OutputWorkspaceLAB", 'Output')
+        self.setPropertyGroup("OutputWorkspaceHAB", 'Output')
+        self.setPropertyGroup("OutputWorkspaceMerged", 'Output')
+
+        # CAN output
+        self.declareProperty(WorkspaceGroupProperty('OutputWorkspaceLABCan', '',
+                                                    optional=PropertyMode.Optional, direction=Direction.Output),
+                             doc='The can output workspace group for the low-angle bank, provided there is one. '
+                                 'Each workspace in the group is one event slice.')
+        self.declareProperty(WorkspaceGroupProperty('OutputWorkspaceHABCan', '',
+                                                    optional=PropertyMode.Optional, direction=Direction.Output),
+                             doc='The can output workspace group for the high-angle bank, provided there is one. '
+                                 'Each workspace in the group is one event slice.')
+        self.declareProperty(WorkspaceGroupProperty('OutputWorkspaceLABSample', '',
+                                                    optional=PropertyMode.Optional, direction=Direction.Output),
+                             doc='The sample output workspace group for the low-angle bank, provided there is one. '
+                                 'Each workspace in the group is one event slice.')
+        self.declareProperty(WorkspaceGroupProperty('OutputWorkspaceHABSample', '',
+                                                    optional=PropertyMode.Optional, direction=Direction.Output),
+                             doc='The sample output workspace group for the high-angle bank, provided there is one. '
+                                 'Each workspace in the group is one event slice.')
+        self.declareProperty(MatrixWorkspaceProperty('OutputWorkspaceCalculatedTransmission', '',
+                                                     optional=PropertyMode.Optional, direction=Direction.Output),
+                             doc='The calculated transmission workspace.')
+        self.declareProperty(MatrixWorkspaceProperty('OutputWorkspaceUnfittedTransmission', '',
+                                                     optional=PropertyMode.Optional, direction=Direction.Output),
+                             doc='The unfitted transmission workspace.')
+        self.declareProperty(MatrixWorkspaceProperty('OutputWorkspaceCalculatedTransmissionCan', '',
+                                                     optional=PropertyMode.Optional, direction=Direction.Output),
+                             doc='The calculated transmission workspace for the can.')
+        self.declareProperty(MatrixWorkspaceProperty('OutputWorkspaceUnfittedTransmissionCan', '',
+                                                     optional=PropertyMode.Optional, direction=Direction.Output),
+                             doc='The unfitted transmission workspace for the can.')
+        self.setPropertyGroup("OutputWorkspaceLABCan", 'Can Output')
+        self.setPropertyGroup("OutputWorkspaceHABCan", 'Can Output')
+        self.setPropertyGroup("OutputWorkspaceLABSample", 'Can Output')
+        self.setPropertyGroup("OutputWorkspaceHABSample", 'Can Output')
+
+        # Output CAN Count and Norm for optimizations
+        self.declareProperty(WorkspaceGroupProperty('OutputWorkspaceLABCanNorm', '',
+                                                    optional=PropertyMode.Optional, direction=Direction.Output),
+                             doc='The can norm output workspace group for the low-angle bank, provided there is one. '
+                                 'Each workspace in the group is one event slice.')
+        self.declareProperty(WorkspaceGroupProperty('OutputWorkspaceLABCanCount', '',
+                                                    optional=PropertyMode.Optional, direction=Direction.Output),
+                             doc='The can count output workspace group for the low-angle bank, provided there is one. '
+                                 'Each workspace in the group is one event slice.')
+        self.declareProperty(WorkspaceGroupProperty('OutputWorkspaceHABCanCount', '',
+                                                    optional=PropertyMode.Optional, direction=Direction.Output),
+                             doc='The can count output workspace group for the high-angle bank, provided there is one. '
+                                 'Each workspace in the group is one event slice.')
+        self.declareProperty(WorkspaceGroupProperty('OutputWorkspaceHABCanNorm', '',
+                                                    optional=PropertyMode.Optional, direction=Direction.Output),
+                             doc='The can norm output workspace group for the high-angle bank, provided there is one. '
+                                 'Each workspace in the group is one event slice.')
+
+        self.setPropertyGroup("OutputWorkspaceLABCanCount", 'Opt Output')
+        self.setPropertyGroup("OutputWorkspaceLABCanNorm", 'Opt Output')
+        self.setPropertyGroup("OutputWorkspaceHABCanCount", 'Opt Output')
+        self.setPropertyGroup("OutputWorkspaceHABCanNorm", 'Opt Output')
+
+    def PyInit(self):
+        self._pyinit()
+        self.declareProperty("Period", False)
+        self.declareProperty("WavelengthRange", False)
+
+    def PyExec(self):
+        self._pyexec()
+
+    def do_initial_reduction(self, state, overall_reduction_mode):
+        # --------------------------------------------------------------------------------------------------------------
+        # Setup initial reduction
+        # --------------------------------------------------------------------------------------------------------------
+        initial_reduction_alg = create_child_algorithm(self, "SANSReductionCorePreprocess", **{})
+        # Decide which core reduction information to run, i.e. HAB, LAB, ALL, MERGED. In the case of ALL and MERGED,
+        # the required simple reduction modes need to be run. Normally this is HAB and LAB, future implementations
+        # might have more detectors though (or different types)
+        reduction_setting_bundles = self._get_reduction_setting_bundles(state, overall_reduction_mode)
+
+        # --------------------------------------------------------------------------------------------------------------
+        # Initial Reduction - steps which can be carried out before event slicing
+        # --------------------------------------------------------------------------------------------------------------
+        intermediate_bundles = []
+        for reduction_setting_bundle in reduction_setting_bundles:
+            intermediate_bundles.append(run_initial_event_slice_reduction(initial_reduction_alg,
+                                                                          reduction_setting_bundle))
+        return self._get_slice_reduction_setting_bundles(intermediate_bundles)
+
+    def do_reduction(self, reduction_alg, reduction_setting_bundles, use_optimizations, progress):
+        """
+        Perform the main reduction
+        :param reduction_alg: SANSReductionCoreEventSlice algorithm
+        :param reduction_setting_bundles: a list of list containing workspaces to be reduced.
+        :param use_optimizations: bool. If true, use can optimizations in reduction
+        :param progress: a progress bar
+        :return: output_bundles: a list of list containing output workspaces
+                 output_parts_bundles: a list of lists containing output workspaces
+                 output_transmission_bundles: a list containing transmission workspaces
+        """
+        output_bundles = []
+        output_parts_bundles = []
+        output_transmission_bundles = []
+        for event_slice_bundles in reduction_setting_bundles:
+            # Output bundles and parts bundles need to be separated by a event slices, but grouped by component
+            # e.g. [[workspaces for slice1], [workspaces for slice2]]
+            slice_bundles = []
+            slice_parts_bundles = []
+            for slice_bundle in event_slice_bundles:
+                progress.report("Running a single reduction ...")
+                # We want to make use of optimizations here.
+                # If a can workspace has already been reduced with the same can
+                # settings and is stored in the ADS, then we should use it
+                # (provided the user has optimizations enabled).
+                if use_optimizations and slice_bundle.data_type is DataType.Can:
+                    output_bundle, output_parts_bundle, \
+                        output_transmission_bundle = run_optimized_for_can(reduction_alg,
+                                                                           slice_bundle,
+                                                                           event_slice_optimisation=True)
+                else:
+                    output_bundle, output_parts_bundle, \
+                        output_transmission_bundle = run_core_event_slice_reduction(reduction_alg, slice_bundle)
+                slice_bundles.append(output_bundle)
+                slice_parts_bundles.append(output_parts_bundle)
+                output_transmission_bundles.append(output_transmission_bundle)
+            output_bundles.append(slice_bundles)
+            output_parts_bundles.append(slice_parts_bundles)
+
+        return output_bundles, output_parts_bundles, output_transmission_bundles
+
+    @staticmethod
+    def _reduction_name():
+        return "SANSReductionCoreEventSlice"
+
+    @staticmethod
+    def _get_slice_bundles(bundle):
+        """
+            Splits a reduction package object into several reduction package objects if it
+            contains several event slice settings
+
+            :param bundle: a EventSliceSettingBundle tuple
+            :return: a list of EventSliceSettingBundle tuples where each tuple contains only one event slice.
+        """
+        slice_bundles = []
+        state = bundle.state
+        slice_event_info = state.slice
+        start_time = slice_event_info.start_time
+        end_time = slice_event_info.end_time
+
+        states = []
+        for start, end in zip(start_time, end_time):
+            state_copy = deepcopy(state)
+            slice_event_info = state_copy.slice
+            slice_event_info.start_time = [start]
+            slice_event_info.end_time = [end]
+            states.append(state_copy)
+
+        for state in states:
+            new_state = deepcopy(state)
+            slice_bundles.append(EventSliceSettingBundle(state=new_state,
+                                                         data_type=bundle.data_type,
+                                                         reduction_mode=bundle.reduction_mode,
+                                                         output_parts=bundle.output_parts,
+                                                         scatter_workspace=bundle.scatter_workspace,
+                                                         dummy_mask_workspace=bundle.dummy_mask_workspace,
+                                                         scatter_monitor_workspace=bundle.scatter_monitor_workspace,
+                                                         direct_workspace=bundle.direct_workspace,
+                                                         transmission_workspace=bundle.transmission_workspace))
+        return slice_bundles
+
+    def _get_slice_reduction_setting_bundles(self, intermediate_bundles):
+        """
+        For each workspace bundle we have from the initial reduction (one for each component),
+        create a separate bundle for each event slice.
+        We group these as a list of lists, with the structure:
+        [[component1 for event slice1, c2 for es1,..], [c1 for es2, c2 for es2, ..], ..]
+
+        :param intermediate_bundles: a list of EventSliceSettingBundle objects,
+                                     the output from the initial reduction.
+        :return: a list of lists of EventSliceSettingBundle objects, one for each component and event slice.
+        """
+        sliced_bundles = []
+        for bundle in intermediate_bundles:
+            sliced_bundles.append(self._get_slice_bundles(bundle))
+
+        # We currently have a list containing a list for each component. Each component list contains workspaces
+        # split into event slices. We want the inner list to be component-wise splits so we must transpose this.
+        return list(map(list, zip(*sliced_bundles)))
+
+    def set_shift_and_scale_output(self, scale_factors, shift_factors):
+        create_workspace_alg = create_child_algorithm(self, "CreateWorkspace", **{"DataX": scale_factors,
+                                                                                  "DataY": shift_factors})
+        create_workspace_alg.execute()
+        self.setProperty("OutShiftAndScaleFactor", create_workspace_alg.getProperty("OutputWorkspace").value)
+
+    def set_output_workspaces(self, reduction_mode_vs_output_workspaces, reduction_mode_vs_workspace_names):
+        """
+        Sets the output workspaces which can be HAB, LAB or Merged.
+
+        At this step we also provide a workspace name to the sample logs which can be used later on for saving
+        :param reduction_mode_vs_output_workspaces:  map from reduction mode to output workspace
+        :param reduction_mode_vs_workspace_names: map from reduction mode to output workspace name
+        """
+        workspace_group_merged = WorkspaceGroup()
+        workspace_group_lab = WorkspaceGroup()
+        workspace_group_hab = WorkspaceGroup()
+        # Note that this breaks the flexibility that we have established with the reduction mode. We have not hardcoded
+        # HAB or LAB anywhere which means that in the future there could be other detectors of relevance. Here we
+        # reference HAB and LAB directly since we currently don't want to rely on dynamic properties. See also in PyInit
+        for reduction_mode, output_workspaces in list(reduction_mode_vs_output_workspaces.items()):
+            workspace_names = reduction_mode_vs_workspace_names[reduction_mode]
+            for output_workspace, output_name in zip(output_workspaces, workspace_names):
+                # In an MPI reduction output_workspace is produced on the master rank, skip others.
+                if output_workspace is None:
+                    continue
+                else:
+                    AnalysisDataService.addOrReplace(output_name, output_workspace)
+                if reduction_mode is ReductionMode.Merged:
+                    workspace_group_merged.addWorkspace(output_workspace)
+                elif reduction_mode is ISISReductionMode.LAB:
+                    workspace_group_lab.addWorkspace(output_workspace)
+                elif reduction_mode is ISISReductionMode.HAB:
+                    workspace_group_hab.addWorkspace(output_workspace)
+                else:
+                    raise RuntimeError("SANSSingleReduction: Cannot set the output workspace. "
+                                       "The selected reduction mode {0} is unknown.".format(reduction_mode))
+        if workspace_group_merged.size() > 0:
+            self.setProperty("OutputWorkspaceMerged", workspace_group_merged)
+        if workspace_group_lab.size() > 0:
+            self.setProperty("OutputWorkspaceLAB", workspace_group_lab)
+        if workspace_group_hab.size() > 0:
+            self.setProperty("OutputWorkspaceHAB", workspace_group_hab)
+
+    def set_reduced_can_workspace_on_output(self, output_bundles):
+        """
+        Sets the reduced can group workspaces on the output properties.
+        The reduced can workspaces can be:
+        LAB Can or
+        HAB Can
+
+        :param output_bundles: a list of output bundles
+        """
+        workspace_group_lab_can = WorkspaceGroup()
+        workspace_group_hab_can = WorkspaceGroup()
+        # Find the LAB Can and HAB Can entries if they exist
+        for component_bundle in output_bundles:
+            for output_bundle in component_bundle:
+                if output_bundle.data_type is DataType.Can:
+                    reduction_mode = output_bundle.reduction_mode
+                    output_workspace = output_bundle.output_workspace
+                    # Make sure that the output workspace is not None which can be the case if there has never been a
+                    # can set for the reduction.
+                    if output_workspace is not None and not does_can_workspace_exist_on_ads(output_workspace):
+                        name = self._get_output_workspace_name(output_bundle.state, output_bundle.reduction_mode,
+                                                               can=True)
+                        AnalysisDataService.addOrReplace(name, output_workspace)
+                        if reduction_mode is ISISReductionMode.LAB:
+                            workspace_group_lab_can.addWorkspace(output_workspace)
+                        elif reduction_mode is ISISReductionMode.HAB:
+                            workspace_group_hab_can.addWorkspace(output_workspace)
+                        else:
+                            raise RuntimeError("SANSSingleReduction: The reduction mode {0} should not"
+                                               " be set with a can.".format(reduction_mode))
+        if workspace_group_lab_can.size() > 0:
+            # LAB group workspace is non-empty, so we want to set it as output
+            self.setProperty("OutputWorkspaceLABCan", workspace_group_lab_can)
+        if workspace_group_hab_can.size() > 0:
+            self.setProperty("OutputWorkspaceHABCan", workspace_group_hab_can)
+
+    def set_reduced_can_count_and_norm_on_output(self, output_bundles_parts):
+        """
+        Sets the reduced can count and norm group workspaces on the output properties.
+        The reduced can workspaces can be:
+        1. LAB Can Count
+        2. LAB Can Norm
+        3. HAB Can Count
+        4. HAB Can Norm
+
+        :param output_bundles_parts: a list of output bundle parts
+        """
+        workspace_group_lab_can_count = WorkspaceGroup()
+        workspace_group_lab_can_norm = WorkspaceGroup()
+        workspace_group_hab_can_count = WorkspaceGroup()
+        workspace_group_hab_can_norm = WorkspaceGroup()
+        # Find the partial output bundles fo LAB Can and HAB Can if they exist
+        for event_slice_bundles in output_bundles_parts:
+            for output_bundle_part in event_slice_bundles:
+                if output_bundle_part.data_type is DataType.Can:
+                    reduction_mode = output_bundle_part.reduction_mode
+                    output_workspace_count = output_bundle_part.output_workspace_count
+                    output_workspace_norm = output_bundle_part.output_workspace_norm
+                    # Make sure that the output workspace is not None which can be the case if there has never been a
+                    # can set for the reduction.
+                    if output_workspace_norm is not None and output_workspace_count is not None and \
+                            not does_can_workspace_exist_on_ads(output_workspace_norm) and \
+                            not does_can_workspace_exist_on_ads(output_workspace_count):
+                        name = self._get_output_workspace_name(output_bundle_part.state, output_bundle_part.reduction_mode)
+                        count_name = name + "_hab_can_count"
+                        norm_name = name + "_hab_can_norm"
+                        AnalysisDataService.addOrReplace(count_name, output_workspace_count)
+                        AnalysisDataService.addOrReplace(norm_name, output_workspace_norm)
+                        if reduction_mode is ISISReductionMode.LAB:
+                            workspace_group_lab_can_count.addWorkspace(output_workspace_count)
+                            workspace_group_lab_can_norm.addWorkspace(output_workspace_norm)
+                        elif reduction_mode is ISISReductionMode.HAB:
+                            workspace_group_hab_can_count.addWorkspace(output_workspace_count)
+                            workspace_group_hab_can_norm.addWorkspace(output_workspace_norm)
+                        else:
+                            raise RuntimeError("SANSSingleReduction: The reduction mode {0} should not"
+                                               " be set with a partial can.".format(reduction_mode))
+        if workspace_group_lab_can_count.size() > 0:
+            self.setProperty("OutputWorkspaceLABCanCount", workspace_group_lab_can_count)
+        if workspace_group_lab_can_norm.size() > 0:
+            self.setProperty("OutputWorkspaceLABCanNorm", workspace_group_lab_can_norm)
+        if workspace_group_hab_can_count.size() > 0:
+            self.setProperty("OutputWorkspaceHABCanCount", workspace_group_hab_can_count)
+        if workspace_group_hab_can_norm.size() > 0:
+            self.setProperty("OutputWorkspaceHABCanNorm", workspace_group_hab_can_norm)
+
+    def set_can_and_sam_on_output(self, output_bundles):
+        """
+        Sets the reduced can and sam workspaces.
+        These can be:
+        1. LAB Can
+        2. HAB Can
+        3. LAB Sample
+        4. HAB Sample
+        Cans are also output for optimization, so check for double output.
+        :param output_bundles: a list of output_bundles
+        """
+        workspace_group_lab_can = WorkspaceGroup()
+        workspace_group_hab_can = WorkspaceGroup()
+        workspace_group_lab_sample = WorkspaceGroup()
+        workspace_group_hab_sample = WorkspaceGroup()
+
+        for component_bundle in output_bundles:
+            for output_bundle in component_bundle:
+                if output_bundle.data_type is DataType.Can:
+                    reduction_mode = output_bundle.reduction_mode
+                    output_workspace = output_bundle.output_workspace
+
+                    if output_workspace is not None and not does_can_workspace_exist_on_ads(output_workspace):
+                        can_name = self._get_output_workspace_name(output_bundle.state, output_bundle.reduction_mode,
+                                                                   can=True)
+                        AnalysisDataService.addOrReplace(can_name, output_workspace)
+                        if reduction_mode is ISISReductionMode.LAB:
+                            workspace_group_lab_can.addWorkspace(output_workspace)
+                        elif reduction_mode is ISISReductionMode.HAB:
+                            workspace_group_hab_can.addWorkspace(output_workspace)
+                        else:
+                            raise RuntimeError("SANSSingleReduction: The reduction mode {0} should not"
+                                               " be set with a can.".format(reduction_mode))
+                elif output_bundle.data_type is DataType.Sample:
+                    reduction_mode = output_bundle.reduction_mode
+                    output_workspace = output_bundle.output_workspace
+
+                    if output_workspace is not None:
+                        sample_name = self._get_output_workspace_name(output_bundle.state, output_bundle.reduction_mode,
+                                                                      sample=True)
+                        AnalysisDataService.addOrReplace(sample_name, output_workspace)
+                        if reduction_mode is ISISReductionMode.LAB:
+                            workspace_group_lab_sample.addWorkspace(output_workspace)
+                        elif reduction_mode is ISISReductionMode.HAB:
+                            workspace_group_hab_sample.addWorkspace(output_workspace)
+                        else:
+                            raise RuntimeError("SANSSingleReduction: The reduction mode {0} should not"
+                                               " be set with a sample.".format(reduction_mode))
+
+        if workspace_group_hab_can.size() > 0:
+            self.setProperty("OutputWorkspaceHABCan", workspace_group_hab_can)
+        if workspace_group_hab_sample.size() > 0:
+            self.setProperty("OutputWorkspaceHABSample", workspace_group_hab_sample)
+        if workspace_group_lab_can.size() > 0:
+            self.setProperty("OutputWorkspaceLABCan", workspace_group_lab_can)
+        if workspace_group_lab_sample.size() > 0:
+            self.setProperty("OutputWorkspaceLABSample", workspace_group_lab_sample)
+
+    def set_transmission_workspaces_on_output(self, transmission_bundles, fit_state):
+        for transmission_bundle in transmission_bundles:
+            fit_performed = fit_state[DataType.to_string(transmission_bundle.data_type)].fit_type != FitType.NoFit
+            calculated_transmission_workspace = transmission_bundle.calculated_transmission_workspace
+            unfitted_transmission_workspace = transmission_bundle.unfitted_transmission_workspace
+            if transmission_bundle.data_type is DataType.Can:
+                if does_can_workspace_exist_on_ads(calculated_transmission_workspace):
+                    # The workspace is cloned here because the transmission runs are diagnostic output so even though
+                    # the values already exist they need to be labelled seperately for each reduction.
+                    calculated_transmission_workspace = CloneWorkspace(calculated_transmission_workspace,
+                                                                       StoreInADS=False)
+                if does_can_workspace_exist_on_ads(unfitted_transmission_workspace):
+                    unfitted_transmission_workspace = CloneWorkspace(unfitted_transmission_workspace, StoreInADS=False)
+                if fit_performed:
+                    self.setProperty("OutputWorkspaceCalculatedTransmissionCan", calculated_transmission_workspace)
+                self.setProperty("OutputWorkspaceUnfittedTransmissionCan", unfitted_transmission_workspace)
+            elif transmission_bundle.data_type is DataType.Sample:
+                if fit_performed:
+                    self.setProperty("OutputWorkspaceCalculatedTransmission", calculated_transmission_workspace)
+                self.setProperty("OutputWorkspaceUnfittedTransmission", unfitted_transmission_workspace)
+            else:
+                raise RuntimeError("SANSSingleReduction: The data type {0} should be"
+                                   " sample or can.".format(transmission_bundle.data_type))
+
+    def _get_workspace_names(self, reduction_mode_vs_workspace_names, output_bundle):
+        output_workspace_names = self._get_final_workspace_names(output_bundle)
+        for reduction_mode, name in output_workspace_names.items():
+            reduction_mode_vs_workspace_names[reduction_mode].append(name)
+        return reduction_mode_vs_workspace_names
+
+    def _get_final_workspace_names(self, output_bundles):
+        """This method retrieves the workspace names for event sliced final output workspaces.
+        :param output_bundles: A set of outputBundles
+        :return: a map of ReductionMode vs final output workspace names"""
+        reduction_mode_vs_output_bundles = get_reduction_mode_vs_output_bundles(output_bundles)
+
+        # For each reduction mode, we must find the output name of the workspace
+        final_output_workspace_names = {}
+        for reduction_mode, output_bundles in reduction_mode_vs_output_bundles.items():
+            # Find the sample in the data collection
+            state, reduction_mode = next(((output_bundle.state, output_bundle.reduction_mode)
+                                          for output_bundle in output_bundles
+                                          if output_bundle.data_type == DataType.Sample), None)
+
+            # Get the workspace name
+            name = self._get_output_workspace_name(state, reduction_mode=reduction_mode)
+            final_output_workspace_names.update({reduction_mode: name})
+
+        return final_output_workspace_names
+
+    def _get_merged_workspace_name(self, output_parts_bundle):
+        """This method gets the output workspace names for a merged bundle. This only occurs
+         if the reduction mode is Merged.
+         :param output_parts_bundle: a list of OutputBundles containing workspaces for a single event slice.
+         :return: a workspace name
+         """
+        state = output_parts_bundle[0].state
+        return self._get_output_workspace_name(state, reduction_mode=ReductionMode.Merged)
+
+    def _get_output_workspace_name(self, state, reduction_mode=None, data_type=None,
+                                   can=False, sample=False, transmission=False, fitted=False):
+        """
+        Get the output names for the sliced workspaces (within the group workspaces, which are already named).
+
+        :param state: a SANS state object
+        :param reduction_mode: an optional ISISReductionMode enum: "HAB", "LAB", "Merged", or "All"
+        :param data_type: an optional DataType enum: "Sample" or "Can"
+        :param can: optional bool. If true then creating name for a can workspace
+        :param sample: optional bool. If true then creating name for a sample workspace. Sample and can cannot both be
+                       true
+        :param transmission: optional bool. If true then creating name for a transmission workspace
+        :param fitted: optional bool. If true then workspace is a fitted transmission workspace, otherwise unfitted
+        :return: name of the workspace
+        """
+        _multi = {"event_slice": True,
+                  "period": self.getProperty("Period").value,
+                  "wavelength_range": self.getProperty("WavelengthRange").value}
+
+        if not transmission:
+            _suffix = ""
+            if can:
+                if reduction_mode == ISISReductionMode.HAB:
+                    _suffix = "_hab_can"
+                elif reduction_mode == ISISReductionMode.LAB:
+                    _suffix = "_lab_can"
+            elif sample:
+                if reduction_mode == ISISReductionMode.HAB:
+                    _suffix = "_hab_sample"
+                elif reduction_mode == ISISReductionMode.LAB:
+                    _suffix = "_lab_sample"
+            return get_output_name(state, reduction_mode, True, suffix=_suffix, multi_reduction_type=_multi)[0]
+        else:
+            return get_transmission_output_name(state, data_type, _multi, fitted)[0]
+
+
+# Register algorithm with Mantid
+AlgorithmFactory.subscribe(SANSSingleReduction)
diff --git a/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSSingleReductionBase.py b/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSSingleReductionBase.py
new file mode 100644
index 0000000000000000000000000000000000000000..1867b034e48b6d408291440014dc371ece5c6e45
--- /dev/null
+++ b/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/SANSSingleReductionBase.py
@@ -0,0 +1,317 @@
+# Mantid Repository : https://github.com/mantidproject/mantid
+#
+# Copyright &copy; 2019 ISIS Rutherford Appleton Laboratory UKRI,
+#     NScD Oak Ridge National Laboratory, European Spallation Source
+#     & Institut Laue - Langevin
+# SPDX - License - Identifier: GPL - 3.0 +
+# pylint: disable=invalid-name
+
+"""A base class to share functionality between SANSSingleReduction versions."""
+
+from __future__ import (absolute_import, division, print_function)
+
+from collections import defaultdict
+
+from mantid.api import (DistributedDataProcessorAlgorithm,
+                        MatrixWorkspaceProperty, Progress, PropertyMode)
+from mantid.kernel import (Direction, PropertyManagerProperty)
+from sans.algorithm_detail.bundles import ReductionSettingBundle
+from sans.algorithm_detail.single_execution import (get_final_output_workspaces,
+                                                    get_merge_bundle_for_merge_request)
+from sans.algorithm_detail.strip_end_nans_and_infs import strip_end_nans
+from sans.common.enums import (ReductionMode, DataType, ISISReductionMode)
+from sans.common.general_functions import create_child_algorithm
+from sans.state.state_base import create_deserialized_sans_state_from_property_manager
+
+
+class SANSSingleReductionBase(DistributedDataProcessorAlgorithm):
+    def _pyinit(self):
+        # ----------
+        # INPUT
+        # ----------
+        self.declareProperty(PropertyManagerProperty('SANSState'),
+                             doc='A property manager which fulfills the SANSState contract.')
+
+        self.declareProperty("UseOptimizations", True, direction=Direction.Input,
+                             doc="When enabled the ADS is being searched for already loaded and reduced workspaces. "
+                                 "Depending on your concrete reduction, this could provide a significant"
+                                 " performance boost")
+
+        self.declareProperty("SaveCan", False, direction=Direction.Input,
+                             doc="When enabled, the unsubtracted can and sam workspaces are added to the ADS.")
+
+        # Sample Scatter Workspaces
+        self.declareProperty(MatrixWorkspaceProperty('SampleScatterWorkspace', '',
+                                                     optional=PropertyMode.Mandatory, direction=Direction.Input),
+                             doc='The sample scatter workspace. This workspace does not contain monitors.')
+        self.declareProperty(MatrixWorkspaceProperty('SampleScatterMonitorWorkspace', '',
+                                                     optional=PropertyMode.Mandatory, direction=Direction.Input),
+                             doc='The sample scatter monitor workspace. This workspace only contains monitors.')
+
+        # Sample Transmission Workspace
+        self.declareProperty(MatrixWorkspaceProperty('SampleTransmissionWorkspace', '',
+                                                     optional=PropertyMode.Optional, direction=Direction.Input),
+                             doc='The sample transmission workspace.')
+
+        # Sample Direct Workspace
+        self.declareProperty(MatrixWorkspaceProperty('SampleDirectWorkspace', '',
+                                                     optional=PropertyMode.Optional, direction=Direction.Input),
+                             doc='The sample scatter direct workspace.')
+
+        self.setPropertyGroup("SampleScatterWorkspace", 'Sample')
+        self.setPropertyGroup("SampleScatterMonitorWorkspace", 'Sample')
+        self.setPropertyGroup("SampleTransmissionWorkspace", 'Sample')
+        self.setPropertyGroup("SampleDirectWorkspace", 'Sample')
+
+        # Can Scatter Workspaces
+        self.declareProperty(MatrixWorkspaceProperty('CanScatterWorkspace', '',
+                                                     optional=PropertyMode.Optional, direction=Direction.Input),
+                             doc='The can scatter workspace. This workspace does not contain monitors.')
+        self.declareProperty(MatrixWorkspaceProperty('CanScatterMonitorWorkspace', '',
+                                                     optional=PropertyMode.Optional, direction=Direction.Input),
+                             doc='The can scatter monitor workspace. This workspace only contains monitors.')
+
+        # Sample Transmission Workspace
+        self.declareProperty(MatrixWorkspaceProperty('CanTransmissionWorkspace', '',
+                                                     optional=PropertyMode.Optional, direction=Direction.Input),
+                             doc='The can transmission workspace.')
+
+        # Sample Direct Workspace
+        self.declareProperty(MatrixWorkspaceProperty('CanDirectWorkspace', '',
+                                                     optional=PropertyMode.Optional, direction=Direction.Input),
+                             doc='The sample scatter direct workspace.')
+
+        self.setPropertyGroup("CanScatterWorkspace", 'Can')
+        self.setPropertyGroup("CanScatterMonitorWorkspace", 'Can')
+        self.setPropertyGroup("CanTransmissionWorkspace", 'Can')
+        self.setPropertyGroup("CanDirectWorkspace", 'Can')
+
+        # ----------
+        # OUTPUT
+        # ----------
+        self._declare_output_properties()
+
+    def _pyexec(self):
+        # Get state
+        state = self._get_state()
+
+        # Get reduction mode
+        overall_reduction_mode = self._get_reduction_mode(state)
+
+        # --------------------------------------------------------------------------------------------------------------
+        # Perform the initial reduction. Version 1 does not have an initial reduction.
+        # --------------------------------------------------------------------------------------------------------------
+        reduction_setting_bundles = self.do_initial_reduction(state, overall_reduction_mode)
+
+        # --------------------------------------------------------------------------------------------------------------
+        # Setup main reduction
+        # --------------------------------------------------------------------------------------------------------------
+
+        # Run core reductions
+        use_optimizations = self.getProperty("UseOptimizations").value
+        save_can = self.getProperty("SaveCan").value
+
+        # Create the reduction core algorithm
+        reduction_alg = create_child_algorithm(self, self._reduction_name(), **{})
+
+        # Set up progress
+        progress = self._get_progress(sum([len(event_list) for event_list in reduction_setting_bundles]),
+                                      overall_reduction_mode)
+
+        # --------------------------------------------------------------------------------------------------------------
+        # Reduction - here we slice the workspaces and perform the steps which must be carried out after slicing
+        # --------------------------------------------------------------------------------------------------------------
+        output_bundles, output_parts_bundles, \
+            output_transmission_bundles = self.do_reduction(reduction_alg, reduction_setting_bundles, use_optimizations,
+                                                            progress)
+
+        reduction_mode_vs_output_workspaces = defaultdict(list)
+        reduction_mode_vs_workspace_names = defaultdict(list)
+
+        # --------------------------------------------------------------------------------------------------------------
+        # Deal with non-merged
+        # Note that we have non-merged workspaces even in the case of a merged reduction, ie LAB and HAB results
+        # --------------------------------------------------------------------------------------------------------------
+        progress.report("Final clean up...")
+        for event_slice_bundle in output_bundles:
+            output_workspaces_non_merged = get_final_output_workspaces(event_slice_bundle, self)
+            for reduction_mode, workspace in output_workspaces_non_merged.items():
+                reduction_mode_vs_output_workspaces[reduction_mode].append(workspace)
+
+            reduction_mode_vs_workspace_names = self._get_workspace_names(reduction_mode_vs_workspace_names,
+                                                                          event_slice_bundle)
+
+        # --------------------------------------------------------------------------------------------------------------
+        # Deal with merging
+        # --------------------------------------------------------------------------------------------------------------
+        # Merge if required with stitching etc.
+        scale_factors = []
+        shift_factors = []
+        if overall_reduction_mode is ReductionMode.Merged:
+            progress.report("Merging reductions ...")
+            for i, event_slice_part_bundle in enumerate(output_parts_bundles):
+                merge_bundle = get_merge_bundle_for_merge_request(event_slice_part_bundle, self)
+                scale_factors.append(merge_bundle.scale)
+                shift_factors.append(merge_bundle.shift)
+                reduction_mode_vs_output_workspaces[ReductionMode.Merged].append(merge_bundle.merged_workspace)
+                merged_name = self._get_merged_workspace_name(event_slice_part_bundle)
+                reduction_mode_vs_workspace_names[ReductionMode.Merged].append(merged_name)
+
+                scaled_HAB = strip_end_nans(merge_bundle.scaled_hab_workspace, self)
+                reduction_mode_vs_output_workspaces[ISISReductionMode.HAB].append(scaled_HAB)
+                # Get HAB workspace name
+                state = event_slice_part_bundle[0].state
+                hab_name = self._get_output_workspace_name(state, reduction_mode=ISISReductionMode.HAB)
+                reduction_mode_vs_workspace_names[ISISReductionMode.HAB].append(hab_name)
+
+            self.set_shift_and_scale_output(scale_factors, shift_factors)
+
+        # --------------------------------------------------------------------------------------------------------------
+        # Set the output workspaces
+        # --------------------------------------------------------------------------------------------------------------
+        self.set_output_workspaces(reduction_mode_vs_output_workspaces, reduction_mode_vs_workspace_names)
+
+        # --------------------------------------------------------------------------------------------------------------
+        # Set the reduced can workspaces on the output if optimizations are
+        # enabled. This will allow SANSBatchReduction to add them to the ADS.
+        # --------------------------------------------------------------------------------------------------------------
+        if use_optimizations:
+            if not save_can:
+                self.set_reduced_can_workspace_on_output(output_bundles)
+            self.set_reduced_can_count_and_norm_on_output(output_parts_bundles)
+
+        if save_can:
+            self.set_can_and_sam_on_output(output_bundles)
+
+        self.set_transmission_workspaces_on_output(output_transmission_bundles,
+                                                   state.adjustment.calculate_transmission.fit)
+
+    def do_initial_reduction(self, state, overall_reduction_mode):
+        raise NotImplementedError("do_initial_reduction must be implemented.")
+
+    def _get_workspace_names(self, reduction_mode_vs_workspace_names,
+                             event_slice_bundle):
+        raise NotImplementedError("_get_workspace_names must be implemented.")
+
+    def _get_merged_workspace_name(self, event_slice_part_bundle):
+        raise NotImplementedError("_get_merged_workspace_name must be implemented.")
+
+    def _get_output_workspace_name(self, state, reduction_mode=None, data_type=None,
+                                   can=False, sample=False, transmission=False, fitted=False):
+        raise NotImplementedError("_get_output_workspace_name must be implemented.")
+
+    def set_reduced_can_workspace_on_output(self, output_bundles):
+        raise NotImplementedError("set_reduced_can_workspace_on_output must be implemented.")
+
+    def set_reduced_can_count_and_norm_on_output(self, output_parts_bundles):
+        raise NotImplementedError("set_reduced_can_count_and_norm_on_output must be implemented.")
+
+    def set_can_and_sam_on_output(self, output_bundles):
+        raise NotImplementedError("set_can_and_sam_on_output.")
+
+    def set_transmission_workspaces_on_output(self, output_transmission_bundles,
+                                              fit_state):
+        raise NotImplementedError("set_transmission_workspaces_on_output must be implemented.")
+
+    def set_shift_and_scale_output(self, scale_factors, shift_factors):
+        raise NotImplementedError("set_shift_and_scale_output must be implemented.")
+
+    def set_output_workspaces(self, reduction_mode_vs_output_workspaces, reduction_mode_vs_workspace_names):
+        raise NotImplementedError("set_output_workspaces must be implemented.")
+
+    def do_reduction(self, reduction_alg, reduction_setting_bundles, use_optimizations, progress):
+        raise NotImplementedError("do_reduction must be implemented.")
+
+    def validateInputs(self):
+        errors = dict()
+        # Check that the input can be converted into the right state object
+        try:
+            state = self._get_state()
+            state.validate()
+        except ValueError as err:
+            errors.update({"SANSSingleReduction": str(err)})
+        return errors
+
+    def _get_state(self):
+        state_property_manager = self.getProperty("SANSState").value
+        state = create_deserialized_sans_state_from_property_manager(state_property_manager)
+        state.property_manager = state_property_manager
+        return state
+
+    @staticmethod
+    def _get_reduction_mode(state):
+        reduction_info = state.reduction
+        reduction_mode = reduction_info.reduction_mode
+        return reduction_mode
+
+    def _get_reduction_setting_bundles(self, state, reduction_mode):
+        # We need to output the parts if we request a merged reduction mode. This is necessary for stitching later on.
+        output_parts = reduction_mode is ReductionMode.Merged
+
+        # If the reduction mode is MERGED, then we need to make sure that all reductions for that selection
+        # are executed, i.e. we need to split it up
+        if reduction_mode is ReductionMode.Merged:
+            # If we are dealing with a merged reduction we need to know which detectors should be merged.
+            reduction_info = state.reduction
+            reduction_modes = reduction_info.get_merge_strategy()
+        elif reduction_mode is ReductionMode.All:
+            reduction_info = state.reduction
+            reduction_modes = reduction_info.get_all_reduction_modes()
+        else:
+            reduction_modes = [reduction_mode]
+
+        # Create the Scatter information
+        sample_info = self._create_reduction_bundles_for_data_type(state=state,
+                                                                   data_type=DataType.Sample,
+                                                                   reduction_modes=reduction_modes,
+                                                                   output_parts=output_parts,
+                                                                   scatter_name="SampleScatterWorkspace",
+                                                                   scatter_monitor_name="SampleScatterMonitorWorkspace",
+                                                                   transmission_name="SampleTransmissionWorkspace",
+                                                                   direct_name="SampleDirectWorkspace")
+
+        # Create the Can information
+        can_info = self._create_reduction_bundles_for_data_type(state=state,
+                                                                data_type=DataType.Can,
+                                                                reduction_modes=reduction_modes,
+                                                                output_parts=output_parts,
+                                                                scatter_name="CanScatterWorkspace",
+                                                                scatter_monitor_name="CanScatterMonitorWorkspace",
+                                                                transmission_name="CanTransmissionWorkspace",
+                                                                direct_name="CanDirectWorkspace")
+        reduction_setting_bundles = sample_info
+
+        # Make sure that the can information has at least a scatter and a monitor workspace
+        for can_bundle in can_info:
+            if can_bundle.scatter_workspace is not None and can_bundle.scatter_monitor_workspace is not None:
+                reduction_setting_bundles.append(can_bundle)
+        return reduction_setting_bundles
+
+    def _create_reduction_bundles_for_data_type(self, state, data_type, reduction_modes, output_parts,
+                                                scatter_name, scatter_monitor_name,
+                                                transmission_name, direct_name):
+        # Get workspaces
+        scatter_workspace = self.getProperty(scatter_name).value
+
+        scatter_monitor_workspace = self.getProperty(scatter_monitor_name).value
+        transmission_workspace = self.getProperty(transmission_name).value
+        direct_workspace = self.getProperty(direct_name).value
+
+        # Iterate over all requested reduction types, i.e. LAB, HAB, ..
+        reduction_setting_bundles = []
+        for reduction_mode in reduction_modes:
+            reduction_setting_bundle = ReductionSettingBundle(state=state,
+                                                              data_type=data_type,
+                                                              reduction_mode=reduction_mode,
+                                                              output_parts=output_parts,
+                                                              scatter_workspace=scatter_workspace,
+                                                              scatter_monitor_workspace=scatter_monitor_workspace,
+                                                              transmission_workspace=transmission_workspace,
+                                                              direct_workspace=direct_workspace)
+            reduction_setting_bundles.append(reduction_setting_bundle)
+        return reduction_setting_bundles
+
+    def _get_progress(self, number_of_reductions, overall_reduction_mode):
+        number_from_merge = 1 if overall_reduction_mode is ReductionMode.Merged else 0
+        number_of_progress_reports = number_of_reductions + number_from_merge + 1
+        return Progress(self, start=0.0, end=1.0, nreports=number_of_progress_reports)
diff --git a/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/__init__.py b/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..680d818636ca0c71215d00e62846d9ac7b9aec63
--- /dev/null
+++ b/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANS/__init__.py
@@ -0,0 +1,7 @@
+# Mantid Repository : https://github.com/mantidproject/mantid
+#
+# Copyright &copy; 2019 ISIS Rutherford Appleton Laboratory UKRI,
+#     NScD Oak Ridge National Laboratory, European Spallation Source
+#     & Institut Laue - Langevin
+# SPDX - License - Identifier: GPL - 3.0 +
+# pylint: disable=invalid-name
diff --git a/Testing/SystemTests/tests/analysis/SANSSingleReductionTest.py b/Testing/SystemTests/tests/analysis/SANSSingleReductionTest.py
index 24ef74afb3823144c6add95f35b9cb381f0cdd5d..f2629efc89dcd40ab42c754e90674869bb3245a2 100644
--- a/Testing/SystemTests/tests/analysis/SANSSingleReductionTest.py
+++ b/Testing/SystemTests/tests/analysis/SANSSingleReductionTest.py
@@ -7,8 +7,10 @@
 # pylint: disable=too-many-public-methods, invalid-name, too-many-arguments
 
 from __future__ import (absolute_import, division, print_function)
-import unittest
+
+import time
 import systemtesting
+import unittest
 
 import mantid  # noqa
 from mantid.api import AlgorithmManager
@@ -20,10 +22,10 @@ from sans.common.general_functions import create_unmanaged_algorithm
 from sans.common.file_information import SANSFileInformationFactory
 
 
-# -----------------------------------------------
-# Tests for the SANSSingleReduction algorithm
-# -----------------------------------------------
-class SANSSingleReductionTest(unittest.TestCase):
+# ----------------------------------------------------------------------------------------------------------------------
+# Base class containing useful functions for the tests
+# ----------------------------------------------------------------------------------------------------------------------
+class SingleReductionTest(unittest.TestCase):
     def _load_workspace(self, state):
         load_alg = AlgorithmManager.createUnmanaged("SANSLoad")
         load_alg.setChild(True)
@@ -60,16 +62,53 @@ class SANSSingleReductionTest(unittest.TestCase):
         return sample_scatter, sample_scatter_monitor_workspace, transmission_workspace, direct_workspace, \
                can_scatter_workspace, can_scatter_monitor_workspace, can_transmission_workspace, can_direct_workspace  # noqa
 
+    def _compare_to_reference(self, workspace, reference_file_name, check_spectra_map=True):
+        # Load the reference file
+        load_name = "LoadNexusProcessed"
+        load_options = {"Filename": reference_file_name,
+                        "OutputWorkspace": EMPTY_NAME}
+        load_alg = create_unmanaged_algorithm(load_name, **load_options)
+        load_alg.execute()
+        reference_workspace = load_alg.getProperty("OutputWorkspace").value
+
+        # Compare reference file with the output_workspace
+        self._compare_workspace(workspace, reference_workspace, check_spectra_map=check_spectra_map)
+
+    def _compare_workspace(self, workspace1, workspace2, check_spectra_map=True,
+                           tolerance=1e-6):
+        # We need to disable the instrument comparison, it takes way too long
+        # We need to disable the sample -- Not clear why yet
+        # operation how many entries can be found in the sample logs
+        compare_name = "CompareWorkspaces"
+        compare_options = {"Workspace1": workspace1,
+                           "Workspace2": workspace2,
+                           "Tolerance": tolerance,
+                           "CheckInstrument": False,
+                           "CheckSample": False,
+                           "ToleranceRelErr": True,
+                           "CheckAllData": True,
+                           "CheckMasking": True,
+                           "CheckType": True,
+                           "CheckAxes": True,
+                           "CheckSpectraMap": check_spectra_map}
+        compare_alg = create_unmanaged_algorithm(compare_name, **compare_options)
+        compare_alg.setChild(False)
+        compare_alg.execute()
+        result = compare_alg.getProperty("Result").value
+        self.assertTrue(result)
+
     def _run_single_reduction(self, state, sample_scatter, sample_monitor, sample_transmission=None, sample_direct=None,
                               can_scatter=None, can_monitor=None, can_transmission=None, can_direct=None,
-                              output_settings=None):
+                              output_settings=None, event_slice_optimisation=False, save_can=False, use_optimizations=False):
         single_reduction_name = "SANSSingleReduction"
+        ver = 1 if not event_slice_optimisation else 2
         state_dict = state.property_manager
 
         single_reduction_options = {"SANSState": state_dict,
                                     "SampleScatterWorkspace": sample_scatter,
                                     "SampleScatterMonitorWorkspace": sample_monitor,
-                                    "UseOptimizations": False}
+                                    "UseOptimizations": use_optimizations,
+                                    "SaveCan": save_can}
         if sample_transmission:
             single_reduction_options.update({"SampleTransmissionWorkspace": sample_transmission})
 
@@ -91,44 +130,19 @@ class SANSSingleReductionTest(unittest.TestCase):
         if output_settings:
             single_reduction_options.update(output_settings)
 
-        single_reduction_alg = create_unmanaged_algorithm(single_reduction_name, **single_reduction_options)
+        single_reduction_alg = create_unmanaged_algorithm(single_reduction_name, version=ver,
+                                                          **single_reduction_options)
 
         # Act
         single_reduction_alg.execute()
         self.assertTrue(single_reduction_alg.isExecuted())
         return single_reduction_alg
 
-    def _compare_workspace(self, workspace, reference_file_name, check_spectra_map=True):
-        # Load the reference file
-        load_name = "LoadNexusProcessed"
-        load_options = {"Filename": reference_file_name,
-                        "OutputWorkspace": EMPTY_NAME}
-        load_alg = create_unmanaged_algorithm(load_name, **load_options)
-        load_alg.execute()
-        reference_workspace = load_alg.getProperty("OutputWorkspace").value
-
-        # Compare reference file with the output_workspace
-        # We need to disable the instrument comparison, it takes way too long
-        # We need to disable the sample -- Not clear why yet
-        # operation how many entries can be found in the sample logs
-        compare_name = "CompareWorkspaces"
-        compare_options = {"Workspace1": workspace,
-                           "Workspace2": reference_workspace,
-                           "Tolerance": 1e-6,
-                           "CheckInstrument": False,
-                           "CheckSample": False,
-                           "ToleranceRelErr": True,
-                           "CheckAllData": True,
-                           "CheckMasking": True,
-                           "CheckType": True,
-                           "CheckAxes": True,
-                           "CheckSpectraMap": check_spectra_map}
-        compare_alg = create_unmanaged_algorithm(compare_name, **compare_options)
-        compare_alg.setChild(False)
-        compare_alg.execute()
-        result = compare_alg.getProperty("Result").value
-        self.assertTrue(result)
 
+# ----------------------------------------------------------------------------------------------------------------------
+# Test version 1 of SANSSingleReduction
+# ----------------------------------------------------------------------------------------------------------------------
+class SANSSingleReductionTest(SingleReductionTest):
     def test_that_single_reduction_evaluates_LAB(self):
         # Arrange
         # Build the data information
@@ -185,17 +199,18 @@ class SANSSingleReductionTest(unittest.TestCase):
 
         # Compare the output of the reduction with the reference
         reference_file_name = "SANS2D_ws_D20_reference_LAB_1D.nxs"
-        self._compare_workspace(output_workspace, reference_file_name)
+        self._compare_to_reference(output_workspace, reference_file_name)
 
         calculated_transmission_reference_file = "SANS2D_ws_D20_calculated_transmission_reference_LAB.nxs"
         unfitted_transmission_reference_file = "SANS2D_ws_D20_unfitted_transmission_reference_LAB.nxs"
         calculated_transmission_reference_file_can = "SANS2D_ws_D20_calculated_transmission_reference_LAB_can.nxs"
         unfitted_transmission_reference_file_can = "SANS2D_ws_D20_unfitted_transmission_reference_LAB_can.nxs"
-        self._compare_workspace(calculated_transmission, calculated_transmission_reference_file, check_spectra_map=False)
-        self._compare_workspace(unfitted_transmission, unfitted_transmission_reference_file)
-        self._compare_workspace(calculated_transmission_can, calculated_transmission_reference_file_can,
-                                check_spectra_map=False)
-        self._compare_workspace(unfitted_transmission_can, unfitted_transmission_reference_file_can)
+        self._compare_to_reference(calculated_transmission, calculated_transmission_reference_file,
+                                   check_spectra_map=False)
+        self._compare_to_reference(unfitted_transmission, unfitted_transmission_reference_file)
+        self._compare_to_reference(calculated_transmission_can, calculated_transmission_reference_file_can,
+                                   check_spectra_map=False)
+        self._compare_to_reference(unfitted_transmission_can, unfitted_transmission_reference_file_can)
 
     def test_that_single_reduction_evaluates_HAB(self):
         # Arrange
@@ -249,7 +264,7 @@ class SANSSingleReductionTest(unittest.TestCase):
 
         # # Compare the output of the reduction with the reference
         reference_file_name = "SANS2D_ws_D20_reference_HAB_1D.nxs"
-        self._compare_workspace(output_workspace, reference_file_name)
+        self._compare_to_reference(output_workspace, reference_file_name)
 
     def test_that_single_reduction_evaluates_merged(self):
         # Arrange
@@ -314,7 +329,7 @@ class SANSSingleReductionTest(unittest.TestCase):
 
         # Compare the output of the reduction with the reference
         reference_file_name = "SANS2D_ws_D20_reference_Merged_1D.nxs"
-        self._compare_workspace(output_workspace, reference_file_name)
+        self._compare_to_reference(output_workspace, reference_file_name)
 
     def test_that_single_reduction_evaluates_LAB_for_2D_reduction(self):
         # Arrange
@@ -369,7 +384,292 @@ class SANSSingleReductionTest(unittest.TestCase):
 
         # Compare the output of the reduction with the reference
         reference_file_name = "SANS2D_ws_D20_reference_LAB_2D.nxs"
-        self._compare_workspace(output_workspace, reference_file_name)
+        self._compare_to_reference(output_workspace, reference_file_name)
+
+
+# ----------------------------------------------------------------------------------------------------------------------
+# Test version 2 of SANSSingleReduction, and compare it to version 1
+# ----------------------------------------------------------------------------------------------------------------------
+class SANSSingleReduction2Test(SingleReductionTest):
+    def _assert_group_workspace(self, workspace, n=2):
+        """
+        Check that a workspace is not None and that it contains n workspaces
+        """
+        self.assertNotEqual(workspace, None)
+        self.assertEqual(workspace.size(), n)
+
+    def test_that_single_reduction_evaluates_HAB(self):
+        # Arrange
+        # Build the data information
+        file_information_factory = SANSFileInformationFactory()
+        file_information = file_information_factory.create_sans_file_information("SANS2D00034484")
+        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
+        data_builder.set_sample_scatter("SANS2D00034484")
+        data_builder.set_sample_transmission("SANS2D00034505")
+        data_builder.set_sample_direct("SANS2D00034461")
+        data_builder.set_can_scatter("SANS2D00034481")
+        data_builder.set_can_transmission("SANS2D00034502")
+        data_builder.set_can_direct("SANS2D00034461")
+
+        data_builder.set_calibration("TUBE_SANS2D_BOTH_31681_25Sept15.nxs")
+        data_info = data_builder.build()
+
+        # Get the rest of the state from the user file
+        user_file_director = StateDirectorISIS(data_info, file_information)
+        user_file_director.set_user_file("USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt")
+        # Set the reduction mode to HAB
+        user_file_director.set_reduction_builder_reduction_mode(ISISReductionMode.HAB)
+        user_file_director.set_compatibility_builder_use_compatibility_mode(False)
+
+        # Add some event slices
+        user_file_director.set_slice_event_builder_start_time([0.00, 300.00])
+        user_file_director.set_slice_event_builder_end_time([300.00, 600.00])
+
+        # Construct
+        state = user_file_director.construct()
+
+        # Load the sample workspaces
+        sample, sample_monitor, transmission_workspace, direct_workspace, can, can_monitor,\
+        can_transmission, can_direct = self._load_workspace(state)  # noqa
+
+        # Act
+        output_settings = {"OutputWorkspaceHAB": EMPTY_NAME}
+        start_time = time.time()
+        single_reduction_v2_alg = self._run_single_reduction(state, sample_scatter=sample,
+                                                             sample_transmission=transmission_workspace,
+                                                             sample_direct=direct_workspace,
+                                                             sample_monitor=sample_monitor,
+                                                             can_scatter=can,
+                                                             can_monitor=can_monitor,
+                                                             can_transmission=can_transmission,
+                                                             can_direct=can_direct,
+                                                             output_settings=output_settings,
+                                                             event_slice_optimisation=True,
+                                                             save_can=True,
+                                                             use_optimizations=True)
+        version_2_execution_time = time.time() - start_time
+
+        # Check output workspaces
+        output_workspace = single_reduction_v2_alg.getProperty("OutputWorkspaceHAB").value
+        hab_can = single_reduction_v2_alg.getProperty("OutputWorkspaceHABCan").value
+        hab_sample = single_reduction_v2_alg.getProperty("OutputWorkspaceHABSample").value
+        hab_can_count = single_reduction_v2_alg.getProperty("OutputWorkspaceHABCanCount").value
+        hab_can_norm = single_reduction_v2_alg.getProperty("OutputWorkspaceHABCanNorm").value
+
+        self._assert_group_workspace(output_workspace)
+        self._assert_group_workspace(hab_can)
+        self._assert_group_workspace(hab_sample)
+        self._assert_group_workspace(hab_can_count)
+        self._assert_group_workspace(hab_can_norm)
+
+        # ---------------------------------------------------
+        # Comparison test with version 1
+        # This can be removed once version 2 has been adopted
+        # ---------------------------------------------------
+
+        # Run the first event slice
+        user_file_director.set_slice_event_builder_start_time([0.00])
+        user_file_director.set_slice_event_builder_end_time([300.00])
+        state = user_file_director.construct()
+
+        start_time = time.time()
+        single_reduction_alg_first_slice = self._run_single_reduction(state, sample_scatter=sample,
+                                                                      sample_transmission=transmission_workspace,
+                                                                      sample_direct=direct_workspace,
+                                                                      sample_monitor=sample_monitor,
+                                                                      can_scatter=can,
+                                                                      can_monitor=can_monitor,
+                                                                      can_transmission=can_transmission,
+                                                                      can_direct=can_direct,
+                                                                      output_settings=output_settings,
+                                                                      event_slice_optimisation=False,
+                                                                      save_can=True)
+        first_slice_execution_time = time.time() - start_time
+
+        # Run the second event slice
+        user_file_director.set_slice_event_builder_start_time([300.00])
+        user_file_director.set_slice_event_builder_end_time([600.00])
+        state = user_file_director.construct()
+
+        start_time = time.time()
+        single_reduction_alg_second_slice = self._run_single_reduction(state, sample_scatter=sample,
+                                                                       sample_transmission=transmission_workspace,
+                                                                       sample_direct=direct_workspace,
+                                                                       sample_monitor=sample_monitor,
+                                                                       can_scatter=can,
+                                                                       can_monitor=can_monitor,
+                                                                       can_transmission=can_transmission,
+                                                                       can_direct=can_direct,
+                                                                       output_settings=output_settings,
+                                                                       event_slice_optimisation=False,
+                                                                       save_can=True)
+        version_1_execution_time = time.time() - start_time + first_slice_execution_time
+
+        # Check that running version 2 once is quicker than running version 1 twice (once for each slice)
+        # version 2 has been significantly quicker that multiple runs are not necessary to ensure this test
+        # does not sporadically fail. However, this check could be removed if this changes.
+        self.assertLess(version_2_execution_time, version_1_execution_time)
+
+        # Now compare output workspaces from the two versions
+        # Output HAB workspace
+        event_slice_output_workspace = single_reduction_v2_alg.getProperty("OutputWorkspaceHAB").value
+        first_slice_output_workspace = single_reduction_alg_first_slice.getProperty("OutputWorkspaceHAB").value
+        second_slice_output_workspace = single_reduction_alg_second_slice.getProperty("OutputWorkspaceHAB").value
+
+        self._compare_workspace(event_slice_output_workspace[0], first_slice_output_workspace, tolerance=1e-6)
+        self._compare_workspace(event_slice_output_workspace[1], second_slice_output_workspace, tolerance=1e-6)
+
+        # HAB sample
+        event_slice_output_sample = single_reduction_v2_alg.getProperty("OutputWorkspaceHABSample").value
+        first_slice_output_sample = single_reduction_alg_first_slice.getProperty("OutputWorkspaceHABSample").value
+        second_slice_output_sample = single_reduction_alg_second_slice.getProperty("OutputWorkspaceHABSample").value
+
+        self._compare_workspace(event_slice_output_sample[0], first_slice_output_sample, tolerance=1e-6)
+        self._compare_workspace(event_slice_output_sample[1], second_slice_output_sample, tolerance=1e-6)
+
+        # HAB can
+        event_slice_output_can = single_reduction_v2_alg.getProperty("OutputWorkspaceHABCan").value
+        first_slice_output_can = single_reduction_alg_first_slice.getProperty("OutputWorkspaceHABCan").value
+        second_slice_output_can = single_reduction_alg_second_slice.getProperty("OutputWorkspaceHABCan").value
+
+        self._compare_workspace(event_slice_output_can[0], first_slice_output_can, tolerance=1e-6)
+        self._compare_workspace(event_slice_output_can[1], second_slice_output_can, tolerance=1e-6)
+
+    def test_that_single_reduction_evaluates_LAB(self):
+        # Arrange
+        # Build the data information
+        file_information_factory = SANSFileInformationFactory()
+        file_information = file_information_factory.create_sans_file_information("SANS2D00034484")
+        data_builder = get_data_builder(SANSFacility.ISIS, file_information)
+        data_builder.set_sample_scatter("SANS2D00034484")
+        data_builder.set_sample_transmission("SANS2D00034505")
+        data_builder.set_sample_direct("SANS2D00034461")
+        data_builder.set_can_scatter("SANS2D00034481")
+        data_builder.set_can_transmission("SANS2D00034502")
+        data_builder.set_can_direct("SANS2D00034461")
+
+        data_builder.set_calibration("TUBE_SANS2D_BOTH_31681_25Sept15.nxs")
+        data_info = data_builder.build()
+
+        # Get the rest of the state from the user file
+        user_file_director = StateDirectorISIS(data_info, file_information)
+        user_file_director.set_user_file("USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt")
+        # Set the reduction mode to LAB
+        user_file_director.set_reduction_builder_reduction_mode(ISISReductionMode.LAB)
+        user_file_director.set_compatibility_builder_use_compatibility_mode(False)
+
+        # Add some event slices
+        user_file_director.set_slice_event_builder_start_time([0.00, 300.00])
+        user_file_director.set_slice_event_builder_end_time([300.00, 600.00])
+
+        # Construct
+        state = user_file_director.construct()
+
+        # Load the sample workspaces
+        sample, sample_monitor, transmission_workspace, direct_workspace, can, can_monitor,\
+        can_transmission, can_direct = self._load_workspace(state)  # noqa
+
+        # Act
+        output_settings = {"OutputWorkspaceLAB": EMPTY_NAME}
+        start_time = time.time()
+        single_reduction_v2_alg = self._run_single_reduction(state, sample_scatter=sample,
+                                                             sample_transmission=transmission_workspace,
+                                                             sample_direct=direct_workspace,
+                                                             sample_monitor=sample_monitor,
+                                                             can_scatter=can,
+                                                             can_monitor=can_monitor,
+                                                             can_transmission=can_transmission,
+                                                             can_direct=can_direct,
+                                                             output_settings=output_settings,
+                                                             event_slice_optimisation=True,
+                                                             save_can=True,
+                                                             use_optimizations=True)
+        version_2_execution_time = time.time() - start_time
+
+        # Check output workspaces
+        output_workspace = single_reduction_v2_alg.getProperty("OutputWorkspaceLAB").value
+        lab_can = single_reduction_v2_alg.getProperty("OutputWorkspaceLABCan").value
+        lab_sample = single_reduction_v2_alg.getProperty("OutputWorkspaceLABSample").value
+        lab_can_count = single_reduction_v2_alg.getProperty("OutputWorkspaceLABCanCount").value
+        lab_can_norm = single_reduction_v2_alg.getProperty("OutputWorkspaceLABCanNorm").value
+
+        self._assert_group_workspace(output_workspace)
+        self._assert_group_workspace(lab_can)
+        self._assert_group_workspace(lab_sample)
+        self._assert_group_workspace(lab_can_count)
+        self._assert_group_workspace(lab_can_norm)
+
+        # ---------------------------------------------------
+        # Comparison test with version 1
+        # This can be removed once version 2 has been adopted
+        # ---------------------------------------------------
+        # Run the first event slice
+        user_file_director.set_slice_event_builder_start_time([0.00])
+        user_file_director.set_slice_event_builder_end_time([300.00])
+        state = user_file_director.construct()
+
+        start_time = time.time()
+        single_reduction_alg_first_slice = self._run_single_reduction(state, sample_scatter=sample,
+                                                                      sample_transmission=transmission_workspace,
+                                                                      sample_direct=direct_workspace,
+                                                                      sample_monitor=sample_monitor,
+                                                                      can_scatter=can,
+                                                                      can_monitor=can_monitor,
+                                                                      can_transmission=can_transmission,
+                                                                      can_direct=can_direct,
+                                                                      output_settings=output_settings,
+                                                                      event_slice_optimisation=False,
+                                                                      save_can=True)
+        first_slice_execution_time = time.time() - start_time
+
+        # Run the second event slice
+        user_file_director.set_slice_event_builder_start_time([300.00])
+        user_file_director.set_slice_event_builder_end_time([600.00])
+        state = user_file_director.construct()
+
+        start_time = time.time()
+        single_reduction_alg_second_slice = self._run_single_reduction(state, sample_scatter=sample,
+                                                                       sample_transmission=transmission_workspace,
+                                                                       sample_direct=direct_workspace,
+                                                                       sample_monitor=sample_monitor,
+                                                                       can_scatter=can,
+                                                                       can_monitor=can_monitor,
+                                                                       can_transmission=can_transmission,
+                                                                       can_direct=can_direct,
+                                                                       output_settings=output_settings,
+                                                                       event_slice_optimisation=False,
+                                                                       save_can=True)
+        version_1_execution_time = time.time() - start_time + first_slice_execution_time
+
+        # Check that running version 2 once is quicker than running version 1 twice (once for each slice)
+        # version 2 has been significantly quicker that multiple runs are not necessary to ensure this test
+        # does not sporadically fail. However, this check could be removed if this changes.
+        self.assertLess(version_2_execution_time, version_1_execution_time)
+
+        # Now compare output workspaces from the two versions
+        # Output LAB workspace
+        event_slice_output_workspace = single_reduction_v2_alg.getProperty("OutputWorkspaceLAB").value
+        first_slice_output_workspace = single_reduction_alg_first_slice.getProperty("OutputWorkspaceLAB").value
+        second_slice_output_workspace = single_reduction_alg_second_slice.getProperty("OutputWorkspaceLAB").value
+
+        self._compare_workspace(event_slice_output_workspace[0], first_slice_output_workspace, tolerance=1e-6)
+        self._compare_workspace(event_slice_output_workspace[1], second_slice_output_workspace, tolerance=1e-6)
+
+        # LAB sample
+        event_slice_output_sample = single_reduction_v2_alg.getProperty("OutputWorkspaceLABSample").value
+        first_slice_output_sample = single_reduction_alg_first_slice.getProperty("OutputWorkspaceLABSample").value
+        second_slice_output_sample = single_reduction_alg_second_slice.getProperty("OutputWorkspaceLABSample").value
+
+        self._compare_workspace(event_slice_output_sample[0], first_slice_output_sample, tolerance=1e-6)
+        self._compare_workspace(event_slice_output_sample[1], second_slice_output_sample, tolerance=1e-6)
+
+        # LAB can
+        event_slice_output_can = single_reduction_v2_alg.getProperty("OutputWorkspaceLABCan").value
+        first_slice_output_can = single_reduction_alg_first_slice.getProperty("OutputWorkspaceLABCan").value
+        second_slice_output_can = single_reduction_alg_second_slice.getProperty("OutputWorkspaceLABCan").value
+
+        self._compare_workspace(event_slice_output_can[0], first_slice_output_can, tolerance=1e-6)
+        self._compare_workspace(event_slice_output_can[1], second_slice_output_can, tolerance=1e-6)
 
 
 class SANSReductionRunnerTest(systemtesting.MantidSystemTest):
@@ -380,6 +680,7 @@ class SANSReductionRunnerTest(systemtesting.MantidSystemTest):
     def runTest(self):
         suite = unittest.TestSuite()
         suite.addTest(unittest.makeSuite(SANSSingleReductionTest, 'test'))
+        suite.addTest(unittest.makeSuite(SANSSingleReduction2Test, 'test'))
         runner = unittest.TextTestRunner()
         res = runner.run(suite)
         if res.wasSuccessful():
diff --git a/scripts/Interface/ui/sans_isis/sans_data_processor_gui.py b/scripts/Interface/ui/sans_isis/sans_data_processor_gui.py
index 147e5a7143e367e89f550dd67832f1f6c7fc7fb8..78f9b39bc5a7199906639e5e1ce451f016f96c7c 100644
--- a/scripts/Interface/ui/sans_isis/sans_data_processor_gui.py
+++ b/scripts/Interface/ui/sans_isis/sans_data_processor_gui.py
@@ -1084,11 +1084,14 @@ class SANSDataProcessorGui(QMainWindow,
     @compatibility_mode.setter
     def compatibility_mode(self, value):
         self.event_binning_group_box.setChecked(value)
-        if not value:
-            # If you uncheck it, post to logger, in
-            # case user didn't realise user file had
-            # turned it off
-            self.gui_logger.notice("Compatibility mode has been turned off.")
+
+    @property
+    def event_slice_optimisation(self):
+        return self.event_slice_optimisation_checkbox.isChecked()
+
+    @event_slice_optimisation.setter
+    def event_slice_optimisation(self, value):
+        self.event_slice_optimisation_checkbox.setChecked(value)
 
     @property
     def instrument(self):
diff --git a/scripts/Interface/ui/sans_isis/sans_data_processor_window.ui b/scripts/Interface/ui/sans_isis/sans_data_processor_window.ui
index 9456aa90b6c8b76631f179a8fa14c4507e9c725f..bb8f921f85dc5907a6b18ac1289ed04c75d45b92 100644
--- a/scripts/Interface/ui/sans_isis/sans_data_processor_window.ui
+++ b/scripts/Interface/ui/sans_isis/sans_data_processor_window.ui
@@ -854,20 +854,40 @@ QGroupBox::title {
                     <layout class="QGridLayout" name="gridLayout_2">
                      <item row="0" column="0">
                       <layout class="QGridLayout" name="slice_event_grid_layout">
-                       <item row="0" column="0">
+                       <item row="1" column="0">
                         <widget class="QLabel" name="slice_event_label">
                          <property name="text">
                           <string>Slices</string>
                          </property>
                         </widget>
                        </item>
-                       <item row="0" column="1">
+                       <item row="1" column="2">
                         <widget class="QLineEdit" name="slice_event_line_edit">
                          <property name="enabled">
                           <bool>true</bool>
                          </property>
                         </widget>
                        </item>
+                       <item row="0" column="0">
+                        <widget class="QLabel" name="event_slice_optimisation_label">
+                         <property name="toolTip">
+                          <string>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p&gt;If checked, the reduction workflow is changed to perform most operations on event workspaces before event slicing.  This speeds up reduction by not repeating operations for each event slice.&lt;/p&gt;&lt;p&gt;&lt;br/&gt;&lt;/p&gt;&lt;p&gt;Leaving this unchecked will perform reductions as before.&lt;/p&gt;&lt;p&gt;&lt;br/&gt;&lt;/p&gt;&lt;p&gt;Checking this, but also checking &lt;span style=&quot; font-weight:600;&quot;&gt;Use compatibility mode&lt;/span&gt;, will perform reductions as before.&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</string>
+                         </property>
+                         <property name="text">
+                          <string>Optimize event slices</string>
+                         </property>
+                        </widget>
+                       </item>
+                       <item row="0" column="2">
+                        <widget class="QCheckBox" name="event_slice_optimisation_checkbox">
+                         <property name="toolTip">
+                          <string>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p&gt;If checked, the reduction workflow is changed to perform most operations on event workspaces before event slicing.  This speeds up reduction by not repeating operations for each event slice.&lt;/p&gt;&lt;p&gt;&lt;br/&gt;&lt;/p&gt;&lt;p&gt;Leaving this unchecked will perform reductions as before.&lt;/p&gt;&lt;p&gt;&lt;br/&gt;&lt;/p&gt;&lt;p&gt;Checking this, but also checking &lt;span style=&quot; font-weight:600;&quot;&gt;Use compatibility mode&lt;/span&gt;, will perform reductions as before.&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</string>
+                         </property>
+                         <property name="text">
+                          <string/>
+                         </property>
+                        </widget>
+                       </item>
                       </layout>
                      </item>
                     </layout>
@@ -2381,7 +2401,6 @@ QGroupBox::title {
   <tabstop>q_resolution_delta_r_line_edit</tabstop>
   <tabstop>q_resolution_moderator_file_line_edit</tabstop>
   <tabstop>q_resolution_moderator_file_push_button</tabstop>
-  <tabstop>settings_tab_widget</tabstop>
   <tabstop>user_file_line_edit</tabstop>
   <tabstop>tab_choice_list</tabstop>
   <tabstop>batch_line_edit</tabstop>
diff --git a/scripts/SANS/sans/algorithm_detail/batch_execution.py b/scripts/SANS/sans/algorithm_detail/batch_execution.py
index cc33a240052ab2057b50f99bdfd21a7107023797..8a111699284bdb017a60fdc6453104a7895fcc53 100644
--- a/scripts/SANS/sans/algorithm_detail/batch_execution.py
+++ b/scripts/SANS/sans/algorithm_detail/batch_execution.py
@@ -5,10 +5,13 @@
 #     & Institut Laue - Langevin
 # SPDX - License - Identifier: GPL - 3.0 +
 from __future__ import (absolute_import, division, print_function)
+
 from copy import deepcopy
+
 from mantid.api import AnalysisDataService, WorkspaceGroup
-from sans.common.general_functions import (add_to_sample_log, create_managed_non_child_algorithm, create_unmanaged_algorithm,
-                                           get_output_name, get_base_name_from_multi_period_name, get_transmission_output_name)
+from sans.common.general_functions import (add_to_sample_log, create_managed_non_child_algorithm,
+                                           create_unmanaged_algorithm, get_output_name,
+                                           get_base_name_from_multi_period_name, get_transmission_output_name)
 from sans.common.enums import (SANSDataType, SaveType, OutputMode, ISISReductionMode, DataType)
 from sans.common.constants import (TRANS_SUFFIX, SANS_SUFFIX, ALL_PERIODS,
                                    LAB_CAN_SUFFIX, LAB_CAN_COUNT_SUFFIX, LAB_CAN_NORM_SUFFIX,
@@ -34,6 +37,34 @@ else:
 # ----------------------------------------------------------------------------------------------------------------------
 # Functions for the execution of a single batch iteration
 # ----------------------------------------------------------------------------------------------------------------------
+def select_reduction_alg(split_for_event_slices, use_compatibility_mode,
+                         event_slice_optimisation_selected, reduction_packages):
+    """
+    Select whether the data should be reduced via version 1 or 2 of SANSSingleReduction.
+    To use version 2, the reduction must be carried out with event slices, compatibility mode
+    must not have been switched on (via the sans_data_processor_gui), and event slice mode must have been switched on
+    (via the sans_data_processor_gui)
+    :param split_for_event_slices: bool. If true, event slicing will be carried out in the reduction
+    :param use_compatibility_mode: bool. If true, compatibility mode has been turned on
+    :param event_slice_optimisation_selected: bool. If true, event slice mode has been turned on
+    :param reduction_packages: a list of reduction package objects
+    :return:whether or not we're event slicing (bool); reduction packages
+    """
+    if (split_for_event_slices and not
+            use_compatibility_mode and
+            event_slice_optimisation_selected):
+        # If using compatibility mode we convert to histogram immediately after taking event slices,
+        # so would not be able to perform operations on event workspaces pre-slicing.
+        event_slice_optimisation = True
+    else:
+        event_slice_optimisation = False
+        if split_for_event_slices:
+            # Split into separate event slice workspaces here.
+            # For event_slice mode, this is done in SANSSingleReductionEventSlice
+            reduction_packages = split_reduction_packages_for_event_slice_packages(reduction_packages)
+    return event_slice_optimisation, reduction_packages
+
+
 def single_reduction_for_batch(state, use_optimizations, output_mode, plot_results, output_graph, save_can=False):
     """
     Runs a single reduction.
@@ -45,6 +76,9 @@ def single_reduction_for_batch(state, use_optimizations, output_mode, plot_resul
     :param state: a SANSState object
     :param use_optimizations: if true then the optimizations of child algorithms are enabled.
     :param output_mode: the output mode
+    :param plot_results: bool. Whether or not workspaces should be plotted as they are reduced. Currently only works
+                         with event slice compatibility
+    :param output_graph: The graph object for plotting workspaces.
     :param save_can: bool. whether or not to save out can workspaces
     """
     # ------------------------------------------------------------------------------------------------------------------
@@ -64,17 +98,25 @@ def single_reduction_for_batch(state, use_optimizations, output_mode, plot_resul
 
     # ------------------------------------------------------------------------------------------------------------------
     # Get reduction settings
-    # Split into individual bundles which can be reduced individually. We split here if we have multiple periods or
-    # sliced times for example.
+    # Split into individual bundles which can be reduced individually. We split here if we have multiple periods.
     # ------------------------------------------------------------------------------------------------------------------
     reduction_packages = get_reduction_packages(state, workspaces, monitors)
+    split_for_event_slices = reduction_packages_require_splitting_for_event_slices(reduction_packages)
+
+    event_slice_optimisation, reduction_packages = select_reduction_alg(split_for_event_slices,
+                                                                        state.compatibility.use_compatibility_mode,
+                                                                        state.compatibility.use_event_slice_optimisation,
+                                                                        reduction_packages)
+
     # ------------------------------------------------------------------------------------------------------------------
     # Run reductions (one at a time)
     # ------------------------------------------------------------------------------------------------------------------
     single_reduction_name = "SANSSingleReduction"
     single_reduction_options = {"UseOptimizations": use_optimizations,
                                 "SaveCan": save_can}
-    reduction_alg = create_managed_non_child_algorithm(single_reduction_name, **single_reduction_options)
+    alg_version = 2 if event_slice_optimisation else 1
+    reduction_alg = create_managed_non_child_algorithm(single_reduction_name, version=alg_version,
+                                                       **single_reduction_options)
     reduction_alg.setChild(False)
     # Perform the data reduction
     for reduction_package in reduction_packages:
@@ -82,7 +124,8 @@ def single_reduction_for_batch(state, use_optimizations, output_mode, plot_resul
         # Set the properties on the algorithm
         # -----------------------------------
         set_properties_for_reduction_algorithm(reduction_alg, reduction_package,
-                                               workspace_to_name, workspace_to_monitor)
+                                               workspace_to_name, workspace_to_monitor,
+                                               event_slice_optimisation=event_slice_optimisation)
 
         # -----------------------------------
         #  Run the reduction
@@ -121,10 +164,14 @@ def single_reduction_for_batch(state, use_optimizations, output_mode, plot_resul
         reduction_package.reduced_lab_sample = get_workspace_from_algorithm(reduction_alg, "OutputWorkspaceLABSample")
         reduction_package.reduced_hab_sample = get_workspace_from_algorithm(reduction_alg, "OutputWorkspaceHABSample")
 
-        reduction_package.out_scale_factor = reduction_alg.getProperty("OutScaleFactor").value
-        reduction_package.out_shift_factor = reduction_alg.getProperty("OutShiftFactor").value
+        out_scale_factor, out_shift_factor = get_shift_and_scale_factors_from_algorithm(reduction_alg,
+                                                                                        event_slice_optimisation)
+        reduction_package.out_scale_factor = out_scale_factor
+        reduction_package.out_shift_factor = out_shift_factor
 
-        if plot_results:
+        if not event_slice_optimisation and plot_results:
+            # Plot results is intended to show the result of each workspace/slice as it is reduced
+            # as we reduce in bulk, it is not possible to plot live results while in event_slice mode
             if PYQT4:
                 plot_workspace(reduction_package, output_graph)
             elif output_graph:
@@ -132,7 +179,8 @@ def single_reduction_for_batch(state, use_optimizations, output_mode, plot_resul
         # -----------------------------------
         # The workspaces are already on the ADS, but should potentially be grouped
         # -----------------------------------
-        group_workspaces_if_required(reduction_package, output_mode, save_can)
+        group_workspaces_if_required(reduction_package, output_mode, save_can,
+                                     event_slice_optimisation=event_slice_optimisation)
 
     # --------------------------------
     # Perform output of all workspaces
@@ -147,10 +195,10 @@ def single_reduction_for_batch(state, use_optimizations, output_mode, plot_resul
     #    * This means that we need to save out the reduced data
     #    * The data is already on the ADS, so do nothing
     if output_mode is OutputMode.SaveToFile:
-        save_to_file(reduction_packages, save_can)
+        save_to_file(reduction_packages, save_can, event_slice_optimisation=event_slice_optimisation)
         delete_reduced_workspaces(reduction_packages)
     elif output_mode is OutputMode.Both:
-        save_to_file(reduction_packages, save_can)
+        save_to_file(reduction_packages, save_can, event_slice_optimisation=event_slice_optimisation)
 
     # -----------------------------------------------------------------------
     # Clean up other workspaces if the optimizations have not been turned on.
@@ -158,8 +206,11 @@ def single_reduction_for_batch(state, use_optimizations, output_mode, plot_resul
     if not use_optimizations:
         delete_optimization_workspaces(reduction_packages, workspaces, monitors, save_can)
 
-    out_scale_factors = [reduction_package.out_scale_factor for reduction_package in reduction_packages]
-    out_shift_factors = [reduction_package.out_shift_factor for reduction_package in reduction_packages]
+    out_scale_factors = []
+    out_shift_factors = []
+    for reduction_package in reduction_packages:
+        out_scale_factors.extend(reduction_package.out_scale_factor)
+        out_shift_factors.extend(reduction_package.out_shift_factor)
 
     return out_scale_factors, out_shift_factors
 
@@ -188,7 +239,6 @@ def plot_workspace(reduction_package, output_graph):
 
     :param reduction_package: An object containing the reduced workspaces
     :param output_graph: Name to the plot window
-    :return: None
     """
     if reduction_package.reduction_mode == ISISReductionMode.All:
         graph_handle = plotSpectrum([reduction_package.reduced_hab, reduction_package.reduced_lab], 0,
@@ -213,7 +263,6 @@ def plot_workspace_matplotlib(reduction_package, output_graph):
 
     :param reduction_package: An object containing the reduced workspaces
     :param output_graph: A matplotlib fig
-    :return: None
     """
     plot_kwargs = {"scalex": True,
                    "scaley": True}
@@ -481,7 +530,9 @@ def get_reduction_packages(state, workspaces, monitors):
 
     There are several reasons why a state can (and should) split up:
     1. Multi-period files were loaded. This means that we need to perform one reduction per (loaded) period
-    2. Event slices were specified. This means that we need to perform one reduction per event slice.
+    2. Event slices were specified. We event slice after initial reduction has taken place, as some operations can
+            be performed before event slicing. We do this for more efficient reduction, as we are not performing the
+            same operations multiple times needlessly.
 
     :param state: A single state which potentially needs to be split up into several states
     :param workspaces: The workspaces contributing to the reduction
@@ -491,12 +542,6 @@ def get_reduction_packages(state, workspaces, monitors):
     # First: Split the state on a per-period basis
     reduction_packages = create_initial_reduction_packages(state, workspaces, monitors)
 
-    # Second: Split resulting reduction packages on a per-event-slice basis
-    # Note that at this point all reduction packages will have the same state information. They only differ in the
-    # workspaces that they use.
-    if reduction_packages_require_splitting_for_event_slices(reduction_packages):
-        reduction_packages = split_reduction_packages_for_event_slice_packages(reduction_packages)
-
     if reduction_packages_require_splitting_for_wavelength_range(reduction_packages):
         reduction_packages = split_reduction_packages_for_wavelength_range(reduction_packages)
     return reduction_packages
@@ -705,7 +750,8 @@ def get_workspace_for_index(index, workspace_list):
     return workspace
 
 
-def set_properties_for_reduction_algorithm(reduction_alg, reduction_package, workspace_to_name, workspace_to_monitor):
+def set_properties_for_reduction_algorithm(reduction_alg, reduction_package, workspace_to_name, workspace_to_monitor,
+                                           event_slice_optimisation=False):
     """
     Sets up everything necessary on the reduction algorithm.
 
@@ -713,12 +759,16 @@ def set_properties_for_reduction_algorithm(reduction_alg, reduction_package, wor
     :param reduction_package: a reduction package object
     :param workspace_to_name: the workspace to name map
     :param workspace_to_monitor: a workspace to monitor map
+    :param event_slice_optimisation: optional bool. If true then using SANSSingleReductionEventSlice algorithm.
+                        In this base, names and base names should not include time slice information.
     """
     def _set_output_name(_reduction_alg, _reduction_package, _is_group, _reduction_mode, _property_name,
                          _attr_out_name, _atrr_out_name_base, multi_reduction_type, _suffix=None, transmission=False):
         if not transmission:
+            # Use event_slice from set_properties_for_reduction_algorithm scope
             _out_name, _out_name_base = get_output_name(_reduction_package.state, _reduction_mode, _is_group,
-                                                        multi_reduction_type=multi_reduction_type)
+                                                        multi_reduction_type=multi_reduction_type,
+                                                        event_slice_optimisation=event_slice_optimisation)
         else:
             _out_name, _out_name_base = get_transmission_output_name(_reduction_package.state, _reduction_mode
                                                                      , multi_reduction_type=multi_reduction_type)
@@ -800,14 +850,26 @@ def set_properties_for_reduction_algorithm(reduction_alg, reduction_package, wor
     is_part_of_multi_period_reduction = reduction_package.is_part_of_multi_period_reduction
     is_part_of_event_slice_reduction = reduction_package.is_part_of_event_slice_reduction
     is_part_of_wavelength_range_reduction = reduction_package.is_part_of_wavelength_range_reduction
-    is_group = is_part_of_multi_period_reduction or is_part_of_event_slice_reduction or is_part_of_wavelength_range_reduction
-    multi_reduction_type = {"period": is_part_of_multi_period_reduction, "event_slice": is_part_of_event_slice_reduction,
+    is_group = (is_part_of_multi_period_reduction or is_part_of_event_slice_reduction or
+                is_part_of_wavelength_range_reduction or event_slice_optimisation)
+    multi_reduction_type = {"period": is_part_of_multi_period_reduction,
+                            "event_slice": is_part_of_event_slice_reduction,
                             "wavelength_range": is_part_of_wavelength_range_reduction}
 
+    # SANSSingleReduction version 2 only properties
+    if event_slice_optimisation:
+        # In event slice mode, we can have multiple shift and scale factors for one reduction package
+        # there we output these as a workspace containing shifts as X data and scales as Y data.
+        reduction_alg.setProperty("OutShiftAndScaleFactor", "ShiftAndScaleFactors")
+        # Set properties used to generated names for workspaces within the output workspace groups
+        reduction_alg.setProperty("Period", is_part_of_multi_period_reduction)
+        reduction_alg.setProperty("WavelengthRange", is_part_of_wavelength_range_reduction)
+
     reduction_mode = reduction_package.reduction_mode
     if reduction_mode is ISISReductionMode.Merged:
         _set_output_name(reduction_alg, reduction_package, is_group, ISISReductionMode.Merged,
-                         "OutputWorkspaceMerged", "reduced_merged_name", "reduced_merged_base_name", multi_reduction_type)
+                         "OutputWorkspaceMerged", "reduced_merged_name", "reduced_merged_base_name",
+                         multi_reduction_type)
         _set_output_name(reduction_alg, reduction_package, is_group, ISISReductionMode.LAB,
                          "OutputWorkspaceLAB", "reduced_lab_name", "reduced_lab_base_name", multi_reduction_type)
         _set_output_name(reduction_alg, reduction_package, is_group, ISISReductionMode.HAB,
@@ -903,10 +965,32 @@ def get_workspace_from_algorithm(alg, output_property_name, add_logs=False, user
         return None
 
 
+def get_shift_and_scale_factors_from_algorithm(alg, event_slice_optimisation):
+    """
+    Retrieve the shift and scale factors from the algorithm. In event slice mode there can be multiple shift
+    and scale factors. These are output as a workspace containing scale and shift as X, Y data, respectively.
+    :param alg: The SingleReduction algorithm
+    :param event_slice_optimisation: bool. If true, then version 2 has been run, otherwise v1.
+    :return: a list of shift factors, a list of scale factors
+    """
+    if event_slice_optimisation:
+        factors_workspace = get_workspace_from_algorithm(alg, "OutShiftAndScaleFactor")
+        if factors_workspace is None:
+            return [], []
+        else:
+            scales = factors_workspace.readX(0)
+            shifts = factors_workspace.readY(0)
+            delete_alg = create_unmanaged_algorithm("DeleteWorkspace", **{"Workspace": "ShiftAndScaleFactors"})
+            delete_alg.execute()
+            return scales, shifts
+    else:
+        return [alg.getProperty("OutScaleFactor").value], [alg.getProperty("OutShiftFactor").value]
+
+
 # ----------------------------------------------------------------------------------------------------------------------
 # Functions for outputs to the ADS and saving the file
 # ----------------------------------------------------------------------------------------------------------------------
-def group_workspaces_if_required(reduction_package, output_mode, save_can):
+def group_workspaces_if_required(reduction_package, output_mode, save_can, event_slice_optimisation=False):
     """
     The output workspaces have already been published to the ADS by the algorithm. Now we might have to
     bundle them into a group if:
@@ -916,9 +1000,11 @@ def group_workspaces_if_required(reduction_package, output_mode, save_can):
     :param reduction_package: a list of reduction packages
     :param output_mode: one of OutputMode. SaveToFile, PublishToADS, Both.
     :param save_can: a bool. If true save out can and sample workspaces.
+    :param event_slice_optimisation: an optional bool. If true group_workspaces is being called on event sliced data, so the
+                        reduction_package contains grouped workspaces.
     """
     is_part_of_multi_period_reduction = reduction_package.is_part_of_multi_period_reduction
-    is_part_of_event_slice_reduction = reduction_package.is_part_of_event_slice_reduction
+    is_part_of_event_slice_reduction = reduction_package.is_part_of_event_slice_reduction or event_slice_optimisation
     is_part_of_wavelength_range_reduction = reduction_package.is_part_of_wavelength_range_reduction
     requires_grouping = is_part_of_multi_period_reduction or is_part_of_event_slice_reduction\
         or is_part_of_wavelength_range_reduction
@@ -977,25 +1063,35 @@ def add_to_group(workspace, name_of_group_workspace):
     """
     Creates a group workspace with the base name for the workspace
 
-    :param workspace: the workspace to add to the WorkspaceGroup
+    :param workspace: the workspace to add to the WorkspaceGroup. This can be a group workspace
     :param name_of_group_workspace: the name of the WorkspaceGroup
     """
     if workspace is None:
         return
     name_of_workspace = workspace.name()
     if AnalysisDataService.doesExist(name_of_group_workspace):
-        group_workspace = AnalysisDataService.retrieve(name_of_group_workspace)
-        if type(group_workspace) is WorkspaceGroup:
-            if not group_workspace.contains(name_of_workspace):
-                group_workspace.add(name_of_workspace)
+        add_to_group_when_group_workspace_exists_in_ADS(workspace, name_of_workspace, name_of_group_workspace)
+    else:
+        if type(workspace) is WorkspaceGroup:
+            if workspace.size() > 0:
+                rename_group_workspace(name_of_workspace, name_of_group_workspace)
         else:
-            group_name = "GroupWorkspaces"
-            group_options = {"InputWorkspaces": [name_of_workspace],
-                             "OutputWorkspace": name_of_group_workspace}
-            group_alg = create_unmanaged_algorithm(group_name, **group_options)
+            make_group_from_workspace(name_of_workspace, name_of_group_workspace)
+
 
-            group_alg.setAlwaysStoreInADS(True)
-            group_alg.execute()
+def add_to_group_when_group_workspace_exists_in_ADS(workspace, name_of_workspace, name_of_group_workspace):
+    """
+    Group workspace to a group workspace, given that the group workspace already exists on the ADS.
+    :param workspace: A MatrixWorkspace object to add to group
+    :param name_of_workspace: str. Name of workspace to add to group
+    :param name_of_group_workspace: str. Name of workspace group into which workspace should be added
+    """
+    group_workspace = AnalysisDataService.retrieve(name_of_group_workspace)
+    if type(group_workspace) is WorkspaceGroup:
+        if type(workspace) is WorkspaceGroup:
+            add_group_to_group(name_of_workspace, name_of_group_workspace)
+        elif not group_workspace.contains(name_of_workspace):
+            group_workspace.add(name_of_workspace)
     else:
         group_name = "GroupWorkspaces"
         group_options = {"InputWorkspaces": [name_of_workspace],
@@ -1006,14 +1102,67 @@ def add_to_group(workspace, name_of_group_workspace):
         group_alg.execute()
 
 
-def save_to_file(reduction_packages, save_can):
+def add_group_to_group(name_of_group_workspace, name_of_target_group_workspace):
+    """
+    Adds a group workspace to an existing group workspace.
+    This is used when using SANSSingleEventSlice algorithm, which returns group workspaces
+    containing the workspaces from each time slice.
+    :param name_of_group_workspace: str. The name of the group workspace
+    :param name_of_target_group_workspace: str. The name of the group workspace we want as output
+    :return:
+    """
+    if name_of_group_workspace == name_of_target_group_workspace:
+        # Do nothing as we already have the group workspace we want
+        return
+    group_name = "GroupWorkspaces"
+    group_options = {"InputWorkspaces": [name_of_target_group_workspace, name_of_group_workspace],
+                     "OutputWorkspace": name_of_target_group_workspace}
+    group_alg = create_unmanaged_algorithm(group_name, **group_options)
+    group_alg.setAlwaysStoreInADS(True)
+    group_alg.execute()
+
+
+def make_group_from_workspace(name_of_workspace, name_of_group_workspace):
+    """
+    Group a workspace into a group workspace which does not yet exist on the ADS
+    :param name_of_workspace: name of workspace to put into a group
+    :param name_of_group_workspace: name of group workspace to create
+    """
+    group_name = "GroupWorkspaces"
+    group_options = {"InputWorkspaces": [name_of_workspace],
+                     "OutputWorkspace": name_of_group_workspace}
+    group_alg = create_unmanaged_algorithm(group_name, **group_options)
+
+    group_alg.setAlwaysStoreInADS(True)
+    group_alg.execute()
+
+
+def rename_group_workspace(name_of_workspace, name_of_group_workspace):
+    """
+    Rename a group workspace
+    :param name_of_workspace: current name of group workspace
+    :param name_of_group_workspace: target name of group workspace
+    """
+    rename_name = "RenameWorkspace"
+    rename_options = {"InputWorkspace": name_of_workspace,
+                      "OutputWorkspace": name_of_group_workspace}
+    rename_alg = create_unmanaged_algorithm(rename_name, **rename_options)
+    rename_alg.setAlwaysStoreInADS(True)
+    rename_alg.execute()
+
+
+def save_to_file(reduction_packages, save_can, event_slice_optimisation=False):
     """
     Extracts all workspace names which need to be saved and saves them into a file.
 
-    @param reduction_packages: a list of reduction packages which contain all the relevant information for saving
-    @param save_can: a bool. When true save the unsubtracted can and sample workspaces
+    :param reduction_packages: a list of reduction packages which contain all the relevant information for saving
+    :param save_can: a bool. When true save the unsubtracted can and sample workspaces
+    :param event_slice_optimisation: an optional bool. If true then reduction packages contain event slice data
     """
-    workspaces_names_to_save = get_all_names_to_save(reduction_packages, save_can=save_can)
+    if not event_slice_optimisation:
+        workspaces_names_to_save = get_all_names_to_save(reduction_packages, save_can=save_can)
+    else:
+        workspaces_names_to_save = get_event_slice_names_to_save(reduction_packages, save_can=save_can)
 
     state = reduction_packages[0].state
     save_info = state.save
@@ -1032,7 +1181,8 @@ def delete_reduced_workspaces(reduction_packages, include_non_transmission=True)
     """
     Deletes all workspaces which would have been generated from a list of reduction packages.
 
-    @param reduction_packages: a list of reduction package
+    :param reduction_packages: a list of reduction package
+    :param include_non_transmission: an optional bool. If true then also delete reduced hab, lab, merged
     """
     def _delete_workspaces(_delete_alg, _workspaces):
         for _workspace in _workspaces:
@@ -1146,11 +1296,11 @@ def get_transmission_names_to_save(reduction_package, can):
 
 def get_all_names_to_save(reduction_packages, save_can):
     """
-    Extracts all the output names from a list of reduction packages. The main
+    Extracts all the output names from a list of reduction packages.
 
-    @param reduction_packages: a list of reduction packages
-    @param save_can: a bool, whether or not to save unsubtracted can workspace
-    @return: a list of workspace names to save.
+    :param reduction_packages: a list of reduction packages
+    :param save_can: a bool, whether or not to save unsubtracted can workspace
+    :return: a list of workspace names to save.
     """
     names_to_save = []
     for reduction_package in reduction_packages:
@@ -1194,6 +1344,57 @@ def get_all_names_to_save(reduction_packages, save_can):
     return set(names_to_save)
 
 
+def get_event_slice_names_to_save(reduction_packages, save_can):
+    """
+    Extracts all the output names from a list of reduction packages which contain event sliced data.
+    The workspaces in these reduction packages are gruop workspaces, except for transmissions.
+
+    :param reduction_packages: a list of reduction packages
+    :param save_can: a bool, whether or not to save unsubtracted can workspace
+    :return: a list of workspace names to save.
+    """
+    names_to_save = []
+    for reduction_package in reduction_packages:
+        reduced_lab = reduction_package.reduced_lab
+        reduced_hab = reduction_package.reduced_hab
+        reduced_merged = reduction_package.reduced_merged
+        reduced_lab_can = reduction_package.reduced_lab_can
+        reduced_hab_can = reduction_package.reduced_hab_can
+        reduced_lab_sample = reduction_package.reduced_lab_sample
+        reduced_hab_sample = reduction_package.reduced_hab_sample
+
+        reduced_lab_names = [] if reduced_lab is None else reduced_lab.getNames()
+        reduced_hab_names = [] if reduced_hab is None else reduced_hab.getNames()
+        reduced_merged_names = [] if reduced_merged is None else reduced_merged.getNames()
+        reduced_lab_can_names = [] if reduced_lab_can is None else reduced_lab_can.getNames()
+        reduced_hab_can_names = [] if reduced_hab_can is None else reduced_hab_can.getNames()
+        reduced_lab_sample_names = [] if reduced_lab_sample is None else reduced_lab_sample.getNames()
+        reduced_hab_sample_names = [] if reduced_hab_sample is None else reduced_hab_sample.getNames()
+
+        trans_name = get_transmission_names_to_save(reduction_package, False)
+        trans_can_name = get_transmission_names_to_save(reduction_package, True)
+
+        def _get_names_in_list(_list, _trans_name, _trans_can_name):
+            return ((name, _trans_name, _trans_can_name) for name in _list if name not in (None, ""))
+
+        if save_can:
+            names_to_save.extend(_get_names_in_list(reduced_merged_names, trans_name, trans_can_name))
+            names_to_save.extend(_get_names_in_list(reduced_lab_names, trans_name, trans_can_name))
+            names_to_save.extend(_get_names_in_list(reduced_hab_names, trans_name, trans_can_name))
+            names_to_save.extend(_get_names_in_list(reduced_lab_can_names, '', trans_can_name))
+            names_to_save.extend(_get_names_in_list(reduced_hab_can_names, '', trans_can_name))
+            names_to_save.extend(_get_names_in_list(reduced_lab_sample_names, trans_name, ''))
+            names_to_save.extend(_get_names_in_list(reduced_hab_sample_names, trans_name, ''))
+        elif reduced_merged:
+            names_to_save.extend(_get_names_in_list(reduced_merged_names, trans_name, trans_can_name))
+        else:
+            names_to_save.extend(_get_names_in_list(reduced_lab_names, trans_name, trans_can_name))
+            names_to_save.extend(_get_names_in_list(reduced_hab_names, trans_name, trans_can_name))
+
+    # We might have some workspaces as duplicates (the group workspaces), so make them unique
+    return set(names_to_save)
+
+
 def save_workspace_to_file(workspace_name, file_formats, file_name,
                            transmission_name='', transmission_can_name=''):
     """
diff --git a/scripts/SANS/sans/algorithm_detail/bundles.py b/scripts/SANS/sans/algorithm_detail/bundles.py
index 705ac39184155ee647854e29a33afa0281f07ad7..eb54ea9c3c63d4850600a06a3afcda6e8bdf71a0 100644
--- a/scripts/SANS/sans/algorithm_detail/bundles.py
+++ b/scripts/SANS/sans/algorithm_detail/bundles.py
@@ -49,3 +49,11 @@ OutputPartsBundle = namedtuple('OutputPartsBundle', 'state, data_type, reduction
 
 OutputTransmissionBundle = namedtuple('OutputTransmissionBundle', 'state, data_type, calculated_transmission_workspace,'
                                                                   ' unfitted_transmission_workspace')
+
+# Bundles for event slice data
+EventSliceSettingBundle = namedtuple('EventSliceSettingBundle', 'state, data_type, reduction_mode, '
+                                                                'output_parts, scatter_workspace, '
+                                                                'dummy_mask_workspace, '
+                                                                'scatter_monitor_workspace, '
+                                                                'direct_workspace, '
+                                                                'transmission_workspace')
diff --git a/scripts/SANS/sans/algorithm_detail/single_execution.py b/scripts/SANS/sans/algorithm_detail/single_execution.py
index 1243a08b33d441f4c86b2385de89dafa92d7d5ca..4958a416d02992be4df44d4dd571e6ed16772d81 100644
--- a/scripts/SANS/sans/algorithm_detail/single_execution.py
+++ b/scripts/SANS/sans/algorithm_detail/single_execution.py
@@ -5,17 +5,121 @@
 #     & Institut Laue - Langevin
 # SPDX - License - Identifier: GPL - 3.0 +
 from __future__ import (absolute_import, division, print_function)
-from sans.common.constants import EMPTY_NAME
-from sans.common.general_functions import (create_child_algorithm,
-                                           write_hash_into_reduced_can_workspace,
-                                           get_reduced_can_workspace_from_ads, get_transmission_workspaces_from_ads)
-from sans.common.enums import (ISISReductionMode, DetectorType, DataType, OutputParts, TransmissionType)
-from sans.algorithm_detail.strip_end_nans_and_infs import strip_end_nans
-from sans.algorithm_detail.merge_reductions import (MergeFactory, is_sample, is_can)
-from sans.algorithm_detail.bundles import (OutputBundle, OutputPartsBundle, OutputTransmissionBundle)
-from mantid.kernel import mpisetup
+
 import sys
 
+from mantid.kernel import mpisetup
+from sans.algorithm_detail.bundles import (EventSliceSettingBundle, OutputBundle,
+                                           OutputPartsBundle, OutputTransmissionBundle)
+from sans.algorithm_detail.merge_reductions import (MergeFactory, is_sample, is_can)
+from sans.algorithm_detail.strip_end_nans_and_infs import strip_end_nans
+from sans.common.constants import EMPTY_NAME
+from sans.common.enums import (DataType, DetectorType, ISISReductionMode, OutputParts, TransmissionType)
+from sans.common.general_functions import (create_child_algorithm, get_reduced_can_workspace_from_ads,
+                                           get_transmission_workspaces_from_ads,
+                                           write_hash_into_reduced_can_workspace)
+
+
+def run_initial_event_slice_reduction(reduction_alg, reduction_setting_bundle):
+    """
+    This function runs the initial core reduction for event slice data. This is essentially half
+    a reduction (either sample or can), and is run before event slicing has been performed.
+
+    :param reduction_alg: a handle to the initial event slice reduction algorithm.
+    :param reduction_setting_bundle: a ReductionSettingBundle tuple
+    :return: a EventSliceReductionSettingBundle tuple
+    """
+    # Get component to reduce
+    component = get_component_to_reduce(reduction_setting_bundle)
+    # Set the properties on the reduction algorithms
+    serialized_state = reduction_setting_bundle.state.property_manager
+    reduction_alg.setProperty("SANSState", serialized_state)
+    reduction_alg.setProperty("Component", component)
+    reduction_alg.setProperty("ScatterWorkspace", reduction_setting_bundle.scatter_workspace)
+    reduction_alg.setProperty("ScatterMonitorWorkspace", reduction_setting_bundle.scatter_monitor_workspace)
+    reduction_alg.setProperty("DataType", DataType.to_string(reduction_setting_bundle.data_type))
+
+    reduction_alg.setProperty("OutputWorkspace", EMPTY_NAME)
+    reduction_alg.setProperty("OutputMonitorWorkspace", EMPTY_NAME)
+
+    # Run the reduction core
+    reduction_alg.execute()
+
+    # Get the results
+    output_workspace = reduction_alg.getProperty("OutputWorkspace").value
+    mask_workspace = reduction_alg.getProperty("DummyMaskWorkspace").value
+    output_monitor_workspace = reduction_alg.getProperty("OutputMonitorWorkspace").value
+
+    return EventSliceSettingBundle(state=reduction_setting_bundle.state,
+                                   data_type=reduction_setting_bundle.data_type,
+                                   reduction_mode=reduction_setting_bundle.reduction_mode,
+                                   output_parts=reduction_setting_bundle.output_parts,
+                                   scatter_workspace=output_workspace,
+                                   dummy_mask_workspace=mask_workspace,
+                                   scatter_monitor_workspace=output_monitor_workspace,
+                                   direct_workspace=reduction_setting_bundle.direct_workspace,
+                                   transmission_workspace=reduction_setting_bundle.transmission_workspace)
+
+
+def run_core_event_slice_reduction(reduction_alg, reduction_setting_bundle):
+    """
+    This function runs a core reduction for event slice data. This reduction slices by event time and converts to q.
+    All other operations, such as moving and converting to histogram, have been performed before the event slicing.
+
+    :param reduction_alg: a handle to the reduction algorithm.
+    :param reduction_setting_bundle: a ReductionSettingBundle tuple
+    :return: an OutputBundle and an OutputPartsBundle
+    """
+
+    # Get component to reduce
+    component = get_component_to_reduce(reduction_setting_bundle)
+    # Set the properties on the reduction algorithms
+    serialized_state = reduction_setting_bundle.state.property_manager
+    reduction_alg.setProperty("SANSState", serialized_state)
+    reduction_alg.setProperty("Component", component)
+    reduction_alg.setProperty("ScatterWorkspace", reduction_setting_bundle.scatter_workspace)
+    reduction_alg.setProperty("DirectWorkspace", reduction_setting_bundle.direct_workspace)
+    reduction_alg.setProperty("TransmissionWorkspace", reduction_setting_bundle.transmission_workspace)
+    reduction_alg.setProperty("DummyMaskWorkspace", reduction_setting_bundle.dummy_mask_workspace)
+    reduction_alg.setProperty("ScatterMonitorWorkspace", reduction_setting_bundle.scatter_monitor_workspace)
+
+    reduction_alg.setProperty("DataType", DataType.to_string(reduction_setting_bundle.data_type))
+
+    reduction_alg.setProperty("OutputWorkspace", EMPTY_NAME)
+    reduction_alg.setProperty("SumOfCounts", EMPTY_NAME)
+    reduction_alg.setProperty("SumOfNormFactors", EMPTY_NAME)
+
+    # Run the reduction core
+    reduction_alg.execute()
+
+    # Get the results
+    output_workspace = reduction_alg.getProperty("OutputWorkspace").value
+    output_workspace_count = reduction_alg.getProperty("SumOfCounts").value
+    output_workspace_norm = reduction_alg.getProperty("SumOfNormFactors").value
+    output_calculated_transmission_workspace = reduction_alg.getProperty("CalculatedTransmissionWorkspace").value
+    output_unfitted_transmission_workspace = reduction_alg.getProperty("UnfittedTransmissionWorkspace").value
+
+    # Pull the result out of the workspace
+    output_bundle = OutputBundle(state=reduction_setting_bundle.state,
+                                 data_type=reduction_setting_bundle.data_type,
+                                 reduction_mode=reduction_setting_bundle.reduction_mode,
+                                 output_workspace=output_workspace)
+
+    output_parts_bundle = OutputPartsBundle(state=reduction_setting_bundle.state,
+                                            data_type=reduction_setting_bundle.data_type,
+                                            reduction_mode=reduction_setting_bundle.reduction_mode,
+                                            output_workspace_count=output_workspace_count,
+                                            output_workspace_norm=output_workspace_norm)
+
+    output_transmission_bundle = OutputTransmissionBundle(state=reduction_setting_bundle.state,
+                                                          data_type=reduction_setting_bundle.data_type,
+                                                          calculated_transmission_workspace=
+                                                          output_calculated_transmission_workspace,
+                                                          unfitted_transmission_workspace=
+                                                          output_unfitted_transmission_workspace,
+                                                          )
+    return output_bundle, output_parts_bundle, output_transmission_bundle
+
 
 def run_core_reduction(reduction_alg, reduction_setting_bundle):
     """
@@ -208,13 +312,14 @@ def get_component_to_reduce(reduction_setting_bundle):
     return reduction_mode_setting
 
 
-def run_optimized_for_can(reduction_alg, reduction_setting_bundle):
+def run_optimized_for_can(reduction_alg, reduction_setting_bundle, event_slice_optimisation=False):
     """
     Check if the state can reduction already exists, and if so, use it else reduce it and add it to the ADS.
 
-    @param reduction_alg: a handle to the SANSReductionCore algorithm
-    @param reduction_setting_bundle: a ReductionSettingBundle tuple.
-    @return: a reduced workspace, a partial output workspace for the counts, a partial workspace for the normalization.
+    :param reduction_alg: a handle to the SANSReductionCore algorithm
+    :param reduction_setting_bundle: a ReductionSettingBundle tuple.
+    :param event_slice_optimisation: An optional bool. If true then run run_core_event_slice_reduction, else run_core_reduction.
+    :return: a reduced workspace, a partial output workspace for the counts, a partial workspace for the normalization.
     """
     state = reduction_setting_bundle.state
     output_parts = reduction_setting_bundle.output_parts
@@ -262,8 +367,12 @@ def run_optimized_for_can(reduction_alg, reduction_setting_bundle):
 
     if must_reload:
         # if output_bundle.output_workspace is None or partial_output_require_reload:
-        output_bundle, output_parts_bundle, output_transmission_bundle = run_core_reduction(reduction_alg,
-                                                                                            reduction_setting_bundle)
+        if not event_slice_optimisation:
+            output_bundle, output_parts_bundle, \
+                output_transmission_bundle = run_core_reduction(reduction_alg, reduction_setting_bundle)
+        else:
+            output_bundle, output_parts_bundle, \
+                output_transmission_bundle = run_core_event_slice_reduction(reduction_alg, reduction_setting_bundle)
 
         # Now we need to tag the workspaces and add it to the ADS
         if output_bundle.output_workspace is not None:
diff --git a/scripts/SANS/sans/common/general_functions.py b/scripts/SANS/sans/common/general_functions.py
index 502464efe486b80ccfe3ac24b5609a1e7dba7376..a90f3da156091b352081f5a6570dc40bc5c06353 100644
--- a/scripts/SANS/sans/common/general_functions.py
+++ b/scripts/SANS/sans/common/general_functions.py
@@ -98,15 +98,19 @@ def get_single_valued_logs_from_workspace(workspace, log_names, log_types, conve
     return log_results
 
 
-def create_unmanaged_algorithm(name, **kwargs):
+def create_unmanaged_algorithm(name, version=None, **kwargs):
     """
     Creates an unmanaged child algorithm and initializes it.
 
     :param name: the name of the algorithm
+    :param version: optional int. The version of the algorithm to use.
     :param kwargs: settings for the algorithm
     :return: an initialized algorithm instance.
     """
-    alg = AlgorithmManager.createUnmanaged(name)
+    if version is not None:
+        alg = AlgorithmManager.createUnmanaged(name, version)
+    else:
+        alg = AlgorithmManager.createUnmanaged(name)
     alg.initialize()
     alg.setChild(True)
     alg.setRethrows(True)
@@ -115,15 +119,20 @@ def create_unmanaged_algorithm(name, **kwargs):
     return alg
 
 
-def create_managed_non_child_algorithm(name, **kwargs):
+def create_managed_non_child_algorithm(name, version=None, **kwargs):
     """
     Creates a managed child algorithm and initializes it.
 
     :param name: the name of the algorithm
+    :param version: optional int. The version of the algorithm to use.
     :param kwargs: settings for the algorithm
     :return: an initialized algorithm instance.
     """
-    alg = AlgorithmManager.create(name)
+    if version is not None:
+        alg = AlgorithmManager.create(name, version)
+    else:
+        # Let the algorithm pick the most recent version
+        alg = AlgorithmManager.create(name)
     alg.initialize()
     alg.setChild(False)
     alg.setRethrows(True)
@@ -609,7 +618,8 @@ def get_ranges_for_rebin_array(rebin_array):
 # ----------------------------------------------------------------------------------------------------------------------
 # Functions related to workspace names
 # ----------------------------------------------------------------------------------------------------------------------
-def get_standard_output_workspace_name(state, reduction_data_type, data_type = DataType.to_string(DataType.Sample)):
+def get_standard_output_workspace_name(state, reduction_data_type, data_type = DataType.to_string(DataType.Sample),
+                                       include_slice_limits=True):
     """
     Creates the name of the output workspace from a state object.
 
@@ -621,9 +631,11 @@ def get_standard_output_workspace_name(state, reduction_data_type, data_type = D
     5. A wavelength range: wavelength_low + "_" + wavelength_high
     6. In case of a 1D reduction, then add phi limits
     7. If we are dealing with an actual slice limit, then specify it: "_tXX_TYY" Note that the time set to
-       two decimals
+       two decimals. This is not included if creating a name for a workspace in "event_slice" mode, as the
+       slice limit information for names is calculated in SANSSingleReductionEventSlice
     :param state: a SANSState object
     :param reduction_data_type: which reduced data type is being looked at, ie HAB, LAB or Merged
+    :param include_slice_limits: optional bool. If True, add slice limits to the name.
     :return: the name of the reduced workspace, and the base name fo the reduced workspace
     """
     # 1. Short run number
@@ -678,7 +690,7 @@ def get_standard_output_workspace_name(state, reduction_data_type, data_type = D
     slice_state = state.slice
     start_time = slice_state.start_time
     end_time = slice_state.end_time
-    if start_time and end_time:
+    if start_time and end_time and include_slice_limits:
         start_time_as_string = '_t%.2f' % start_time[0]
         end_time_as_string = '_T%.2f' % end_time[0]
     else:
@@ -726,7 +738,8 @@ def get_transmission_output_name(state, data_type=DataType.Sample, multi_reducti
     return output_name, output_base_name
 
 
-def get_output_name(state, reduction_mode, is_group, suffix="", multi_reduction_type=None):
+def get_output_name(state, reduction_mode, is_group, suffix="", multi_reduction_type=None,
+                    event_slice_optimisation=False):
     # Get the external settings from the save state
     save_info = state.save
     user_specified_output_name = save_info.user_specified_output_name
@@ -740,7 +753,9 @@ def get_output_name(state, reduction_mode, is_group, suffix="", multi_reduction_
         use_reduction_mode_as_suffix = True
 
     # Get the standard workspace name
-    workspace_name, workspace_base_name = get_standard_output_workspace_name(state, reduction_mode)
+    workspace_name, \
+        workspace_base_name = get_standard_output_workspace_name(state, reduction_mode,
+                                                                 include_slice_limits=(not event_slice_optimisation))
 
     # If user specified output name is not none then we use it for the base name
     if user_specified_output_name and not is_group:
diff --git a/scripts/SANS/sans/gui_logic/models/state_gui_model.py b/scripts/SANS/sans/gui_logic/models/state_gui_model.py
index bcb4f8d4b4c029e8f7c6518acb0e8f3cac386830..794c1e4edfa08b68e807074102d6686ae9e0d41d 100644
--- a/scripts/SANS/sans/gui_logic/models/state_gui_model.py
+++ b/scripts/SANS/sans/gui_logic/models/state_gui_model.py
@@ -69,6 +69,14 @@ class StateGuiModel(object):
     def compatibility_mode(self, value):
         self.set_simple_element(element_id=OtherId.use_compatibility_mode, value=value)
 
+    @property
+    def event_slice_optimisation(self):
+        return self.get_simple_element(element_id=OtherId.use_event_slice_optimisation, default_value=False)
+
+    @event_slice_optimisation.setter
+    def event_slice_optimisation(self, value):
+        self.set_simple_element(element_id=OtherId.use_event_slice_optimisation, value=value)
+
     # ------------------------------------------------------------------------------------------------------------------
     # Save Options
     # ------------------------------------------------------------------------------------------------------------------
diff --git a/scripts/SANS/sans/gui_logic/presenter/run_tab_presenter.py b/scripts/SANS/sans/gui_logic/presenter/run_tab_presenter.py
index 0ec5aa2a16501a2c3298ea39c3ba9a8d8baab2cb..87852b0635bebde7df950124275cf11aa8d8888e 100644
--- a/scripts/SANS/sans/gui_logic/presenter/run_tab_presenter.py
+++ b/scripts/SANS/sans/gui_logic/presenter/run_tab_presenter.py
@@ -1171,6 +1171,7 @@ class RunTabPresenter(object):
         self._set_on_state_model("zero_error_free", state_model)
         self._set_on_state_model("save_types", state_model)
         self._set_on_state_model("compatibility_mode", state_model)
+        self._set_on_state_model("event_slice_optimisation", state_model)
         self._set_on_state_model("merge_scale", state_model)
         self._set_on_state_model("merge_shift", state_model)
         self._set_on_state_model("merge_scale_fit", state_model)
diff --git a/scripts/SANS/sans/state/compatibility.py b/scripts/SANS/sans/state/compatibility.py
index b0454e05b980ddc6aa1944584ecab7e631b9f331..6316b81bb76c7bda2d45248c7bd41c085c1b1f7c 100644
--- a/scripts/SANS/sans/state/compatibility.py
+++ b/scripts/SANS/sans/state/compatibility.py
@@ -26,10 +26,12 @@ from sans.common.enums import SANSFacility
 class StateCompatibility(StateBase):
     use_compatibility_mode = BoolParameter()
     time_rebin_string = StringParameter()
+    use_event_slice_optimisation = BoolParameter()
 
     def __init__(self):
         super(StateCompatibility, self).__init__()
         self.use_compatibility_mode = False
+        self.use_event_slice_optimisation = False
         self.time_rebin_string = ""
 
     def validate(self):
diff --git a/scripts/SANS/sans/test_helper/mock_objects.py b/scripts/SANS/sans/test_helper/mock_objects.py
index 8c68032c62475f5d5503657a9e64662e1b429919..72e334da3ded005474bdf4563fdbc85cef562610 100644
--- a/scripts/SANS/sans/test_helper/mock_objects.py
+++ b/scripts/SANS/sans/test_helper/mock_objects.py
@@ -211,6 +211,9 @@ def create_mock_view(user_file_path, batch_file_path=None, row_user_file_path=""
     _instrument = mock.PropertyMock(return_value=SANSInstrument.SANS2D)
     type(view).instrument = _instrument
 
+    _event_slice_optimisation = mock.PropertyMock(return_value=False)
+    type(view).event_slice_optimisation = _event_slice_optimisation
+
     return view, settings_diagnostic_tab, masking_table
 
 
diff --git a/scripts/SANS/sans/user_file/settings_tags.py b/scripts/SANS/sans/user_file/settings_tags.py
index b596877cf74bf328d467141a51388e9e08c36f1b..40037e2781186869f7f04533b545b5324e560407 100644
--- a/scripts/SANS/sans/user_file/settings_tags.py
+++ b/scripts/SANS/sans/user_file/settings_tags.py
@@ -143,6 +143,7 @@ class BackId(object):
 @serializable_enum("reduction_dimensionality", "use_full_wavelength_range", "event_slices",
                    "use_compatibility_mode", "save_types", "save_as_zero_error_free", "user_specified_output_name",
                    "user_specified_output_name_suffix", "use_reduction_mode_as_suffix", "sample_width", "sample_height",
-                   "sample_thickness", "sample_shape", "merge_mask", "merge_min", "merge_max", "wavelength_range")
+                   "sample_thickness", "sample_shape", "merge_mask", "merge_min", "merge_max", "wavelength_range",
+                   "use_event_slice_optimisation")
 class OtherId(object):
     pass
diff --git a/scripts/SANS/sans/user_file/state_director.py b/scripts/SANS/sans/user_file/state_director.py
index 1e7d2886368dabd8c1ba19fe3337d8abcde2dfd2..6d86bbba469c7bcae92b89fda36a80b1cc97d151 100644
--- a/scripts/SANS/sans/user_file/state_director.py
+++ b/scripts/SANS/sans/user_file/state_director.py
@@ -1281,6 +1281,12 @@ class StateDirectorISIS(object):
             use_compatibility_mode = use_compatibility_mode[-1]
             self._compatibility_builder.set_use_compatibility_mode(use_compatibility_mode)
 
+        if OtherId.use_event_slice_optimisation in user_file_items:
+            use_event_slice_optimisation = user_file_items[OtherId.use_event_slice_optimisation]
+            check_if_contains_only_one_element(use_event_slice_optimisation, OtherId.use_event_slice_optimisation)
+            use_event_slice_optimisation = use_event_slice_optimisation[-1]
+            self._compatibility_builder.set_use_event_slice_optimisation(use_event_slice_optimisation)
+
     def _set_up_save(self, user_file_items):
         if OtherId.save_types in user_file_items:
             save_types = user_file_items[OtherId.save_types]
diff --git a/scripts/test/SANS/algorithm_detail/batch_execution_test.py b/scripts/test/SANS/algorithm_detail/batch_execution_test.py
index 1ac5ad42e04133f485ec7fc453e5045de5050cf0..77117cc87cb2f743366bb44eb02649129f096525 100644
--- a/scripts/test/SANS/algorithm_detail/batch_execution_test.py
+++ b/scripts/test/SANS/algorithm_detail/batch_execution_test.py
@@ -10,7 +10,8 @@ import unittest
 
 from mantid.simpleapi import CreateSampleWorkspace
 from mantid.py3compat import mock
-from sans.algorithm_detail.batch_execution import get_all_names_to_save, get_transmission_names_to_save, ReductionPackage
+from sans.algorithm_detail.batch_execution import (get_all_names_to_save, get_transmission_names_to_save,
+                                                   ReductionPackage, select_reduction_alg)
 
 
 class ADSMock(object):
@@ -202,6 +203,44 @@ class GetAllNamesToSaveTest(unittest.TestCase):
 
         self.assertEqual(names_to_save, names_expected)
 
+    def test_does_not_use_event_slice_optimisation_when_not_requiring_event_slices(self):
+        require_event_slices = False
+        compatibility_mode = False
+        event_slice_optimisation_checkbox = True
+        actual_using_event_slice_optimisation, _ = select_reduction_alg(require_event_slices, compatibility_mode,
+                                                                        event_slice_optimisation_checkbox, [])
+        self.assertEqual(actual_using_event_slice_optimisation, False)
+
+    @mock.patch("sans.algorithm_detail.batch_execution.split_reduction_packages_for_event_slice_packages")
+    def test_does_not_use_event_slice_optimisation_when_compatibility_mode_turned_on(self, event_slice_splitter_mock):
+        require_event_slices = True
+        compatibility_mode = True
+        event_slice_optimisation_checkbox = True
+        actual_using_event_slice_optimisation, _ = select_reduction_alg(require_event_slices, compatibility_mode,
+                                                                        event_slice_optimisation_checkbox, [])
+        self.assertEqual(actual_using_event_slice_optimisation, False)
+        # Test that reduction packages have been split into event slices
+        event_slice_splitter_mock.assert_called_once_with([])
+
+    @mock.patch("sans.algorithm_detail.batch_execution.split_reduction_packages_for_event_slice_packages")
+    def test_does_not_use_event_slice_optimisation_when_optimisation_not_selected(self, event_slice_splitter_mock):
+        require_event_slices = True
+        compatibility_mode = False
+        event_slice_optimisation_checkbox = False
+        actual_using_event_slice_optimisation, _ = select_reduction_alg(require_event_slices, compatibility_mode,
+                                                                        event_slice_optimisation_checkbox, [])
+        self.assertEqual(actual_using_event_slice_optimisation, False)
+        # Test that reduction packages have been split into event slices
+        event_slice_splitter_mock.assert_called_once_with([])
+
+    def test_use_event_slice_optimisation_when_using_event_slice_optimisation_is_checked(self):
+        require_event_slices = True
+        compatibility_mode = False
+        event_slice_optimisation_checkbox = True
+        actual_using_event_slice_optimisation, _ = select_reduction_alg(require_event_slices, compatibility_mode,
+                                                                        event_slice_optimisation_checkbox, [])
+        self.assertEqual(actual_using_event_slice_optimisation, True)
+
 
 if __name__ == '__main__':
     unittest.main()
diff --git a/scripts/test/SANS/common/general_functions_test.py b/scripts/test/SANS/common/general_functions_test.py
index fe36d6726e26e257981b41095661c5e24a50887d..b537f455e7b9b59ccd7877263423e5a69b0b5c47 100644
--- a/scripts/test/SANS/common/general_functions_test.py
+++ b/scripts/test/SANS/common/general_functions_test.py
@@ -5,21 +5,25 @@
 #     & Institut Laue - Langevin
 # SPDX - License - Identifier: GPL - 3.0 +
 from __future__ import (absolute_import, division, print_function)
+
 import unittest
-from mantid.kernel import (V3D, Quat)
+
 from mantid.api import AnalysisDataService, FrameworkManager
-from sans.common.general_functions import (quaternion_to_angle_and_axis, create_unmanaged_algorithm, add_to_sample_log,
+from mantid.kernel import (V3D, Quat)
+from mantid.py3compat import mock
+from sans.common.constants import (SANS2D, LOQ, LARMOR)
+from sans.common.enums import (ISISReductionMode, ReductionDimensionality, OutputParts,
+                               SANSInstrument, DetectorType, SANSFacility, DataType)
+from sans.common.general_functions import (quaternion_to_angle_and_axis, create_managed_non_child_algorithm,
+                                           create_unmanaged_algorithm, add_to_sample_log,
                                            get_standard_output_workspace_name, sanitise_instrument_name,
                                            get_reduced_can_workspace_from_ads, write_hash_into_reduced_can_workspace,
                                            convert_instrument_and_detector_type_to_bank_name,
                                            convert_bank_name_to_detector_type_isis,
                                            get_facility, parse_diagnostic_settings, get_transmission_output_name,
                                            get_output_name)
-from sans.common.constants import (SANS2D, LOQ, LARMOR)
-from sans.common.enums import (ISISReductionMode, ReductionDimensionality, OutputParts,
-                               SANSInstrument, DetectorType, SANSFacility, DataType)
-from sans.test_helper.test_director import TestDirector
 from sans.state.data import StateData
+from sans.test_helper.test_director import TestDirector
 
 
 class SANSFunctionsTest(unittest.TestCase):
@@ -179,6 +183,15 @@ class SANSFunctionsTest(unittest.TestCase):
         # Assert
         self.assertEqual("12345rear_1D_12.0_34.0Phi12.0_56.0_t4.57_T12.37",  output_workspace)
 
+    def test_that_can_switch_off_including_slice_limits_in_standard_output_workspace_name(self):
+        # Arrange
+        state = SANSFunctionsTest._get_state()
+        # Act
+        output_workspace, _ = get_standard_output_workspace_name(state, ISISReductionMode.LAB,
+                                                                 include_slice_limits=False)
+        # Assert
+        self.assertTrue("12345rear_1D_12.0_34.0Phi12.0_56.0" == output_workspace)
+
     def test_that_get_transmission_output_name_returns_correct_name_for_user_specified_workspace(self):
         # Arrange
         state = SANSFunctionsTest._get_state()
@@ -541,6 +554,14 @@ class SANSFunctionsTest(unittest.TestCase):
         self.assertEqual(output_name, '12345rear_1D_12.0_34.0Phi12.0_56.0_t4.57_T12.37')
         self.assertEqual(group_output_name, '12345rear_1DPhi12.0_56.0')
 
+    @mock.patch("sans.common.general_functions.AlgorithmManager")
+    def test_that_can_create_versioned_managed_non_child_algorithms(self, alg_manager_mock):
+        create_managed_non_child_algorithm("TestAlg", version=2, **{"test_val": 5})
+        alg_manager_mock.create.assert_called_once_with("TestAlg", 2)
+
+        alg_manager_mock.reset_mock()
+        create_managed_non_child_algorithm("TestAlg", **{"test_val": 5})
+        alg_manager_mock.create.assert_called_once_with("TestAlg")
 
 if __name__ == '__main__':
     unittest.main()
diff --git a/scripts/test/SANS/gui_logic/run_tab_presenter_test.py b/scripts/test/SANS/gui_logic/run_tab_presenter_test.py
index a58c5f829a9ea2d019dabafb5ee98b1c4bc8a4b9..a3be599d4e336f48402bbeb836ce96fc70604bee 100644
--- a/scripts/test/SANS/gui_logic/run_tab_presenter_test.py
+++ b/scripts/test/SANS/gui_logic/run_tab_presenter_test.py
@@ -1140,7 +1140,7 @@ class RunTabPresenterTest(unittest.TestCase):
             if PropertyManagerDataService.doesExist(element):
                 PropertyManagerDataService.remove(element)
 
-    def _get_files_and_mock_presenter(self, content, is_multi_period=True, row_user_file_path = ""):
+    def _get_files_and_mock_presenter(self, content, is_multi_period=True, row_user_file_path=""):
         if row_user_file_path:
             content[1].update({BatchReductionEntry.UserFile : row_user_file_path})
 
diff --git a/scripts/test/SANS/gui_logic/state_gui_model_test.py b/scripts/test/SANS/gui_logic/state_gui_model_test.py
index ffa34f90a56548aaf6fb6e2c9042127cdd9fd525..94816f95f5c95fb3470f6fabaae7f801374d7857 100644
--- a/scripts/test/SANS/gui_logic/state_gui_model_test.py
+++ b/scripts/test/SANS/gui_logic/state_gui_model_test.py
@@ -32,8 +32,17 @@ class StateGuiModelTest(unittest.TestCase):
 
     def test_that_can_set_compatibility_mode(self):
         state_gui_model = StateGuiModel({"test": [1]})
-        state_gui_model.compatibility_mode = True
-        self.assertTrue(state_gui_model.compatibility_mode)
+        state_gui_model.compatibility_mode = False
+        self.assertFalse(state_gui_model.compatibility_mode)
+
+    def test_that_default_event_slice_optimisation_is_false(self):
+        state_gui_model = StateGuiModel({"test": [1]})
+        self.assertFalse(state_gui_model.event_slice_optimisation)
+
+    def test_that_can_set_event_slice_optimisation(self):
+        state_gui_model = StateGuiModel({"test": [1]})
+        state_gui_model.event_slice_optimisation = True
+        self.assertTrue(state_gui_model.event_slice_optimisation)
 
     # ------------------------------------------------------------------------------------------------------------------
     # Save options