diff --git a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/LiquidsReflectometryReduction.py b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/LiquidsReflectometryReduction.py
new file mode 100644
index 0000000000000000000000000000000000000000..c3a4bc826f7138a0a5fb2230eb129f61debba476
--- /dev/null
+++ b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/LiquidsReflectometryReduction.py
@@ -0,0 +1,561 @@
+#pylint: disable=no-init,invalid-name
+import time
+import math
+import os
+from mantid.api import *
+from mantid.simpleapi import *
+from mantid.kernel import *
+
+class LiquidsReflectometryReduction(PythonAlgorithm):
+
+    def category(self):
+        return "Reflectometry\\SNS"
+
+    def name(self):
+        return "LiquidsReflectometryReduction"
+
+    def version(self):
+        return 1
+
+    def summary(self):
+        return "Liquids Reflectometer (REFL) reduction"
+
+    def PyInit(self):
+        self.declareProperty(IntArrayProperty("RunNumbers"), "List of run numbers to process")
+        self.declareProperty("NormalizationRunNumber", 0, "Run number of the normalization run to use")
+        self.declareProperty(IntArrayProperty("SignalPeakPixelRange"), "Pixel range defining the data peak")
+        self.declareProperty("SubtractSignalBackground", True,
+                             doc='If true, the background will be subtracted from the data peak')
+        self.declareProperty(IntArrayProperty("SignalBackgroundPixelRange", [123, 137],
+                                              IntArrayLengthValidator(2), direction=Direction.Input),
+                                              "Pixelrange defining the background. Default:(123,137)")
+        self.declareProperty("NormFlag", True, doc="If true, the data will be normalized")
+        self.declareProperty(IntArrayProperty("NormPeakPixelRange", [127, 133],
+                                              IntArrayLengthValidator(2), direction=Direction.Input),
+                                              "Pixel range defining the normalization peak")
+        self.declareProperty("SubtractNormBackground", True,
+                             doc="If true, the background will be subtracted from the normalization peak")
+        self.declareProperty(IntArrayProperty("NormBackgroundPixelRange", [127, 137],
+                                              IntArrayLengthValidator(2), direction=Direction.Input),
+                                              "Pixel range defining the background for the normalization")
+        self.declareProperty("LowResDataAxisPixelRangeFlag", True,
+                             doc="If true, the low resolution direction of the data will be cropped according to the lowResDataAxisPixelRange property")
+        self.declareProperty(IntArrayProperty("LowResDataAxisPixelRange", [115, 210],
+                                              IntArrayLengthValidator(2), direction=Direction.Input),
+                                              "Pixel range to use in the low resolution direction of the data")
+        self.declareProperty("LowResNormAxisPixelRangeFlag", True,
+                             doc="If true, the low resolution direction of the normalization run will be cropped according to the LowResNormAxisPixelRange property")
+        self.declareProperty(IntArrayProperty("LowResNormAxisPixelRange", [115, 210],
+                                              IntArrayLengthValidator(2), direction=Direction.Input),
+                                              "Pixel range to use in the low resolution direction of the normalizaion run")
+        self.declareProperty(FloatArrayProperty("TOFRange", [9000., 23600.],
+                                                FloatArrayLengthValidator(2), direction=Direction.Input),
+                                                "TOF range to use")
+        self.declareProperty("TofRangeFlag", True,
+                             doc="If true, the TOF will be cropped according to the TOF range property")
+        self.declareProperty("QMin", 0.05, doc="Minimum Q-value")
+        self.declareProperty("QStep", 0.02, doc="Step size in Q. Enter a negative value to get a log scale")
+        self.declareProperty("AngleOffset", 0.0, doc="angle offset (degrees)")
+        self.declareProperty("AngleOffsetError", 0.0, doc="Angle offset error (degrees)")
+        self.declareProperty(MatrixWorkspaceProperty("OutputWorkspace", "", Direction.Output), "Output workspace")
+        self.declareProperty("ScalingFactorFile", "", doc="Scaling factor configuration file")
+        self.declareProperty("SlitsWidthFlag", True,
+                             doc="Looking for perfect match of slits width when using Scaling Factor file")
+        self.declareProperty("IncidentMediumSelected", "", doc="Incident medium used for those runs")
+        self.declareProperty("GeometryCorrectionFlag", False, doc="Use or not the geometry correction")
+        self.declareProperty("FrontSlitName", "S1", doc="Name of the front slit")
+        self.declareProperty("BackSlitName", "Si", doc="Name of the back slit")
+        self.declareProperty("TOFSteps", 40.0, doc="TOF step size")
+        self.declareProperty("CropFirstAndLastPoints", True, doc="If true, we crop the first and last points")
+
+    def PyExec(self):
+        # The old reflectivity reduction has an offset between the input
+        # pixel numbers and what it actually uses. Set the offset to zero
+        # to turn it off.
+        self.LEGACY_OFFSET = -1
+
+        # The old reduction code had a tolerance value for matching the
+        # slit parameters to get the scaling factors
+        self.TOLERANCE = 0.020
+
+        # DATA
+        dataRunNumbers = self.getProperty("RunNumbers").value
+        dataPeakRange = self.getProperty("SignalPeakPixelRange").value
+        dataBackRange = self.getProperty("SignalBackgroundPixelRange").value
+
+        # NORM
+        normalizationRunNumber = self.getProperty("NormalizationRunNumber").value
+        normBackRange = self.getProperty("NormBackgroundPixelRange").value
+        normPeakRange = self.getProperty("NormPeakPixelRange").value
+
+        # Get Q range
+        qMin = self.getProperty("QMin").value
+        qStep = self.getProperty("QStep").value
+        if qStep > 0:  #force logarithmic binning
+            qStep = -qStep
+
+        # If we have multiple files, add them
+        file_list = []
+        for item in dataRunNumbers:
+            data_file = FileFinder.findRuns("REF_L%d" % item)[0]
+            file_list.append(data_file)
+        runs = reduce((lambda x, y: '%s+%s' % (x, y)), file_list)
+        ws_event_data = Load(Filename=runs)
+
+        # Get the TOF range
+        TOFrangeFlag = self.getProperty("TofRangeFlag")
+        if TOFrangeFlag:
+            TOFrange = self.getProperty("TOFRange").value  #microS
+        else:
+            tof_max = ws_event_data.getTofMax()
+            TOFrange = [0, tof_max]
+
+        # Number of pixels in each direction
+        #TODO: revisit this when we update the IDF
+        self.number_of_pixels_x = int(ws_event_data.getInstrument().getNumberParameter("number-of-x-pixels")[0])
+        self.number_of_pixels_y = int(ws_event_data.getInstrument().getNumberParameter("number-of-y-pixels")[0])
+
+        # Get scattering angle theta
+        theta = self.calculate_scattering_angle(ws_event_data)
+
+        # ----- Process Sample Data -------------------------------------------
+        crop_request = self.getProperty("LowResDataAxisPixelRangeFlag")
+        low_res_range = self.getProperty("LowResDataAxisPixelRange").value
+        bck_request = self.getProperty("SubtractSignalBackground").value
+        data_cropped = self.process_data(ws_event_data, TOFrange,
+                                         crop_request, low_res_range,
+                                         dataPeakRange, bck_request, dataBackRange)
+
+        # ----- Normalization -------------------------------------------------
+        # Load normalization
+        ws_event_norm = LoadEventNexus("REF_L_%s" % normalizationRunNumber,
+                                       OutputWorkspace="REF_L_%s" % normalizationRunNumber)
+        crop_request = self.getProperty("LowResNormAxisPixelRangeFlag")
+        low_res_range = self.getProperty("LowResNormAxisPixelRange").value
+        bck_request = self.getProperty("SubtractNormBackground").value
+        norm_cropped = self.process_data(ws_event_norm, TOFrange,
+                                         crop_request, low_res_range,
+                                         normPeakRange, bck_request, normBackRange)
+        # Avoid leaving trash behind
+        AnalysisDataService.remove(str(ws_event_norm))
+
+        # Sum up the normalization peak
+        norm_summed = SumSpectra(InputWorkspace = norm_cropped)
+
+        # Normalize the data
+        normalized_data = data_cropped / norm_summed
+        # Avoid leaving trash behind
+        AnalysisDataService.remove(str(data_cropped))
+        AnalysisDataService.remove(str(norm_cropped))
+        AnalysisDataService.remove(str(norm_summed))
+
+        normalized_data = ConvertToPointData(InputWorkspace=normalized_data,
+                                             OutputWorkspace=str(normalized_data))
+
+        # Apply scaling factors
+        normalized_data = self.apply_scaling_factor(normalized_data)
+
+        q_workspace = SumSpectra(InputWorkspace = normalized_data)
+        q_workspace.getAxis(0).setUnit("MomentumTransfer")
+
+        # Geometry correction to convert To Q with correction
+        geometryCorrectionFlag = self.getProperty("GeometryCorrectionFlag").value
+        if geometryCorrectionFlag:
+            logger.error("The geometry correction for the Q conversion has not been implemented.")
+
+        # Get the distance fromthe moderator to the detector
+        sample = ws_event_data.getInstrument().getSample()
+        source = ws_event_data.getInstrument().getSource()
+        source_sample_distance = sample.getDistance(source)
+        detector = ws_event_data.getDetector(0)
+        sample_detector_distance = detector.getPos().getZ()
+        source_detector_distance = source_sample_distance + sample_detector_distance
+
+        # Convert to Q
+        # Use the TOF range to pick the maximum Q, and give it a little extra room.
+        h = 6.626e-34  # m^2 kg s^-1
+        m = 1.675e-27  # kg
+        constant = 4e-4 * math.pi * m * source_detector_distance / h * math.sin(theta)
+        q_range = [qMin, qStep, constant / TOFrange[0] * 1.2]
+
+        data_x = q_workspace.dataX(0)
+        for i in range(len(data_x)):
+            data_x[i] = constant / data_x[i]
+        q_workspace = SortXAxis(InputWorkspace=q_workspace, OutputWorkspace=str(q_workspace))
+
+        # Cook up a name compatible with the UI for backward compatibility
+        _time = int(time.time())
+        name_output_ws = self.getPropertyValue("OutputWorkspace")
+        name_output_ws = name_output_ws + '_#' + str(_time) + 'ts'
+
+        q_rebin = Rebin(InputWorkspace=q_workspace, Params=q_range,
+                        OutputWorkspace=name_output_ws)
+
+        # Crop to non-zero values
+        data_y = q_rebin.readY(0)
+        low_q = None
+        high_q = None
+        for i in range(len(data_y)):
+            if low_q is None and abs(data_y[i])>0:
+                low_q = i
+            if high_q is None and abs(data_y[len(data_y)-1-i])>0:
+                high_q = len(data_y)-1-i
+            if low_q is not None and high_q is not None:
+                break
+
+        crop = self.getProperty("CropFirstAndLastPoints").value
+        if low_q is not None and high_q is not None:
+            # Get rid of first and last Q points to avoid edge effects
+            if crop:
+                low_q += 1
+                high_q -= 1
+            data_x = q_rebin.readX(0)
+            q_rebin = CropWorkspace(InputWorkspace=q_rebin,
+                                    OutputWorkspace=str(q_rebin),
+                                    XMin=data_x[low_q], XMax=data_x[high_q])
+        else:
+            logger.error("Data is all zeros. Check your TOF ranges.")
+
+        # Clean up the workspace for backward compatibility
+        data_y = q_rebin.dataY(0)
+        data_e = q_rebin.dataE(0)
+        # Again for backward compatibility, the first and last points of the 
+        # raw output when not cropping was simply set to 0 += 1.
+        if crop is False:
+            data_y[0] = 0
+            data_e[0] = 1
+            data_y[len(data_y)-1] = 0
+            data_e[len(data_y)-1] = 1
+        # Values < 1e-12 and values where the error is greater than the value are replaced by 0+-1
+        for i in range(len(data_y)):
+            if data_y[i] < 1e-12 or data_e[i]>data_y[i]:
+                data_y[i]=0.0
+                data_e[i]=1.0
+
+        #TODO: remove this, which we use during development to make sure we don't leave trash
+        logger.information(str(AnalysisDataService.getObjectNames()))
+
+        # Avoid leaving trash behind
+        for ws in ['ws_event_data', 'normalized_data', 'q_workspace']:
+            if AnalysisDataService.doesExist(ws):
+                AnalysisDataService.remove(ws)
+
+        self.setProperty('OutputWorkspace', mtd[name_output_ws])
+
+
+    def calculate_scattering_angle(self, ws_event_data):
+        """
+            Compute the scattering angle
+            @param ws_event_data: data workspace
+        """
+        run_object = ws_event_data.getRun()
+        thi_value = run_object.getProperty('thi').value[0]
+        thi_units = run_object.getProperty('thi').units
+        tthd_value = run_object.getProperty('tthd').value[0]
+        tthd_units = run_object.getProperty('tthd').units
+
+        # Make sure we have radians
+        if thi_units == 'degree':
+            thi_value *= math.pi / 180.0
+        if tthd_units == 'degree':
+            tthd_value *= math.pi / 180.0
+
+        theta = math.fabs(tthd_value - thi_value) / 2.
+
+        # Add the offset
+        angle_offset_deg = self.getProperty("AngleOffset").value
+        return theta + angle_offset_deg * math.pi / 180.0
+
+    def clocking_correction(self, workspace, pixel_range, range_width=3):
+        """
+            Applies the "clocking correction". The pixel range is
+            the range that contains the reflectivity data. Compute the
+            average noise per pixel over two small bands on each side.
+            The subtract that noise pixel-wise from the data
+        """
+        pass
+
+    def subtract_background(self, workspace, peak_range, background_range,
+                            low_res_range, sum_peak=False, offset=None):
+        """
+            Subtract background in place
+            #TODO: RefRoi needs to do error weighting and deal with zeros
+            @param workspace: Mantid workspace
+            @param peak_range: range of pixels defining the peak [min, max]
+            @param background_range: range of pixels defining the background [min, max]
+            @param low_res_range: low resolution range to integrate over
+            @param sum_peak: if True, the resulting peak will be summed
+        """
+        if offset is None:
+            offset = self.LEGACY_OFFSET
+        peak_min = int(peak_range[0]) + offset
+        peak_max = int(peak_range[1]) + offset
+        bck_min = int(background_range[0]) + offset
+        bck_max = int(background_range[1]) + offset
+
+        # Get low-resolution range
+        x_min = int(low_res_range[0]) + offset
+        x_max = int(low_res_range[1]) + offset
+
+        left_bck = None
+        if peak_min > bck_min:
+            left_bck = RefRoi(InputWorkspace=workspace, IntegrateY=False,
+                              NXPixel=self.number_of_pixels_x,
+                              NYPixel=self.number_of_pixels_y,
+                              ConvertToQ=False,
+                              XPixelMin=x_min,
+                              XPixelMax=x_max,
+                              YPixelMin=bck_min,
+                              YPixelMax=peak_min - 1,
+                              ErrorWeighting = True,
+                              SumPixels=True, NormalizeSum=True)
+
+        right_bck = None
+        if peak_max < bck_max:
+            right_bck = RefRoi(InputWorkspace=workspace, IntegrateY=False,
+                               NXPixel=self.number_of_pixels_x,
+                               NYPixel=self.number_of_pixels_y,
+                               ConvertToQ=False,
+                               XPixelMin=x_min,
+                               XPixelMax=x_max,
+                               YPixelMin=peak_max + 1,
+                               YPixelMax=bck_max,
+                               ErrorWeighting = True,
+                               SumPixels=True, NormalizeSum=True)
+            
+        if right_bck is not None and left_bck is not None:
+            average = (left_bck + right_bck) / 2.0
+        elif right_bck is not None:
+            average = right_bck
+        elif left_bck is not None:
+            average = left_bck
+        else:
+            average = RefRoi(InputWorkspace=workspace, IntegrateY=False,
+                             NXPixel=self.number_of_pixels_x,
+                             NYPixel=self.number_of_pixels_y,
+                             ConvertToQ=False,
+                             XPixelMin=x_min,
+                             XPixelMax=x_max,
+                             YPixelMin=bck_min,
+                             YPixelMax=bck_max,
+                             ErrorWeighting = True,
+                             SumPixels=True, NormalizeSum=True)
+        # Integrate over the low-res direction
+        workspace = RefRoi(InputWorkspace=workspace, IntegrateY=False,
+                           NXPixel=self.number_of_pixels_x,
+                           NYPixel=self.number_of_pixels_y,
+                           XPixelMin=x_min,
+                           XPixelMax=x_max,
+                           ConvertToQ=False,
+                           SumPixels=sum_peak,
+                           OutputWorkspace=str(workspace))
+        #TODO Check whether we should multiply by the number of pixels
+        # in the low-res direction
+        workspace = Minus(LHSWorkspace=workspace, RHSWorkspace=average,
+                          OutputWorkspace=str(workspace))
+        # Avoid leaving trash behind
+        average_name = str(average)
+        if AnalysisDataService.doesExist(str(left_bck)):
+            AnalysisDataService.remove(str(left_bck))
+        if AnalysisDataService.doesExist(str(right_bck)):
+            AnalysisDataService.remove(str(right_bck))
+        if AnalysisDataService.doesExist(average_name):
+            AnalysisDataService.remove(average_name)
+        return workspace
+
+    def process_data(self, workspace, tof_range, crop_low_res, low_res_range,
+                     peak_range, subtract_background, background_range):
+        """
+            Common processing for both sample data and normalization.
+        """
+        # Rebin TOF axis
+        tof_max = workspace.getTofMax()
+        tof_step = self.getProperty("TOFSteps").value
+        workspace = Rebin(InputWorkspace=workspace, Params=[0, tof_step, tof_max], 
+                          PreserveEvents=False, OutputWorkspace="%s_histo" % str(workspace))
+
+        # Crop TOF range
+        workspace = CropWorkspace(InputWorkspace=workspace,
+                                  XMin=tof_range[0], XMax=tof_range[1],
+                                  OutputWorkspace=str(workspace))
+
+        # Integrate over low resolution range
+        x_min = 0
+        x_max = self.number_of_pixels_x
+        if crop_low_res:
+            x_min = int(low_res_range[0])
+            x_max = int(low_res_range[1])
+
+        # Subtract background
+        if subtract_background:
+            workspace = self.subtract_background(workspace,
+                                                 peak_range, background_range,
+                                                 [x_min, x_max])
+        else:
+            # If we don't subtract the background, we still have to integrate
+            # over the low resolution axis
+            workspace = RefRoi(InputWorkspace=workspace, IntegrateY=False,
+                               NXPixel=self.number_of_pixels_x,
+                               NYPixel=self.number_of_pixels_y,
+                               ConvertToQ=False, XPixelMin=x_min, XPixelMax=x_max,
+                               OutputWorkspace=str(workspace))
+
+        # Normalize by current proton charge
+        # Note that the background subtraction will use an error weighted mean
+        # and use 1 as the error on counts of zero. We normalize by the integrated
+        # current _after_ the background subtraction so that the 1 doesn't have
+        # to be changed to a 1/Charge.
+        workspace = NormaliseByCurrent(InputWorkspace=workspace, OutputWorkspace=str(workspace))
+
+        # Crop to only the selected peak region
+        cropped = CropWorkspace(InputWorkspace = workspace,
+                                StartWorkspaceIndex=int(peak_range[0]) + self.LEGACY_OFFSET,
+                                EndWorkspaceIndex=int(peak_range[1]) + self.LEGACY_OFFSET,
+                                OutputWorkspace="%s_cropped" % str(workspace))
+
+        # Avoid leaving trash behind
+        AnalysisDataService.remove(str(workspace))
+
+        return cropped
+
+    def apply_scaling_factor(self, workspace):
+        """
+            Apply scaling factor from reference scaling data
+            @param workspace: Mantid workspace
+        """
+        scaling_factor_file = self.getProperty("ScalingFactorFile").value
+        if not os.path.isfile(scaling_factor_file):
+            scaling_factor_files = FileFinder.findRuns(scaling_factor_file)
+            if len(scaling_factor_files)>0:
+                scaling_factor_file = scaling_factor_files[0]
+                if not os.path.isfile(scaling_factor_file):
+                    logger.error("Could not find scaling factor file %s" % scaling_factor_file)
+                    return
+            else:
+                logger.error("Could not find scaling factor file %s" % scaling_factor_file)
+                return
+
+        # Get the incident medium
+        incident_medium = self.getProperty("IncidentMediumSelected").value
+        
+        # Get the wavelength
+        lr = workspace.getRun().getProperty('LambdaRequest').value[0]
+        lr_value = float("{0:.2f}".format(lr))
+
+        # Get the slit information
+        front_slit = self.getProperty("FrontSlitName").value
+        back_slit = self.getProperty("BackSlitName").value
+
+        # Option to match slit widths or not
+        match_slit_width = self.getProperty("SlitsWidthFlag").value
+
+        s1h = abs(workspace.getRun().getProperty("%sVHeight" % front_slit).value[0])
+        s1w = abs(workspace.getRun().getProperty("%sHWidth" % front_slit).value[0])
+        try:
+            s2h = abs(workspace.getRun().getProperty("%sVHeight" % back_slit).value[0])
+            s2w = abs(workspace.getRun().getProperty("%sHWidth" % back_slit).value[0])
+        except:
+            # For backward compatibility with old code
+            logger.error("Specified slit could not be found: %s  Trying S2" % back_slit)
+            s2h = abs(workspace.getRun().getProperty("S2VHeight").value[0])
+            s2w = abs(workspace.getRun().getProperty("S2HWidth").value[0])
+
+        scaling_info = "Scaling settings: %s wl=%s S1H=%s S2H=%s" % (incident_medium,
+                                                                     lr_value, s1h, s2h)
+        if match_slit_width:
+            scaling_info += " S1W=%s S2W=%s" % (s1w, s2w)
+        logger.information(scaling_info)
+
+        def _reduce(accumulation, item):
+            """
+                Reduce function that accumulates values in a dictionary
+            """
+            toks_item = item.split('=')
+            if len(toks_item)!=2:
+                return accumulation
+            if type(accumulation)==dict:
+                accumulation[toks_item[0].strip()] = toks_item[1].strip()
+            else:
+                toks_accum = accumulation.split('=')
+                accumulation = {toks_item[0].strip(): toks_item[1].strip(),
+                                toks_accum[0].strip(): toks_accum[1].strip()}
+            return accumulation
+
+        def _value_check(key, data, reference):
+            """
+                Check an entry against a reference value
+            """
+            if key in data:
+                return abs(abs(float(data[key])) - abs(float(reference))) <= self.TOLERANCE
+            return False
+
+        scaling_data = open(scaling_factor_file, 'r')
+        file_content = scaling_data.read()
+        scaling_data.close()
+        
+        data_found = None
+        for line in file_content.split('\n'):
+            if line.startswith('#'):
+                continue
+
+            # Parse the line of data and produce a dict
+            toks = line.split()
+            data_dict = reduce(_reduce, toks, {})
+            
+            # Get ordered list of keys
+            keys = []
+            for token in toks:
+                key_value = token.split('=')
+                if len(key_value)==2:
+                    keys.append(key_value[0].strip())
+
+            # Sanity check
+            if keys[0] != 'IncidentMedium' and keys[1] != 'LambdaRequested' \
+                and keys[2] != 'S1H':
+                logger.error("The scaling factor file isn't standard: bad keywords")
+            # The S2H key has been changing in the earlier version of REFL reduction.
+            # Get the key from the data to make sure we are backward compatible.
+            s2h_key = keys[3]
+            if 'IncidentMedium' in data_dict \
+                and data_dict['IncidentMedium'] == incident_medium.strip() \
+                and _value_check('LambdaRequested', data_dict, lr_value) \
+                and _value_check('S1H', data_dict, s1h) \
+                and _value_check(s2h_key, data_dict, s2h):
+
+                if not match_slit_width or (_value_check('S1W', data_dict, s1w) \
+                        and _value_check('S2W', data_dict, s2w)):
+                    data_found = data_dict
+                    break
+
+        AddSampleLog(Workspace=workspace, LogName='isSFfound', LogText=str(data_found is not None))
+        if data_found is not None:
+            a = float(data_found['a'])
+            b = float(data_found['b'])
+            a_error = float(data_found['error_a'])
+            b_error = float(data_found['error_b'])
+
+            # Extract a single spectrum, just so we have the TOF axis
+            # to create a normalization workspace
+            normalization = ExtractSingleSpectrum(InputWorkspace=workspace,
+                                                  OutputWorkspace="normalization",
+                                                  WorkspaceIndex=0)
+            norm_tof = normalization.dataX(0)
+            norm_value = normalization.dataY(0)
+            norm_error = normalization.dataE(0)
+            #TODO: The following is done on the bin edges.
+            # Should it not be done for the center of the bin?
+            for i in range(len(norm_value)):
+                norm_value[i] = norm_tof[i] * b + a
+                norm_error[i] = math.sqrt(a_error*a_error + norm_tof[i] * norm_tof[i] * b_error * b_error)
+
+            workspace = Divide(LHSWorkspace=workspace,
+                               RHSWorkspace=normalization,
+                               OutputWorkspace=str(workspace))
+            # Avoid leaving trash behind
+            AnalysisDataService.remove(str(normalization))
+        else:
+            logger.error("Could not find scaling factor")
+        return workspace
+
+
+AlgorithmFactory.subscribe(LiquidsReflectometryReduction)
diff --git a/Code/Mantid/Framework/WorkflowAlgorithms/src/RefReduction.cpp b/Code/Mantid/Framework/WorkflowAlgorithms/src/RefReduction.cpp
index c4f1a9b4ccc9ba04a40190f297a759bb3b1ed778..a861fefdef291d4b5270482e8d423fd0e2e30be0 100644
--- a/Code/Mantid/Framework/WorkflowAlgorithms/src/RefReduction.cpp
+++ b/Code/Mantid/Framework/WorkflowAlgorithms/src/RefReduction.cpp
@@ -443,7 +443,7 @@ MatrixWorkspace_sptr RefReduction::processNormalization() {
   refAlg->setProperty("ConvertToQ", false);
   refAlg->setProperty("SumPixels", true);
   refAlg->setProperty("NormalizeSum", true);
-
+  refAlg->setProperty("AverageOverIntegratedAxis", integrateY);
   refAlg->setProperty("YPixelMin", ymin);
   refAlg->setProperty("YPixelMax", ymax);
   refAlg->setProperty("XPixelMin", xmin);
@@ -712,7 +712,7 @@ MatrixWorkspace_sptr RefReduction::subtractBackground(
     leftAlg->setProperty("ConvertToQ", false);
     leftAlg->setProperty("SumPixels", true);
     leftAlg->setProperty("NormalizeSum", true);
-
+    leftAlg->setProperty("AverageOverIntegratedAxis", integrateY);
     leftAlg->setProperty("YPixelMin", ymin);
     leftAlg->setProperty("YPixelMax", ymax);
     leftAlg->setProperty("XPixelMin", xmin);
@@ -737,7 +737,7 @@ MatrixWorkspace_sptr RefReduction::subtractBackground(
     rightAlg->setProperty("ConvertToQ", false);
     rightAlg->setProperty("SumPixels", true);
     rightAlg->setProperty("NormalizeSum", true);
-
+    rightAlg->setProperty("AverageOverIntegratedAxis", integrateY);
     rightAlg->setProperty("YPixelMin", ymin);
     rightAlg->setProperty("YPixelMax", ymax);
     rightAlg->setProperty("XPixelMin", xmin);
@@ -778,7 +778,7 @@ MatrixWorkspace_sptr RefReduction::subtractBackground(
     refAlg->setProperty("ConvertToQ", false);
     refAlg->setProperty("SumPixels", true);
     refAlg->setProperty("NormalizeSum", true);
-
+    refAlg->setProperty("AverageOverIntegratedAxis", integrateY);
     refAlg->setProperty("YPixelMin", ymin);
     refAlg->setProperty("YPixelMax", ymax);
     refAlg->setProperty("XPixelMin", xmin);
diff --git a/Code/Mantid/Framework/WorkflowAlgorithms/src/RefRoi.cpp b/Code/Mantid/Framework/WorkflowAlgorithms/src/RefRoi.cpp
index c45a2e81183536ecd0bc96a8ccc84eab96df6c4a..20dbd035fe1cb61257fdaee79f6bb7a826764163 100644
--- a/Code/Mantid/Framework/WorkflowAlgorithms/src/RefRoi.cpp
+++ b/Code/Mantid/Framework/WorkflowAlgorithms/src/RefRoi.cpp
@@ -46,7 +46,14 @@ void RefRoi::init() {
   declareProperty(
       "NormalizeSum", false,
       "If true, and SumPixels is true, the"
-      "resulting histogram will be divided by the number of pixels in the ROI");
+      " resulting histogram will be divided by the number of pixels in the ROI");
+  declareProperty(
+      "AverageOverIntegratedAxis", false,
+      "If true, and SumPixels and NormalizeSum are true, the"
+      " resulting histogram will also be divided by the number of pixels integrated over");
+  declareProperty(
+      "ErrorWeighting", false,
+      "If true, error weighting will be used when normalizing");
   declareProperty(
       "IntegrateY", true,
       "If true, the Y direction will be"
@@ -102,6 +109,7 @@ void RefRoi::extract2D() {
   bool integrate_y = getProperty("IntegrateY");
   bool sum_pixels = getProperty("SumPixels");
   bool normalize = getProperty("NormalizeSum");
+  bool error_weighting = getProperty("ErrorWeighting");
 
   int nHisto = integrate_y ? m_nXPixel : m_nYPixel;
   int xmin = integrate_y ? 0 : m_xMin;
@@ -149,44 +157,74 @@ void RefRoi::extract2D() {
     XOut0 = inputWS->readX(0);
   }
 
-  PARALLEL_FOR2(outputWS, inputWS)
-  for (int i = xmin; i <= xmax; i++) {
-    PARALLEL_START_INTERUPT_REGION
-    for (int j = ymin; j <= ymax; j++) {
-      int index = m_nYPixel * i + j;
+  // Make sure the inner loop is always the one we integrate over
+  int main_axis_min = integrate_y ? xmin : ymin;
+  int main_axis_max = integrate_y ? xmax : ymax;
+  int integrated_axis_min = integrate_y ? ymin : xmin;
+  int integrated_axis_max = integrate_y ? ymax : xmax;
+
+  for (int i = main_axis_min; i <= main_axis_max; i++) {
+    size_t output_index = i;
+    if (sum_pixels)
+      output_index = 0;
+
+    MantidVec &YOut = outputWS->dataY(output_index);
+    MantidVec &EOut = outputWS->dataE(output_index);
+    MantidVec signal_vector(YOut.size(), 0.0);
+    MantidVec error_vector(YOut.size(), 0.0);
+
+    for (int j = integrated_axis_min; j <= integrated_axis_max; j++) {
+      int index = integrate_y ? m_nYPixel * i + j : m_nYPixel * j + i;
       const MantidVec &YIn = inputWS->readY(index);
       const MantidVec &EIn = inputWS->readE(index);
 
-      size_t output_index = integrate_y ? i : j;
-      if (sum_pixels)
-        output_index = 0;
-
-      MantidVec &YOut = outputWS->dataY(output_index);
-      MantidVec &EOut = outputWS->dataE(output_index);
+      for (size_t t = 0; t < YOut.size(); t++) {
+        size_t t_index = convert_to_q ? YOut.size() - 1 - t : t;
+        if (sum_pixels && normalize && error_weighting) {
+          signal_vector[t] += YIn[t_index];
+          error_vector[t] += EIn[t_index] * EIn[t_index];
+        } else {
+          YOut[t] += YIn[t_index];
+          EOut[t] += EIn[t_index] * EIn[t_index];
+        }
+      }
+    }
 
+    if (sum_pixels && normalize && error_weighting) {
       for (size_t t = 0; t < YOut.size(); t++) {
         size_t t_index = convert_to_q ? YOut.size() - 1 - t : t;
-        YOut[t] += YIn[t_index];
-        EOut[t] += EIn[t_index] * EIn[t_index];
+        double error_squared = error_vector[t_index] == 0 ? 1 : error_vector[t_index];
+        YOut[t] += signal_vector[t_index] / error_squared;
+        EOut[t] += 1.0 / error_squared;
       }
     }
-    PARALLEL_END_INTERUPT_REGION
   }
-  PARALLEL_CHECK_INTERUPT_REGION
 
-  const int n_pixels = (xmax - xmin + 1) * (ymax - ymin + 1);
+  // Check whether we want to divide by the number of pixels along
+  // the axis we integrated over.
+  bool average_integrated = getProperty("AverageOverIntegratedAxis");
+  double n_integrated = 1.0;
+  if (sum_pixels && normalize && average_integrated) {
+    n_integrated = integrated_axis_max - integrated_axis_min + 1;
+  }
 
   for (int i = 0; i < nHisto; i++) {
     outputWS->dataX(i) = XOut0;
     MantidVec &YOut = outputWS->dataY(i);
     MantidVec &EOut = outputWS->dataE(i);
     for (size_t t = 0; t < EOut.size(); t++) {
-      EOut[t] = sqrt(EOut[t]);
-
       if (sum_pixels && normalize) {
-        YOut[t] = YOut[t] / n_pixels;
-        EOut[t] = EOut[t] / n_pixels;
-      }
+        if (error_weighting) {
+          YOut[t] = YOut[t] / EOut[t] / n_integrated;
+          EOut[t] = sqrt(1.0/EOut[t]) / n_integrated;
+        } else {
+          EOut[t] = sqrt(EOut[t]);
+          YOut[t] = YOut[t] / (main_axis_max - main_axis_min + 1) / n_integrated;
+          EOut[t] = EOut[t] / (main_axis_max - main_axis_min + 1) / n_integrated;
+        }
+      } else {
+        EOut[t] = sqrt(EOut[t]);
+      };
     }
   }
 
diff --git a/Code/Mantid/Testing/Data/SystemTest/REFL_119816.nxs.md5 b/Code/Mantid/Testing/Data/SystemTest/REFL_119816.nxs.md5
new file mode 100644
index 0000000000000000000000000000000000000000..36cf9a81dbb29dbcc164a9b4cd1b3b07b5c7e7a5
--- /dev/null
+++ b/Code/Mantid/Testing/Data/SystemTest/REFL_119816.nxs.md5
@@ -0,0 +1 @@
+fd214d4f9b4b60328862ce37420e58ec
diff --git a/Code/Mantid/Testing/Data/SystemTest/REF_L_119692_event.nxs.md5 b/Code/Mantid/Testing/Data/SystemTest/REF_L_119692_event.nxs.md5
new file mode 100644
index 0000000000000000000000000000000000000000..0e69a237886e3e4c10a36791d530ddcaba988438
--- /dev/null
+++ b/Code/Mantid/Testing/Data/SystemTest/REF_L_119692_event.nxs.md5
@@ -0,0 +1 @@
+f83ebc24532a5a25c1bf5b0f8710c7f5
diff --git a/Code/Mantid/Testing/Data/SystemTest/REF_L_119816_event.nxs.md5 b/Code/Mantid/Testing/Data/SystemTest/REF_L_119816_event.nxs.md5
new file mode 100644
index 0000000000000000000000000000000000000000..f969d529218c9646519b78d9ac4d5b0f0e98a4a7
--- /dev/null
+++ b/Code/Mantid/Testing/Data/SystemTest/REF_L_119816_event.nxs.md5
@@ -0,0 +1 @@
+a5571a7bb7950a4c8c16ef953011ae08
diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/LiquidsReflectometryReductionTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/LiquidsReflectometryReductionTest.py
new file mode 100644
index 0000000000000000000000000000000000000000..118f1a63c428ddc95043170ae6779182e4ceb76e
--- /dev/null
+++ b/Code/Mantid/Testing/SystemTests/tests/analysis/LiquidsReflectometryReductionTest.py
@@ -0,0 +1,49 @@
+#pylint: disable=no-init
+import stresstesting
+from mantid import *
+
+from mantid.simpleapi import *
+
+class LiquidsReflectometryReductionTest(stresstesting.MantidStressTest):
+    def runTest(self):
+        #TODO: The reduction algorithm should not require an absolute path
+        scaling_factor_file = FileFinder.getFullPath("directBeamDatabaseFall2014_IPTS_11601_2.cfg")
+        
+        LiquidsReflectometryReduction(RunNumbers=[119814],
+                                      NormalizationRunNumber=119690,
+                                      SignalPeakPixelRange=[154, 166],
+                                      SubtractSignalBackground=True,
+                                      SignalBackgroundPixelRange=[151, 169],
+                                      NormFlag=True,
+                                      NormPeakPixelRange=[154, 160],
+                                      NormBackgroundPixelRange=[151, 163],
+                                      SubtractNormBackground=True,
+                                      LowResDataAxisPixelRangeFlag=True,
+                                      LowResDataAxisPixelRange=[99, 158],
+                                      LowResNormAxisPixelRangeFlag=True,
+                                      LowResNormAxisPixelRange=[98, 158],
+                                      TOFRange=[29623.0, 42438.0],
+                                      IncidentMediumSelected='2InDiamSi',
+                                      GeometryCorrectionFlag=False,
+                                      QMin=0.005,
+                                      QStep=0.01,
+                                      AngleOffset=0.009,
+                                      AngleOffsetError=0.001,
+                                      ScalingFactorFile=scaling_factor_file,
+                                      SlitsWidthFlag=True,
+                                      CropFirstAndLastPoints=False,
+                                      OutputWorkspace='reflectivity_119814')
+
+    def validate(self):
+        # Be more tolerant with the output.
+        self.tolerance = 0.01
+
+        # Skip the first point so we don't have to have a big tolerance
+        data_y = mtd["reflectivity_119814"].dataY(0)
+        data_y[1] = 0.631281639115562
+        
+        self.disableChecking.append('Instrument')
+        self.disableChecking.append('Sample')
+        self.disableChecking.append('SpectraMap')
+        self.disableChecking.append('Axes')
+        return "reflectivity_119814", 'REFL_119814_combined_data.nxs'
diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/LiquidsReflectometryReductionWithBackgroundTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/LiquidsReflectometryReductionWithBackgroundTest.py
new file mode 100644
index 0000000000000000000000000000000000000000..0d09d62bec2c46703814a59c927399e289099fa5
--- /dev/null
+++ b/Code/Mantid/Testing/SystemTests/tests/analysis/LiquidsReflectometryReductionWithBackgroundTest.py
@@ -0,0 +1,49 @@
+#pylint: disable=no-init
+import stresstesting
+from mantid import *
+from mantid.simpleapi import *
+
+class LiquidsReflectometryReductionWithBackgroundTest(stresstesting.MantidStressTest):
+    def runTest(self):
+        #TODO: The reduction algorithm should not require an absolute path
+        scaling_factor_file = FileFinder.getFullPath("directBeamDatabaseFall2014_IPTS_11601_2.cfg")
+        
+        LiquidsReflectometryReduction(RunNumbers=[119816],
+                                      NormalizationRunNumber=119692,
+                                      SignalPeakPixelRange=[155, 165],
+                                      SubtractSignalBackground=True,
+                                      SignalBackgroundPixelRange=[146, 165],
+                                      NormFlag=True,
+                                      NormPeakPixelRange=[154, 162],
+                                      NormBackgroundPixelRange=[151, 165],
+                                      SubtractNormBackground=True,
+                                      LowResDataAxisPixelRangeFlag=True,
+                                      LowResDataAxisPixelRange=[99, 158],
+                                      LowResNormAxisPixelRangeFlag=True,
+                                      LowResNormAxisPixelRange=[118, 137],
+                                      TOFRange=[9610, 22425],
+                                      IncidentMediumSelected='2InDiamSi',
+                                      GeometryCorrectionFlag=False,
+                                      QMin=0.005,
+                                      QStep=0.01,
+                                      AngleOffset=0.009,
+                                      AngleOffsetError=0.001,
+                                      ScalingFactorFile=scaling_factor_file,
+                                      SlitsWidthFlag=True,
+                                      CropFirstAndLastPoints=False,
+                                      OutputWorkspace='reflectivity_119816')
+
+    def validate(self):
+        # Be more tolerant with the output.
+        self.tolerance = 0.0002
+
+        # Skip the first point so we don't have to have a big tolerance
+        data_y = mtd["reflectivity_119816"].dataY(0)
+        data_y[1] = 0.00499601750282373
+        
+        self.disableChecking.append('Instrument')
+        self.disableChecking.append('Sample')
+        self.disableChecking.append('SpectraMap')
+        self.disableChecking.append('Axes')
+        return "reflectivity_119816", 'REFL_119816.nxs'
+
diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/REFLReduction.py b/Code/Mantid/Testing/SystemTests/tests/analysis/REFLReduction.py
index 7c9e50ae3c20f77e6f58cbf38dd65f4eb0c5938e..57c9b9c7b02505aaa022e88ed3c9dfd830b9429d 100644
--- a/Code/Mantid/Testing/SystemTests/tests/analysis/REFLReduction.py
+++ b/Code/Mantid/Testing/SystemTests/tests/analysis/REFLReduction.py
@@ -1,7 +1,6 @@
 #pylint: disable=no-init
 import stresstesting
 from mantid import *
-
 from mantid.simpleapi import *
 
 class REFLReduction(stresstesting.MantidStressTest):
@@ -36,9 +35,9 @@ class REFLReduction(stresstesting.MantidStressTest):
     def validate(self):
         # Be more tolerant with the output.
         self.tolerance = 0.0001
+
         self.disableChecking.append('Instrument')
         self.disableChecking.append('Sample')
         self.disableChecking.append('SpectraMap')
         self.disableChecking.append('Axes')
         return "reflectivity_119814", 'REFL_119814_combined_data.nxs'
-
diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/REFLWithBackground.py b/Code/Mantid/Testing/SystemTests/tests/analysis/REFLWithBackground.py
new file mode 100644
index 0000000000000000000000000000000000000000..17443e9c64b2ad225c6b43ca1906d84ffbe7eb42
--- /dev/null
+++ b/Code/Mantid/Testing/SystemTests/tests/analysis/REFLWithBackground.py
@@ -0,0 +1,44 @@
+#pylint: disable=no-init
+import stresstesting
+from mantid import *
+from mantid.simpleapi import *
+
+class REFLWithBackground(stresstesting.MantidStressTest):
+    def runTest(self):
+        #TODO: The reduction algorithm should not require an absolute path
+        scaling_factor_file = FileFinder.getFullPath("directBeamDatabaseFall2014_IPTS_11601_2.cfg")
+        
+        RefLReduction(RunNumbers=[119816],
+                      NormalizationRunNumber=119692,
+                      SignalPeakPixelRange=[155, 165],
+                      SubtractSignalBackground=True,
+                      SignalBackgroundPixelRange=[146, 165],
+                      NormFlag=True,
+                      NormPeakPixelRange=[154, 162],
+                      NormBackgroundPixelRange=[151, 165],
+                      SubtractNormBackground=True,
+                      LowResDataAxisPixelRangeFlag=True,
+                      LowResDataAxisPixelRange=[99, 158],
+                      LowResNormAxisPixelRangeFlag=True,
+                      LowResNormAxisPixelRange=[118, 137],
+                      TOFRange=[9610, 22425],
+                      IncidentMediumSelected='2InDiamSi',
+                      GeometryCorrectionFlag=False,
+                      QMin=0.005,
+                      QStep=0.01,
+                      AngleOffset=0.009,
+                      AngleOffsetError=0.001,
+                      ScalingFactorFile=scaling_factor_file,
+                      SlitsWidthFlag=True,
+                      OutputWorkspace='reflectivity_119816')
+
+    def validate(self):
+        # Be more tolerant with the output.
+        self.tolerance = 0.0001
+
+        self.disableChecking.append('Instrument')
+        self.disableChecking.append('Sample')
+        self.disableChecking.append('SpectraMap')
+        self.disableChecking.append('Axes')
+        return "reflectivity_119816", 'REFL_119816.nxs'
+
diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/RefRoi.py b/Code/Mantid/Testing/SystemTests/tests/analysis/RefRoi.py
new file mode 100644
index 0000000000000000000000000000000000000000..a8aee4424f5e8bac1694d01217740e048f523edc
--- /dev/null
+++ b/Code/Mantid/Testing/SystemTests/tests/analysis/RefRoi.py
@@ -0,0 +1,22 @@
+#pylint: disable=no-init
+import stresstesting
+from mantid import *
+from mantid.simpleapi import *
+
+class RefRoiTest(stresstesting.MantidStressTest):
+    def runTest(self):
+        ws=Load(Filename="REF_L_119814")
+        ws = Integration(InputWorkspace=ws)
+        roi = RefRoi(InputWorkspace=ws,
+                     NXPixel=256, NYPixel=304,
+                     IntegrateY=False, ConvertToQ=False)
+        roi = Transpose(InputWorkspace=roi)
+
+    def validate(self):
+        self.disableChecking.append('Instrument')
+        self.disableChecking.append('Sample')
+        self.disableChecking.append('SpectraMap')
+        self.disableChecking.append('Axes')
+        return "roi", 'REFL_119814_roi_peak.nxs'
+
+
diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/reference/REFL_119814_roi_peak.nxs.md5 b/Code/Mantid/Testing/SystemTests/tests/analysis/reference/REFL_119814_roi_peak.nxs.md5
new file mode 100644
index 0000000000000000000000000000000000000000..65db618e778e31995fe12af61f66e08c5706e705
--- /dev/null
+++ b/Code/Mantid/Testing/SystemTests/tests/analysis/reference/REFL_119814_roi_peak.nxs.md5
@@ -0,0 +1 @@
+7329950c6f224e245f665ef8317f16bd