diff --git a/Code/Mantid/Framework/Algorithms/inc/MantidAlgorithms/Q1D.h b/Code/Mantid/Framework/Algorithms/inc/MantidAlgorithms/Q1D.h
index a4151a4839a1b435f409c2713c99162549d46753..19f150530c98542f1fdc1f930b9a2e8cad38c5f4 100644
--- a/Code/Mantid/Framework/Algorithms/inc/MantidAlgorithms/Q1D.h
+++ b/Code/Mantid/Framework/Algorithms/inc/MantidAlgorithms/Q1D.h
@@ -47,7 +47,7 @@ public:
   /// Algorithm's name
   virtual const std::string name() const { return "Q1D"; }
   /// Algorithm's version
-  virtual int version() const { return (1); }
+  virtual int version() const { return (3); }
   /// Algorithm's category for identification
   virtual const std::string category() const { return "SANS"; }
 
diff --git a/Code/Mantid/Framework/Algorithms/test/Q1DTest.h b/Code/Mantid/Framework/Algorithms/test/Q1DTest.h
index 28cf8be69eef421fedcb895fd6510e23749fb860..77f31ae0f0c1348702394314d50d195c287a6313 100644
--- a/Code/Mantid/Framework/Algorithms/test/Q1DTest.h
+++ b/Code/Mantid/Framework/Algorithms/test/Q1DTest.h
@@ -17,10 +17,11 @@ public:
     TS_ASSERT_EQUALS( Q1D.name(), "Q1D" )
   }
 
-  void testVersion()
+/*this algorithm must be reset to being old when the SANS group are happy with the new version
+void testVersion()
   {
     TS_ASSERT_EQUALS( Q1D.version(), 1 )
-  }
+  }*/
 
   void testCategory()
   {
diff --git a/Code/Mantid/MantidQt/CustomInterfaces/src/SANSRunWindow.cpp b/Code/Mantid/MantidQt/CustomInterfaces/src/SANSRunWindow.cpp
index 7c33d9ba865a89053ac78da13db258e6d46f3230..f4407c8769a6c7fd0ffd18347b66cb20b807a23b 100644
--- a/Code/Mantid/MantidQt/CustomInterfaces/src/SANSRunWindow.cpp
+++ b/Code/Mantid/MantidQt/CustomInterfaces/src/SANSRunWindow.cpp
@@ -733,7 +733,7 @@ bool SANSRunWindow::loadUserFile()
     "print i.ReductionSingleton().instrument.detector_file('front')"));
 
   QString file = runReduceScriptFunction(
-    "print i.ReductionSingleton().prep_normalize.getPixelCorrFile()");
+    "print i.ReductionSingleton().flood_file.get_filename()");
   file = file.trimmed();
   //Check if the file name is set to Python's None object and then adjust the controls if there is an empty entry
   m_uiForm.floodFile->setFileText(file == "None" ? "" : file);
diff --git a/Code/Mantid/scripts/SANS/ISISCommandInterface.py b/Code/Mantid/scripts/SANS/ISISCommandInterface.py
index 6cb2a420fd6b25fed4623ee346d7631b1ea91dcb..6cf677446f39f3e372edfafab3fcceb47350be15 100644
--- a/Code/Mantid/scripts/SANS/ISISCommandInterface.py
+++ b/Code/Mantid/scripts/SANS/ISISCommandInterface.py
@@ -288,7 +288,7 @@ def SetFrontEfficiencyFile(filename):
     front_det.correction_file = filename
 
 def SetDetectorFloodFile(filename):
-    ReductionSingleton().prep_normalize.setPixelCorrFile(filename)
+    ReductionSingleton().flood_file.set_filename(filename)
 
 def displayUserFile():
     print '-- Mask file defaults --'
diff --git a/Code/Mantid/scripts/SANS/centre_finder.py b/Code/Mantid/scripts/SANS/centre_finder.py
index 669bbc63b4787ff257d32a91fdef6c71dc48c1da..ec098fbf36f57810c49124766bf36fb1f2332760 100644
--- a/Code/Mantid/scripts/SANS/centre_finder.py
+++ b/Code/Mantid/scripts/SANS/centre_finder.py
@@ -35,6 +35,7 @@ class CentreFinder(object):
     
         #phi masking will remove areas of the detector that we need 
         self.reducer.mask.mask_phi = False
+        self.reducer.keep_un_normalised(False)
         self.reducer.run_no_Q('centre')
     
         self._group_into_quadrants('centre', trial[0], trial[1], suffix='_tmp')
diff --git a/Code/Mantid/scripts/SANS/isis_reducer.py b/Code/Mantid/scripts/SANS/isis_reducer.py
index eaa5db222a3b343d07c76f23d4487dfe4d80e0c0..347c91cea389cd3af446fd54ff910e1ed2389623 100644
--- a/Code/Mantid/scripts/SANS/isis_reducer.py
+++ b/Code/Mantid/scripts/SANS/isis_reducer.py
@@ -58,7 +58,8 @@ class ISISReducer(SANSReducer):
         #---- creates a new workspace leaving the raw data behind 
         self._fork_ws = [self.out_name]
 
-        self._proc_TOF = [self.crop_detector]
+        self._proc_TOF = [self.flood_file]
+        self._proc_TOF.append(self.crop_detector)
         self._proc_TOF.append(self.mask)
         self._proc_TOF.append(self.to_wavelen)
 
@@ -83,13 +84,15 @@ class ISISReducer(SANSReducer):
     def _init_steps(self):
         """
             Initialises the steps that are not initialised by (ISIS)CommandInterface.
-        """       
+        """
+        
         self.data_loader =     None
         self.user_settings =   None
         self.place_det_sam =   isis_reduction_steps.MoveComponents()
         self.geometry =        sans_reduction_steps.GetSampleGeom()
         self.out_name =       isis_reduction_steps.GetOutputName()
-#this needs to change
+        self.flood_file =      isis_reduction_steps.CorrectToFileISIS(
+            '', 'SpectrumNumber','Divide', self.out_name.name_holder)
         self.crop_detector =   isis_reduction_steps.CropDetBank(
             self.out_name.name_holder)
         self.samp_trans_load = None
@@ -99,13 +102,9 @@ class ISISReducer(SANSReducer):
         self.norm_mon =        isis_reduction_steps.NormalizeToMonitor()
         self.transmission_calculator =\
                                isis_reduction_steps.TransmissionCalc(loader=None)
-        self._corr_and_scale = isis_reduction_steps.AbsoluteUnits()
-        
-        #this object isn't a step but is required by the ConvertToQ step below
-        self.prep_normalize = isis_reduction_steps.CalculateNormISIS(
-                [self.norm_mon, self.transmission_calculator])
-
-        self.to_Q =            sans_reduction_steps.ConvertToQ(self.prep_normalize)
+        self._corr_and_scale = isis_reduction_steps.ISISCorrections()
+        self.to_Q =            isis_reduction_steps.ConvertToQ(
+	                                         container=self._temporys)
         self.background_subtracter = None
         self._geo_corr =       sans_reduction_steps.SampleGeomCor(self.geometry)
         self._zero_error_flags=isis_reduction_steps.ReplaceErrors()
@@ -119,12 +118,12 @@ class ISISReducer(SANSReducer):
         self.sample_wksp = None
         self.full_trans_wav = False
         self._monitor_set = False
-        #workspaces that this reducer uses and will delete at the end
-        self._temporys = {}
-        #the output workspaces created by a data analysis
-        self._outputs = {}
-        #all workspaces created by this reducer
-        self._workspace = [self._temporys, self._outputs] 
+	#workspaces that this reducer uses and will delete at the end
+        self._temporys = {'Q1D errors' : None}
+	#the output workspaces created by a data analysis
+	self._outputs = {}
+	#all workspaces created by this reducer
+	self._workspace = [self._temporys, self._outputs] 
 
         self._init_steps()
 	
@@ -182,6 +181,8 @@ class ISISReducer(SANSReducer):
         new_reducer.output_wksp = new_wksp
 
         #give the name of the new workspace to the first algorithm that was run
+        new_reducer.flood_file.out_container[0] = new_wksp
+        #the line below is required if the step above is optional
         new_reducer.crop_detector.out_container[0] = new_wksp
         
         if new_reducer.transmission_calculator:
@@ -199,7 +200,9 @@ class ISISReducer(SANSReducer):
             new_reducer.run_no_Q(new_wksp)
 
     def name_outwksp(self, new_name):
-        #give the name of the new workspace to the first algorithm that is run
+        #give the name of the new workspace to the first algorithm that was run
+        self.flood_file.out_container = [new_name]
+        #the line below is required if the step above is optional
         self.crop_detector.out_container = [new_name]
 
     def run_from_raw(self):
@@ -258,7 +261,25 @@ class ISISReducer(SANSReducer):
         self.clean = False
         return self.output_wksp
 
+    def keep_un_normalised(self, keep):
+        """
+	        Use this function to keep the un-normalised workspace from the
+	        normalise to monitor step and use it for the Q1D error estimate.
+	        Call this function with keep = False to disable this
+	        @param keep: set tot True to keep the workspace, False to delete it
+        """
+        if keep:
+            self._temporys['Q1D errors'] = 'to_delete_prenormed'
+        else:
+            if self._temporys['Q1D errors']:
+               if mtd.workspaceExists(self._temporys['Q1D errors']):
+                   DeleteWorkspace(self._temporys['Q1D errors'])
+                   self._temporys['Q1D errors'] = None
+	    
+	self.norm_mon.save_original = self._temporys['Q1D errors']
+
     def set_Q_output_type(self, out_type):
+       self.keep_un_normalised(self.to_Q.output_type == '1D')
        self.to_Q.set_output_type(out_type)
 
     def post_process(self):
@@ -267,7 +288,7 @@ class ISISReducer(SANSReducer):
             user_file = 'None'
         else:
             user_file = self.user_settings.filename
-        AddSampleLog(self.output_wksp, "UserFile", LogText=user_file)
+        AddSampleLog(self.output_wksp, "UserFile", user_file)
 	
 	for role in self._temporys.keys():
 	    try:
@@ -360,16 +381,3 @@ class ISISReducer(SANSReducer):
 
     CENT_FIND_RMIN = None
     CENT_FIND_RMAX = None
-    
-def deleteWorkspaces(workspaces):
-    """
-        Deletes a list of workspaces if they exist but ignores any errors
-    """
-    for wk in workspaces:
-        try:
-            if wk and mantid.workspaceExists(wk):
-                DeleteWorkspace(wk)
-        except:
-            #if the workspace can't be deleted this function does nothing
-            pass
-
diff --git a/Code/Mantid/scripts/SANS/isis_reduction_steps.py b/Code/Mantid/scripts/SANS/isis_reduction_steps.py
index f52de71439b330902c749992e971ff66620aca41..b449ce003db8bed582dac0fb6047aadeedb716af 100644
--- a/Code/Mantid/scripts/SANS/isis_reduction_steps.py
+++ b/Code/Mantid/scripts/SANS/isis_reduction_steps.py
@@ -909,9 +909,75 @@ class CropDetBank(ReductionStep):
     def execute(self, reducer, workspace):
         if len(self.out_container) > 0:
             reducer.output_wksp = self.out_container[0]
-        
         # Get the detector bank that is to be used in this analysis leave the complete workspace
-        reducer.instrument.cur_detector().crop_to_detector(workspace, reducer.output_wksp)
+        CropWorkspace(workspace, reducer.output_wksp,
+            StartWorkspaceIndex = reducer.instrument.cur_detector().get_first_spec_num() - 1,
+            EndWorkspaceIndex = reducer.instrument.cur_detector().last_spec_num - 1)
+
+class ConvertToQ(ReductionStep):
+    _OUTPUT_TYPES = {'1D' : 'Q1D',
+                     '2D': 'Qxy'}
+    _DEFAULT_GRAV = False
+    
+    def __init__(self, container=None):
+        super(ConvertToQ, self).__init__()
+	#allows running the reducers keep_un_normalised(), if required
+	self._error_holder = container
+        
+        #this should be set to 1D or 2D
+        self._output_type = '1D'
+        #the algorithm that corrosponds to the above choice
+        self._Q_alg = self._OUTPUT_TYPES[self._output_type]
+        #if true gravity is taken into account in the Q1D calculation
+        self._use_gravity = self._DEFAULT_GRAV
+        self._grav_set = False
+    
+    def set_output_type(self, descript):
+        """
+	    Requests the given output from the Q conversion, either 1D or 2D. For
+	    the 1D calculation it asks the reducer to keep a workspace for error
+	    estimates
+	    @param descript: 1D or 2D
+	"""
+        self._Q_alg = self._OUTPUT_TYPES[descript]
+        self._output_type = descript
+	
+        if self._output_type == '1D':
+            if not self._error_holder['Q1D errors']:
+                raise RuntimeError('Could not find the un-normalised sample workspace needed for error estimates')
+	        	
+    def get_output_type(self):
+        return self._output_type
+
+    output_type = property(get_output_type, set_output_type, None, None)
+
+    def get_gravity(self):
+        return self._use_gravity
+
+    def set_gravity(self, flag, override=True):
+        if override:
+            self._grav_set = True
+            
+        if (not self._grav_set) or override:
+                self._use_gravity = bool(flag)
+        else:
+            _issueWarning("User file can't override previous gravity setting, do gravity correction remains " + str(self._use_gravity)) 
+
+    def execute(self, reducer, workspace):
+        if self._Q_alg == 'Q1D':
+	    #use a counts workspace for errors only if it exists, otherwise use the data workspace as a dummy
+            if self._error_holder['Q1D errors']:
+                errors = self._error_holder['Q1D errors']
+	    else:
+	        errors = workspace
+
+            Q1D(workspace, errors, workspace, reducer.Q_REBIN, AccountForGravity=self._use_gravity)
+
+        elif self._Q_alg == 'Qxy':
+            Qxy(workspace, workspace, reducer.QXY2, reducer.DQXY, AccountForGravity=self._use_gravity)
+            ReplaceSpecialValues(workspace, workspace, NaNValue="0", InfinityValue="0")
+        else:
+            raise NotImplementedError('The type of Q reduction hasn\'t been set, e.g. 1D or 2D')
 
 class NormalizeToMonitor(sans_reduction_steps.Normalize):
     """
@@ -928,9 +994,8 @@ class NormalizeToMonitor(sans_reduction_steps.Normalize):
             index_num = None
         super(NormalizeToMonitor, self).__init__(index_num)
         self._raw_ws = raw_ws
-
-        #the result of this calculation that will be used by CalculateNorm() and the ConvertToQ
-        self.output_wksp = None
+	#it is possible to keep the un-normalised workspace, to do this set this to the name you want this workspace to have
+	self.save_original = None
 
     def execute(self, reducer, workspace):
         normalization_spectrum = self._normalization_spectrum 
@@ -943,21 +1008,24 @@ class NormalizeToMonitor(sans_reduction_steps.Normalize):
             raw_ws = reducer.data_loader.uncropped
 
         mantid.sendLogMessage('::SANS::Normalizing to monitor ' + str(normalization_spectrum))
+        # Get counting time or monitor
+        norm_ws = workspace+"_normalization"
+        norm_ws = 'Monitor'
 
-        self.output_wksp = 'Monitor'       
-        CropWorkspace(raw_ws, self.output_wksp,
+        
+        CropWorkspace(raw_ws, norm_ws,
                       StartWorkspaceIndex = normalization_spectrum-1, 
                       EndWorkspaceIndex   = normalization_spectrum-1)
     
         if reducer.instrument.name() == 'LOQ':
-            RemoveBins(self.output_wksp, self.output_wksp, '19900', '20500',
+            RemoveBins(norm_ws, norm_ws, '19900', '20500',
                 Interpolation="Linear")
         
         # Remove flat background
         TOF_start, TOF_end = reducer.inst.get_TOFs(
                                     self.NORMALISATION_SPEC_NUMBER)
         if TOF_start and TOF_end:
-            FlatBackground(self.output_wksp, self.output_wksp, StartX=TOF_start, EndX=TOF_end,
+            FlatBackground(norm_ws, norm_ws, StartX=TOF_start, EndX=TOF_end,
                 WorkspaceIndexList=self.NORMALISATION_SPEC_INDEX, Mode='Mean')
 
         #perform the same conversion on the monitor spectrum as was applied to the workspace but with a possibly different rebin
@@ -965,7 +1033,17 @@ class NormalizeToMonitor(sans_reduction_steps.Normalize):
             r_alg = 'InterpolatingRebin'
         else :
             r_alg = 'Rebin'
-        reducer.to_wavelen.execute(reducer, self.output_wksp, bin_alg=r_alg)
+        reducer.to_wavelen.execute(reducer, norm_ws, bin_alg=r_alg)
+
+        out_workspace = workspace
+        if self.save_original:
+            # At this point need to fork off workspace name to keep a workspace containing raw counts
+            RenameWorkspace(workspace, self.save_original)
+            workspace = self.save_original
+
+        Divide(workspace, norm_ws, out_workspace)
+
+        DeleteWorkspace(norm_ws)
 
 class TransmissionCalc(sans_reduction_steps.BaseTransmission):
     """
@@ -1019,9 +1097,6 @@ class TransmissionCalc(sans_reduction_steps.BaseTransmission):
         # a custom transmission workspace, if we have this there is much less to do 
         self.calculated_samp = ''
         self.calculated_can = None
-        #the result of this calculation that will be used by CalculateNorm() and the ConvertToQ
-        self.output_wksp = None
-
 
     def set_trans_fit(self, min=None, max=None, fit_method=None, override=True):
         if min: min = float(min)
@@ -1118,9 +1193,13 @@ class TransmissionCalc(sans_reduction_steps.BaseTransmission):
                 #if no transmission files were specified this isn't an error, we just do nothing
                 return None
 
-        self.output_wksp = trans_ws+'_rebinned'
-        RebinToWorkspace(trans_ws, workspace, self.output_wksp)
+        rebinned = trans_ws+'_rebinned'
+        RebinToWorkspace(trans_ws, workspace, rebinned)
         
+        Divide(workspace, rebinned, workspace)
+
+        DeleteWorkspace(rebinned)
+
     def _get_run_wksps(self):
         """
             Retrieves the names runs that contain the user specified for calculation
@@ -1222,13 +1301,24 @@ class TransmissionCalc(sans_reduction_steps.BaseTransmission):
         return fitted_name, unfitted
 
 
-class AbsoluteUnits(ReductionStep):
+class ISISCorrections(ReductionStep):
     DEFAULT_SCALING = 100.0
     def __init__(self):
         # Scaling values [%]
         self.rescale= self.DEFAULT_SCALING
     
+    def set_filename(self, filename):
+        raise AttributeError('The correction must be set in the instrument, or use the CorrectionToFileStep instead')
+
     def execute(self, reducer, workspace):
+        #use the instrument's correction file
+        corr_file = reducer.instrument.cur_detector().correction_file
+        corr = sans_reduction_steps.CorrectToFileStep(corr_file, "Wavelength", "Divide")
+        try:
+            corr.execute(reducer, workspace)
+        except ValueError:
+            raise ValueError('Could not find the correction file %s, is the path incorrect or is it not the same directory as your user file?' % corr_file)
+
         scalefactor = self.rescale
         # Data reduced with Mantid is a factor of ~pi higher than Colette.
         # For LOQ only, divide by this until we understand why.
@@ -1238,45 +1328,22 @@ class AbsoluteUnits(ReductionStep):
 
         ws = mantid[workspace]
         ws *= scalefactor
-        
-class CalculateNormISIS(sans_reduction_steps.CalculateNorm):
+
+class CorrectToFileISIS(sans_reduction_steps.CorrectToFileStep):
     """
-        Generates the normalization workspaces required by Q1D from normalization
-        produced by other, sometimes optional, reduction_steps or a specified
-        workspace
+        Adds the ability to change the name of the output workspace to
+        its CorrectToFileStep (the base ReductionStep) 
     """
-    TMP_WORKSPACE_NAME = '__CalculateNormISIS_loaded_tmp'
-    
-    def  __init__(self, wavelength_deps=[]):
-        super(CalculateNormISIS, self).__init__(wavelength_deps)
-        #algorithm to be used to load correction files
-        self._load='Load'
-        #a parameters string to add as the last argument to the above algorithm
-        self._load_params='FirstColumnValue="SpectrumNumber"'
-
-    def calculate(self, reducer):
-        """
-            Multiplies all the wavelength scalings into one workspace and all the detector
-            dependent scalings into another workspace that can be used by ConvertToQ
-        """
-        #use the instrument's correction file
-        corr_file = reducer.instrument.cur_detector().correction_file
-        if corr_file:
-            LoadRKH(corr_file, self.TMP_WORKSPACE_NAME, "Wavelength")
-            self._wave_adjs.append(self.TMP_WORKSPACE_NAME)
-            
-            if self._is_point_data(self.TMP_WORKSPACE_NAME):
-                ConvertToHistogram(self.TMP_WORKSPACE_NAME, self.TMP_WORKSPACE_NAME)
-
-        wave_adj, pixel_adj = super(CalculateNormISIS, self).calculate(reducer)
+    def __init__(self, file='', corr_type='', operation='', name_container=[]):
+        super(CorrectToFileISIS, self).__init__(file, corr_type, operation)
+        self.out_container = name_container
 
-        if pixel_adj:
-            #remove all the pixels that are not present in the sample data (the other detector)
-            reducer.instrument.cur_detector().crop_to_detector(pixel_adj, pixel_adj)
-        
-        isis_reducer.deleteWorkspaces([self.TMP_WORKSPACE_NAME])
-        
-        return wave_adj, pixel_adj
+    def execute(self, reducer, workspace):
+        if self._filename:
+            if len(self.out_container) > 0:
+                reducer.output_wksp = self.out_container[0]
+                CorrectToFile(workspace, self._filename, reducer.output_wksp,
+                              self._corr_type, self._operation)
 
 class UnitsConvert(ReductionStep):
     """
@@ -1407,7 +1474,7 @@ class UserFile(ReductionStep):
         reducer.user_file_path = os.path.dirname(user_file)
         # Re-initializes default values
         self._initialize_mask(reducer)
-        reducer.prep_normalize.setPixelCorrFile('')
+        reducer.flood_file.set_filename("")
     
         file_handle = open(user_file, 'r')
         for line in file_handle:
@@ -1419,9 +1486,9 @@ class UserFile(ReductionStep):
         reducer.instrument.copy_correction_files()
         
         # this might change but right now there is no flood correct for the HAB 
-        if reducer.prep_normalize.getPixelCorrFile():
+        if reducer.flood_file.get_filename():
             if reducer.instrument.cur_detector().name() == 'HAB':
-                _issueWarning('Is your flood detection file "%s" valid on the HAB? Otherwise it my give negative intensities!' % reducer.prep_normalize.getPixelCorrFile())
+                _issueWarning('Is your flood detection file "%s" valid on the HAB? Otherwise it my give negative intensities!' % reducer.flood_file.get_filename())
         
         self.executed = True
         return self.executed
@@ -1656,7 +1723,7 @@ class UserFile(ReductionStep):
                         except AttributeError:
                             raise AttributeError('Detector HAB does not exist for the current instrument, set the instrument to LOQ first')
                     elif parts[0].upper() == 'FLAT':
-                        reducer.prep_normalize.setPixelCorrFile(filepath)
+                        reducer.flood_file.set_filename(filepath)
                     else:
                         pass
                 elif len(parts) == 2: