diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MAPS_DGSReduction.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MAPS_DGSReduction.py
index f19287627e9187dca1944a8f58d44c958dd71553..caf57a13b001e6d749c047685e358bf3f956a582 100644
--- a/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MAPS_DGSReduction.py
+++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MAPS_DGSReduction.py
@@ -1,6 +1,6 @@
 """ Sample MAPS reduction scrip """ 
-#import os
-#os.environ["PATH"] = r"c:/Mantid/Code/builds/br_10803/bin/Release;"+os.environ["PATH"]
+import os
+os.environ["PATH"] = r"c:\Mantid\Code\builds\br_master\bin\Release;" + os.environ["PATH"]
 from Direct.ReductionWrapper import *
 try:
     import reduce_vars as web_var
@@ -9,137 +9,136 @@ except:
 
 
 class ReduceMAPS(ReductionWrapper):
-   @MainProperties
-   def def_main_properties(self):
-       """ Define main properties used in reduction """ 
-       prop = {}
-       prop['sample_run'] = 17269
-       prop['wb_run'] = 17186
-       prop['incident_energy'] = 150
-       prop['energy_bins'] = [-15,3,135]
-
-       
-      # Absolute units reduction properties.
-       prop['monovan_run'] = 17589
-       prop['sample_mass'] = 10/(94.4/13) # -- this number allows to get approximately the same system test intensities for MAPS as the old test
-       prop['sample_rmm'] = 435.96 #
-       return prop
-
-   @AdvancedProperties
-   def def_advanced_properties(self):
-      """  separation between simple and advanced properties depends
+    @MainProperties
+    def def_main_properties(self):
+        """ Define main properties used in reduction """ 
+        prop = {}
+        prop['sample_run'] = 17269
+        prop['wb_run'] = 17186
+        prop['incident_energy'] = 150
+        prop['energy_bins'] = [-15,3,135]
+
+        # Absolute units reduction properties.
+        prop['monovan_run'] = 17589
+        prop['sample_mass'] = 10 / (94.4 / 13) # -- this number allows to get approximately the same system test intensities for MAPS as the old test
+        prop['sample_rmm'] = 435.96 #
+        return prop
+
+    @AdvancedProperties
+    def def_advanced_properties(self):
+        """  separation between simple and advanced properties depends
            on scientist, experiment and user.
            main properties override advanced properties.      
-      """
-      prop = {}
-      prop['map_file'] = 'default'
-      #prop['monovan_mapfile'] = 'default' #'4to1_mid_lowang.map' # default
-      prop['hard_mask_file'] =None
-      #prop['det_cal_file'] = ? default?
-      prop['save_format']=''
+        """
+        prop = {}
+        prop['map_file'] = 'default'
+        #prop['monovan_mapfile'] = 'default' #'4to1_mid_lowang.map' # default
+        prop['hard_mask_file'] = None
+        #prop['det_cal_file'] = ?  default?
+        prop['save_format'] = ''
+
+        prop['diag_remove_zero'] = False
+
+        # this are the parameters which were used in old MAPS_Parameters.xml test.
+        prop['wb-integr-max'] = 300
+        #prop['wb_integr_range']=[20,300]
+        prop['bkgd-range-min'] = 12000
+        prop['bkgd-range-max'] = 18000
+        #prop['bkgd_range']=[12000,18000]
+
+        prop['diag_samp_hi'] = 1.5
+        prop['diag_samp_sig'] = 3.3
+        prop['diag_van_hi'] = 2.0
       
-      prop['diag_remove_zero']=False
-
-      # this are the parameters which were used in old MAPS_Parameters.xml test. 
-      prop['wb-integr-max'] =300
-      #prop['wb_integr_range']=[20,300]
-      prop['bkgd-range-min']=12000
-      prop['bkgd-range-max']=18000
-      #prop['bkgd_range']=[12000,18000]
-
-      prop['diag_samp_hi']=1.5
-      prop['diag_samp_sig']=3.3
-      prop['diag_van_hi']=2.0
+        prop['abs_units_van_range'] = [-40,40]    
       
-      prop['abs_units_van_range']=[-40,40]    
-      
-      return prop
+        return prop
       #
-   @iliad
-   def reduce(self,input_file=None,output_directory=None):
-     """ Method executes reduction over single file
+    @iliad
+    def reduce(self,input_file=None,output_directory=None):
+        """ Method executes reduction over single file
          Overload only if custom reduction is needed
-     """
-     outWS = ReductionWrapper.reduce(self,input_file,output_directory)
-     #SaveNexus(ws,Filename = 'MARNewReduction.nxs')
-     return outWS
+        """
+        outWS = ReductionWrapper.reduce(self,input_file,output_directory)
+        #SaveNexus(ws,Filename = 'MARNewReduction.nxs')
+        return outWS
 
-   def __init__(self,web_var=None):
+    def __init__(self,web_var=None):
        """ sets properties defaults for the instrument with Name"""
        ReductionWrapper.__init__(self,'MAP',web_var)
-   # 
-   def set_custom_output_filename(self):
-      """ define custom name of output files if standard one is not satisfactory 
+    #
+    def set_custom_output_filename(self):
+        """ define custom name of output files if standard one is not satisfactory 
           In addition to that, example of accessing reduction properties 
           Changing them if necessary
-      """ 
-      def custom_name(prop_man):
-          """ sample function which builds filename from 
+        """ 
+        def custom_name(prop_man):
+            """ sample function which builds filename from 
               incident energy and run number and adds some auxiliary information 
               to it.
-          """ 
-          # Note -- properties have the same names  as the list of advanced and 
-          # main properties
-          ei = prop_man.incident_energy
-          # sample run is more then just list of runs, so we use 
-          # the formalization below to access its methods
-          run_num = PropertyManager.sample_run.run_number()
-          name = "RUN{0}atEi{1:<4.1f}meV_One2One".format(run_num ,ei)
-          return name
+            """ 
+            # Note -- properties have the same names as the list of advanced and
+            # main properties
+            ei = prop_man.incident_energy
+            # sample run is more then just list of runs, so we use
+            # the formalization below to access its methods
+            run_num = PropertyManager.sample_run.run_number()
+            name = "RUN{0}atEi{1:<4.1f}meV_One2One".format(run_num ,ei)
+            return name
        
-      # Uncomment this to use custom filename function
-      # Note: the properties are stored in prop_man class accessed as
-      # below. 
-      #return custom_name(self.reducer.prop_man)
-      # use this method to use standard file name generating function
-      return None
+        # Uncomment this to use custom filename function
+        # Note: the properties are stored in prop_man class accessed as
+        # below.
+        #return custom_name(self.reducer.prop_man)
+        # use this method to use standard file name generating function
+        return None
 
 #----------------------------------------------------------------------------------------------------------------------
+if __name__ == "__main__":
 
+    data_root = r'd:\Data\MantidDevArea\Datastore\DataCopies'
+    data_dir  = os.path.join(data_root,r'Testing\Data\SystemTest')
+    ref_data_dir = os.path.join(data_root,r'Testing\SystemTests\tests\analysis\reference')
+    result_dir = r'd:/Data/Mantid_Testing/14_12_15'
 
+    config.setDataSearchDirs('{0};{1};{2}'.format(data_dir,ref_data_dir,result_dir))
+    #config.appendDataSearchDir('d:/Data/Mantid_GIT/Test/AutoTestData')
+    config['defaultsave.directory'] = result_dir # folder to save resulting spe/nxspe files.  Defaults are in
 
-if __name__=="__main__":
-     maps_dir = 'd:/Data/MantidSystemTests/Data'
-     data_dir ='d:/Data/Mantid_Testing/14_12_15'
-     ref_data_dir = 'd:/Data/MantidSystemTests/SystemTests/AnalysisTests/ReferenceResults' 
-     config.setDataSearchDirs('{0};{1};{2}'.format(data_dir,maps_dir,ref_data_dir))
-     #config.appendDataSearchDir('d:/Data/Mantid_GIT/Test/AutoTestData')
-     config['defaultsave.directory'] = data_dir # folder to save resulting spe/nxspe files. Defaults are in
-
-     # execute stuff from Mantid
-     rd = ReduceMAPS()
-     rd.def_advanced_properties()
-     rd.def_main_properties()
+    # execute stuff from Mantid
+    rd = ReduceMAPS()
+    rd.def_advanced_properties()
+    rd.def_main_properties()
 
 
-#### uncomment rows below to generate web variables and save then to transfer to   ###
+#### uncomment rows below to generate web variables and save then to transfer to ###
     ## web services.
     #run_dir = os.path.dirname(os.path.realpath(__file__))
     #file = os.path.join(run_dir,'reduce_vars.py')
     #rd.save_web_variables(file)
 
-#### Set up time interval (sec) for reducer to check for input data file.         ####
-    #  If this file is not present and this value is 0,reduction fails 
-    #  if this value >0 the reduction wait until file appears on the data 
+#### Set up time interval (sec) for reducer to check for input data file.  ####
+    #  If this file is not present and this value is 0,reduction fails
+    #  if this value >0 the reduction wait until file appears on the data
     #  search path checking after time specified below.
-     rd.wait_for_file = 0  # waiting time interval
+    rd.wait_for_file = 0  # waiting time interval
 
 ####get reduction parameters from properties above, override what you want locally ###
-   # and run reduction. Overriding would have form:
-   # rd.reducer.property_name (from the dictionary above) = new value e.g. 
+   # and run reduction.  Overriding would have form:
+   # rd.reducer.property_name (from the dictionary above) = new value e.g.
    # rd.reducer.energy_bins = [-40,2,40]
-   # or 
+   # or
    ## rd.reducer.sum_runs = False
 
-###### Run reduction over all run numbers or files assigned to                   ######
-     # sample_run  variable 
+###### Run reduction over all run numbers or files assigned to ######
+     # sample_run variable
 
-    # return output workspace only if you are going to do 
-    # something with it here. Running range of runs will return the array 
+    # return output workspace only if you are going to do
+    # something with it here.  Running range of runs will return the array
     # of workspace pointers.
     #red_ws = rd.run_reduction()
     # usual way to go is to reduce workspace and save it internally
-     rd.run_reduction()
+    rd.run_reduction()
 
 
 #### Validate reduction result against known result, obtained earlier  ###
diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MariReduction.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MariReduction.py
index 006a5d008272b8ea68815b0df1ce04d0ecfc3bd7..dd560e7e58ca5e3f0f33eea331ac13d76c7d19f4 100644
--- a/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MariReduction.py
+++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MariReduction.py
@@ -1,7 +1,7 @@
 import os
-#os.environ["PATH"] =\
-#r"c:/Mantid/Code/builds/br_master/bin/Release;"+os.environ["PATH"]
-""" Sample MARI reduction scrip used in testing ReductionWrapper """ 
+os.environ["PATH"] =\
+r"c:/Mantid/Code/builds/br_master/bin/Release;"+os.environ["PATH"]
+""" Sample MARI reduction scrip used in testing ReductionWrapper """
 from Direct.ReductionWrapper import *
 try:
     import reduce_vars as web_var
@@ -10,80 +10,107 @@ except:
 
 
 class ReduceMARIFromFile(ReductionWrapper):
-   @MainProperties
-   def def_main_properties(self):
-       """ Define main properties used in reduction """ 
-       prop = {}
-       prop['sample_run'] = 11001
-       prop['wb_run'] = 11060
-       prop['incident_energy'] = 12
-       prop['energy_bins'] = [-11,0.05,11]
+    @MainProperties
+    def def_main_properties(self):
+        """ Define main properties used in reduction """ 
+        prop = {}
+        prop['sample_run'] = 11001
+        prop['wb_run'] = 11060
+        prop['incident_energy'] = 12
+        prop['energy_bins'] = [-11,0.05,11]
 
        #prop['sum_runs'] = False
 
-      # Absolute units reduction properties.
-       prop['monovan_run'] = 11015
-       prop['sample_mass'] = 10
-       prop['sample_rmm'] = 435.96
-       return prop
+        # Absolute units reduction properties.
+        prop['monovan_run'] = 11015
+        prop['sample_mass'] = 10
+        prop['sample_rmm'] = 435.96
+        return prop
 
-   @AdvancedProperties
-   def def_advanced_properties(self):
-      """  separation between simple and advanced properties depends
+    @AdvancedProperties
+    def def_advanced_properties(self):
+        """  separation between simple and advanced properties depends
            on scientist, experiment and user.
            main properties override advanced properties.      
-      """
-      prop = {}
-      prop['map_file'] = "mari_res.map"
-      prop['monovan_mapfile'] = "mari_res.map"
-      prop['hard_mask_file'] = "mar11015.msk"
-      prop['det_cal_file'] = 11060
-      prop['save_format'] = ''
-      return prop
+        """
+        prop = {}
+        prop['map_file'] = "mari_res.map"
+        prop['monovan_mapfile'] = "mari_res.map"
+        prop['hard_mask_file'] = "mar11015.msk"
+        prop['det_cal_file'] = 11060
+        prop['save_format'] = ''
+        return prop
       #
-   @iliad
-   def reduce(self,input_file=None,output_directory=None):
-     """ Method executes reduction over single file
+    @iliad
+    def reduce(self,input_file=None,output_directory=None):
+        """Method executes reduction over single file
          Overload only if custom reduction is needed
-     """
-     outWS = ReductionWrapper.reduce(self,input_file,output_directory)
-     #SaveNexus(outWS,Filename = 'MARNewReduction.nxs')
-     return outWS
+        """
+        outWS = ReductionWrapper.reduce(self,input_file,output_directory)
+        #SaveNexus(outWS,Filename = 'MARNewReduction.nxs')
+        return outWS
  
-   def validate_result(self,build_validation=False):
-      """ Change this method to verify different results     """
-      # build_validation -- if true, build and save new workspace rather then validating the old one
-      rez,message = ReductionWrapper.build_or_validate_result(self,11001,"MARIReduction.nxs",build_validation,1.e-2)
-      return rez,message
-
-   def __init__(self,web_var=None):
+    def validate_result(self,build_validation=False):
+        """Change this method to verify different results     """
+        # build_validation -- if true, build and save new workspace rather then validating the old one
+        rez,message = ReductionWrapper.build_or_validate_result(self,11001,"MARIReduction.nxs",build_validation,1.e-2)
+        return rez,message
+
+    def set_custom_output_filename(self):
+        """ define custom name of output files if standard one is not satisfactory 
+          In addition to that, example of accessing reduction properties 
+          Changing them if necessary
+        """ 
+        def custom_name(prop_man):
+            """Sample function which builds filename from
+              incident energy and run number and adds some auxiliary information
+              to it.
+            """
+            # Note -- properties have the same names as the list of advanced and
+            # main properties
+            ei = prop_man.incident_energy
+            # sample run is more then just list of runs, so we use
+            # the formalization below to access its methods
+            run_num = PropertyManager.sample_run.run_number()
+            name = "RUN{0}atEi{1:<4.1f}meV_One2One".format(run_num ,ei)
+            return name
+       
+        # Uncomment this to use custom filename function
+        # Note: the properties are stored in prop_man class accessed as
+        # below.
+        #return custom_name(self.reducer.prop_man)
+        # use this method to use standard file name generating function
+        return None
+
+
+    def __init__(self,web_var=None):
        """ sets properties defaults for the instrument with Name"""
        ReductionWrapper.__init__(self,'MAR',web_var)
 #-------------------------------------------------------------------------------------------------#
 #-------------------------------------------------------------------------------------------------#
 #-------------------------------------------------------------------------------------------------#
 def main(input_file=None,output_directory=None):
-        """ This method is used to run code from web service
-            and should not be touched except changing the name of the
-            particular ReductionWrapper class (e.g. ReduceMARI here)
+    """ This method is used to run code from web service
+        and should not be touched except changing the name of the
+        particular ReductionWrapper class (e.g. ReduceMARI here)
 
-            You can also change the output folder to save data to
-            where web services will copy data
+        You can also change the output folder to save data to
+        where web services will copy data
 
-            This method will go when web service implements proper factory 
-        """
-        # note web variables initialization
-        rd = ReduceMARIFromFile(web_var)
-        rd.reduce(input_file,output_directory)
-        # change to the name of the folder to save data to
-        return ''
+        This method will go when web service implements proper factory 
+    """
+    # note web variables initialization
+    rd = ReduceMARIFromFile(web_var)
+    rd.reduce(input_file,output_directory)
+    # change to the name of the folder to save data to
+    return ''
 
 #----------------------------------------------------------------------------------------------------------------------
 class ReduceMARIFromWorkspace(ReductionWrapper):
 
    @MainProperties
    def def_main_properties(self):
-       """ Define main properties used in reduction """ 
+       """Define main properties used in reduction """
        prop = {}
        prop['sample_run'] = Load(Filename='MAR11001.RAW',OutputWorkspace='MAR11001.RAW')
        # WB workspace
@@ -278,49 +305,50 @@ class ReduceMARIMonitorsSeparate(ReductionWrapper):
 
 if __name__ == "__main__":
 
-     maps_dir = 'd:/Data/MantidSystemTests/Data'
-     data_dir = 'd:/Data/Mantid_Testing/14_12_15'
-     ref_data_dir = 'd:/Data/MantidSystemTests/SystemTests/AnalysisTests/ReferenceResults' 
-     config.setDataSearchDirs('{0};{1};{2}'.format(data_dir,maps_dir,ref_data_dir))
-     #config.appendDataSearchDir('d:/Data/Mantid_GIT/Test/AutoTestData')
-     config['defaultsave.directory'] = data_dir # folder to save resulting spe/nxspe files.  Defaults are in
-
-     # execute stuff from Mantid
-     #rd = ReduceMARIFromFile()
-     #rd= ReduceMARIMon2Norm()
-     rd = ReduceMARIMonitorsSeparate()
-     #rd = ReduceMARIFromWorkspace()
-     rd.def_advanced_properties()
-     rd.def_main_properties()
-
-#### uncomment rows below to generate web variables and save then to transfer to   ###
+    data_root = r'd:\Data\MantidDevArea\Datastore\DataCopies'
+    data_dir = os.path.join(data_root,r'Testing\Data\SystemTest')
+    ref_data_dir = os.path.join(data_root,r'Testing\SystemTests\tests\analysis\reference')
+    result_dir = r'd:/Data/Mantid_Testing/14_12_15'
+    config.setDataSearchDirs('{0};{1};{2}'.format(data_dir,ref_data_dir,result_dir))
+    #config.appendDataSearchDir('d:/Data/Mantid_GIT/Test/AutoTestData')
+    config['defaultsave.directory'] = result_dir # folder to save resulting spe/nxspe files.  Defaults are in
+
+    # execute stuff from Mantid
+    #rd = ReduceMARIFromFile()
+    rd= ReduceMARIMon2Norm()
+    #rd = ReduceMARIMonitorsSeparate()
+    #rd = ReduceMARIFromWorkspace()
+    rd.def_advanced_properties()
+    rd.def_main_properties()
+
+#### uncomment rows below to generate web variables and save then to transfer to ###
     ## web services.
-     run_dir = os.path.dirname(os.path.realpath(__file__))
-     file = os.path.join(run_dir,'reduce_vars.py')
-     rd.save_web_variables(file)
+    run_dir = os.path.dirname(os.path.realpath(__file__))
+    file = os.path.join(run_dir,'reduce_vars.py')
+    rd.save_web_variables(file)
 
-#### Set up time interval (sec) for reducer to check for input data file.         ####
-     #  If this file is not present and this value is 0,reduction fails 
-     #  if this value >0 the reduction wait until file appears on the data 
-     #  search path checking after time specified below.
-     rd.wait_for_file = 0  # waiting time interval
+#### Set up time interval (sec) for reducer to check for input data file.  ####
+    #  If this file is not present and this value is 0,reduction fails
+    #  if this value >0 the reduction wait until file appears on the data
+    #  search path checking after time specified below.
+    rd.wait_for_file = 0  # waiting time interval
 
 ####get reduction parameters from properties above, override what you want locally ###
-   # and run reduction. Overriding would have form:
-   # rd.reducer.property_name (from the dictionary above) = new value e.g. 
+   # and run reduction.  Overriding would have form:
+   # rd.reducer.property_name (from the dictionary above) = new value e.g.
    # rd.reducer.energy_bins = [-40,2,40]
-   # or 
+   # or
    ## rd.reducer.sum_runs = False
 
-###### Run reduction over all run numbers or files assigned to                   ######
-     # sample_run  variable 
+###### Run reduction over all run numbers or files assigned to ######
+     # sample_run variable
 
-    # return output workspace only if you are going to do 
-    # something with it here. Running range of runs will return the array 
+    # return output workspace only if you are going to do
+    # something with it here.  Running range of runs will return the array
     # of workspace pointers.
     #red_ws = rd.run_reduction()
     # usual way to go is to reduce workspace and save it internally
-     rd.run_reduction()
+    rd.run_reduction()
 
 
 #### Validate reduction result against known result, obtained earlier  ###
diff --git a/Code/Mantid/scripts/Inelastic/Direct/DirectEnergyConversion.py b/Code/Mantid/scripts/Inelastic/Direct/DirectEnergyConversion.py
index 465d731e23c735fd49e57391c3f6952694a78fd3..0475bb0fd19564e3e6c4f3337cf50f689ac5b34b 100644
--- a/Code/Mantid/scripts/Inelastic/Direct/DirectEnergyConversion.py
+++ b/Code/Mantid/scripts/Inelastic/Direct/DirectEnergyConversion.py
@@ -211,7 +211,7 @@ class DirectEnergyConversion(object):
                 white.add_masked_ws(white_data)
                 DeleteWorkspace(Workspace='white_ws_clone')
                 diag_mask,n_masks = white.get_masking()
-            if not(out_ws_name is None):
+            if not out_ws_name is None:
                 dm = CloneWorkspace(diag_mask,OutputWorkspace=out_ws_name)
                 return dm
             else:
@@ -241,7 +241,7 @@ class DirectEnergyConversion(object):
                 # Set up the background integrals for diagnostic purposes
                 result_ws = self.normalise(diag_sample, self.normalise_method)
 
-                #>>> here result workspace is being processed 
+                #>>>here result workspace is being processed
                 #-- not touching result ws
                 bkgd_range = self.background_test_range
                 background_int = Integration(result_ws,\
@@ -261,12 +261,15 @@ class DirectEnergyConversion(object):
         # extract existing white mask if one is defined and provide it for
         # diagnose to use instead of constantly diagnosing the same vanadium
         white_mask,num_masked = white.get_masking()
-        if not(white_mask is None) and not(sample_mask is None):
-            # nothing to do then
+        if white_mask is None or sample_mask is None:
+            pass # have to run diagnostics
+        else:
+            #Sample mask and white masks are defined.
+            #nothing to do then
             total_mask = sample_mask + white_mask
             return total_mask
-        else:
-            pass # have to run diagnostics after all
+
+
 
         # Check how we should run diag
         diag_spectra_blocks = self.diag_spectra
@@ -376,7 +379,7 @@ class DirectEnergyConversion(object):
             masking,header = self._run_diagnostics(prop_man)
         else:
             header = '*** Using stored mask file for workspace with {0} spectra and {1} masked spectra'
-            masking = self.spectra_masks 
+            masking = self.spectra_masks
 
         # estimate and report the number of failing detectors
         nMaskedSpectra = get_failed_spectra_list_from_masks(masking)
@@ -405,8 +408,8 @@ class DirectEnergyConversion(object):
             MonovanCashNum = PropertyManager.monovan_run.run_number()
         else:
             MonovanCashNum = None
-        # Set or clear monovan run number to use in cash ID to return correct 
-        # cashed value of monovan integral
+        #Set or clear monovan run number to use in cash ID to return correct
+        #cashed value of monovan integral
         PropertyManager.mono_correction_factor.set_cash_mono_run_number(MonovanCashNum)
 
         mono_ws_base = None
@@ -457,7 +460,7 @@ class DirectEnergyConversion(object):
             # or use previously cashed value
             cashed_mono_int = PropertyManager.mono_correction_factor.get_val_from_cash(prop_man)
             if MonovanCashNum != None or self.mono_correction_factor or cashed_mono_int:
-                deltaE_ws_sample,mono_ws_base=self._do_abs_corrections(deltaE_ws_sample,cashed_mono_int,\
+                deltaE_ws_sample,mono_ws_base = self._do_abs_corrections(deltaE_ws_sample,cashed_mono_int,\
                     ei_guess,mono_ws_base,tof_range, cut_ind,num_ei_cuts)
             else:
                 pass # no absolute units corrections
diff --git a/Code/Mantid/scripts/Inelastic/Direct/ReductionWrapper.py b/Code/Mantid/scripts/Inelastic/Direct/ReductionWrapper.py
index 332ff0751f6c5554304db29c1cbb673038877b3d..5eee0d0e058300f9edbfc25eda239154dd3ea581 100644
--- a/Code/Mantid/scripts/Inelastic/Direct/ReductionWrapper.py
+++ b/Code/Mantid/scripts/Inelastic/Direct/ReductionWrapper.py
@@ -27,12 +27,12 @@ class ReductionWrapper(object):
           and define if wrapper runs from web services or not
       """
       # internal variable, indicating if we should try to wait for input files to appear
-      self._wait_for_file=False
-      # internal variable, used in system tests to validate workflow, 
-      # with waiting for files. It is the holder to the function
+      self._wait_for_file = False
+      # internal variable, used in system tests to validate workflow,
+      # with waiting for files.  It is the holder to the function
       # used during debugging "wait for files" workflow
       # instead of Pause algorithm
-      self._debug_wait_for_files_operation=None
+      self._debug_wait_for_files_operation = None
 
       # The variables which are set up from web interface or to be exported to
       # web interface
@@ -60,7 +60,7 @@ class ReductionWrapper(object):
 
     @wait_for_file.setter
     def wait_for_file(self,value):
-        if value>0:
+        if value > 0:
             self._wait_for_file = value
         else:
             self._wait_for_file = False
@@ -77,7 +77,7 @@ class ReductionWrapper(object):
         if not FileName:
             FileName = 'reduce_vars.py'
 
-        f=open(FileName,'w')
+        f = open(FileName,'w')
         f.write("standard_vars = {\n")
         str_wrapper = '         '
         for key,val in self._wvs.standard_vars.iteritems():
@@ -86,17 +86,17 @@ class ReductionWrapper(object):
                   else:
                       row = "{0}\'{1}\':{2}".format(str_wrapper,key,val)
                   f.write(row)
-                  str_wrapper=',\n         '
+                  str_wrapper = ',\n         '
         f.write("\n}\nadvanced_vars={\n")
 
-        str_wrapper='         '
+        str_wrapper = '         '
         for key,val in self._wvs.advanced_vars.iteritems():
                   if isinstance(val,str):
                       row = "{0}\'{1}\':\'{2}\'".format(str_wrapper,key,val)
                   else:
                       row = "{0}\'{1}\':{2}".format(str_wrapper,key,val)
                   f.write(row)
-                  str_wrapper=',\n        '
+                  str_wrapper = ',\n        '
         f.write("\n}\n")
         f.close()
 
@@ -106,15 +106,15 @@ class ReductionWrapper(object):
         self.def_advanced_properties()
         self.def_main_properties()
         if self._run_from_web:
-            web_vars = dict(self._wvs.standard_vars.items()+self._wvs.advanced_vars.items())
+            web_vars = dict(self._wvs.standard_vars.items() + self._wvs.advanced_vars.items())
             self.reducer.prop_man.set_input_parameters(**web_vars)
         else:
-            pass # we should set already set up variables using 
+            pass # we should set already set up variables using
 
         # validate properties and report result
         return self.reducer.prop_man.validate_properties(False)
 #
-#   
+#
     def validate_result(self,build_validation=False,Error=1.e-3,ToleranceRelErr=True):
         """ Overload this using build_or_validate_result to have possibility to run or validate result """ 
         return True
@@ -156,14 +156,14 @@ class ReductionWrapper(object):
                     self.reducer.prop_man.log\
                         ("*** WARNING:can not load (find?) validation file {0}\n"\
                          "    Building validation".format(validation_file),'warning')
-                    build_validation=True
+                    build_validation = True
            else:
-              build_validation=True
+              build_validation = True
 
 
         # just in case, to be sure
         current_web_state = self._run_from_web
-        current_wait_state= self.wait_for_file
+        current_wait_state = self.wait_for_file
         # disable wait for input and
         self._run_from_web = False
         self.wait_for_file = False
@@ -172,7 +172,7 @@ class ReductionWrapper(object):
         self.def_main_properties()
         #
         self.reducer.sample_run = sample_run
-        self.reducer.prop_man.save_format=None
+        self.reducer.prop_man.save_format = None
 
         reduced = self.reduce()
 
@@ -182,7 +182,7 @@ class ReductionWrapper(object):
             else:
                result_name = self.reducer.prop_man.save_file_name
             self.reducer.prop_man.log("*** Saving validation file with name: {0}.nxs".format(result_name),'notice')
-            SaveNexus(reduced,Filename=result_name+'.nxs')
+            SaveNexus(reduced,Filename=result_name + '.nxs')
             return True,'Created validation file {0}.nxs'.format(result_name)
         else:
             if isinstance(reduced,list): # check only first result in multirep
@@ -243,7 +243,7 @@ class ReductionWrapper(object):
            config['defaultsave.directory'] = str(output_directory)
 
         timeToWait = self._wait_for_file
-        if timeToWait>0:
+        if timeToWait > 0:
             Found,input_file = PropertyManager.sample_run.find_file(be_quet=True)
             while not Found:
                 file_hint,fext = PropertyManager.sample_run.file_hint()
@@ -269,25 +269,25 @@ class ReductionWrapper(object):
        self.reducer.prop_man.sum_runs = True
 
        timeToWait = self._wait_for_file
-       if timeToWait>0:
-          run_files =        PropertyManager.sample_run.get_run_list()
+       if timeToWait > 0:
+          run_files = PropertyManager.sample_run.get_run_list()
           num_files_to_sum = len(PropertyManager.sample_run)
 
           ok,missing,found = self.reducer.prop_man.find_files_to_sum()
           n_found = len(found)
           if not ok:
-              # necessary to cache intermediate sums in memory 
+              # necessary to cache intermediate sums in memory
               self.reducer.prop_man.cashe_sum_ws = True
           while not(ok):
-              while n_found>0:
+              while n_found > 0:
                  last_found = found[-1]
                  self.reducer.prop_man.sample_run = last_found # request to reduce all up to last found
                  ws = self.reducer.convert_to_energy()
                  # reset search to whole file list again
-                 self.reducer.prop_man.sample_run = run_files[num_files_to_sum-1]
+                 self.reducer.prop_man.sample_run = run_files[num_files_to_sum - 1]
                  ok,missing,found = self.reducer.prop_man.find_files_to_sum()
                  n_found = len(found)
-                 if ok: # no need to cache sum any more. All necessary files found
+                 if ok: # no need to cache sum any more.  All necessary files found
                     self.reducer.prop_man.cashe_sum_ws = False
 
               self.reducer.prop_man.log("*** Waiting {0} sec for runs {1} to appear on the data search path"\
@@ -296,7 +296,7 @@ class ReductionWrapper(object):
               ok,missing,found = self.reducer.prop_man.find_files_to_sum()
               n_found = len(found)
           #end not(ok)
-          if n_found>0:
+          if n_found > 0:
             # cash sum can be dropped now if it has not been done before
              self.reducer.prop_man.cashe_sum_ws = False
              ws = self.reducer.convert_to_energy()
@@ -304,53 +304,53 @@ class ReductionWrapper(object):
          ws = self.reducer.convert_to_energy()
 
        return ws
-    # 
+    #
     def run_reduction(self):
-       """" Reduces runs one by one or sum all them together and reduce after this
+        """" Reduces runs one by one or sum all them together and reduce after this
 
-            if wait_for_file time is > 0, it will until  missing files appear on the 
+            if wait_for_file time is > 0, it will until  missing files appear on the
             data search path
-       """ 
-       try:
-         n,r = funcreturns.lhs_info('both')
-         out_ws_name = r[0]
-       except:
-         out_ws_name = None
-
-       if self.reducer.sum_runs:
-# --------### sum runs provided      ------------------------------------###
-          if out_ws_name is None:
-            self.sum_and_reduce()
-            return None
-          else:
-            red_ws=self.sum_and_reduce() 
-            RenameWorkspace(InputWorkspace=red_ws,OutputWorkspace=out_ws_name)
-            return mtd[out_ws_name]
-       else:
+        """
+        try:
+            n,r = funcreturns.lhs_info('both')
+            out_ws_name = r[0]
+        except:
+            out_ws_name = None
+
+        if self.reducer.sum_runs:
+# --------### sum runs provided ------------------------------------###
+            if out_ws_name is None:
+                self.sum_and_reduce()
+                return None
+            else:
+                red_ws = self.sum_and_reduce() 
+                RenameWorkspace(InputWorkspace=red_ws,OutputWorkspace=out_ws_name)
+                return mtd[out_ws_name]
+        else:
 # --------### reduce list of runs one by one ----------------------------###
-         runfiles = PropertyManager.sample_run.get_run_file_list()
-         if out_ws_name is None:
-            for file in runfiles:
-                 self.reduce(file)
-            #end
-            return None
-         else:
-            results=[]
-            nruns = len(runfiles)
-            for num,file in enumerate(runfiles):
-                 red_ws=self.reduce(file)
-                 if nruns >1:
-                    out_name = out_ws_name+'#{0}of{1}'.format(num+1,nruns)
-                    RenameWorkspace(InputWorkspace=red_ws,OutputWorkspace=out_name)
-                    red_ws = mtd[out_name]
-                 results.append(red_ws)
-            #end
-            if len(results) == 1:
-               return results[0]
+            runfiles = PropertyManager.sample_run.get_run_file_list()
+            if out_ws_name is None:
+                for file in runfiles:
+                    self.reduce(file)
+                return None
             else:
-               return results
-       #end
-
+                results = []
+                nruns = len(runfiles)
+                for num,file in enumerate(runfiles):
+                    red_ws = self.reduce(file)
+                    if nruns > 1:
+                        out_name = out_ws_name + '#{0}of{1}'.format(num + 1,nruns)
+                        RenameWorkspace(InputWorkspace=red_ws,OutputWorkspace=out_name)
+                        red_ws = mtd[out_name]
+                    results.append(red_ws)
+                #end
+                if len(results) == 1:
+                    return results[0]
+                else:
+                    return results
+                #end if 
+            #end if
+        #end
 
 def MainProperties(main_prop_definition):
     """ Decorator stores properties dedicated as main and sets these properties
@@ -376,7 +376,7 @@ def AdvancedProperties(adv_prop_definition):
         #print "in decorator: ",properties
         host = args[0]
         if not host._run_from_web: # property run locally
-            host._wvs.advanced_vars =prop_dict
+            host._wvs.advanced_vars = prop_dict
             host.reducer.prop_man.set_input_parameters(**prop_dict)
         return prop_dict
 
@@ -398,19 +398,19 @@ def iliad(reduce):
             out_ws_name = None
 
         host = args[0]
-        if len(args)>1:
+        if len(args) > 1:
             input_file = args[1]
-            if len(args)>2:
+            if len(args) > 2:
                 output_directory = args[2]
             else:
-                output_directory =None
+                output_directory = None
         else:
-            input_file=None
-            output_directory=None
+            input_file = None
+            output_directory = None
         # add input file folder to data search directory if file has it
         if input_file and isinstance(input_file,str):
            data_path = os.path.dirname(input_file)
-           if len(data_path)>0:
+           if len(data_path) > 0:
               try:
                  config.appendDataSearchDir(str(data_path))
                  args[1] = os.path.basename(input_file)
@@ -420,7 +420,7 @@ def iliad(reduce):
            config['defaultsave.directory'] = str(output_directory)
 
         if host._run_from_web:
-            web_vars = dict(host._wvs.standard_vars.items()+host._wvs.advanced_vars.items())
+            web_vars = dict(host._wvs.standard_vars.items() + host._wvs.advanced_vars.items())
             host.reducer.prop_man.set_input_parameters(**web_vars)
         else:
             pass # we should set already set up variables using
@@ -433,17 +433,17 @@ def iliad(reduce):
 
         # prohibit returning workspace to web services.
         if host._run_from_web and not isinstance(rez,str):
-            rez=""
+            rez = ""
         else:
           if isinstance(rez,list):
               # multirep run, just return as it is
               return rez
           if out_ws_name and rez.name() != out_ws_name :
-              rez=RenameWorkspace(InputWorkspace=rez,OutputWorkspace=out_ws_name)
+              rez = RenameWorkspace(InputWorkspace=rez,OutputWorkspace=out_ws_name)
 
         return rez
 
     return iliad_wrapper
 
-if __name__=="__main__":
+if __name__ == "__main__":
     pass
diff --git a/Code/Mantid/scripts/Inelastic/Direct/RunDescriptor.py b/Code/Mantid/scripts/Inelastic/Direct/RunDescriptor.py
index a7c388cd50b79fa05abce7b66583962e95a49079..b48d2fd12c7eab0af79386a84827c606b70dc86a 100644
--- a/Code/Mantid/scripts/Inelastic/Direct/RunDescriptor.py
+++ b/Code/Mantid/scripts/Inelastic/Direct/RunDescriptor.py
@@ -30,8 +30,8 @@ class RunList(object):
         self._set_fnames(fnames,fext)
 #--------------------------------------------------------------------------------------------------
     def set_cashed_sum_ws(self,ws,new_ws_name=None):
-        """Store the name of a workspace in the class 
-           as reference 
+        """Store the name of a workspace in the class
+           as reference
         """
         if new_ws_name:
             old_name = ws.name()
@@ -45,15 +45,15 @@ class RunList(object):
         self._partial_sum_ws_name = new_ws_name
     #
     def get_cashed_sum_ws(self):
-       """Return python pointer to cached sum workspace
-       """
-       if not self._partial_sum_ws_name:
+        """Return python pointer to cached sum workspace
+        """
+        if not self._partial_sum_ws_name:
             return None
-       if self._partial_sum_ws_name in mtd:
+        if self._partial_sum_ws_name in mtd:
             return mtd[self._partial_sum_ws_name]
-       else:
+        else:
             return None
-   #
+    #
     def get_cashed_sum_clone(self):
         """ """
         origin = self.get_cashed_sum_ws()
@@ -78,13 +78,13 @@ class RunList(object):
 #--------------------------------------------------------------------------------------------------
     #
     def _set_fnames(self,fnames,fext):
-        """Sets filenames lists and file extension lists 
+        """Sets filenames lists and file extension lists
             of length correspondent to run number length
 
-           if length of the list provided differs from the length 
-           of the run list, expands fnames list and fext list 
+           if length of the list provided differs from the length
+           of the run list, expands fnames list and fext list
            to the whole runnumber list using last for fext and
-           first for fnames members of the  
+           first for fnames members of the
         """
         if fnames:
             if isinstance(fnames,list):
@@ -92,7 +92,7 @@ class RunList(object):
             else:
                 self._file_path = [fnames]
 
-        if not(self._file_path):
+        if not self._file_path:
             self._file_path = [''] * len(self._run_numbers)
         else:
             if len(self._file_path) != len(self._run_numbers):
@@ -104,7 +104,7 @@ class RunList(object):
             else:
                 self._fext = [fext]
 
-        if not (self._fext):
+        if not self._fext:
             self._fext = [''] * len(self._run_numbers)
         else:
             if len(self._fext) != len(self._run_numbers):
@@ -114,7 +114,7 @@ class RunList(object):
     def get_file_guess(self,inst_name,run_num,default_fext=None,index=None):
         """Return the name of run file for run number provided
 
-          Note: internal file extension overwrites 
+          Note: internal file extension overwrites
           default_fext if internal is not empty
         """
         if index is None:
@@ -138,56 +138,56 @@ class RunList(object):
         return self._run_numbers
     #
     def add_or_replace_run(self,run_number,fpath='',fext=None,default_fext=False):
-      """Add run number to list of existing runs
+        """Add run number to list of existing runs
 
          Let's prohibit adding the same run numbers using this method.
          Equivalent run numbers can still be added using list assignment
 
-         file path and file extension are added/modified if present 
+         file path and file extension are added/modified if present
          regardless of run being added or replaced
-      """
-      if not(run_number in self._run_numbers):
-          self._run_numbers.append(run_number)
-          if not fpath:
-             fpath = self._file_path[-1]
-          self._file_path.append(fpath)
-          if not fext:
-             fext = self._fext[-1]
-          self._fext.append(fext)
-
-          self._last_ind2sum = len(self._run_numbers) - 1
-          return self._last_ind2sum
-      else:
-          ext_ind = self._run_numbers.index(run_number)
-          if len(fpath) > 0:
-             self._file_path[ext_ind] = fpath
-          if fext:
-             if not(default_fext and len(self._fext[ext_ind]) > 0): #not keep existing
-                self._fext[ext_ind] = fext
-          self._last_ind2sum = ext_ind
-          return ext_ind
+        """
+        if not(run_number in self._run_numbers):
+            self._run_numbers.append(run_number)
+            if not fpath:
+                fpath = self._file_path[-1]
+            self._file_path.append(fpath)
+            if not fext:
+                fext = self._fext[-1]
+            self._fext.append(fext)
+
+            self._last_ind2sum = len(self._run_numbers) - 1
+            return self._last_ind2sum
+        else:
+            ext_ind = self._run_numbers.index(run_number)
+            if len(fpath) > 0:
+                self._file_path[ext_ind] = fpath
+            if fext:
+                if not(default_fext and len(self._fext[ext_ind]) > 0): #not keep existing
+                    self._fext[ext_ind] = fext
+            self._last_ind2sum = ext_ind
+            return ext_ind
     #
     def check_runs_equal(self,run_list,fpath=None,fext=None):
-        """Returns true if all run numbers in existing list are 
+        """Returns true if all run numbers in existing list are
            in the comparison list and vice versa.
 
-           if lists numbers coincide, 
+           if lists numbers coincide,
            sets new file_path and fext list if such are provided
         """
         if len(run_list) != len(self._run_numbers):
             return False
 
         for run in run_list:
-            if not(run in self._run_numbers):
+            if not run in self._run_numbers:
                 return False
         self._set_fnames(fpath,fext)
-        return True 
+        return True
     #
     def get_current_run_info(self,sum_runs,ind=None):
         """Return last run info for file to sum"""
         if ind:
             if not(ind > -1 and ind < len(self._run_numbers)):
-                raise RuntimeError("Index {0} is outside of the run list of {1} runs".format(ind,len(self._run_numbers))) 
+                raise RuntimeError("Index {0} is outside of the run list of {1} runs".format(ind,len(self._run_numbers)))
         else:
             ind = self.get_last_ind2sum(sum_runs)
         return self._run_numbers[ind],self._file_path[ind],self._fext[ind],ind
@@ -203,7 +203,7 @@ class RunList(object):
             self._last_ind2sum = -1
     #
     def get_run_list2sum(self,num_to_sum=None):
-        """Get run numbers of the files to be summed together 
+        """Get run numbers of the files to be summed together
            from the list of defined run numbers
         """
         n_runs = len(self._run_numbers)
@@ -241,7 +241,7 @@ class RunList(object):
         return sum_ext
     #
     def find_run_files(self,inst_name,run_list=None,default_fext=None):
-       """Find run files correspondent to the run list provided
+        """Find run files correspondent to the run list provided
           and set path to these files as new internal parameters
           for the files in list
 
@@ -249,36 +249,35 @@ class RunList(object):
           not found and found 
 
           Run list have to coincide or be part of self._run_numbers
-          No special check for correctness is performed, so may fail 
+          No special check for correctness is performed, so may fail
           miserably
-       """
-
-       if not run_list:
-          run_list = self._run_numbers
-       not_found = []
-       found = []
-       for run in run_list:
-           file_hint,index = self.get_file_guess(inst_name,run,default_fext)
-           try:
-              file = FileFinder.findRuns(file_hint)[0]
-              fpath,fname = os.path.split(file)
-              fname,fex = os.path.splitext(fname)
-              self._fext[index] = fex
-              self._file_path[index] = fpath
-              #self._last_ind2sum = index
-              found.append(run)
-           except RuntimeError:
-              not_found.append(run)
-       return not_found,found
+        """
+
+        if not run_list:
+            run_list = self._run_numbers
+        not_found = []
+        found = []
+        for run in run_list:
+            file_hint,index = self.get_file_guess(inst_name,run,default_fext)
+            try:
+                file = FileFinder.findRuns(file_hint)[0]
+                fpath,fname = os.path.split(file)
+                fname,fex = os.path.splitext(fname)
+                self._fext[index] = fex
+                self._file_path[index] = fpath
+                #self._last_ind2sum = index
+                found.append(run)
+            except RuntimeError:
+                not_found.append(run)
+        return not_found,found
 #--------------------------------------------------------------------------------------------------
 #--------------------------------------------------------------------------------------------------
 #--------------------------------------------------------------------------------------------------
 class RunDescriptor(PropDescriptor):
-    """ descriptor to work with a run or list of runs specified 
-        either as run number (run file) or as
-        this run loaded in memory as a workspace 
+    """Descriptor to work with a run or list of runs specified
+       either as run number (run file) or as
+       this run loaded in memory as a workspace
 
-        Used to help 
     """
     # the host class referencing contained all instantiated descriptors.
     # Descriptors methods rely on it to work (e.g.  to extract file loader
@@ -302,9 +301,9 @@ class RunDescriptor(PropDescriptor):
         self._clear_all()
 
     def __len__(self):
-        """ overloaded len function, which 
-            return length of the run-files list
-            to work with 
+        """overloaded len function, which
+           return length of the run-files list
+           to work with
         """
         if not(self._run_number):
             return 0
@@ -314,8 +313,8 @@ class RunDescriptor(PropDescriptor):
             return 1
 #--------------------------------------------------------------------------------------------------------------------
     def _clear_all(self):
-        """ clear all internal properties, workspaces and caches, 
-            associated with this run 
+        """clear all internal properties, workspaces and caches,
+           associated with this run 
         """
         # Run number
         self._run_number = None
@@ -325,12 +324,12 @@ class RunDescriptor(PropDescriptor):
         self._fext = None
 
         if self._ws_name:
-           mon_ws = self._ws_name + '_monitors'
-           # Workspace name which corresponds to the run
-           if self._ws_name in mtd:
-              DeleteWorkspace(self._ws_name)
-           if mon_ws in mtd:
-              DeleteWorkspace(mon_ws)
+            mon_ws = self._ws_name + '_monitors'
+            # Workspace name which corresponds to the run
+            if self._ws_name in mtd:
+                DeleteWorkspace(self._ws_name)
+            if mon_ws in mtd:
+                DeleteWorkspace(mon_ws)
 
         self._ws_name = None # none if not loaded
         # String used to identify the workspace related to this property
@@ -343,57 +342,58 @@ class RunDescriptor(PropDescriptor):
         self._in_cash = False
         # clear masking workspace if any available
         if self._mask_ws_name:
-           if self._mask_ws_name in mtd:
-              DeleteWorkspace(self._mask_ws_name)
-           self._mask_ws_name = None
+            if self._mask_ws_name in mtd:
+                DeleteWorkspace(self._mask_ws_name)
+            self._mask_ws_name = None
 
 #--------------------------------------------------------------------------------------------------------------------
     def __get__(self,instance,owner):
-       """Return current run number or workspace if it is loaded"""
-       if instance is None:
-           return self
+        """Return current run number or workspace if it is loaded"""
+        if instance is None:
+            return self
 
-       if self._ws_name and self._ws_name in mtd:
-           return mtd[self._ws_name]
-       else:
+        if self._ws_name and self._ws_name in mtd:
+            return mtd[self._ws_name]
+        else:
             return self._run_number
 #--------------------------------------------------------------------------------------------------------------------
     def __set__(self,instance,value):
-       """Set up Run number and define workspace name from any source """
-       #
-       if value == None: # clear current run number
-          self._clear_all()
-          return
-       if isinstance(value, api.Workspace):
-           if  self._ws_name:
-             if self._ws_name != value.name():
-               self._clear_all()
-               self._set_ws_as_source(value)
-             else:
-                return # do nothing
+        """Set up Run number and define workspace name from any source """
+        #
+        if value == None: # clear current run number
+            self._clear_all()
+            return
+        if isinstance(value, api.Workspace):
+            if  self._ws_name:
+                if self._ws_name != value.name():
+                    self._clear_all()
+                    self._set_ws_as_source(value)
+                else:
+                    return # do nothing
                 # it is just reassigning the same workspace to itself
-           else: # first assignment of workspace to property
-               self._set_ws_as_source(value)
-           return
-
-       if isinstance(value,str): # it may be run number as string or it may be a workspace name
-          if value in mtd: # workspace name
-              ws = mtd[value]
-              self.__set__(instance,ws)
-              return
-          else:  # split string into run indexes and auxiliary file parameters
-              file_path,run_num,fext = prop_helpers.parse_run_file_name(value)
-
-              if isinstance(run_num,list):
-                 self._set_run_list(instance,run_num,file_path,fext)
-              else:
-                 self._set_single_run(instance,run_num,file_path,fext,False)
-       elif isinstance(value,list):
-           self._set_run_list(instance,value,"",instance.data_file_ext)
-       else:
-           self._set_single_run(instance,value,"",instance.data_file_ext,True)
+            else: # first assignment of workspace to property
+                self._set_ws_as_source(value)
+            return
+
+        if isinstance(value,str): # it may be run number as string or it may be a workspace name
+            if value in mtd: # workspace name
+                ws = mtd[value]
+                self.__set__(instance,ws)
+                return
+            else:  # split string into run indexes and auxiliary file parameters
+                file_path,run_num,fext = prop_helpers.parse_run_file_name(value)
+
+                if isinstance(run_num,list):
+                    self._set_run_list(instance,run_num,file_path,fext)
+                else:
+                    self._set_single_run(instance,run_num,file_path,fext,False)
+        elif isinstance(value,list):
+            self._set_run_list(instance,value,"",instance.data_file_ext)
+        else:
+            self._set_single_run(instance,value,"",instance.data_file_ext,True)
 
 #--------------------------------------------------------------------------------------------------------------------
+
     def _set_single_run(self,instance,run_number,file_path='',fext=None,default_fext=False):
         """ """
         self._run_number = int(run_number)
@@ -423,16 +423,16 @@ class RunDescriptor(PropDescriptor):
     def _set_run_list(self,instance,run_list,file_path=None,fext=None):
 
         if self._run_list and self._run_list.check_runs_equal(run_list,file_path,fext):
-           return 
+            return 
         else:
-           self._clear_all()
-           self._run_list = RunList(run_list,file_path,fext)
-           run_num,file_path,main_fext,ind = self._run_list.get_current_run_info(instance.sum_runs)
-           self._run_list.set_last_ind2sum(ind)
-           self._run_number = run_num
-           self._run_file_path = file_path
-           self._fext = main_fext 
-           self._ws_name = self._build_ws_name()
+            self._clear_all()
+            self._run_list = RunList(run_list,file_path,fext)
+            run_num,file_path,main_fext,ind = self._run_list.get_current_run_info(instance.sum_runs)
+            self._run_list.set_last_ind2sum(ind)
+            self._run_number = run_num
+            self._run_file_path = file_path
+            self._fext = main_fext 
+            self._ws_name = self._build_ws_name()
 
     def run_number(self):
         """Return run number regardless of workspace is loaded or not"""
@@ -450,7 +450,10 @@ class RunDescriptor(PropDescriptor):
         """
         if self._mask_ws_name:
             mask_ws = mtd[self._mask_ws_name]
-            num_masked = mask_ws.getRun().getLogData('NUM_SPECTRA_Masked').value
+            #TODO: need normal exposure of getNumberMasked() method of masks workspace
+            __tmp_masks,spectra = ExtractMask(self._mask_ws_name)
+            num_masked = len(spectra)
+            DeleteWorkspace(__tmp_masks)
             return (mask_ws,num_masked)
         else:
             return (None,0)
@@ -461,20 +464,17 @@ class RunDescriptor(PropDescriptor):
         """
         if self._mask_ws_name:
             mask_ws = mtd[self._mask_ws_name]
-            num_masked = mask_ws.getRun().getLogData('NUM_SPECTRA_Masked').value
             add_mask_name = self._prop_name + '_tmp_masking'
         else:
-            num_masked = 0
             add_mask_name = self._prop_name + 'CurrentMasking'
-        masks,spectra = ExtractMask(InputWorkspace=masked_ws,OutputWorkspace=add_mask_name)
 
-        num_masked+=len(spectra)
+        masks,spectra = ExtractMask(InputWorkspace=masked_ws,OutputWorkspace=add_mask_name)
         if self._mask_ws_name:
             mask_ws +=masks
+            DeleteWorkspace(add_mask_name)
         else:
             self._mask_ws_name = add_mask_name
-        AddSampleLog(Workspace=self._mask_ws_name,LogName = 'NUM_SPECTRA_Masked',\
-                     LogText=str(num_masked),LogType='Number')
+        #
 #--------------------------------------------------------------------------------------------------------------------
     def is_monws_separate(self):
         """Is monitor workspace is separated from data workspace or not"""
@@ -499,7 +499,7 @@ class RunDescriptor(PropDescriptor):
             else:
                 return [current_run]
         else:
-           return [current_run]
+            return [current_run]
 #--------------------------------------------------------------------------------------------------------------------
     def get_run_file_list(self):
         """Returns list of the files, assigned to current property """
@@ -534,7 +534,7 @@ class RunDescriptor(PropDescriptor):
         return summed_runs
 #--------------------------------------------------------------------------------------------------------------------
     def get_runs_to_sum(self,existing_sum_ws=None,num_files=None):
-        """ return list of runs, expected to be summed together
+        """Return list of runs, expected to be summed together
             excluding the runs, already summed and added to cached sum workspace
         """
 
@@ -553,39 +553,39 @@ class RunDescriptor(PropDescriptor):
         runs2_sum = self._run_list.get_run_list2sum(num_files)
         for run in summed_runs:
             if run in runs2_sum:
-               del runs2_sum[runs2_sum.index(run)]
+                del runs2_sum[runs2_sum.index(run)]
         return (runs2_sum,existing_sum_ws,n_existing_sums)
 #--------------------------------------------------------------------------------------------------------------------
     def find_run_files(self,run_list=None):
-       """Find run files correspondent to the run list provided
+        """Find run files correspondent to the run list provided
           and set path to these files as new internal parameters
           for the files in the list
 
           Returns True and empty list or False and
           the list of the runs, which files were not found
           or not belong to the existing run list. 
-       """
-
-       if not self._run_list: 
-         if not run_list:
-            return (True,[],[])
-         else:
-            return (False,run_list,[])
-
-       if run_list:
-          existing = self._run_list.get_all_run_list()
-          non_existing = []
-          for run in run_list:
-              if not(run in existing):
-                 raise RuntimeError('run {0} is not in the existing run list'.format(run))
-
-       inst = RunDescriptor._holder.short_instr_name
-       default_fext = RunDescriptor._holder.data_file_ext
-       not_found,found = self._run_list.find_run_files(inst,run_list,default_fext)
-       if len(not_found) == 0:
-          return (True,[],found)
-       else:
-          return (False,not_found,found)
+        """
+
+        if not self._run_list: 
+            if not run_list:
+                return (True,[],[])
+            else:
+                return (False,run_list,[])
+
+        if run_list:
+            existing = self._run_list.get_all_run_list()
+            non_existing = []
+            for run in run_list:
+                if not(run in existing):
+                    raise RuntimeError('run {0} is not in the existing run list'.format(run))
+
+        inst = RunDescriptor._holder.short_instr_name
+        default_fext = RunDescriptor._holder.data_file_ext
+        not_found,found = self._run_list.find_run_files(inst,run_list,default_fext)
+        if len(not_found) == 0:
+            return (True,[],found)
+        else:
+            return (False,not_found,found)
 #--------------------------------------------------------------------------------------------------------------------
     def set_action_suffix(self,suffix=None):
         """Method to set part of the workspace name, which indicate some action performed over this workspace
@@ -623,24 +623,24 @@ class RunDescriptor(PropDescriptor):
         new_name = self._build_ws_name()
         old_name = workspace.name()
         if new_name != old_name:
-           RenameWorkspace(InputWorkspace=old_name,OutputWorkspace=new_name)
+            RenameWorkspace(InputWorkspace=old_name,OutputWorkspace=new_name)
 
-           old_mon_name = old_name + '_monitors'
-           new_mon_name = new_name + '_monitors'
-           if old_mon_name in mtd:
-              RenameWorkspace(InputWorkspace=old_mon_name,OutputWorkspace=new_mon_name)
+            old_mon_name = old_name + '_monitors'
+            new_mon_name = new_name + '_monitors'
+            if old_mon_name in mtd:
+                RenameWorkspace(InputWorkspace=old_mon_name,OutputWorkspace=new_mon_name)
         self._ws_name = new_name
 #--------------------------------------------------------------------------------------------------------------------
     @staticmethod
     def _check_calibration_source():
-         """If user have not specified calibration as input to the script,
+        """If user have not specified calibration as input to the script,
             try to retrieve calibration stored in file with run properties"""
-         changed_prop = RunDescriptor._holder.getChangedProperties()
-         if 'det_cal_file' in changed_prop:
-              use_workspace_calibration = False
-         else:
-              use_workspace_calibration = True
-         return use_workspace_calibration
+        changed_prop = RunDescriptor._holder.getChangedProperties()
+        if 'det_cal_file' in changed_prop:
+            use_workspace_calibration = False
+        else:
+            use_workspace_calibration = True
+        return use_workspace_calibration
 #--------------------------------------------------------------------------------------------------------------------
     def get_workspace(self):
         """Method returns workspace correspondent to current run number(s)
@@ -649,7 +649,7 @@ class RunDescriptor(PropDescriptor):
            Returns Mantid pointer to the workspace, corresponding to this run number
         """
         if not self._ws_name:
-           self._ws_name = self._build_ws_name()
+            self._ws_name = self._build_ws_name()
 
 
         if self._ws_name in mtd:
@@ -657,26 +657,26 @@ class RunDescriptor(PropDescriptor):
             if ws.run().hasProperty("calibrated"):
                 return ws # already calibrated
             else:
-               prefer_ws_calibration = self._check_calibration_source()
-               self.apply_calibration(ws,RunDescriptor._holder.det_cal_file,prefer_ws_calibration)
-               return ws
-        else:
-           if self._run_number:
-               prefer_ws_calibration = self._check_calibration_source()
-               inst_name = RunDescriptor._holder.short_inst_name
-               calibration = RunDescriptor._holder.det_cal_file
-               if self._run_list and RunDescriptor._holder.sum_runs : # Sum runs
-                   ws = self._load_and_sum_runs(inst_name,RunDescriptor._holder.load_monitors_with_workspace)
-               else: # load current workspace
-                   ws = self.load_run(inst_name, calibration,False, RunDescriptor._holder.load_monitors_with_workspace,prefer_ws_calibration)
-
-
-               self.synchronize_ws(ws)
-               self.apply_calibration(ws,calibration,prefer_ws_calibration)
-
-               return ws
-           else:
-              return None
+                prefer_ws_calibration = self._check_calibration_source()
+                self.apply_calibration(ws,RunDescriptor._holder.det_cal_file,prefer_ws_calibration)
+                return ws
+        else:
+            if self._run_number:
+                prefer_ws_calibration = self._check_calibration_source()
+                inst_name = RunDescriptor._holder.short_inst_name
+                calibration = RunDescriptor._holder.det_cal_file
+                if self._run_list and RunDescriptor._holder.sum_runs : # Sum runs
+                    ws = self._load_and_sum_runs(inst_name,RunDescriptor._holder.load_monitors_with_workspace)
+                else: # load current workspace
+                    ws = self.load_run(inst_name, calibration,False, RunDescriptor._holder.load_monitors_with_workspace,prefer_ws_calibration)
+
+
+                self.synchronize_ws(ws)
+                self.apply_calibration(ws,calibration,prefer_ws_calibration)
+
+                return ws
+            else:
+                return None
 #--------------------------------------------------------------------------------------------------------------------
     def get_ws_clone(self,clone_name='ws_clone'):
         """Get unbounded clone of existing Run workspace"""
@@ -703,30 +703,30 @@ class RunDescriptor(PropDescriptor):
            Return the pointer to workspace being chopped
         """
         if not origin:
-           origin = self.get_workspace()
+            origin = self.get_workspace()
 
         origin_name = origin.name()
         try:
-           mon_ws = mtd[origin_name + '_monitors']
+            mon_ws = mtd[origin_name + '_monitors']
         except:
-           mon_ws = None
+            mon_ws = None
 
         target_name = '#{0}/{1}#'.format(chunk_num,n_chunks) + origin_name
         if chunk_num == n_chunks:
-           RenameWorkspace(InputWorkspace=origin_name,OutputWorkspace=target_name)
-           if mon_ws:
-              RenameWorkspace(InputWorkspace=mon_ws,OutputWorkspace=target_name + '_monitors')
-           origin_name = target_name
-           origin_invalidated = True
+            RenameWorkspace(InputWorkspace=origin_name,OutputWorkspace=target_name)
+            if mon_ws:
+                RenameWorkspace(InputWorkspace=mon_ws,OutputWorkspace=target_name + '_monitors')
+            origin_name = target_name
+            origin_invalidated = True
         else:
-           if mon_ws:
-              CloneWorkspace(InputWorkspace=mon_ws,OutputWorkspace=target_name + '_monitors')
-           origin_invalidated = False
+            if mon_ws:
+                CloneWorkspace(InputWorkspace=mon_ws,OutputWorkspace=target_name + '_monitors')
+            origin_invalidated = False
 
         if rebin: # debug and compatibility mode with old reduction
-           Rebin(origin_name,OutputWorkspace=target_name,Params=[tof_range[0],tof_range[1],tof_range[2]],PreserveEvents=False)
+            Rebin(origin_name,OutputWorkspace=target_name,Params=[tof_range[0],tof_range[1],tof_range[2]],PreserveEvents=False)
         else:
-           CropWorkspace(origin_name,OutputWorkspace=target_name,XMin=tof_range[0],XMax=tof_range[2])
+            CropWorkspace(origin_name,OutputWorkspace=target_name,XMin=tof_range[0],XMax=tof_range[2])
 
         self._set_ws_as_source(mtd[target_name])
         if origin_invalidated:
@@ -743,7 +743,7 @@ class RunDescriptor(PropDescriptor):
         """
         data_ws = self.get_workspace()
         if not data_ws:
-           return None
+            return None
 
         monWS_name = data_ws.name() + '_monitors'
         if monWS_name in mtd:
@@ -759,18 +759,18 @@ class RunDescriptor(PropDescriptor):
                 mon_ws = self.copy_spectrum2monitors(data_ws,mon_ws,specID)
 
         if monitor_ID:
-           try:
+            try:
                 ws_index = mon_ws.getIndexFromSpectrumNumber(monitor_ID)
-           except: #
-               mon_ws = None
+            except: #
+                mon_ws = None
         else:
             mon_list = self._holder.get_used_monitors_list()
             for monID in mon_list:
                 try:
                     ws_ind = mon_ws.getIndexFromSpectrumNumber(int(monID))
                 except:
-                   mon_ws = None
-                   break
+                    mon_ws = None
+                    break
         return mon_ws
 #--------------------------------------------------------------------------------------------------------------------
     def is_existing_ws(self):
@@ -781,7 +781,7 @@ class RunDescriptor(PropDescriptor):
             else:
                 return False
         else:
-           return False
+            return False
 #--------------------------------------------------------------------------------------------------------------------
 #--------------------------------------------------------------------------------------------------------------------
     def get_file_ext(self):
@@ -810,7 +810,7 @@ class RunDescriptor(PropDescriptor):
            main purpose -- to support customized order of file extensions
         """
         if not run_num_str:
-           run_num_str = str(self.run_number())
+            run_num_str = str(self.run_number())
         inst_name = RunDescriptor._holder.short_inst_name
 
         if 'file_hint' in kwargs:
@@ -873,16 +873,16 @@ class RunDescriptor(PropDescriptor):
            raise IOError(data_file)
 
         if load_mon_with_workspace:
-             mon_load_option = 'Include'
+            mon_load_option = 'Include'
         else:
-             mon_load_option = 'Separate'
+            mon_load_option = 'Separate'
         #
         try: # Hack: LoadEventNexus does not understand Separate at the moment and throws.
-             # And event loader always loads monitors separately
-             Load(Filename=data_file, OutputWorkspace=ws_name,LoadMonitors = mon_load_option)
+            # And event loader always loads monitors separately
+            Load(Filename=data_file, OutputWorkspace=ws_name,LoadMonitors = mon_load_option)
         except ValueError:
-             #mon_load_option =str(int(load_mon_with_workspace))
-             Load(Filename=data_file, OutputWorkspace=ws_name,LoadMonitors = '1',MonitorsAsEvents='0')
+            #mon_load_option =str(int(load_mon_with_workspace))
+            Load(Filename=data_file, OutputWorkspace=ws_name,LoadMonitors = '1',MonitorsAsEvents='0')
 
         RunDescriptor._logger("Loaded {0}".format(data_file),'information')
 
@@ -925,7 +925,7 @@ class RunDescriptor(PropDescriptor):
         if not calibration or use_ws_calibration:
             return
         if not isinstance(loaded_ws, api.Workspace):
-           raise RuntimeError(' Calibration can be applied to a workspace only and got object of type {0}'.format(type(loaded_ws)))
+            raise RuntimeError(' Calibration can be applied to a workspace only and got object of type {0}'.format(type(loaded_ws)))
 
         if loaded_ws.run().hasProperty("calibrated"):
             return # already calibrated
@@ -942,8 +942,8 @@ class RunDescriptor(PropDescriptor):
                     test_name = ws_calibration
                     ws_calibration = FileFinder.getFullPath(ws_calibration)
                     if len(ws_calibration) == 0:
-                       raise RuntimeError('Can not find defined in run {0} calibration file {1}\n'\
-                                          'Define det_cal_file reduction parameter properly'.format(loaded_ws.name(),test_name))
+                        raise RuntimeError('Can not find defined in run {0} calibration file {1}\n'\
+                                           'Define det_cal_file reduction parameter properly'.format(loaded_ws.name(),test_name))
                     RunDescriptor._logger('*** load_data: Calibrating data using workspace defined calibration file: {0}'.format(ws_calibration),'notice')
             except KeyError: # no det_cal_file defined in workspace
                 if calibration:
@@ -1097,27 +1097,27 @@ class RunDescriptor(PropDescriptor):
         return not(self._in_cash)
 
     def notify_sum_runs_changed(self,old_value,new_value):
-       """ Take actions on changes to sum_runs option 
-       """
-       if self._run_list:
-          if old_value != new_value:
-             rl = self._run_list
-             self._clear_all()
-             rl.set_last_ind2sum(-1) # this will reset index to default
-             self._run_list = rl
-             run_num,file_path,main_fext,ind = self._run_list.get_current_run_info(new_value)
-             self._run_list.set_last_ind2sum(ind)
-             self._run_number = run_num
-             self._run_file_path = file_path
-             self._fext = main_fext 
-             self._ws_name = self._build_ws_name(new_value)
-          if new_value is False:
-             self._run_list.del_cashed_sum()
+        """Take actions on changes to sum_runs option
+        """
+        if self._run_list:
+            if old_value != new_value:
+                rl = self._run_list
+                self._clear_all()
+                rl.set_last_ind2sum(-1) # this will reset index to default
+                self._run_list = rl
+                run_num,file_path,main_fext,ind = self._run_list.get_current_run_info(new_value)
+                self._run_list.set_last_ind2sum(ind)
+                self._run_number = run_num
+                self._run_file_path = file_path
+                self._fext = main_fext 
+                self._ws_name = self._build_ws_name(new_value)
+            if new_value is False:
+                self._run_list.del_cashed_sum()
 
     def _load_and_sum_runs(self,inst_name,monitors_with_ws):
         """Load multiple runs and sum them together 
 
-           monitors_with_ws -- if true, load monitors with workspace 
+           monitors_with_ws -- if true, load monitors with workspace
         """
 
         RunDescriptor._logger("*** Summing multiple runs            ****")
@@ -1138,7 +1138,7 @@ class RunDescriptor(PropDescriptor):
 
             f_guess,index = self._run_list.get_file_guess(inst_name,runs_to_sum[0])
             ws = self.load_file(inst_name,'Sum_ws',False,monitors_with_ws,
-                                      False,file_hint=f_guess)
+                                False,file_hint=f_guess)
 
             sum_ws_name = ws.name()
             sum_mon_name = sum_ws_name + '_monitors'
@@ -1203,7 +1203,7 @@ class RunDescriptorDependent(RunDescriptor):
         self._has_own_value = False
 
     def __get__(self,instance,owner=None):
-        """Return dependent run number which is host run number if this one has not been set 
+        """Return dependent run number which is host run number if this one has not been set
           or this run number if it was
         """
         if instance is None: # this class functions and the host functions
@@ -1232,9 +1232,9 @@ class RunDescriptorDependent(RunDescriptor):
     # TODO -- how to automate all these functions below?
     def run_number(self):
         if self._has_own_value:
-           return super(RunDescriptorDependent,self).run_number()
+            return super(RunDescriptorDependent,self).run_number()
         else:
-           return self._host.run_number()
+            return self._host.run_number()
     #
     def is_monws_separate(self):
         if self._has_own_value:
@@ -1348,14 +1348,14 @@ class RunDescriptorDependent(RunDescriptor):
         else:
             return self._host.clear_monitors()
     def get_masking(self):
-         if self._has_own_value:
+        if self._has_own_value:
             return super(RunDescriptorDependent,self).get_masking()
-         else:
+        else:
             return self._host.get_masking()
     def add_masked_ws(self,masked_ws):
-         if self._has_own_value:
+        if self._has_own_value:
             return super(RunDescriptorDependent,self).add_masked_ws(masked_ws)
-         else:
+        else:
             return self._host.add_masked_ws(masked_ws)
 #--------------------------------------------------------------------------------------------------------------------
 #--------------------------------------------------------------------------------------------------------------------
diff --git a/Code/Mantid/scripts/Inelastic/Direct/diagnostics.py b/Code/Mantid/scripts/Inelastic/Direct/diagnostics.py
index 1891c01c73c893468a9d6f2127676c8aec188891..d12e771b8d0d0b208e15aeb80e2f2f9d5103f7a1 100644
--- a/Code/Mantid/scripts/Inelastic/Direct/diagnostics.py
+++ b/Code/Mantid/scripts/Inelastic/Direct/diagnostics.py
@@ -71,17 +71,17 @@ def diagnose(white_int,**kwargs):
 
     # process subsequent calls to this routine, when white mask is already defined
     white= kwargs.get('white_mask',None) # and white beam is not changed
-    # white mask assumed to be global so no sectors in there 
-    if not(white is None) and isinstance(white,RunDescriptor.RunDescriptor):
-       hardmask_file = None
-       white_mask,num_failed = white.get_masking()
-       add_masking(white_int, white_mask)
-       van_mask  = None
+    #white mask assumed to be global so no sectors in there
+    if not white is None and isinstance(white,RunDescriptor.RunDescriptor):
+        hardmask_file = None
+        white_mask,num_failed = white.get_masking()
+        add_masking(white_int, white_mask)
+        van_mask  = None
     else: # prepare workspace to keep white mask
         white_mask = None
         van_mask = CloneWorkspace(white_int)
 
-    if not (hardmask_file is None):
+    if not hardmask_file is None:
         LoadMask(Instrument=kwargs.get('instr_name',''),InputFile=parser.hard_mask_file,
                  OutputWorkspace='hard_mask_ws')
         MaskDetectors(Workspace=white_int, MaskedWorkspace='hard_mask_ws')
@@ -94,48 +94,47 @@ def diagnose(white_int,**kwargs):
         DeleteWorkspace('hard_mask_ws')
 
     if not parser.use_hard_mask_only :
-       # White beam Test
-       if white_mask:
+        # White beam Test
+        if white_mask:
             test_results[1] = ['white_mask cache global', num_failed]
-       else:
-          __white_masks, num_failed = do_white_test(white_int, parser.tiny, parser.huge,
+        else:
+            __white_masks, num_failed = do_white_test(white_int, parser.tiny, parser.huge,
                                                     parser.van_out_lo, parser.van_out_hi,
                                                     parser.van_lo, parser.van_hi,
                                                     parser.van_sig, start_index, end_index)
-          test_results[1] = [str(__white_masks), num_failed]
-          add_masking(white_int, __white_masks, start_index, end_index)
-          if van_mask:
-             add_masking(van_mask, __white_masks, start_index, end_index)
-          DeleteWorkspace(__white_masks)
-
-       # Second white beam test
-       if 'second_white' in kwargs: #NOT IMPLEMENTED 
-           raise NotImplementedError("Second white is not yet implemented")
-           __second_white_masks, num_failed = do_second_white_test(white_int, parser.second_white, parser.tiny, parser.huge,\
+            test_results[1] = [str(__white_masks), num_failed]
+            add_masking(white_int, __white_masks, start_index, end_index)
+            if van_mask:
+                add_masking(van_mask, __white_masks, start_index, end_index)
+            DeleteWorkspace(__white_masks)
+
+        # Second white beam test
+        if 'second_white' in kwargs: #NOT IMPLEMENTED 
+            raise NotImplementedError("Second white is not yet implemented")
+            __second_white_masks, num_failed = do_second_white_test(white_int, parser.second_white, parser.tiny, parser.huge,\
                                                        parser.van_out_lo, parser.van_out_hi,\
                                                        parser.van_lo, parser.van_hi, parser.variation,\
                                                        parser.van_sig, start_index, end_index)
-           test_results[2] = [str(__second_white_masks), num_failed]
-           add_masking(white_int, __second_white_masks, start_index, end_index)
-           #TODO
-           #add_masking(van_mask, __second_white_masks, start_index, end_index)
+            test_results[2] = [str(__second_white_masks), num_failed]
+            add_masking(white_int, __second_white_masks, start_index, end_index)
+            #TODO
+            #add_masking(van_mask, __second_white_masks, start_index, end_index)
 
         #
         # Zero total count check for sample counts
         #
-       zero_count_failures = 0
-       if kwargs.get('sample_counts',None) is not None and kwargs.get('samp_zero',False):
+        zero_count_failures = 0
+        if kwargs.get('sample_counts',None) is not None and kwargs.get('samp_zero',False):
             add_masking(parser.sample_counts, white_int)
             maskZero, zero_count_failures = FindDetectorsOutsideLimits(InputWorkspace=parser.sample_counts,\
-                                                                    StartWorkspaceIndex=start_index, EndWorkspaceIndex=end_index,\
+                                                                   StartWorkspaceIndex=start_index, EndWorkspaceIndex=end_index,\
                                                                    LowThreshold=1e-10, HighThreshold=1e100)
             add_masking(white_int, maskZero, start_index, end_index)
             DeleteWorkspace(maskZero)
-
         #
         # Background check
         #
-       if hasattr(parser, 'background_int'):
+        if hasattr(parser, 'background_int'):
             add_masking(parser.background_int, white_int)
             __bkgd_mask, failures = do_background_test(parser.background_int, parser.samp_lo,\
                                                        parser.samp_hi, parser.samp_sig, parser.samp_zero, start_index, end_index)
@@ -146,7 +145,7 @@ def diagnose(white_int,**kwargs):
         #
         # Bleed test
         #
-       if hasattr(parser, 'bleed_test') and parser.bleed_test:
+        if hasattr(parser, 'bleed_test') and parser.bleed_test:
             if not hasattr(parser, 'sample_run'):
                 raise RuntimeError("Bleed test requested but the sample_run keyword has not been provided")
             __bleed_masks, failures = do_bleed_test(parser.sample_run, parser.bleed_maxrate, parser.bleed_pixels)
@@ -158,18 +157,18 @@ def diagnose(white_int,**kwargs):
     end_index_name=" to: end"
     default = True
     if hasattr(parser, 'print_diag_results') and parser.print_diag_results:
-            default=True
+        default=True
     if 'start_index' in kwargs:
-            default = False
-            start_index_name = "from: "+str(kwargs['start_index'])
+        default = False
+        start_index_name = "from: "+str(kwargs['start_index'])
     if 'end_index' in kwargs :
-            default = False
-            end_index_name = " to: "+str(kwargs['end_index'])
+        default = False
+        end_index_name = " to: "+str(kwargs['end_index'])
 
 
     testName=start_index_name+end_index_name
     if not default :
-       testName = " For bank: "+start_index_name+end_index_name
+        testName = " For bank: "+start_index_name+end_index_name
 
     if hasattr(parser, 'print_diag_results') and parser.print_diag_results:
         print_test_summary(test_results,testName)
@@ -294,7 +293,8 @@ def normalise_background(background_int, white_int, second_white_int=None):
 
     """
     if second_white_int is None:
-        # quetly divide background integral by white beam integral not reporting about possible 0 in wb integral (they will be removed by diag anyway)
+        #quetly divide background integral by white beam integral not reporting about possible 0 in
+        #wb integral (they will be removed by diag anyway)
         background_int =  Divide(LHSWorkspace=background_int,RHSWorkspace=white_int,WarnOnZeroDivide='0')
     else:
         hmean = 2.0*white_int*second_white_int/(white_int+second_white_int)