diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISISDirectInelastic.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISDirectInelastic.py index af683d96b1d906cf664662ed2be221c58ee02d2b..0a53f6a2f71e98ffe795722849396c8eadb2424d 100644 --- a/Code/Mantid/Testing/SystemTests/tests/analysis/ISISDirectInelastic.py +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISDirectInelastic.py @@ -96,7 +96,7 @@ class MARIReductionFromFile(ISISDirectInelasticReduction): return "outWS" def get_reference_file(self): return "MARIReduction.nxs" - + class MARIReductionFromFileCache(ISISDirectInelasticReduction): def __init__(self): @@ -429,49 +429,6 @@ class LETReduction(stresstesting.MantidStressTest): return "outWS", "LETReduction.nxs" -class LETReductionEvent2014Multirep(stresstesting.MantidStressTest): - """ - written in a hope that most of the stuff find here will eventually find its way into main reduction routines - """ - - def requiredMemoryMB(self): - """Far too slow for managed workspaces. They're tested in other places. Requires 20Gb""" - return 20000 - - def runTest(self): - """ - Run the LET reduction with event NeXus files - - Relies on LET_Parameters.xml file from June 2013 - """ - from ISIS_LETReduction import ReduceLET_MultiRep2014 - red = ReduceLET_MultiRep2014() - - red.def_advanced_properties() - red.def_main_properties() - - - out_ws_list=red.reduce() - - #mults =[41.178539329370217/41.178300987983413,72.235863046309746/72.231475173892022] - #New normalization for 3.4 meV: 41.178539329370217 - #Old normalization for 3.4 meV: 41.178300987983413 - #New normalization for 8 meV: 72.235863046309746 - #Old normalization for 8 meV: 72.231475173892022 - #for ind,ws in enumerate(out_ws_list): - # ws *=mults[ind] - - - - - - def validate(self): - self.tolerance = 1e-6 - self.tolerance_is_reller=False - self.disableChecking.append('SpectraMap') - self.disableChecking.append('Instrument') - - return "LETreducedEi3.4","LET14305_3_4mev.nxs","LETreducedEi8.0", "LET14305_8_0mev.nxs" class LETReductionEvent2015Multirep(stresstesting.MantidStressTest): """ @@ -495,7 +452,7 @@ class LETReductionEvent2015Multirep(stresstesting.MantidStressTest): red.def_main_properties() - out_ws_list=red.reduce() + out_ws_list=red.run_reduction() #for ind,ws in enumerate(out_ws_list): # ws *=mults[ind] diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_LETReduction.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_LETReduction.py index df2385d75af50f9eb3824b9c5e1bd56607a23350..eba0d9b8cd46991db497491154101e0859014f38 100644 --- a/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_LETReduction.py +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_LETReduction.py @@ -1,13 +1,13 @@ -""" Sample LET reduction scrip """ +""" Sample LET reduction script """ import os -os.environ["PATH"] = r"c:/Mantid/Code/builds/br_master/bin/Release;"+os.environ["PATH"] +#os.environ["PATH"] = r"c:/Mantid/Code/builds/br_master/bin/Release;"+os.environ["PATH"] from Direct.ReductionWrapper import * try: - import reduce_vars as rv + import reduce_vars as web_var except: - rv = None + web_var = None # def find_binning_range(energy,ebin): @@ -72,349 +72,245 @@ def find_background(ws_name,bg_range): class ReduceLET_OneRep(ReductionWrapper): - @MainProperties - def def_main_properties(self): - """ Define main properties used in reduction """ + @MainProperties + def def_main_properties(self): + """Define main properties used in reduction """ - prop = {} - ei = 7.0 - ebin = [-1,0.002,0.95] + prop = {} + ei = 7.0 + ebin = [-1,0.002,0.95] - prop['sample_run'] = 'LET00006278.nxs' - prop['wb_run'] = 'LET00005545.raw' - prop['incident_energy'] = ei - prop['energy_bins'] = ebin + prop['sample_run'] = 'LET00006278.nxs' + prop['wb_run'] = 'LET00005545.raw' + prop['incident_energy'] = ei + prop['energy_bins'] = ebin - - # Absolute units reduction properties. - #prop['monovan_run'] = 17589 - #prop['sample_mass'] = 10/(94.4/13) # -- this number allows to get approximately the same system test intensities for MAPS as the old test - #prop['sample_rmm'] = 435.96 # - return prop - - @AdvancedProperties - def def_advanced_properties(self): - """ separation between simple and advanced properties depends + + # Absolute units reduction properties. + #prop['monovan_run'] = 17589 + #prop['sample_mass'] = 10/(94.4/13) # -- this number allows to get approximately the same system test intensities for MAPS as the old test + #prop['sample_rmm'] = 435.96 # + return prop + + @AdvancedProperties + def def_advanced_properties(self): + """Separation between simple and advanced properties depends on scientist, experiment and user. - main properties override advanced properties. - """ - - prop = {} - prop['map_file'] = 'rings_103' - prop['hard_mask_file'] ='LET_hard.msk' - prop['det_cal_file'] = 'det_corrected7.dat' - prop['save_format']='' - prop['bleed'] = False - prop['norm_method']='current' - prop['detector_van_range']=[0.5,200] - prop['load_monitors_with_workspace']=True - #TODO: this has to be loaded from the workspace and work without this - #prop['ei-mon1-spec']=40966 - - - return prop - # - @iliad - def reduce(self,input_file=None,output_directory=None): - # run reduction, write auxiliary script to add something here. - - prop = self.reducer.prop_man - # Ignore input properties for the time being - white_ws = 'wb_wksp' - LoadRaw(Filename='LET00005545.raw',OutputWorkspace=white_ws) - #prop.wb_run = white_ws - - sample_ws = 'w1' - monitors_ws = sample_ws + '_monitors' - LoadEventNexus(Filename='LET00006278.nxs',OutputWorkspace=sample_ws, + main properties override advanced properties. + """ + + prop = {} + prop['map_file'] = 'rings_103' + prop['hard_mask_file'] ='LET_hard.msk' + prop['det_cal_file'] = 'det_corrected7.dat' + prop['save_format']='' + prop['bleed'] = False + prop['norm_method']='current' + prop['detector_van_range']=[0.5,200] + prop['load_monitors_with_workspace']=True + return prop + # + @iliad + def reduce(self,input_file=None,output_directory=None): + """run reduction, write auxiliary script to add something here.""" + + prop = self.reducer.prop_man + # Ignore input properties for the time being + white_ws = 'wb_wksp' + LoadRaw(Filename='LET00005545.raw',OutputWorkspace=white_ws) + #prop.wb_run = white_ws + + sample_ws = 'w1' + monitors_ws = sample_ws + '_monitors' + LoadEventNexus(Filename='LET00006278.nxs',OutputWorkspace=sample_ws, SingleBankPixelsOnly='0',LoadMonitors='1', MonitorsAsEvents='1') - ConjoinWorkspaces(InputWorkspace1=sample_ws, InputWorkspace2=monitors_ws) - #prop.sample_run = sample_ws + ConjoinWorkspaces(InputWorkspace1=sample_ws, InputWorkspace2=monitors_ws) + #prop.sample_run = sample_ws - ebin = prop.energy_bins - ei = prop.incident_energy + ebin = prop.energy_bins + ei = prop.incident_energy - (energybin,tbin,t_elastic) = find_binning_range(ei,ebin) - Rebin(InputWorkspace=sample_ws,OutputWorkspace=sample_ws, Params=tbin, PreserveEvents='1') + (energybin,tbin,t_elastic) = find_binning_range(ei,ebin) + Rebin(InputWorkspace=sample_ws,OutputWorkspace=sample_ws, Params=tbin, PreserveEvents='1') - prop.bkgd_range=[int(t_elastic),int(tbin[2])] + prop.bkgd_range=[int(t_elastic),int(tbin[2])] - ebinstring = str(energybin[0])+','+str(energybin[1])+','+str(energybin[2]) - self.reducer.prop_man.energy_bins = ebinstring + ebinstring = str(energybin[0])+','+str(energybin[1])+','+str(energybin[2]) + self.reducer.prop_man.energy_bins = ebinstring - red = DirectEnergyConversion() + red = DirectEnergyConversion() - red.initialise(prop) - outWS = red.convert_to_energy(white_ws,sample_ws) - #SaveNexus(ws,Filename = 'MARNewReduction.nxs') + red.initialise(prop) + outWS = red.convert_to_energy(white_ws,sample_ws) + #SaveNexus(ws,Filename = 'MARNewReduction.nxs') - #when run from web service, return additional path for web server to copy data to" - return outWS + #when run from web service, return additional path for web server to copy data to" + return outWS - def __init__(self,rv=None): - """ sets properties defaults for the instrument with Name""" - ReductionWrapper.__init__(self,'LET',rv) + def __init__(self,web_var=None): + """Sets properties defaults for the instrument with Name""" + ReductionWrapper.__init__(self,'LET',web_var) #---------------------------------------------------------------------------------------------------------------------- -class ReduceLET_MultiRep2014(ReductionWrapper): - @MainProperties - def def_main_properties(self): - """ Define main properties used in reduction """ - - - prop = {} - ei=[3.4,8.] # multiple energies provided in the data file - ebin=[-4,0.002,0.8] #binning of the energy for the spe file. The numbers are as a fraction of ei [from ,step, to ] - - prop['sample_run'] = [14305] - prop['wb_run'] = 5545 - prop['incident_energy'] = ei - prop['energy_bins'] = ebin - - - # Absolute units reduction properties. - # Vanadium labelled Dec 2011 - flat plate of dimensions: 40.5x41x2.0# volume = 3404.025 mm**3 mass= 20.79 - prop['monovan_run'] = 14319 # vanadium run in the same configuration as your sample - prop['sample_mass'] = 20.79 # 17.25 # mass of your sample (PrAl3) - prop['sample_rmm'] = 50.9415 # 221.854 # molecular weight of your sample - - return prop - - @AdvancedProperties - def def_advanced_properties(self): - """ separation between simple and advanced properties depends - on scientist, experiment and user. - main properties override advanced properties. - """ - - prop = {} - prop['map_file'] = 'rings_103.map' - prop['det_cal_file'] = 'det_corrected7.nxs' - prop['save_format']='' - prop['bleed'] = False - prop['norm_method']='current' - prop['detector_van_range']=[2,7] - prop['background_range'] = [92000,98000] # TOF range for the calculating flat background - prop['hardmaskOnly']='LET_hard.msk' # diag does not work well on LET. At present only use a hard mask RIB has created - - # Disable internal background check TODO: fix internal background check - prop['check_background']=False - - prop['monovan_mapfile'] = 'rings_103.map' - - #TODO: Correct monitor, depending on workspace. This has to be loaded from the workspace and work without this settings - #prop['ei-mon1-spec']=40966 - - - - return prop - # - @iliad - def reduce(self,input_file=None,output_directory=None): - # run reduction, write auxiliary script to add something here. - - red_properties = self.reducer.prop_man - ####### - wb= red_properties.wb_run - run_no = red_properties.sample_run - bg_range = red_properties.background_range - ei = red_properties.incident_energy - ebin = red_properties.energy_bins - - remove_background = True #if true then will subtract a flat background in time from the time range given below otherwise put False - - red = DirectEnergyConversion() - - red.initialise(red_properties) - - energybin,tbin,t_elastic = find_binning_range(ei[0],ebin) - energybin,tbin,t_elastic = find_binning_range(ei[1],ebin) - - # loads the white-beam (or rather the long monovan ). Does it as a raw file to save time as the event mode is very large - if 'wb_wksp' in mtd: - wb_wksp=mtd['wb_wksp'] - else: #only load white-beam if not already there - wb_wksp = LoadRaw(Filename='LET0000'+str(wb)+'.raw',OutputWorkspace='wb_wksp') - #dgreduce.getReducer().det_cal_file = 'det_corrected7.nxs' - #wb_wksp = dgreduce.getReducer().load_data('LET0000'+str(wb)+'.raw','wb_wksp') - #dgreduce.getReducer().det_cal_file = wb_wksp - - for run in [run_no]: #loop around runs - fname='LET0000'+str(run)+'.nxs' - print ' processing file ', fname - #w1 = dgreduce.getReducer().load_data(run,'w1') - Load(Filename=fname,OutputWorkspace='w1',LoadMonitors='1') - - - if remove_background: - bg_ws_name=find_background('w1',bg_range) - - ############################################################################################# - # this section finds all the transmitted incident energies if you have not provided them - #if len(ei) == 0: -- not tested here -- should be unit test for that. - #ei = find_chopper_peaks('w1_monitors') - print 'Energies transmitted are:' - print (ei) - - RenameWorkspace(InputWorkspace = 'w1',OutputWorkspace='w1_storage') - RenameWorkspace(InputWorkspace = 'w1_monitors',OutputWorkspace='w1_mon_storage') - - #now loop around all energies for the run - result =[] - for ind,energy in enumerate(ei): - print float(energy) - (energybin,tbin,t_elastic) = find_binning_range(energy,ebin) - print " Rebinning will be performed in the range: ",energybin - # if we calculate more then one energy, initial workspace will be used more then once - if ind <len(ei)-1: - CloneWorkspace(InputWorkspace = 'w1_storage',OutputWorkspace='w1') - CloneWorkspace(InputWorkspace = 'w1_mon_storage',OutputWorkspace='w1_monitors') - else: - RenameWorkspace(InputWorkspace = 'w1_storage',OutputWorkspace='w1') - RenameWorkspace(InputWorkspace = 'w1_mon_storage',OutputWorkspace='w1_monitors') - - if remove_background: - w1=Rebin(InputWorkspace='w1',OutputWorkspace='w1',Params=tbin,PreserveEvents=False) - Minus(LHSWorkspace='w1',RHSWorkspace='bg',OutputWorkspace='w1') - - - ###################################################################### - # ensure correct round-off procedure - argi={} - argi['monovan_integr_range']=[round(ebin[0]*energy,4),round(ebin[2]*energy,4)] # integration range of the vanadium - #MonoVanWSName = None - - # absolute unit reduction -- if you provided MonoVan run or relative units if monoVan is not present - out=red.convert_to_energy(wb_wksp,"w1",energy,energybin,**argi) - - ws_name = 'LETreducedEi{0:2.1f}'.format(energy) - RenameWorkspace(InputWorkspace=out,OutputWorkspace=ws_name) - result.append(mtd[ws_name]) - - #TODO: this will go when multirep mode is implemented properly - # Store processed workspaces back to properties - wb_wksp = PropertyManager.wb_run.get_workspace() - - - #SaveNXSPE(InputWorkspace=ws_name,Filename=ws_name+'.nxspe') - - ####### - #when run from web service, return additional path for web server to copy data to" - return result - - def __init__(self,rv=None): - """ sets properties defaults for the instrument with Name""" - ReductionWrapper.__init__(self,'LET',rv) - class ReduceLET_MultiRep2015(ReductionWrapper): - @MainProperties - def def_main_properties(self): - """ Define main properties used in reduction """ + @MainProperties + def def_main_properties(self): + """Define main properties used in reduction """ - prop = {} - ei=[3.4,8.] # multiple energies provided in the data file - ebin=[-4,0.002,0.8] #binning of the energy for the spe file. The numbers are as a fraction of ei [from ,step, to ] + prop = {} + ei=[3.4,8.] # multiple energies provided in the data file + ebin=[-4,0.002,0.8] #binning of the energy for the spe file. The numbers are as a fraction of ei [from ,step, to ] - prop['sample_run'] = [14305] - prop['wb_run'] = 5545 - prop['incident_energy'] = ei - prop['energy_bins'] = ebin + prop['sample_run'] = [14305] + prop['wb_run'] = 5545 + prop['incident_energy'] = ei + prop['energy_bins'] = ebin - # Absolute units reduction properties. - # Vanadium labelled Dec 2011 - flat plate of dimensions: 40.5x41x2.0# volume = 3404.025 mm**3 mass= 20.79 - prop['monovan_run'] = 14319 # vanadium run in the same configuration as your sample - prop['sample_mass'] = 20.79 # 17.25 # mass of your sample (PrAl3) - prop['sample_rmm'] = 50.9415 # 221.854 # molecular weight of your sample + # Absolute units reduction properties. + # Vanadium labeled Dec 2011 - flat plate of dimensions: 40.5x41x2.0# volume = 3404.025 mm**3 mass= 20.79 + prop['monovan_run'] = 14319 # vanadium run in the same configuration as your sample + prop['sample_mass'] = 20.79 # 17.25 # mass of your sample (PrAl3) + prop['sample_rmm'] = 50.9415 # 221.854 # molecular weight of your sample - return prop + return prop - @AdvancedProperties - def def_advanced_properties(self): - """ separation between simple and advanced properties depends + @AdvancedProperties + def def_advanced_properties(self): + """separation between simple and advanced properties depends on scientist, experiment and user. - main properties override advanced properties. - """ - - prop = {} - prop['map_file'] = 'rings_103.map' - prop['det_cal_file'] = 'det_corrected7.nxs' - prop['bleed'] = False - prop['norm_method']='current' - prop['detector_van_range']=[2,7] - prop['background_range'] = [92000,98000] # TOF range for the calculating flat background - prop['hardmaskOnly']='LET_hard.msk' # diag does not work well on LET. At present only use a hard mask RIB has created - - prop['check_background']=True - - prop['monovan_mapfile'] = 'rings_103.map' - prop['save_format'] = '' - # if two input files with the same name and different extension found, what to prefer. - prop['data_file_ext']='.nxs' # for LET it may be choice between event and histo mode if - # raw file is written in histo, and nxs -- in event mode - - prop['monovan_mapfile'] = 'rings_103.map' - - - #TODO: Correct monitor, depending on workspace. This has to be loaded from the workspace and work without this settings - #prop['ei-mon1-spec']=40966 - - - - return prop + main properties override advanced properties. + """ + + prop = {} + prop['map_file'] = 'rings_103.map' + prop['det_cal_file'] = 'det_corrected7.nxs' + prop['bleed'] = False + prop['norm_method']='current' + prop['detector_van_range']=[2,7] + prop['background_range'] = [92000,98000] # TOF range for the calculating flat background + prop['hardmaskOnly']='LET_hard.msk' # diag does not work well on LET. At present only use a hard mask RIB has created + + prop['check_background']=True + + prop['monovan_mapfile'] = 'rings_103.map' + prop['save_format'] = '' + # if two input files with the same name and different extension found, what to prefer. + prop['data_file_ext']='.nxs' # for LET it may be choice between event and histo mode if + # raw file is written in histo, and nxs -- in event mode + + # Absolute units: map file to calculate monovan integrals + prop['monovan_mapfile'] = 'rings_103.map' + # change this to correct value and verify that motor_log_names refers correct and existing + # log name for crystal rotation to write correct psi value into nxspe files + prop['motor_offset']=None + + #TODO: Correct monitor, depending on workspace. This has to be loaded from the workspace and work without this settings + #prop['ei-mon1-spec']=40966 + return prop # - @iliad - def reduce(self,input_file=None,output_directory=None): - """ Method executes reduction over single file + @iliad + def reduce(self,input_file=None,output_directory=None): + """Method executes reduction over single file - Overload only if custom reduction is needed or + Overload only if custom reduction is needed or special features are requested - """ - res = ReductionWrapper.reduce(self,input_file,output_directory) - # - en = self.reducer.prop_man.incident_energy - for ind,energy in enumerate(en): - ws_name = 'LETreducedEi{0:2.1f}'.format(energy) - RenameWorkspace(InputWorkspace=res[ind],OutputWorkspace=ws_name) - res[ind]= mtd[ws_name] - - #SaveNexus(ws,Filename = 'LETNewReduction.nxs') - return res - - def __init__(self,rv=None): - """ sets properties defaults for the instrument with Name""" - ReductionWrapper.__init__(self,'LET',rv) + """ + res = ReductionWrapper.reduce(self,input_file,output_directory) + # + en = self.reducer.prop_man.incident_energy + for ind,energy in enumerate(en): + ws_name = 'LETreducedEi{0:2.1f}'.format(energy) + RenameWorkspace(InputWorkspace=res[ind],OutputWorkspace=ws_name) + res[ind]= mtd[ws_name] + + #SaveNexus(ws,Filename = 'LETNewReduction.nxs') + return res + + def __init__(self,web_var=None): + """ sets properties defaults for the instrument with Name""" + ReductionWrapper.__init__(self,'LET',web_var) + + def set_custom_output_filename(self): + """Define custom name of output files if standard one is not satisfactory + In addition to that, example of accessing reduction properties + Changing them if necessary + """ + def custom_name(prop_man): + """sample function which builds filename from + incident energy and run number and adds some auxiliary information + to it. + """ + # Note -- properties have the same names as the list of advanced and + # main properties + ei = prop_man.incident_energy + # sample run is more then just list of runs, so we use + # the formalization below to access its methods + run_num = PropertyManager.sample_run.run_number() + name = "RUN{0}atEi{1:<4.1f}meV_One2One".format(run_num ,ei) + return name + + # Uncomment this to use custom filename function + # Note: the properties are stored in prop_man class accessed as + # below. + #return custom_name(self.reducer.prop_man) + # use this method to use standard file name generating function + return None #---------------------------------------------------------------------------------------------------------------------- - - if __name__=="__main__": - maps_dir = 'd:/Data/MantidSystemTests/Data' - data_dir ='d:/Data/Mantid_Testing/14_11_27' - ref_data_dir = 'd:/Data/MantidSystemTests/SystemTests/AnalysisTests/ReferenceResults' - config.setDataSearchDirs('{0};{1};{2}'.format(data_dir,maps_dir,ref_data_dir)) - #config.appendDataSearchDir('d:/Data/Mantid_GIT/Test/AutoTestData') - config['defaultsave.directory'] = data_dir # folder to save resulting spe/nxspe files. Defaults are in - - # execute stuff from Mantid - #rd =ReduceLET_MultiRep2015() - rd =ReduceLET_MultiRep2014() - #rd = ReduceLET_OneRep() - rd.def_advanced_properties() - rd.def_main_properties() - - - #using_web_data = False - #if not using_web_data: - # run_dir=os.path.dirname(os.path.realpath(__file__)) - # file = os.path.join(run_dir,'reduce_vars.py') - # rd.export_changed_values(file) - -###### Run reduction over all files provided as parameters ###### - red_ws = rd.run_reduction() - + maps_dir = 'd:/Data/MantidSystemTests/Data' + data_dir ='d:/Data/Mantid_Testing/14_11_27' + ref_data_dir = 'd:/Data/MantidSystemTests/SystemTests/AnalysisTests/ReferenceResults' + config.setDataSearchDirs('{0};{1};{2}'.format(data_dir,maps_dir,ref_data_dir)) + #config.appendDataSearchDir('d:/Data/Mantid_GIT/Test/AutoTestData') + config['defaultsave.directory'] = data_dir # folder to save resulting spe/nxspe files. Defaults are in + + # execute stuff from Mantid + rd =ReduceLET_MultiRep2015() + #rd = ReduceLET_OneRep() + rd.def_advanced_properties() + rd.def_main_properties() + + +#### uncomment rows below to generate web variables and save then to transfer to ### + ## web services. + #run_dir = os.path.dirname(os.path.realpath(__file__)) + #file = os.path.join(run_dir,'reduce_vars.py') + #rd.save_web_variables(file) + +#### Set up time interval (sec) for reducer to check for input data file. #### + # If this file is not present and this value is 0,reduction fails + # if this value >0 the reduction wait until file appears on the data + # search path checking after time specified below. + rd.wait_for_file = 0 # waiting time interval + +####get reduction parameters from properties above, override what you want locally ### + # and run reduction. Overriding would have form: + # rd.reducer.property_name (from the dictionary above) = new value e.g. + # rd.reducer.energy_bins = [-40,2,40] + # or + ## rd.reducer.sum_runs = False + +###### Run reduction over all run numbers or files assigned to ###### + # sample_run variable + + # return output workspace only if you are going to do + # something with it here. Running range of runs will return the array + # of workspace pointers. + #red_ws = rd.run_reduction() + # usual way to go is to reduce workspace and save it internally + rd.run_reduction() + + +#### Validate reduction result against known result, obtained earlier ### + #rez,mess=rd.validate_result() + #if not rez: + # raise RuntimeError("validation failed with error: {0}".format(mess)) + #else: + # print "ALL Fine" \ No newline at end of file diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MAPS_DGSReduction.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MAPS_DGSReduction.py index 55717e64a8f6f120e4f0105f52bacf25fc2dcd01..caf57a13b001e6d749c047685e358bf3f956a582 100644 --- a/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MAPS_DGSReduction.py +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MAPS_DGSReduction.py @@ -1,6 +1,6 @@ """ Sample MAPS reduction scrip """ -#import os -#os.environ["PATH"] = r"c:/Mantid/Code/builds/br_10803/bin/Release;"+os.environ["PATH"] +import os +os.environ["PATH"] = r"c:\Mantid\Code\builds\br_master\bin\Release;" + os.environ["PATH"] from Direct.ReductionWrapper import * try: import reduce_vars as web_var @@ -9,86 +9,141 @@ except: class ReduceMAPS(ReductionWrapper): - @MainProperties - def def_main_properties(self): - """ Define main properties used in reduction """ - prop = {} - prop['sample_run'] = 17269 - prop['wb_run'] = 17186 - prop['incident_energy'] = 150 - prop['energy_bins'] = [-15,3,135] + @MainProperties + def def_main_properties(self): + """ Define main properties used in reduction """ + prop = {} + prop['sample_run'] = 17269 + prop['wb_run'] = 17186 + prop['incident_energy'] = 150 + prop['energy_bins'] = [-15,3,135] - - # Absolute units reduction properties. - prop['monovan_run'] = 17589 - prop['sample_mass'] = 10/(94.4/13) # -- this number allows to get approximately the same system test intensities for MAPS as the old test - prop['sample_rmm'] = 435.96 # - return prop - - @AdvancedProperties - def def_advanced_properties(self): - """ separation between simple and advanced properties depends + # Absolute units reduction properties. + prop['monovan_run'] = 17589 + prop['sample_mass'] = 10 / (94.4 / 13) # -- this number allows to get approximately the same system test intensities for MAPS as the old test + prop['sample_rmm'] = 435.96 # + return prop + + @AdvancedProperties + def def_advanced_properties(self): + """ separation between simple and advanced properties depends on scientist, experiment and user. main properties override advanced properties. - """ - prop = {} - prop['map_file'] = 'default' - #prop['monovan_mapfile'] = 'default' #'4to1_mid_lowang.map' # default - prop['hard_mask_file'] =None - #prop['det_cal_file'] = ? default? - prop['save_format']='' - - prop['diag_remove_zero']=False - - # this are the parameters which were used in old MAPS_Parameters.xml test. - prop['wb-integr-max'] =300 - #prop['wb_integr_range']=[20,300] - prop['bkgd-range-min']=12000 - prop['bkgd-range-max']=18000 - #prop['bkgd_range']=[12000,18000] - - prop['diag_samp_hi']=1.5 - prop['diag_samp_sig']=3.3 - prop['diag_van_hi']=2.0 + """ + prop = {} + prop['map_file'] = 'default' + #prop['monovan_mapfile'] = 'default' #'4to1_mid_lowang.map' # default + prop['hard_mask_file'] = None + #prop['det_cal_file'] = ? default? + prop['save_format'] = '' + + prop['diag_remove_zero'] = False + + # this are the parameters which were used in old MAPS_Parameters.xml test. + prop['wb-integr-max'] = 300 + #prop['wb_integr_range']=[20,300] + prop['bkgd-range-min'] = 12000 + prop['bkgd-range-max'] = 18000 + #prop['bkgd_range']=[12000,18000] + + prop['diag_samp_hi'] = 1.5 + prop['diag_samp_sig'] = 3.3 + prop['diag_van_hi'] = 2.0 - prop['abs_units_van_range']=[-40,40] + prop['abs_units_van_range'] = [-40,40] - return prop + return prop # - @iliad - def reduce(self,input_file=None,output_directory=None): - """ Method executes reduction over single file + @iliad + def reduce(self,input_file=None,output_directory=None): + """ Method executes reduction over single file Overload only if custom reduction is needed - """ - outWS = ReductionWrapper.reduce(self,input_file,output_directory) - #SaveNexus(ws,Filename = 'MARNewReduction.nxs') - return outWS + """ + outWS = ReductionWrapper.reduce(self,input_file,output_directory) + #SaveNexus(ws,Filename = 'MARNewReduction.nxs') + return outWS - def __init__(self,web_var=None): + def __init__(self,web_var=None): """ sets properties defaults for the instrument with Name""" ReductionWrapper.__init__(self,'MAP',web_var) + # + def set_custom_output_filename(self): + """ define custom name of output files if standard one is not satisfactory + In addition to that, example of accessing reduction properties + Changing them if necessary + """ + def custom_name(prop_man): + """ sample function which builds filename from + incident energy and run number and adds some auxiliary information + to it. + """ + # Note -- properties have the same names as the list of advanced and + # main properties + ei = prop_man.incident_energy + # sample run is more then just list of runs, so we use + # the formalization below to access its methods + run_num = PropertyManager.sample_run.run_number() + name = "RUN{0}atEi{1:<4.1f}meV_One2One".format(run_num ,ei) + return name + + # Uncomment this to use custom filename function + # Note: the properties are stored in prop_man class accessed as + # below. + #return custom_name(self.reducer.prop_man) + # use this method to use standard file name generating function + return None + #---------------------------------------------------------------------------------------------------------------------- +if __name__ == "__main__": + + data_root = r'd:\Data\MantidDevArea\Datastore\DataCopies' + data_dir = os.path.join(data_root,r'Testing\Data\SystemTest') + ref_data_dir = os.path.join(data_root,r'Testing\SystemTests\tests\analysis\reference') + result_dir = r'd:/Data/Mantid_Testing/14_12_15' + + config.setDataSearchDirs('{0};{1};{2}'.format(data_dir,ref_data_dir,result_dir)) + #config.appendDataSearchDir('d:/Data/Mantid_GIT/Test/AutoTestData') + config['defaultsave.directory'] = result_dir # folder to save resulting spe/nxspe files. Defaults are in + + # execute stuff from Mantid + rd = ReduceMAPS() + rd.def_advanced_properties() + rd.def_main_properties() + +#### uncomment rows below to generate web variables and save then to transfer to ### + ## web services. + #run_dir = os.path.dirname(os.path.realpath(__file__)) + #file = os.path.join(run_dir,'reduce_vars.py') + #rd.save_web_variables(file) +#### Set up time interval (sec) for reducer to check for input data file. #### + # If this file is not present and this value is 0,reduction fails + # if this value >0 the reduction wait until file appears on the data + # search path checking after time specified below. + rd.wait_for_file = 0 # waiting time interval -if __name__=="__main__": - maps_dir = 'd:/Data/MantidSystemTests/Data' - data_dir ='d:/Data/Mantid_Testing/14_12_15' - ref_data_dir = 'd:/Data/MantidSystemTests/SystemTests/AnalysisTests/ReferenceResults' - config.setDataSearchDirs('{0};{1};{2}'.format(data_dir,maps_dir,ref_data_dir)) - #config.appendDataSearchDir('d:/Data/Mantid_GIT/Test/AutoTestData') - config['defaultsave.directory'] = data_dir # folder to save resulting spe/nxspe files. Defaults are in +####get reduction parameters from properties above, override what you want locally ### + # and run reduction. Overriding would have form: + # rd.reducer.property_name (from the dictionary above) = new value e.g. + # rd.reducer.energy_bins = [-40,2,40] + # or + ## rd.reducer.sum_runs = False - # execute stuff from Mantid - rd = ReduceMAPS() - rd.def_advanced_properties() - rd.def_main_properties() +###### Run reduction over all run numbers or files assigned to ###### + # sample_run variable + # return output workspace only if you are going to do + # something with it here. Running range of runs will return the array + # of workspace pointers. + #red_ws = rd.run_reduction() + # usual way to go is to reduce workspace and save it internally + rd.run_reduction() - #using_web_data = False - #if not using_web_data: - # run_dir=os.path.dirname(os.path.realpath(__file__)) - # file = os.path.join(run_dir,'reduce_vars.py') - # rd.save_web_vars(file) - rd.reduce() +#### Validate reduction result against known result, obtained earlier ### + #rez,mess=rd.validate_result() + #if not rez: + # raise RuntimeError("validation failed with error: {0}".format(mess)) + #else: + # print "ALL Fine" diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MariReduction.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MariReduction.py index 9eadf5d14b33000a225ff762020a2dd4bca65324..a2f517a117cf6af4d3f918557a76bbeb7c1a0b6f 100644 --- a/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MariReduction.py +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MariReduction.py @@ -1,7 +1,7 @@ import os #os.environ["PATH"] =\ #r"c:/Mantid/Code/builds/br_master/bin/Release;"+os.environ["PATH"] -""" Sample MARI reduction scrip used in testing ReductionWrapper """ +""" Sample MARI reduction scrip used in testing ReductionWrapper """ from Direct.ReductionWrapper import * try: import reduce_vars as web_var @@ -10,80 +10,107 @@ except: class ReduceMARIFromFile(ReductionWrapper): - @MainProperties - def def_main_properties(self): - """ Define main properties used in reduction """ - prop = {} - prop['sample_run'] = 11001 - prop['wb_run'] = 11060 - prop['incident_energy'] = 12 - prop['energy_bins'] = [-11,0.05,11] + @MainProperties + def def_main_properties(self): + """ Define main properties used in reduction """ + prop = {} + prop['sample_run'] = 11001 + prop['wb_run'] = 11060 + prop['incident_energy'] = 12 + prop['energy_bins'] = [-11,0.05,11] #prop['sum_runs'] = False - # Absolute units reduction properties. - prop['monovan_run'] = 11015 - prop['sample_mass'] = 10 - prop['sample_rmm'] = 435.96 - return prop + # Absolute units reduction properties. + prop['monovan_run'] = 11015 + prop['sample_mass'] = 10 + prop['sample_rmm'] = 435.96 + return prop - @AdvancedProperties - def def_advanced_properties(self): - """ separation between simple and advanced properties depends + @AdvancedProperties + def def_advanced_properties(self): + """ separation between simple and advanced properties depends on scientist, experiment and user. main properties override advanced properties. - """ - prop = {} - prop['map_file'] = "mari_res.map" - prop['monovan_mapfile'] = "mari_res.map" - prop['hard_mask_file'] = "mar11015.msk" - prop['det_cal_file'] = 11060 - prop['save_format'] = '' - return prop + """ + prop = {} + prop['map_file'] = "mari_res.map" + prop['monovan_mapfile'] = "mari_res.map" + prop['hard_mask_file'] = "mar11015.msk" + prop['det_cal_file'] = 11060 + prop['save_format'] = '' + return prop # - @iliad - def reduce(self,input_file=None,output_directory=None): - """ Method executes reduction over single file + @iliad + def reduce(self,input_file=None,output_directory=None): + """Method executes reduction over single file Overload only if custom reduction is needed - """ - outWS = ReductionWrapper.reduce(self,input_file,output_directory) - #SaveNexus(outWS,Filename = 'MARNewReduction.nxs') - return outWS + """ + outWS = ReductionWrapper.reduce(self,input_file,output_directory) + #SaveNexus(outWS,Filename = 'MARNewReduction.nxs') + return outWS - def validate_result(self,build_validation=False): - """ Change this method to verify different results """ - # build_validation -- if true, build and save new workspace rather then validating the old one - rez,message = ReductionWrapper.build_or_validate_result(self,11001,"MARIReduction.nxs",build_validation,1.e-2) - return rez,message - - def __init__(self,web_var=None): + def validate_result(self,build_validation=False): + """Change this method to verify different results """ + # build_validation -- if true, build and save new workspace rather then validating the old one + rez,message = ReductionWrapper.build_or_validate_result(self,11001,"MARIReduction.nxs",build_validation,1.e-2) + return rez,message + + def set_custom_output_filename(self): + """ define custom name of output files if standard one is not satisfactory + In addition to that, example of accessing reduction properties + Changing them if necessary + """ + def custom_name(prop_man): + """Sample function which builds filename from + incident energy and run number and adds some auxiliary information + to it. + """ + # Note -- properties have the same names as the list of advanced and + # main properties + ei = prop_man.incident_energy + # sample run is more then just list of runs, so we use + # the formalization below to access its methods + run_num = PropertyManager.sample_run.run_number() + name = "RUN{0}atEi{1:<4.1f}meV_One2One".format(run_num ,ei) + return name + + # Uncomment this to use custom filename function + # Note: the properties are stored in prop_man class accessed as + # below. + #return custom_name(self.reducer.prop_man) + # use this method to use standard file name generating function + return None + + + def __init__(self,web_var=None): """ sets properties defaults for the instrument with Name""" ReductionWrapper.__init__(self,'MAR',web_var) #-------------------------------------------------------------------------------------------------# #-------------------------------------------------------------------------------------------------# #-------------------------------------------------------------------------------------------------# def main(input_file=None,output_directory=None): - """ This method is used to run code from web service - and should not be touched except changing the name of the - particular ReductionWrapper class (e.g. ReduceMARI here) + """ This method is used to run code from web service + and should not be touched except changing the name of the + particular ReductionWrapper class (e.g. ReduceMARI here) - You can also change the output folder to save data to - where web services will copy data + You can also change the output folder to save data to + where web services will copy data - This method will go when web service implements proper factory - """ - # note web variables initialization - rd = ReduceMARIFromFile(web_var) - rd.reduce(input_file,output_directory) - # change to the name of the folder to save data to - return '' + This method will go when web service implements proper factory + """ + # note web variables initialization + rd = ReduceMARIFromFile(web_var) + rd.reduce(input_file,output_directory) + # change to the name of the folder to save data to + return '' #---------------------------------------------------------------------------------------------------------------------- class ReduceMARIFromWorkspace(ReductionWrapper): @MainProperties def def_main_properties(self): - """ Define main properties used in reduction """ + """Define main properties used in reduction """ prop = {} prop['sample_run'] = Load(Filename='MAR11001.RAW',OutputWorkspace='MAR11001.RAW') # WB workspace @@ -278,33 +305,55 @@ class ReduceMARIMonitorsSeparate(ReductionWrapper): if __name__ == "__main__": - maps_dir = 'd:/Data/MantidSystemTests/Data' - data_dir = 'd:/Data/Mantid_Testing/14_12_15' - ref_data_dir = 'd:/Data/MantidSystemTests/SystemTests/AnalysisTests/ReferenceResults' - config.setDataSearchDirs('{0};{1};{2}'.format(data_dir,maps_dir,ref_data_dir)) - #config.appendDataSearchDir('d:/Data/Mantid_GIT/Test/AutoTestData') - config['defaultsave.directory'] = data_dir # folder to save resulting spe/nxspe files. Defaults are in - - # execute stuff from Mantid - #rd = ReduceMARIFromFile() - #rd= ReduceMARIMon2Norm() - rd = ReduceMARIMonitorsSeparate() - #rd = ReduceMARIFromWorkspace() - rd.def_advanced_properties() - rd.def_main_properties() - - # Save web variables - run_dir = os.path.dirname(os.path.realpath(__file__)) - file = os.path.join(run_dir,'reduce_vars.py') - rd.save_web_variables(file) -#### Set up time interval (sec) for reducer to check for input data file. #### - # If this file is not present and this value is 0,reduction fails - # if this value >0 the reduction wait until file appears on the data - # search path checking after time specified below. - rd.wait_for_file = 0 # waiting time interval - -###### Run reduction over all run numbers or files assigned to ###### - # sample_run variable - red_ws = rd.run_reduction() - - + data_root = r'd:\Data\MantidDevArea\Datastore\DataCopies' + data_dir = os.path.join(data_root,r'Testing\Data\SystemTest') + ref_data_dir = os.path.join(data_root,r'Testing\SystemTests\tests\analysis\reference') + result_dir = r'd:/Data/Mantid_Testing/14_12_15' + config.setDataSearchDirs('{0};{1};{2}'.format(data_dir,ref_data_dir,result_dir)) + #config.appendDataSearchDir('d:/Data/Mantid_GIT/Test/AutoTestData') + config['defaultsave.directory'] = result_dir # folder to save resulting spe/nxspe files. Defaults are in + + # execute stuff from Mantid + #rd = ReduceMARIFromFile() + rd= ReduceMARIMon2Norm() + #rd = ReduceMARIMonitorsSeparate() + #rd = ReduceMARIFromWorkspace() + rd.def_advanced_properties() + rd.def_main_properties() + +#### uncomment rows below to generate web variables and save then to transfer to ### + ## web services. + run_dir = os.path.dirname(os.path.realpath(__file__)) + file = os.path.join(run_dir,'reduce_vars.py') + rd.save_web_variables(file) + +#### Set up time interval (sec) for reducer to check for input data file. #### + # If this file is not present and this value is 0,reduction fails + # if this value >0 the reduction wait until file appears on the data + # search path checking after time specified below. + rd.wait_for_file = 0 # waiting time interval + +####get reduction parameters from properties above, override what you want locally ### + # and run reduction. Overriding would have form: + # rd.reducer.property_name (from the dictionary above) = new value e.g. + # rd.reducer.energy_bins = [-40,2,40] + # or + ## rd.reducer.sum_runs = False + +###### Run reduction over all run numbers or files assigned to ###### + # sample_run variable + + # return output workspace only if you are going to do + # something with it here. Running range of runs will return the array + # of workspace pointers. + #red_ws = rd.run_reduction() + # usual way to go is to reduce workspace and save it internally + rd.run_reduction() + + +#### Validate reduction result against known result, obtained earlier ### + #rez,mess=rd.validate_result() + #if not rez: + # raise RuntimeError("validation failed with error: {0}".format(mess)) + #else: + # print "ALL Fine" diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/reference/LET14305_3_4mev.nxs.md5 b/Code/Mantid/Testing/SystemTests/tests/analysis/reference/LET14305_3_4mev.nxs.md5 deleted file mode 100644 index b0cc827392201bda131ef2318a5a938d38993b7e..0000000000000000000000000000000000000000 --- a/Code/Mantid/Testing/SystemTests/tests/analysis/reference/LET14305_3_4mev.nxs.md5 +++ /dev/null @@ -1 +0,0 @@ -94a08cffaa147469c35863a07e77f8e2 \ No newline at end of file diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/reference/LET14305_8_0mev.nxs.md5 b/Code/Mantid/Testing/SystemTests/tests/analysis/reference/LET14305_8_0mev.nxs.md5 deleted file mode 100644 index ecbe7a432ca68699f19b330af0067380031e22e4..0000000000000000000000000000000000000000 --- a/Code/Mantid/Testing/SystemTests/tests/analysis/reference/LET14305_8_0mev.nxs.md5 +++ /dev/null @@ -1 +0,0 @@ -89e4436d52b2f94cc9aa7e267523a815 \ No newline at end of file diff --git a/Code/Mantid/scripts/Inelastic/Direct/DirectEnergyConversion.py b/Code/Mantid/scripts/Inelastic/Direct/DirectEnergyConversion.py index 465d731e23c735fd49e57391c3f6952694a78fd3..1ba99869950964ca60b6fbe3e986c7f152d94db2 100644 --- a/Code/Mantid/scripts/Inelastic/Direct/DirectEnergyConversion.py +++ b/Code/Mantid/scripts/Inelastic/Direct/DirectEnergyConversion.py @@ -197,7 +197,7 @@ class DirectEnergyConversion(object): if self.use_hard_mask_only: # build hard mask - diag_mask,n_masks = white.get_masking() + diag_mask = white.get_masking(1) if diag_mask is None: # in this peculiar way we can obtain working mask which # accounts for initial data grouping in the @@ -210,8 +210,9 @@ class DirectEnergyConversion(object): MaskDetectors(Workspace=white_data, MaskedWorkspace=diag_mask) white.add_masked_ws(white_data) DeleteWorkspace(Workspace='white_ws_clone') - diag_mask,n_masks = white.get_masking() - if not(out_ws_name is None): + DeleteWorkspace(Workspace='hard_mask_ws') + diag_mask = white.get_masking(1) + if not out_ws_name is None: dm = CloneWorkspace(diag_mask,OutputWorkspace=out_ws_name) return dm else: @@ -228,7 +229,7 @@ class DirectEnergyConversion(object): # Get the background/total counts from the sample run if present if not diag_sample is None: diag_sample = self.get_run_descriptor(diag_sample) - sample_mask,n_sam_masked = diag_sample.get_masking() + sample_mask = diag_sample.get_masking(1) if sample_mask is None: # If the bleed test is requested then we need to pass in the # sample_run as well @@ -241,7 +242,7 @@ class DirectEnergyConversion(object): # Set up the background integrals for diagnostic purposes result_ws = self.normalise(diag_sample, self.normalise_method) - #>>> here result workspace is being processed + #>>>here result workspace is being processed #-- not touching result ws bkgd_range = self.background_test_range background_int = Integration(result_ws,\ @@ -260,13 +261,16 @@ class DirectEnergyConversion(object): # extract existing white mask if one is defined and provide it for # diagnose to use instead of constantly diagnosing the same vanadium - white_mask,num_masked = white.get_masking() - if not(white_mask is None) and not(sample_mask is None): - # nothing to do then + white_mask = white.get_masking(1) + if white_mask is None or sample_mask is None: + pass # have to run diagnostics + else: + #Sample mask and white masks are defined. + #nothing to do then total_mask = sample_mask + white_mask return total_mask - else: - pass # have to run diagnostics after all + + # Check how we should run diag diag_spectra_blocks = self.diag_spectra @@ -292,7 +296,7 @@ class DirectEnergyConversion(object): if out_ws_name: if not(diag_sample is None): diag_sample.add_masked_ws(whiteintegrals) - mask,n_removed = diag_sample.get_masking() + mask = diag_sample.get_masking(1) diag_mask = CloneWorkspace(mask,OutputWorkspace=out_ws_name) else: # either WB was diagnosed or WB masks were applied to it # Extract a mask workspace @@ -376,7 +380,7 @@ class DirectEnergyConversion(object): masking,header = self._run_diagnostics(prop_man) else: header = '*** Using stored mask file for workspace with {0} spectra and {1} masked spectra' - masking = self.spectra_masks + masking = self.spectra_masks # estimate and report the number of failing detectors nMaskedSpectra = get_failed_spectra_list_from_masks(masking) @@ -405,8 +409,8 @@ class DirectEnergyConversion(object): MonovanCashNum = PropertyManager.monovan_run.run_number() else: MonovanCashNum = None - # Set or clear monovan run number to use in cash ID to return correct - # cashed value of monovan integral + #Set or clear monovan run number to use in cash ID to return correct + #cashed value of monovan integral PropertyManager.mono_correction_factor.set_cash_mono_run_number(MonovanCashNum) mono_ws_base = None @@ -457,7 +461,7 @@ class DirectEnergyConversion(object): # or use previously cashed value cashed_mono_int = PropertyManager.mono_correction_factor.get_val_from_cash(prop_man) if MonovanCashNum != None or self.mono_correction_factor or cashed_mono_int: - deltaE_ws_sample,mono_ws_base=self._do_abs_corrections(deltaE_ws_sample,cashed_mono_int,\ + deltaE_ws_sample,mono_ws_base = self._do_abs_corrections(deltaE_ws_sample,cashed_mono_int,\ ei_guess,mono_ws_base,tof_range, cut_ind,num_ei_cuts) else: pass # no absolute units corrections diff --git a/Code/Mantid/scripts/Inelastic/Direct/ReductionWrapper.py b/Code/Mantid/scripts/Inelastic/Direct/ReductionWrapper.py index 332ff0751f6c5554304db29c1cbb673038877b3d..5eee0d0e058300f9edbfc25eda239154dd3ea581 100644 --- a/Code/Mantid/scripts/Inelastic/Direct/ReductionWrapper.py +++ b/Code/Mantid/scripts/Inelastic/Direct/ReductionWrapper.py @@ -27,12 +27,12 @@ class ReductionWrapper(object): and define if wrapper runs from web services or not """ # internal variable, indicating if we should try to wait for input files to appear - self._wait_for_file=False - # internal variable, used in system tests to validate workflow, - # with waiting for files. It is the holder to the function + self._wait_for_file = False + # internal variable, used in system tests to validate workflow, + # with waiting for files. It is the holder to the function # used during debugging "wait for files" workflow # instead of Pause algorithm - self._debug_wait_for_files_operation=None + self._debug_wait_for_files_operation = None # The variables which are set up from web interface or to be exported to # web interface @@ -60,7 +60,7 @@ class ReductionWrapper(object): @wait_for_file.setter def wait_for_file(self,value): - if value>0: + if value > 0: self._wait_for_file = value else: self._wait_for_file = False @@ -77,7 +77,7 @@ class ReductionWrapper(object): if not FileName: FileName = 'reduce_vars.py' - f=open(FileName,'w') + f = open(FileName,'w') f.write("standard_vars = {\n") str_wrapper = ' ' for key,val in self._wvs.standard_vars.iteritems(): @@ -86,17 +86,17 @@ class ReductionWrapper(object): else: row = "{0}\'{1}\':{2}".format(str_wrapper,key,val) f.write(row) - str_wrapper=',\n ' + str_wrapper = ',\n ' f.write("\n}\nadvanced_vars={\n") - str_wrapper=' ' + str_wrapper = ' ' for key,val in self._wvs.advanced_vars.iteritems(): if isinstance(val,str): row = "{0}\'{1}\':\'{2}\'".format(str_wrapper,key,val) else: row = "{0}\'{1}\':{2}".format(str_wrapper,key,val) f.write(row) - str_wrapper=',\n ' + str_wrapper = ',\n ' f.write("\n}\n") f.close() @@ -106,15 +106,15 @@ class ReductionWrapper(object): self.def_advanced_properties() self.def_main_properties() if self._run_from_web: - web_vars = dict(self._wvs.standard_vars.items()+self._wvs.advanced_vars.items()) + web_vars = dict(self._wvs.standard_vars.items() + self._wvs.advanced_vars.items()) self.reducer.prop_man.set_input_parameters(**web_vars) else: - pass # we should set already set up variables using + pass # we should set already set up variables using # validate properties and report result return self.reducer.prop_man.validate_properties(False) # -# +# def validate_result(self,build_validation=False,Error=1.e-3,ToleranceRelErr=True): """ Overload this using build_or_validate_result to have possibility to run or validate result """ return True @@ -156,14 +156,14 @@ class ReductionWrapper(object): self.reducer.prop_man.log\ ("*** WARNING:can not load (find?) validation file {0}\n"\ " Building validation".format(validation_file),'warning') - build_validation=True + build_validation = True else: - build_validation=True + build_validation = True # just in case, to be sure current_web_state = self._run_from_web - current_wait_state= self.wait_for_file + current_wait_state = self.wait_for_file # disable wait for input and self._run_from_web = False self.wait_for_file = False @@ -172,7 +172,7 @@ class ReductionWrapper(object): self.def_main_properties() # self.reducer.sample_run = sample_run - self.reducer.prop_man.save_format=None + self.reducer.prop_man.save_format = None reduced = self.reduce() @@ -182,7 +182,7 @@ class ReductionWrapper(object): else: result_name = self.reducer.prop_man.save_file_name self.reducer.prop_man.log("*** Saving validation file with name: {0}.nxs".format(result_name),'notice') - SaveNexus(reduced,Filename=result_name+'.nxs') + SaveNexus(reduced,Filename=result_name + '.nxs') return True,'Created validation file {0}.nxs'.format(result_name) else: if isinstance(reduced,list): # check only first result in multirep @@ -243,7 +243,7 @@ class ReductionWrapper(object): config['defaultsave.directory'] = str(output_directory) timeToWait = self._wait_for_file - if timeToWait>0: + if timeToWait > 0: Found,input_file = PropertyManager.sample_run.find_file(be_quet=True) while not Found: file_hint,fext = PropertyManager.sample_run.file_hint() @@ -269,25 +269,25 @@ class ReductionWrapper(object): self.reducer.prop_man.sum_runs = True timeToWait = self._wait_for_file - if timeToWait>0: - run_files = PropertyManager.sample_run.get_run_list() + if timeToWait > 0: + run_files = PropertyManager.sample_run.get_run_list() num_files_to_sum = len(PropertyManager.sample_run) ok,missing,found = self.reducer.prop_man.find_files_to_sum() n_found = len(found) if not ok: - # necessary to cache intermediate sums in memory + # necessary to cache intermediate sums in memory self.reducer.prop_man.cashe_sum_ws = True while not(ok): - while n_found>0: + while n_found > 0: last_found = found[-1] self.reducer.prop_man.sample_run = last_found # request to reduce all up to last found ws = self.reducer.convert_to_energy() # reset search to whole file list again - self.reducer.prop_man.sample_run = run_files[num_files_to_sum-1] + self.reducer.prop_man.sample_run = run_files[num_files_to_sum - 1] ok,missing,found = self.reducer.prop_man.find_files_to_sum() n_found = len(found) - if ok: # no need to cache sum any more. All necessary files found + if ok: # no need to cache sum any more. All necessary files found self.reducer.prop_man.cashe_sum_ws = False self.reducer.prop_man.log("*** Waiting {0} sec for runs {1} to appear on the data search path"\ @@ -296,7 +296,7 @@ class ReductionWrapper(object): ok,missing,found = self.reducer.prop_man.find_files_to_sum() n_found = len(found) #end not(ok) - if n_found>0: + if n_found > 0: # cash sum can be dropped now if it has not been done before self.reducer.prop_man.cashe_sum_ws = False ws = self.reducer.convert_to_energy() @@ -304,53 +304,53 @@ class ReductionWrapper(object): ws = self.reducer.convert_to_energy() return ws - # + # def run_reduction(self): - """" Reduces runs one by one or sum all them together and reduce after this + """" Reduces runs one by one or sum all them together and reduce after this - if wait_for_file time is > 0, it will until missing files appear on the + if wait_for_file time is > 0, it will until missing files appear on the data search path - """ - try: - n,r = funcreturns.lhs_info('both') - out_ws_name = r[0] - except: - out_ws_name = None - - if self.reducer.sum_runs: -# --------### sum runs provided ------------------------------------### - if out_ws_name is None: - self.sum_and_reduce() - return None - else: - red_ws=self.sum_and_reduce() - RenameWorkspace(InputWorkspace=red_ws,OutputWorkspace=out_ws_name) - return mtd[out_ws_name] - else: + """ + try: + n,r = funcreturns.lhs_info('both') + out_ws_name = r[0] + except: + out_ws_name = None + + if self.reducer.sum_runs: +# --------### sum runs provided ------------------------------------### + if out_ws_name is None: + self.sum_and_reduce() + return None + else: + red_ws = self.sum_and_reduce() + RenameWorkspace(InputWorkspace=red_ws,OutputWorkspace=out_ws_name) + return mtd[out_ws_name] + else: # --------### reduce list of runs one by one ----------------------------### - runfiles = PropertyManager.sample_run.get_run_file_list() - if out_ws_name is None: - for file in runfiles: - self.reduce(file) - #end - return None - else: - results=[] - nruns = len(runfiles) - for num,file in enumerate(runfiles): - red_ws=self.reduce(file) - if nruns >1: - out_name = out_ws_name+'#{0}of{1}'.format(num+1,nruns) - RenameWorkspace(InputWorkspace=red_ws,OutputWorkspace=out_name) - red_ws = mtd[out_name] - results.append(red_ws) - #end - if len(results) == 1: - return results[0] + runfiles = PropertyManager.sample_run.get_run_file_list() + if out_ws_name is None: + for file in runfiles: + self.reduce(file) + return None else: - return results - #end - + results = [] + nruns = len(runfiles) + for num,file in enumerate(runfiles): + red_ws = self.reduce(file) + if nruns > 1: + out_name = out_ws_name + '#{0}of{1}'.format(num + 1,nruns) + RenameWorkspace(InputWorkspace=red_ws,OutputWorkspace=out_name) + red_ws = mtd[out_name] + results.append(red_ws) + #end + if len(results) == 1: + return results[0] + else: + return results + #end if + #end if + #end def MainProperties(main_prop_definition): """ Decorator stores properties dedicated as main and sets these properties @@ -376,7 +376,7 @@ def AdvancedProperties(adv_prop_definition): #print "in decorator: ",properties host = args[0] if not host._run_from_web: # property run locally - host._wvs.advanced_vars =prop_dict + host._wvs.advanced_vars = prop_dict host.reducer.prop_man.set_input_parameters(**prop_dict) return prop_dict @@ -398,19 +398,19 @@ def iliad(reduce): out_ws_name = None host = args[0] - if len(args)>1: + if len(args) > 1: input_file = args[1] - if len(args)>2: + if len(args) > 2: output_directory = args[2] else: - output_directory =None + output_directory = None else: - input_file=None - output_directory=None + input_file = None + output_directory = None # add input file folder to data search directory if file has it if input_file and isinstance(input_file,str): data_path = os.path.dirname(input_file) - if len(data_path)>0: + if len(data_path) > 0: try: config.appendDataSearchDir(str(data_path)) args[1] = os.path.basename(input_file) @@ -420,7 +420,7 @@ def iliad(reduce): config['defaultsave.directory'] = str(output_directory) if host._run_from_web: - web_vars = dict(host._wvs.standard_vars.items()+host._wvs.advanced_vars.items()) + web_vars = dict(host._wvs.standard_vars.items() + host._wvs.advanced_vars.items()) host.reducer.prop_man.set_input_parameters(**web_vars) else: pass # we should set already set up variables using @@ -433,17 +433,17 @@ def iliad(reduce): # prohibit returning workspace to web services. if host._run_from_web and not isinstance(rez,str): - rez="" + rez = "" else: if isinstance(rez,list): # multirep run, just return as it is return rez if out_ws_name and rez.name() != out_ws_name : - rez=RenameWorkspace(InputWorkspace=rez,OutputWorkspace=out_ws_name) + rez = RenameWorkspace(InputWorkspace=rez,OutputWorkspace=out_ws_name) return rez return iliad_wrapper -if __name__=="__main__": +if __name__ == "__main__": pass diff --git a/Code/Mantid/scripts/Inelastic/Direct/RunDescriptor.py b/Code/Mantid/scripts/Inelastic/Direct/RunDescriptor.py index a7c388cd50b79fa05abce7b66583962e95a49079..20222aabad9b1ad16ee5d16067cbf8938715dee6 100644 --- a/Code/Mantid/scripts/Inelastic/Direct/RunDescriptor.py +++ b/Code/Mantid/scripts/Inelastic/Direct/RunDescriptor.py @@ -30,8 +30,8 @@ class RunList(object): self._set_fnames(fnames,fext) #-------------------------------------------------------------------------------------------------- def set_cashed_sum_ws(self,ws,new_ws_name=None): - """Store the name of a workspace in the class - as reference + """Store the name of a workspace in the class + as reference """ if new_ws_name: old_name = ws.name() @@ -45,15 +45,15 @@ class RunList(object): self._partial_sum_ws_name = new_ws_name # def get_cashed_sum_ws(self): - """Return python pointer to cached sum workspace - """ - if not self._partial_sum_ws_name: + """Return python pointer to cached sum workspace + """ + if not self._partial_sum_ws_name: return None - if self._partial_sum_ws_name in mtd: + if self._partial_sum_ws_name in mtd: return mtd[self._partial_sum_ws_name] - else: + else: return None - # + # def get_cashed_sum_clone(self): """ """ origin = self.get_cashed_sum_ws() @@ -78,13 +78,13 @@ class RunList(object): #-------------------------------------------------------------------------------------------------- # def _set_fnames(self,fnames,fext): - """Sets filenames lists and file extension lists + """Sets filenames lists and file extension lists of length correspondent to run number length - if length of the list provided differs from the length - of the run list, expands fnames list and fext list + if length of the list provided differs from the length + of the run list, expands fnames list and fext list to the whole runnumber list using last for fext and - first for fnames members of the + first for fnames members of the """ if fnames: if isinstance(fnames,list): @@ -92,7 +92,7 @@ class RunList(object): else: self._file_path = [fnames] - if not(self._file_path): + if not self._file_path: self._file_path = [''] * len(self._run_numbers) else: if len(self._file_path) != len(self._run_numbers): @@ -104,7 +104,7 @@ class RunList(object): else: self._fext = [fext] - if not (self._fext): + if not self._fext: self._fext = [''] * len(self._run_numbers) else: if len(self._fext) != len(self._run_numbers): @@ -114,7 +114,7 @@ class RunList(object): def get_file_guess(self,inst_name,run_num,default_fext=None,index=None): """Return the name of run file for run number provided - Note: internal file extension overwrites + Note: internal file extension overwrites default_fext if internal is not empty """ if index is None: @@ -138,56 +138,56 @@ class RunList(object): return self._run_numbers # def add_or_replace_run(self,run_number,fpath='',fext=None,default_fext=False): - """Add run number to list of existing runs + """Add run number to list of existing runs Let's prohibit adding the same run numbers using this method. Equivalent run numbers can still be added using list assignment - file path and file extension are added/modified if present + file path and file extension are added/modified if present regardless of run being added or replaced - """ - if not(run_number in self._run_numbers): - self._run_numbers.append(run_number) - if not fpath: - fpath = self._file_path[-1] - self._file_path.append(fpath) - if not fext: - fext = self._fext[-1] - self._fext.append(fext) - - self._last_ind2sum = len(self._run_numbers) - 1 - return self._last_ind2sum - else: - ext_ind = self._run_numbers.index(run_number) - if len(fpath) > 0: - self._file_path[ext_ind] = fpath - if fext: - if not(default_fext and len(self._fext[ext_ind]) > 0): #not keep existing - self._fext[ext_ind] = fext - self._last_ind2sum = ext_ind - return ext_ind + """ + if not(run_number in self._run_numbers): + self._run_numbers.append(run_number) + if not fpath: + fpath = self._file_path[-1] + self._file_path.append(fpath) + if not fext: + fext = self._fext[-1] + self._fext.append(fext) + + self._last_ind2sum = len(self._run_numbers) - 1 + return self._last_ind2sum + else: + ext_ind = self._run_numbers.index(run_number) + if len(fpath) > 0: + self._file_path[ext_ind] = fpath + if fext: + if not(default_fext and len(self._fext[ext_ind]) > 0): #not keep existing + self._fext[ext_ind] = fext + self._last_ind2sum = ext_ind + return ext_ind # def check_runs_equal(self,run_list,fpath=None,fext=None): - """Returns true if all run numbers in existing list are + """Returns true if all run numbers in existing list are in the comparison list and vice versa. - if lists numbers coincide, + if lists numbers coincide, sets new file_path and fext list if such are provided """ if len(run_list) != len(self._run_numbers): return False for run in run_list: - if not(run in self._run_numbers): + if not run in self._run_numbers: return False self._set_fnames(fpath,fext) - return True + return True # def get_current_run_info(self,sum_runs,ind=None): """Return last run info for file to sum""" if ind: if not(ind > -1 and ind < len(self._run_numbers)): - raise RuntimeError("Index {0} is outside of the run list of {1} runs".format(ind,len(self._run_numbers))) + raise RuntimeError("Index {0} is outside of the run list of {1} runs".format(ind,len(self._run_numbers))) else: ind = self.get_last_ind2sum(sum_runs) return self._run_numbers[ind],self._file_path[ind],self._fext[ind],ind @@ -203,7 +203,7 @@ class RunList(object): self._last_ind2sum = -1 # def get_run_list2sum(self,num_to_sum=None): - """Get run numbers of the files to be summed together + """Get run numbers of the files to be summed together from the list of defined run numbers """ n_runs = len(self._run_numbers) @@ -241,7 +241,7 @@ class RunList(object): return sum_ext # def find_run_files(self,inst_name,run_list=None,default_fext=None): - """Find run files correspondent to the run list provided + """Find run files correspondent to the run list provided and set path to these files as new internal parameters for the files in list @@ -249,36 +249,35 @@ class RunList(object): not found and found Run list have to coincide or be part of self._run_numbers - No special check for correctness is performed, so may fail + No special check for correctness is performed, so may fail miserably - """ - - if not run_list: - run_list = self._run_numbers - not_found = [] - found = [] - for run in run_list: - file_hint,index = self.get_file_guess(inst_name,run,default_fext) - try: - file = FileFinder.findRuns(file_hint)[0] - fpath,fname = os.path.split(file) - fname,fex = os.path.splitext(fname) - self._fext[index] = fex - self._file_path[index] = fpath - #self._last_ind2sum = index - found.append(run) - except RuntimeError: - not_found.append(run) - return not_found,found + """ + + if not run_list: + run_list = self._run_numbers + not_found = [] + found = [] + for run in run_list: + file_hint,index = self.get_file_guess(inst_name,run,default_fext) + try: + file = FileFinder.findRuns(file_hint)[0] + fpath,fname = os.path.split(file) + fname,fex = os.path.splitext(fname) + self._fext[index] = fex + self._file_path[index] = fpath + #self._last_ind2sum = index + found.append(run) + except RuntimeError: + not_found.append(run) + return not_found,found #-------------------------------------------------------------------------------------------------- #-------------------------------------------------------------------------------------------------- #-------------------------------------------------------------------------------------------------- class RunDescriptor(PropDescriptor): - """ descriptor to work with a run or list of runs specified - either as run number (run file) or as - this run loaded in memory as a workspace + """Descriptor to work with a run or list of runs specified + either as run number (run file) or as + this run loaded in memory as a workspace - Used to help """ # the host class referencing contained all instantiated descriptors. # Descriptors methods rely on it to work (e.g. to extract file loader @@ -302,9 +301,9 @@ class RunDescriptor(PropDescriptor): self._clear_all() def __len__(self): - """ overloaded len function, which - return length of the run-files list - to work with + """overloaded len function, which + return length of the run-files list + to work with """ if not(self._run_number): return 0 @@ -314,8 +313,8 @@ class RunDescriptor(PropDescriptor): return 1 #-------------------------------------------------------------------------------------------------------------------- def _clear_all(self): - """ clear all internal properties, workspaces and caches, - associated with this run + """clear all internal properties, workspaces and caches, + associated with this run """ # Run number self._run_number = None @@ -325,12 +324,12 @@ class RunDescriptor(PropDescriptor): self._fext = None if self._ws_name: - mon_ws = self._ws_name + '_monitors' - # Workspace name which corresponds to the run - if self._ws_name in mtd: - DeleteWorkspace(self._ws_name) - if mon_ws in mtd: - DeleteWorkspace(mon_ws) + mon_ws = self._ws_name + '_monitors' + # Workspace name which corresponds to the run + if self._ws_name in mtd: + DeleteWorkspace(self._ws_name) + if mon_ws in mtd: + DeleteWorkspace(mon_ws) self._ws_name = None # none if not loaded # String used to identify the workspace related to this property @@ -343,57 +342,58 @@ class RunDescriptor(PropDescriptor): self._in_cash = False # clear masking workspace if any available if self._mask_ws_name: - if self._mask_ws_name in mtd: - DeleteWorkspace(self._mask_ws_name) - self._mask_ws_name = None + if self._mask_ws_name in mtd: + DeleteWorkspace(self._mask_ws_name) + self._mask_ws_name = None #-------------------------------------------------------------------------------------------------------------------- def __get__(self,instance,owner): - """Return current run number or workspace if it is loaded""" - if instance is None: - return self + """Return current run number or workspace if it is loaded""" + if instance is None: + return self - if self._ws_name and self._ws_name in mtd: - return mtd[self._ws_name] - else: + if self._ws_name and self._ws_name in mtd: + return mtd[self._ws_name] + else: return self._run_number #-------------------------------------------------------------------------------------------------------------------- def __set__(self,instance,value): - """Set up Run number and define workspace name from any source """ - # - if value == None: # clear current run number - self._clear_all() - return - if isinstance(value, api.Workspace): - if self._ws_name: - if self._ws_name != value.name(): - self._clear_all() - self._set_ws_as_source(value) - else: - return # do nothing + """Set up Run number and define workspace name from any source """ + # + if value == None: # clear current run number + self._clear_all() + return + if isinstance(value, api.Workspace): + if self._ws_name: + if self._ws_name != value.name(): + self._clear_all() + self._set_ws_as_source(value) + else: + return # do nothing # it is just reassigning the same workspace to itself - else: # first assignment of workspace to property - self._set_ws_as_source(value) - return - - if isinstance(value,str): # it may be run number as string or it may be a workspace name - if value in mtd: # workspace name - ws = mtd[value] - self.__set__(instance,ws) - return - else: # split string into run indexes and auxiliary file parameters - file_path,run_num,fext = prop_helpers.parse_run_file_name(value) - - if isinstance(run_num,list): - self._set_run_list(instance,run_num,file_path,fext) - else: - self._set_single_run(instance,run_num,file_path,fext,False) - elif isinstance(value,list): - self._set_run_list(instance,value,"",instance.data_file_ext) - else: - self._set_single_run(instance,value,"",instance.data_file_ext,True) + else: # first assignment of workspace to property + self._set_ws_as_source(value) + return + + if isinstance(value,str): # it may be run number as string or it may be a workspace name + if value in mtd: # workspace name + ws = mtd[value] + self.__set__(instance,ws) + return + else: # split string into run indexes and auxiliary file parameters + file_path,run_num,fext = prop_helpers.parse_run_file_name(value) + + if isinstance(run_num,list): + self._set_run_list(instance,run_num,file_path,fext) + else: + self._set_single_run(instance,run_num,file_path,fext,False) + elif isinstance(value,list): + self._set_run_list(instance,value,"",instance.data_file_ext) + else: + self._set_single_run(instance,value,"",instance.data_file_ext,True) #-------------------------------------------------------------------------------------------------------------------- + def _set_single_run(self,instance,run_number,file_path='',fext=None,default_fext=False): """ """ self._run_number = int(run_number) @@ -423,16 +423,16 @@ class RunDescriptor(PropDescriptor): def _set_run_list(self,instance,run_list,file_path=None,fext=None): if self._run_list and self._run_list.check_runs_equal(run_list,file_path,fext): - return + return else: - self._clear_all() - self._run_list = RunList(run_list,file_path,fext) - run_num,file_path,main_fext,ind = self._run_list.get_current_run_info(instance.sum_runs) - self._run_list.set_last_ind2sum(ind) - self._run_number = run_num - self._run_file_path = file_path - self._fext = main_fext - self._ws_name = self._build_ws_name() + self._clear_all() + self._run_list = RunList(run_list,file_path,fext) + run_num,file_path,main_fext,ind = self._run_list.get_current_run_info(instance.sum_runs) + self._run_list.set_last_ind2sum(ind) + self._run_number = run_num + self._run_file_path = file_path + self._fext = main_fext + self._ws_name = self._build_ws_name() def run_number(self): """Return run number regardless of workspace is loaded or not""" @@ -444,16 +444,34 @@ class RunDescriptor(PropDescriptor): #-------------------------------------------------------------------------------------------------------------------- # Masking #-------------------------------------------------------------------------------------------------------------------- - def get_masking(self): - """Return masking workspace specific to this particular workspace - together with number of masked spectra + def get_masking(self,noutputs=None): + """Return masking workspace specific to this particular workspace + together with number of masked spectra if requested. + + noutputs is provided as argument, as funcreturn does not propagate + through inheritance and overloaded functions """ + if not noutputs: + try: + noutputs,r = funcreturns.lhs_info('both') + except: + noutputs=0 + if self._mask_ws_name: mask_ws = mtd[self._mask_ws_name] - num_masked = mask_ws.getRun().getLogData('NUM_SPECTRA_Masked').value - return (mask_ws,num_masked) + #TODO: need normal exposure of getNumberMasked() method of masks workspace + if noutputs>1: + __tmp_masks,spectra = ExtractMask(self._mask_ws_name) + num_masked = len(spectra) + DeleteWorkspace(__tmp_masks) + return (mask_ws,num_masked) + else: + return mask_ws else: - return (None,0) + if noutputs>1: + return (None,0) + else: + return None #-------------------------------------------------------------------------------------------------------------------- def add_masked_ws(self,masked_ws): """Extract masking from the workspace provided and store masks @@ -461,20 +479,17 @@ class RunDescriptor(PropDescriptor): """ if self._mask_ws_name: mask_ws = mtd[self._mask_ws_name] - num_masked = mask_ws.getRun().getLogData('NUM_SPECTRA_Masked').value add_mask_name = self._prop_name + '_tmp_masking' else: - num_masked = 0 add_mask_name = self._prop_name + 'CurrentMasking' - masks,spectra = ExtractMask(InputWorkspace=masked_ws,OutputWorkspace=add_mask_name) - num_masked+=len(spectra) + masks,spectra = ExtractMask(InputWorkspace=masked_ws,OutputWorkspace=add_mask_name) if self._mask_ws_name: mask_ws +=masks + DeleteWorkspace(add_mask_name) else: self._mask_ws_name = add_mask_name - AddSampleLog(Workspace=self._mask_ws_name,LogName = 'NUM_SPECTRA_Masked',\ - LogText=str(num_masked),LogType='Number') + # #-------------------------------------------------------------------------------------------------------------------- def is_monws_separate(self): """Is monitor workspace is separated from data workspace or not""" @@ -499,7 +514,7 @@ class RunDescriptor(PropDescriptor): else: return [current_run] else: - return [current_run] + return [current_run] #-------------------------------------------------------------------------------------------------------------------- def get_run_file_list(self): """Returns list of the files, assigned to current property """ @@ -534,7 +549,7 @@ class RunDescriptor(PropDescriptor): return summed_runs #-------------------------------------------------------------------------------------------------------------------- def get_runs_to_sum(self,existing_sum_ws=None,num_files=None): - """ return list of runs, expected to be summed together + """Return list of runs, expected to be summed together excluding the runs, already summed and added to cached sum workspace """ @@ -553,39 +568,39 @@ class RunDescriptor(PropDescriptor): runs2_sum = self._run_list.get_run_list2sum(num_files) for run in summed_runs: if run in runs2_sum: - del runs2_sum[runs2_sum.index(run)] + del runs2_sum[runs2_sum.index(run)] return (runs2_sum,existing_sum_ws,n_existing_sums) #-------------------------------------------------------------------------------------------------------------------- def find_run_files(self,run_list=None): - """Find run files correspondent to the run list provided + """Find run files correspondent to the run list provided and set path to these files as new internal parameters for the files in the list Returns True and empty list or False and the list of the runs, which files were not found or not belong to the existing run list. - """ - - if not self._run_list: - if not run_list: - return (True,[],[]) - else: - return (False,run_list,[]) - - if run_list: - existing = self._run_list.get_all_run_list() - non_existing = [] - for run in run_list: - if not(run in existing): - raise RuntimeError('run {0} is not in the existing run list'.format(run)) - - inst = RunDescriptor._holder.short_instr_name - default_fext = RunDescriptor._holder.data_file_ext - not_found,found = self._run_list.find_run_files(inst,run_list,default_fext) - if len(not_found) == 0: - return (True,[],found) - else: - return (False,not_found,found) + """ + + if not self._run_list: + if not run_list: + return (True,[],[]) + else: + return (False,run_list,[]) + + if run_list: + existing = self._run_list.get_all_run_list() + non_existing = [] + for run in run_list: + if not(run in existing): + raise RuntimeError('run {0} is not in the existing run list'.format(run)) + + inst = RunDescriptor._holder.short_instr_name + default_fext = RunDescriptor._holder.data_file_ext + not_found,found = self._run_list.find_run_files(inst,run_list,default_fext) + if len(not_found) == 0: + return (True,[],found) + else: + return (False,not_found,found) #-------------------------------------------------------------------------------------------------------------------- def set_action_suffix(self,suffix=None): """Method to set part of the workspace name, which indicate some action performed over this workspace @@ -623,24 +638,24 @@ class RunDescriptor(PropDescriptor): new_name = self._build_ws_name() old_name = workspace.name() if new_name != old_name: - RenameWorkspace(InputWorkspace=old_name,OutputWorkspace=new_name) + RenameWorkspace(InputWorkspace=old_name,OutputWorkspace=new_name) - old_mon_name = old_name + '_monitors' - new_mon_name = new_name + '_monitors' - if old_mon_name in mtd: - RenameWorkspace(InputWorkspace=old_mon_name,OutputWorkspace=new_mon_name) + old_mon_name = old_name + '_monitors' + new_mon_name = new_name + '_monitors' + if old_mon_name in mtd: + RenameWorkspace(InputWorkspace=old_mon_name,OutputWorkspace=new_mon_name) self._ws_name = new_name #-------------------------------------------------------------------------------------------------------------------- @staticmethod def _check_calibration_source(): - """If user have not specified calibration as input to the script, + """If user have not specified calibration as input to the script, try to retrieve calibration stored in file with run properties""" - changed_prop = RunDescriptor._holder.getChangedProperties() - if 'det_cal_file' in changed_prop: - use_workspace_calibration = False - else: - use_workspace_calibration = True - return use_workspace_calibration + changed_prop = RunDescriptor._holder.getChangedProperties() + if 'det_cal_file' in changed_prop: + use_workspace_calibration = False + else: + use_workspace_calibration = True + return use_workspace_calibration #-------------------------------------------------------------------------------------------------------------------- def get_workspace(self): """Method returns workspace correspondent to current run number(s) @@ -649,7 +664,7 @@ class RunDescriptor(PropDescriptor): Returns Mantid pointer to the workspace, corresponding to this run number """ if not self._ws_name: - self._ws_name = self._build_ws_name() + self._ws_name = self._build_ws_name() if self._ws_name in mtd: @@ -657,26 +672,26 @@ class RunDescriptor(PropDescriptor): if ws.run().hasProperty("calibrated"): return ws # already calibrated else: - prefer_ws_calibration = self._check_calibration_source() - self.apply_calibration(ws,RunDescriptor._holder.det_cal_file,prefer_ws_calibration) - return ws - else: - if self._run_number: - prefer_ws_calibration = self._check_calibration_source() - inst_name = RunDescriptor._holder.short_inst_name - calibration = RunDescriptor._holder.det_cal_file - if self._run_list and RunDescriptor._holder.sum_runs : # Sum runs - ws = self._load_and_sum_runs(inst_name,RunDescriptor._holder.load_monitors_with_workspace) - else: # load current workspace - ws = self.load_run(inst_name, calibration,False, RunDescriptor._holder.load_monitors_with_workspace,prefer_ws_calibration) - - - self.synchronize_ws(ws) - self.apply_calibration(ws,calibration,prefer_ws_calibration) - - return ws - else: - return None + prefer_ws_calibration = self._check_calibration_source() + self.apply_calibration(ws,RunDescriptor._holder.det_cal_file,prefer_ws_calibration) + return ws + else: + if self._run_number: + prefer_ws_calibration = self._check_calibration_source() + inst_name = RunDescriptor._holder.short_inst_name + calibration = RunDescriptor._holder.det_cal_file + if self._run_list and RunDescriptor._holder.sum_runs : # Sum runs + ws = self._load_and_sum_runs(inst_name,RunDescriptor._holder.load_monitors_with_workspace) + else: # load current workspace + ws = self.load_run(inst_name, calibration,False, RunDescriptor._holder.load_monitors_with_workspace,prefer_ws_calibration) + + + self.synchronize_ws(ws) + self.apply_calibration(ws,calibration,prefer_ws_calibration) + + return ws + else: + return None #-------------------------------------------------------------------------------------------------------------------- def get_ws_clone(self,clone_name='ws_clone'): """Get unbounded clone of existing Run workspace""" @@ -703,30 +718,30 @@ class RunDescriptor(PropDescriptor): Return the pointer to workspace being chopped """ if not origin: - origin = self.get_workspace() + origin = self.get_workspace() origin_name = origin.name() try: - mon_ws = mtd[origin_name + '_monitors'] + mon_ws = mtd[origin_name + '_monitors'] except: - mon_ws = None + mon_ws = None target_name = '#{0}/{1}#'.format(chunk_num,n_chunks) + origin_name if chunk_num == n_chunks: - RenameWorkspace(InputWorkspace=origin_name,OutputWorkspace=target_name) - if mon_ws: - RenameWorkspace(InputWorkspace=mon_ws,OutputWorkspace=target_name + '_monitors') - origin_name = target_name - origin_invalidated = True + RenameWorkspace(InputWorkspace=origin_name,OutputWorkspace=target_name) + if mon_ws: + RenameWorkspace(InputWorkspace=mon_ws,OutputWorkspace=target_name + '_monitors') + origin_name = target_name + origin_invalidated = True else: - if mon_ws: - CloneWorkspace(InputWorkspace=mon_ws,OutputWorkspace=target_name + '_monitors') - origin_invalidated = False + if mon_ws: + CloneWorkspace(InputWorkspace=mon_ws,OutputWorkspace=target_name + '_monitors') + origin_invalidated = False if rebin: # debug and compatibility mode with old reduction - Rebin(origin_name,OutputWorkspace=target_name,Params=[tof_range[0],tof_range[1],tof_range[2]],PreserveEvents=False) + Rebin(origin_name,OutputWorkspace=target_name,Params=[tof_range[0],tof_range[1],tof_range[2]],PreserveEvents=False) else: - CropWorkspace(origin_name,OutputWorkspace=target_name,XMin=tof_range[0],XMax=tof_range[2]) + CropWorkspace(origin_name,OutputWorkspace=target_name,XMin=tof_range[0],XMax=tof_range[2]) self._set_ws_as_source(mtd[target_name]) if origin_invalidated: @@ -743,7 +758,7 @@ class RunDescriptor(PropDescriptor): """ data_ws = self.get_workspace() if not data_ws: - return None + return None monWS_name = data_ws.name() + '_monitors' if monWS_name in mtd: @@ -759,18 +774,18 @@ class RunDescriptor(PropDescriptor): mon_ws = self.copy_spectrum2monitors(data_ws,mon_ws,specID) if monitor_ID: - try: + try: ws_index = mon_ws.getIndexFromSpectrumNumber(monitor_ID) - except: # - mon_ws = None + except: # + mon_ws = None else: mon_list = self._holder.get_used_monitors_list() for monID in mon_list: try: ws_ind = mon_ws.getIndexFromSpectrumNumber(int(monID)) except: - mon_ws = None - break + mon_ws = None + break return mon_ws #-------------------------------------------------------------------------------------------------------------------- def is_existing_ws(self): @@ -781,7 +796,7 @@ class RunDescriptor(PropDescriptor): else: return False else: - return False + return False #-------------------------------------------------------------------------------------------------------------------- #-------------------------------------------------------------------------------------------------------------------- def get_file_ext(self): @@ -810,7 +825,7 @@ class RunDescriptor(PropDescriptor): main purpose -- to support customized order of file extensions """ if not run_num_str: - run_num_str = str(self.run_number()) + run_num_str = str(self.run_number()) inst_name = RunDescriptor._holder.short_inst_name if 'file_hint' in kwargs: @@ -873,16 +888,16 @@ class RunDescriptor(PropDescriptor): raise IOError(data_file) if load_mon_with_workspace: - mon_load_option = 'Include' + mon_load_option = 'Include' else: - mon_load_option = 'Separate' + mon_load_option = 'Separate' # try: # Hack: LoadEventNexus does not understand Separate at the moment and throws. - # And event loader always loads monitors separately - Load(Filename=data_file, OutputWorkspace=ws_name,LoadMonitors = mon_load_option) + # And event loader always loads monitors separately + Load(Filename=data_file, OutputWorkspace=ws_name,LoadMonitors = mon_load_option) except ValueError: - #mon_load_option =str(int(load_mon_with_workspace)) - Load(Filename=data_file, OutputWorkspace=ws_name,LoadMonitors = '1',MonitorsAsEvents='0') + #mon_load_option =str(int(load_mon_with_workspace)) + Load(Filename=data_file, OutputWorkspace=ws_name,LoadMonitors = '1',MonitorsAsEvents='0') RunDescriptor._logger("Loaded {0}".format(data_file),'information') @@ -925,7 +940,7 @@ class RunDescriptor(PropDescriptor): if not calibration or use_ws_calibration: return if not isinstance(loaded_ws, api.Workspace): - raise RuntimeError(' Calibration can be applied to a workspace only and got object of type {0}'.format(type(loaded_ws))) + raise RuntimeError(' Calibration can be applied to a workspace only and got object of type {0}'.format(type(loaded_ws))) if loaded_ws.run().hasProperty("calibrated"): return # already calibrated @@ -942,8 +957,8 @@ class RunDescriptor(PropDescriptor): test_name = ws_calibration ws_calibration = FileFinder.getFullPath(ws_calibration) if len(ws_calibration) == 0: - raise RuntimeError('Can not find defined in run {0} calibration file {1}\n'\ - 'Define det_cal_file reduction parameter properly'.format(loaded_ws.name(),test_name)) + raise RuntimeError('Can not find defined in run {0} calibration file {1}\n'\ + 'Define det_cal_file reduction parameter properly'.format(loaded_ws.name(),test_name)) RunDescriptor._logger('*** load_data: Calibrating data using workspace defined calibration file: {0}'.format(ws_calibration),'notice') except KeyError: # no det_cal_file defined in workspace if calibration: @@ -1097,27 +1112,27 @@ class RunDescriptor(PropDescriptor): return not(self._in_cash) def notify_sum_runs_changed(self,old_value,new_value): - """ Take actions on changes to sum_runs option - """ - if self._run_list: - if old_value != new_value: - rl = self._run_list - self._clear_all() - rl.set_last_ind2sum(-1) # this will reset index to default - self._run_list = rl - run_num,file_path,main_fext,ind = self._run_list.get_current_run_info(new_value) - self._run_list.set_last_ind2sum(ind) - self._run_number = run_num - self._run_file_path = file_path - self._fext = main_fext - self._ws_name = self._build_ws_name(new_value) - if new_value is False: - self._run_list.del_cashed_sum() + """Take actions on changes to sum_runs option + """ + if self._run_list: + if old_value != new_value: + rl = self._run_list + self._clear_all() + rl.set_last_ind2sum(-1) # this will reset index to default + self._run_list = rl + run_num,file_path,main_fext,ind = self._run_list.get_current_run_info(new_value) + self._run_list.set_last_ind2sum(ind) + self._run_number = run_num + self._run_file_path = file_path + self._fext = main_fext + self._ws_name = self._build_ws_name(new_value) + if new_value is False: + self._run_list.del_cashed_sum() def _load_and_sum_runs(self,inst_name,monitors_with_ws): """Load multiple runs and sum them together - monitors_with_ws -- if true, load monitors with workspace + monitors_with_ws -- if true, load monitors with workspace """ RunDescriptor._logger("*** Summing multiple runs ****") @@ -1138,7 +1153,7 @@ class RunDescriptor(PropDescriptor): f_guess,index = self._run_list.get_file_guess(inst_name,runs_to_sum[0]) ws = self.load_file(inst_name,'Sum_ws',False,monitors_with_ws, - False,file_hint=f_guess) + False,file_hint=f_guess) sum_ws_name = ws.name() sum_mon_name = sum_ws_name + '_monitors' @@ -1203,7 +1218,7 @@ class RunDescriptorDependent(RunDescriptor): self._has_own_value = False def __get__(self,instance,owner=None): - """Return dependent run number which is host run number if this one has not been set + """Return dependent run number which is host run number if this one has not been set or this run number if it was """ if instance is None: # this class functions and the host functions @@ -1232,9 +1247,9 @@ class RunDescriptorDependent(RunDescriptor): # TODO -- how to automate all these functions below? def run_number(self): if self._has_own_value: - return super(RunDescriptorDependent,self).run_number() + return super(RunDescriptorDependent,self).run_number() else: - return self._host.run_number() + return self._host.run_number() # def is_monws_separate(self): if self._has_own_value: @@ -1347,15 +1362,15 @@ class RunDescriptorDependent(RunDescriptor): return super(RunDescriptorDependent,self).clear_monitors() else: return self._host.clear_monitors() - def get_masking(self): - if self._has_own_value: - return super(RunDescriptorDependent,self).get_masking() - else: - return self._host.get_masking() + def get_masking(self,noutputs=None): + if self._has_own_value: + return super(RunDescriptorDependent,self).get_masking(noutputs) + else: + return self._host.get_masking(noutputs) def add_masked_ws(self,masked_ws): - if self._has_own_value: + if self._has_own_value: return super(RunDescriptorDependent,self).add_masked_ws(masked_ws) - else: + else: return self._host.add_masked_ws(masked_ws) #-------------------------------------------------------------------------------------------------------------------- #-------------------------------------------------------------------------------------------------------------------- diff --git a/Code/Mantid/scripts/Inelastic/Direct/diagnostics.py b/Code/Mantid/scripts/Inelastic/Direct/diagnostics.py index 1891c01c73c893468a9d6f2127676c8aec188891..269654f8eb52b9b1cc0f335abc2b497e94aa529e 100644 --- a/Code/Mantid/scripts/Inelastic/Direct/diagnostics.py +++ b/Code/Mantid/scripts/Inelastic/Direct/diagnostics.py @@ -71,17 +71,17 @@ def diagnose(white_int,**kwargs): # process subsequent calls to this routine, when white mask is already defined white= kwargs.get('white_mask',None) # and white beam is not changed - # white mask assumed to be global so no sectors in there - if not(white is None) and isinstance(white,RunDescriptor.RunDescriptor): - hardmask_file = None - white_mask,num_failed = white.get_masking() - add_masking(white_int, white_mask) - van_mask = None + #white mask assumed to be global so no sectors in there + if not white is None and isinstance(white,RunDescriptor.RunDescriptor): + hardmask_file = None + white_mask,num_failed = white.get_masking(2) + add_masking(white_int, white_mask) + van_mask = None else: # prepare workspace to keep white mask white_mask = None van_mask = CloneWorkspace(white_int) - if not (hardmask_file is None): + if not hardmask_file is None: LoadMask(Instrument=kwargs.get('instr_name',''),InputFile=parser.hard_mask_file, OutputWorkspace='hard_mask_ws') MaskDetectors(Workspace=white_int, MaskedWorkspace='hard_mask_ws') @@ -94,48 +94,47 @@ def diagnose(white_int,**kwargs): DeleteWorkspace('hard_mask_ws') if not parser.use_hard_mask_only : - # White beam Test - if white_mask: + # White beam Test + if white_mask: test_results[1] = ['white_mask cache global', num_failed] - else: - __white_masks, num_failed = do_white_test(white_int, parser.tiny, parser.huge, + else: + __white_masks, num_failed = do_white_test(white_int, parser.tiny, parser.huge, parser.van_out_lo, parser.van_out_hi, parser.van_lo, parser.van_hi, parser.van_sig, start_index, end_index) - test_results[1] = [str(__white_masks), num_failed] - add_masking(white_int, __white_masks, start_index, end_index) - if van_mask: - add_masking(van_mask, __white_masks, start_index, end_index) - DeleteWorkspace(__white_masks) - - # Second white beam test - if 'second_white' in kwargs: #NOT IMPLEMENTED - raise NotImplementedError("Second white is not yet implemented") - __second_white_masks, num_failed = do_second_white_test(white_int, parser.second_white, parser.tiny, parser.huge,\ + test_results[1] = [str(__white_masks), num_failed] + add_masking(white_int, __white_masks, start_index, end_index) + if van_mask: + add_masking(van_mask, __white_masks, start_index, end_index) + DeleteWorkspace(__white_masks) + + # Second white beam test + if 'second_white' in kwargs: #NOT IMPLEMENTED + raise NotImplementedError("Second white is not yet implemented") + __second_white_masks, num_failed = do_second_white_test(white_int, parser.second_white, parser.tiny, parser.huge,\ parser.van_out_lo, parser.van_out_hi,\ parser.van_lo, parser.van_hi, parser.variation,\ parser.van_sig, start_index, end_index) - test_results[2] = [str(__second_white_masks), num_failed] - add_masking(white_int, __second_white_masks, start_index, end_index) - #TODO - #add_masking(van_mask, __second_white_masks, start_index, end_index) + test_results[2] = [str(__second_white_masks), num_failed] + add_masking(white_int, __second_white_masks, start_index, end_index) + #TODO + #add_masking(van_mask, __second_white_masks, start_index, end_index) # # Zero total count check for sample counts # - zero_count_failures = 0 - if kwargs.get('sample_counts',None) is not None and kwargs.get('samp_zero',False): + zero_count_failures = 0 + if kwargs.get('sample_counts',None) is not None and kwargs.get('samp_zero',False): add_masking(parser.sample_counts, white_int) maskZero, zero_count_failures = FindDetectorsOutsideLimits(InputWorkspace=parser.sample_counts,\ - StartWorkspaceIndex=start_index, EndWorkspaceIndex=end_index,\ + StartWorkspaceIndex=start_index, EndWorkspaceIndex=end_index,\ LowThreshold=1e-10, HighThreshold=1e100) add_masking(white_int, maskZero, start_index, end_index) DeleteWorkspace(maskZero) - # # Background check # - if hasattr(parser, 'background_int'): + if hasattr(parser, 'background_int'): add_masking(parser.background_int, white_int) __bkgd_mask, failures = do_background_test(parser.background_int, parser.samp_lo,\ parser.samp_hi, parser.samp_sig, parser.samp_zero, start_index, end_index) @@ -146,7 +145,7 @@ def diagnose(white_int,**kwargs): # # Bleed test # - if hasattr(parser, 'bleed_test') and parser.bleed_test: + if hasattr(parser, 'bleed_test') and parser.bleed_test: if not hasattr(parser, 'sample_run'): raise RuntimeError("Bleed test requested but the sample_run keyword has not been provided") __bleed_masks, failures = do_bleed_test(parser.sample_run, parser.bleed_maxrate, parser.bleed_pixels) @@ -158,18 +157,18 @@ def diagnose(white_int,**kwargs): end_index_name=" to: end" default = True if hasattr(parser, 'print_diag_results') and parser.print_diag_results: - default=True + default=True if 'start_index' in kwargs: - default = False - start_index_name = "from: "+str(kwargs['start_index']) + default = False + start_index_name = "from: "+str(kwargs['start_index']) if 'end_index' in kwargs : - default = False - end_index_name = " to: "+str(kwargs['end_index']) + default = False + end_index_name = " to: "+str(kwargs['end_index']) testName=start_index_name+end_index_name if not default : - testName = " For bank: "+start_index_name+end_index_name + testName = " For bank: "+start_index_name+end_index_name if hasattr(parser, 'print_diag_results') and parser.print_diag_results: print_test_summary(test_results,testName) @@ -294,7 +293,8 @@ def normalise_background(background_int, white_int, second_white_int=None): """ if second_white_int is None: - # quetly divide background integral by white beam integral not reporting about possible 0 in wb integral (they will be removed by diag anyway) + #quetly divide background integral by white beam integral not reporting about possible 0 in + #wb integral (they will be removed by diag anyway) background_int = Divide(LHSWorkspace=background_int,RHSWorkspace=white_int,WarnOnZeroDivide='0') else: hmean = 2.0*white_int*second_white_int/(white_int+second_white_int) diff --git a/Code/Mantid/scripts/test/RunDescriptorTest.py b/Code/Mantid/scripts/test/RunDescriptorTest.py index e4f60adca811e3c4a7677585272b8497f5c0417a..337fb4d2c317ef0b3ae126031cb0f7367c3ccb62 100644 --- a/Code/Mantid/scripts/test/RunDescriptorTest.py +++ b/Code/Mantid/scripts/test/RunDescriptorTest.py @@ -1,5 +1,5 @@ import os,sys,inspect -#os.environ["PATH"] = r"d:\Data\Mantid_GIT_test\Code\builds\br_master\bin\Release;"+os.environ["PATH"] +#os.environ["PATH"] =r"c:/Mantid/Code/builds/br_master/bin/Release;"+os.environ["PATH"] from mantid.simpleapi import * from mantid import api import unittest @@ -460,6 +460,16 @@ class RunDescriptorTest(unittest.TestCase): ws_name = PropertyManager.sample_run._mask_ws_name self.assertTrue(ws_name in mtd) + masks = PropertyManager.sample_run.get_masking() + self.assertTrue(isinstance(masks,api.MatrixWorkspace)) + ws_name = masks.name() + self.assertTrue(ws_name in mtd) + + masks1 = PropertyManager.sample_run.get_masking(1) + self.assertTrue(isinstance(masks1,api.MatrixWorkspace)) + + + propman.sample_run = None self.assertFalse(ws_name in mtd)