diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ARCSReductionTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ARCSReductionTest.py new file mode 100644 index 0000000000000000000000000000000000000000..27fb1d5decd0eb986b4fdc3891c91f5efe01555d --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ARCSReductionTest.py @@ -0,0 +1,82 @@ +""" +System test for ARCS reduction +""" + +from mantid.simpleapi import * +import os +import stresstesting +from numpy import * + +class ARCSReductionTest(stresstesting.MantidStressTest): + + def requiredFiles(self): + return ["ARCS_23961_event.nxs","WBARCS.nxs"] + + def requiredMemoryMB(self): + return 4000 + + def cleanup(self): + if os.path.exists(self.nxspeFile): + os.remove(self.nxspeFile) + if os.path.exists(self.vanFile1): + os.remove(self.vanFile1) + if os.path.exists(self.vanFile0): + os.remove(self.vanFile0) + return True + + + def runTest(self): + self.vanFile1=os.path.join(config.getString('defaultsave.directory'),'ARCSvan_1.nxs') + self.vanFile0=os.path.join(config.getString('defaultsave.directory'),'ARCSvan_0.nxs') + self.nxspeFile=os.path.join(config.getString('defaultsave.directory'),'ARCSsystemtest.nxspe') + config['default.facility']="SNS" + DgsReduction( + SampleInputFile="ARCS_23961_event.nxs", + OutputWorkspace="reduced", + IncidentBeamNormalisation="ByCurrent", + DetectorVanadiumInputFile="WBARCS.nxs", + UseBoundsForDetVan=True, + DetVanIntRangeLow=0.35, + DetVanIntRangeHigh=0.75, + DetVanIntRangeUnits="Wavelength", + SaveProcessedDetVan=True, + SaveProcDetVanFilename=self.vanFile0, + ) + DgsReduction( + SampleInputFile="ARCS_23961_event.nxs", + OutputWorkspace="reduced", + IncidentBeamNormalisation="ByCurrent", + DetectorVanadiumInputFile="WBARCS.nxs", + UseBoundsForDetVan=True, + DetVanIntRangeLow=0.35, + DetVanIntRangeHigh=0.75, + DetVanIntRangeUnits="Wavelength", + MedianTestLevelsUp=1., + SaveProcessedDetVan=True, + SaveProcDetVanFilename=self.vanFile1, + ) + + Ei=mtd["reduced"].run().get("Ei").value + SaveNXSPE(InputWorkspace="reduced",Filename=self.nxspeFile,Efixed=Ei,psi=0,KiOverKfScaling=True) + + def validate(self): + #test vanadium file + self.assertTrue(os.path.exists(self.vanFile0)) + self.assertTrue(os.path.exists(self.vanFile1)) + van0=Load(self.vanFile0) + van1=Load(self.vanFile1) + m0=ExtractMask(van0) + m1=ExtractMask(van1) + self.assertGreaterThan(len(m0[1]),len(m1[1])) #levelsUp=1 should have less pixels masked + DeleteWorkspace("m0") + DeleteWorkspace("m1") + DeleteWorkspace(van0) + DeleteWorkspace(van1) + self.assertTrue(os.path.exists(self.nxspeFile)) + nxspe=LoadNXSPE(self.nxspeFile) + self.disableChecking.append('Instrument') + + return 'nxspe','ARCSsystemtest.nxs' + + + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/BASISAutoReduction.py b/Code/Mantid/Testing/SystemTests/tests/analysis/BASISAutoReduction.py new file mode 100644 index 0000000000000000000000000000000000000000..9c40107a8674990f98d620c091e10aea920ab214 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/BASISAutoReduction.py @@ -0,0 +1,53 @@ +""" +System Test for BASIS autoreduction +""" +from mantid.simpleapi import * + +import stresstesting +import shutil +import os + +class BASISAutoReductionTest(stresstesting.MantidStressTest): + + def requiredFiles(self): + return ['BSS_13387_event.nxs'] + + def cleanup(self): + return True + + def runTest(self): + idfdir = config['instrumentDefinition.directory'] + autows = 'data_ws' + autows_monitor = 'monitor_ws' + Load(Filename='BSS_13387_event.nxs', OutputWorkspace=autows) + LoadMask(Instrument='BASIS', OutputWorkspace='BASIS_MASK', InputFile='BASIS_AutoReduction_Mask.xml') + MaskDetectors(Workspace=autows, DetectorList="5,49,69,113,133,177,197,241,261,305,325,369,389,433,453,497,517,561,581,625,645,689,709,753,773,817,837,881,901,945,965,1009,1029,1073,1093,1137,1157,1201,1221,1265,1285,1329,1349,1393,1413,1457,1477,1521,1541,1585,1605,1649,1669,1713,1733,1777,1797,1841,1861,1905,1925,1969,1989,2033,2053,2097,2117,2161,2181,2225,2245,2289,2309,2353,2373,2417,2437,2481,2501,2545,2565,2609,2629,2673,2693,2737,2757,2801,2821,2865,2885,2929,2949,2993,3013,3057,3077,3121,3141,3185,3205,3249,3269,3313,3333,3377,3397,3441,3461,3505,3525,3569,3589-3633,3653-3697,3717-3761,3781-3825,3845-3889,3909-3953,3973-4017,4037-4081,4110,4154,4174,4218,4238,4282,4302,4346,4366,4410,4430,4474,4494,4538,4558,4602,4622,4666,4686,4730,4750,4794,4814,4858,4878,4922,4942,4986,5006,5050,5070,5114,5134,5178,5198,5242,5262,5306,5326,5370,5390,5434,5454,5498,5518,5562,5582,5626,5646,5690,5710,5754,5774,5818,5838,5882,5902,5946,5966,6010,6030,6074,6094,6138,6158,6202,6222,6266,6286,6330,6350,6394,6414,6458,6478,6522,6542,6586,6606,6650,6670,6714,6734,6778,6798,6842,6862,6906,6926,6970,6990,7034,7054,7098,7118,7162,7182,7226,7246,7290,7310,7354,7374,7418,7438,7482,7502,7546,7566,7610,7630,7674,7694-7738,7758-7802,7822-7866,7886-7930,7950-7994,8014-8058,8078-8122,8142-8186,8192-15871") #MaskedWorkspace='BASIS_MASK') + ModeratorTzeroLinear(InputWorkspace=autows,OutputWorkspace=autows) + LoadParameterFile(Workspace=autows, Filename=os.path.join(idfdir,'BASIS_silicon_111_Parameters.xml')) + LoadNexusMonitors(Filename='BSS_13387_event.nxs', OutputWorkspace=autows_monitor) + Rebin(InputWorkspace=autows_monitor,OutputWorkspace=autows_monitor,Params='10') + ConvertUnits(InputWorkspace=autows_monitor, OutputWorkspace=autows_monitor, Target='Wavelength') + OneMinusExponentialCor(InputWorkspace=autows_monitor, OutputWorkspace=autows_monitor, C='0.20749999999999999', C1='0.001276') + Scale(InputWorkspace=autows_monitor, OutputWorkspace=autows_monitor, Factor='9.9999999999999995e-07') + ConvertUnits(InputWorkspace=autows, OutputWorkspace=autows, Target='Wavelength', EMode='Indirect') + RebinToWorkspace(WorkspaceToRebin=autows, WorkspaceToMatch=autows_monitor, OutputWorkspace=autows) + Divide(LHSWorkspace=autows, RHSWorkspace=autows_monitor, OutputWorkspace=autows) + ConvertUnits(InputWorkspace=autows, OutputWorkspace=autows, Target='DeltaE', EMode='Indirect') + CorrectKiKf(InputWorkspace=autows, OutputWorkspace=autows,EMode='Indirect') + + Rebin(InputWorkspace=autows, OutputWorkspace=autows, Params='-0.12,0.0004,0.12') + #GroupDetectors(InputWorkspace=autows, OutputWorkspace=autows, MapFile='/SNS/BSS/shared/autoreduce/BASIS_Grouping.xml', Behaviour='Sum') + SofQW3(InputWorkspace=autows, OutputWorkspace=autows+'_sqw', QAxisBinning='0.2,0.2,2.0', EMode='Indirect', EFixed='2.082') + #SaveDaveGrp(Filename=dave_grp_filename, InputWorkspace=autows+'_sqw', ToMicroEV=True) + #SaveNexus(Filename="basis_auto_sqw.nxs", InputWorkspace=autows+'_sqw') + + def validate(self): + # Need to disable checking of the Spectra-Detector map because it isn't + # fully saved out to the nexus file; some masked detectors should be picked + # up with by the mask values in the spectra + self.tolerance = 1e-7 + self.disableChecking.append('Axes') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Instrument') + return 'data_ws_sqw','BASISAutoReduction.nxs' + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/BuildSQWTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/BuildSQWTest.py new file mode 100644 index 0000000000000000000000000000000000000000..27fd99db6c9bc695f50a5d3a3283b2e529ad6a53 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/BuildSQWTest.py @@ -0,0 +1,157 @@ +""" + Defines a system test for converting a set of reduced direct inelastic data + to a single SQW file. + + The test requires as input the set of reduced files, which are ~16Gb along with + the result file that is ~30Gb. The files are not included with the standard + repository & required to be accessible from any machine that wishes to run the test. +""" +import stresstesting +from mantid.simpleapi import * + +import os + +# allow for multiple locations +FILE_LOCATIONS = ["/isis/mantid/localtestdata/"]#,"d:/Data/MantidSystemTests/BigData/Dropbox/LoadSQW"] + +class BuildSQWTest(stresstesting.MantidStressTest): + + _startrun = 15058 + _endrun = 15178 + _input_data = [] + _input_location = None + _created_files = [] + + def __init__(self): + super(BuildSQWTest, self).__init__() + prefix = "MAP" + ext = ".nxspe" + # MAP*.nxspe data files + self._input_data = ["%s%d%s" % (prefix,n,ext) for n in range(self._startrun,self._endrun+1)] + + def skipTests(self): + def check_dir(loc): + for filename in self._input_data: + path = os.path.join(loc, filename) + if not os.path.exists(path): + return False + return True + # end nested function + + all_found = False + for location in FILE_LOCATIONS: + if check_dir(location): + self._input_location = location + all_found = True + break + + skip = (not all_found) + return skip + + def runTest(self): + conversion_params = {} + conversion_params['QDimensions'] = 'Q3D' + conversion_params['dEAnalysisMode'] = 'Direct' + conversion_params['Q3DFrames'] = 'HKL' + conversion_params['QConversionScales'] = 'HKL' + conversion_params['PreprocDetectorsWS'] = '_preprocessed_detectors' + conversion_params['MinValues'] = '-7,-7,-7.,-72.0' + conversion_params['MaxValues'] = '7.,7.,7.,382.0' + conversion_params['SplitInto'] = 50 + conversion_params['MaxRecursionDepth'] = 1 + conversion_params['MinRecursionDepth'] = 1 + + self._created_files = [] + for source in self._input_data: + source_path = os.path.join(self._input_location, source) + target = os.path.join(config["defaultsave.directory"], "MD" + source.rstrip(".nxspe") + ".nxs") + # Make sure the target doesn't exist from a previous test + if os.path.exists(target): + os.remove(target) + + print "Converting '%s' to '%s' " % (source_path,target) + _cur_spe_ws = LoadNXSPE(Filename=source_path) + SetUB(Workspace=_cur_spe_ws,a='2.87',b='2.87',c='2.87') + # rotated by proper number of degrees around axis Y + # sample log Psi should already be there + SetGoniometer(Workspace=_cur_spe_ws,Axis0='Psi,0,1,0,1') + + conversion_params['InputWorkspace'] = _cur_spe_ws + _cur_md_ws = ConvertToMD(**conversion_params) + + SaveMD(InputWorkspace=_cur_md_ws,Filename=target) + self._created_files.append(target) + DeleteWorkspace(_cur_spe_ws) + DeleteWorkspace(_cur_md_ws) + # end conversion loop + + # Do the final merge + sqw_file = os.path.join(config["defaultsave.directory"],"BuildSQWTestCurrent.nxs") + finalSQW = MergeMDFiles(",".join(self._created_files),OutputFilename=sqw_file,Parallel='0') + self._created_files.append(sqw_file) + + def validate(self): + # LoadMD is unable to load the merged output file. See ticket #8480. + # At the moment this test is useful for benchmarking the conversion so it exists purely + # for timing purposes until #8480 is fixed + return True + + def cleanup(self): + for filename in self._created_files: + try: + os.remove(filename) + except OSError,exc: + mantid.logger.warning("Unable to remove created file '%s'" % filename) + +class LoadSQW_FileBasedTest(BuildSQWTest): + """ The test checks loading MD workspace from SQW file when target file is file based""" + + def __init__(self): + + self._input_data = ["Test22meV2f.sqw","Test22meVMD.nxs"] + + def runTest(self): + + MDws_file = os.path.join(config["defaultsave.directory"],"LoadSQWTestFileBased.nxs") + sqw_file = os.path.join(self._input_location,self._input_data[0]) + + wsMD=LoadSQW(Filename=sqw_file, OutputFilename=MDws_file) + + self._created_files=MDws_file; + + + def validate(self): + """Compare file-based MD files """ + ref_file = os.path.join(self._input_location, self._input_data[1]) + Reference=LoadMD(Filename=ref_file, FileBackEnd=True, Memory=100) + rez = CompareMDWorkspaces(Workspace1="wsMD",Workspace2=Reference,Tolerance=1.e-5,CheckEvents=False,IgnoreBoxID=False) + + DeleteWorkspace("wsMD"); + + return rez[0]; + +class LoadSQW_MemBasedTest(BuildSQWTest): + """ The test checks loading MD workspace from SQW file when target file is file based""" + + def __init__(self): + + self._input_data = ["Test22meV2f.sqw","Test22meVMD.nxs"] + + def runTest(self): + + sqw_file = os.path.join(self._input_location,self._input_data[0]) + + wsMD=LoadSQW(Filename=sqw_file) + + self._created_files=[]; + + + def validate(self): + """Compare memory-based vs file based MD workspaces """ + ref_file = os.path.join(self._input_location, self._input_data[1]) + Reference=LoadMD(Filename=ref_file, FileBackEnd=True, Memory=100) + rez = CompareMDWorkspaces(Workspace1="wsMD",Workspace2=Reference,Tolerance=1.e-5,CheckEvents=False,IgnoreBoxID=False) + + DeleteWorkspace("wsMD"); + + return rez[0]; \ No newline at end of file diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/CNCSReductionTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/CNCSReductionTest.py new file mode 100644 index 0000000000000000000000000000000000000000..60397945a8b3cb9bafc9b0a47323c2c6f1015f75 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/CNCSReductionTest.py @@ -0,0 +1,78 @@ +""" +System test for CNCS reduction +""" + +from mantid.simpleapi import * +import os +import stresstesting + +class CNCSReductionTest(stresstesting.MantidStressTest): + + def requiredFiles(self): + return ["CNCS_51936_event.nxs","CNCS_23936_event.nxs","CNCS_23937_event.nxs"] + + def requiredMemoryMB(self): + return 4000 + + def cleanup(self): + if os.path.exists(self.groupingFile): + os.remove(self.groupingFile) + if os.path.exists(self.parFile): + os.remove(self.parFile) + if os.path.exists(self.nxspeFile): + os.remove(self.nxspeFile) + if os.path.exists(self.vanFile): + os.remove(self.vanFile) + return True + + + def runTest(self): + self.groupingFile=os.path.join(config.getString('defaultsave.directory'),'CNCS_powder_group.xml') + self.parFile=os.path.join(config.getString('defaultsave.directory'),'CNCS_powder_group.par') + self.nxspeFile=os.path.join(config.getString('defaultsave.directory'),'CNCS_powder_group.nxspe') + self.vanFile=os.path.join(config.getString('defaultsave.directory'),'van.nx5') + + config['default.facility']="SNS" + Load(Filename='CNCS_23936-23937',OutputWorkspace='sum') + GenerateGroupingPowder(InputWorkspace="sum",AngleStep=0.5,GroupingFilename=self.groupingFile) + Ei=mtd['sum'].getRun()['EnergyRequest'].firstValue() + tib=SuggestTibCNCS(Ei) + erange=str(-Ei)+','+str(0.01*Ei)+','+str(0.95*Ei) + + DgsReduction( + SampleInputWorkspace="sum", + OutputWorkspace="reduced", + EnergyTransferRange="-0.2,0.05,2.2", + GroupingFile=self.groupingFile, + IncidentBeamNormalisation="ByCurrent", + TimeIndepBackgroundSub=True, + TibTofRangeStart=tib[0], + TibTofRangeEnd=tib[1], + DetectorVanadiumInputFile="CNCS_51936_event.nxs", + UseBoundsForDetVan=True, + DetVanIntRangeLow=52000.0, + DetVanIntRangeHigh=53000.0, + DetVanIntRangeUnits="TOF", + SaveProcessedDetVan=True, + SaveProcDetVanFilename=self.vanFile, + ) + + rotationdevice="SERotator2" + psi=mtd["reduced"].run().get(rotationdevice).value[0] + SaveNXSPE(InputWorkspace="reduced",Filename=self.nxspeFile,Efixed=Ei,psi=psi,KiOverKfScaling=True,ParFile=self.parFile) + + def validate(self): + #test vanadium file + self.assertTrue(os.path.exists(self.vanFile)) + van=Load(self.vanFile) + self.assertEqual(van.blocksize(),1) + self.assertEqual(van.getNumberHistograms(),51200) + DeleteWorkspace(van) + self.assertTrue(os.path.exists(self.nxspeFile)) + nxspe=LoadNXSPE(self.nxspeFile) + self.disableChecking.append('Instrument') + + return 'nxspe','CNCSReduction_TIBasEvents.nxs' + + + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/CRISPLoadingTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/CRISPLoadingTest.py new file mode 100644 index 0000000000000000000000000000000000000000..885c8ddd350281f0d9f73a0ffd7b7213132adae7 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/CRISPLoadingTest.py @@ -0,0 +1,25 @@ +from LoadAndCheckBase import * + +''' +Test File loading and basic data integrity checks of CRISP data in Mantid. +''' +class CRISPLoadingTest(LoadAndCheckBase): + + def __init__(self): + super(self.__class__,self).__init__() + self.disableChecking.append("Instrument") + + def get_raw_workspace_filename(self): + return "CSP85423.raw" + + def get_nexus_workspace_filename(self): + return "CSP85423.nxs" + + def get_expected_number_of_periods(self): + return 2 + + def get_integrated_reference_workspace_filename(self): + return "CSP85423_1Integrated.nxs" + + def get_expected_instrument_name(self): + return "CRISP" \ No newline at end of file diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/CalibrateRectangularDetector_Test.py b/Code/Mantid/Testing/SystemTests/tests/analysis/CalibrateRectangularDetector_Test.py new file mode 100644 index 0000000000000000000000000000000000000000..152bc3cdb2f431d5c91542071b8527c221398e22 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/CalibrateRectangularDetector_Test.py @@ -0,0 +1,109 @@ +import stresstesting +from mantid.simpleapi import * +from time import strftime +import os + +def _skip_test(): + """Helper function to determine if we run the test""" + import platform + + # Only runs on RHEL6 at the moment + if "Linux" not in platform.platform(): + return True + flavour = platform.linux_distribution()[2] + if flavour == 'Santiago': # Codename for RHEL6 + return False # Do not skip + else: + return True + +class PG3Calibration(stresstesting.MantidStressTest): + def cleanup(self): + os.remove(self.saved_cal_file) + + def skipTests(self): + return _skip_test() + + def requiredFiles(self): + files = ["PG3_2538_event.nxs"] + return files + + def requiredMemoryMB(self): + """Requires 3Gb""" + return 3000 + + def runTest(self): + # determine where to save + savedir = os.path.abspath(os.path.curdir) + + # run the actual code + output = CalibrateRectangularDetectors(OutputDirectory = savedir, SaveAs = 'calibration', FilterBadPulses = True, + GroupDetectorsBy = 'All', DiffractionFocusWorkspace = False, Binning = '0.5, -0.0004, 2.5', + MaxOffset=0.01, PeakPositions = '.6866,.7283,.8185,.8920,1.0758,1.2615,2.0599', + CrossCorrelation = False, Instrument = 'PG3', RunNumber = '2538', Extension = '_event.nxs') + + if isinstance(output, basestring): + self.saved_cal_file = output + else: + raise NotImplementedError("Output from CalibrateRectangularDetectors is NOT string for calibration file name!") + + # load saved cal file + LoadCalFile(InputWorkspace="PG3_2538_calibrated", CalFileName=self.saved_cal_file, WorkspaceName="PG3_2538", + MakeGroupingWorkspace=False) + MaskDetectors(Workspace="PG3_2538_offsets",MaskedWorkspace="PG3_2538_mask") + # load golden cal file + LoadCalFile(InputWorkspace="PG3_2538_calibrated", CalFileName="PG3_golden.cal", WorkspaceName="PG3_2538_golden", + MakeGroupingWorkspace=False) + MaskDetectors(Workspace="PG3_2538_golden_offsets",MaskedWorkspace="PG3_2538_golden_mask") + + def validateMethod(self): + return "ValidateWorkspaceToWorkspace" + + def validate(self): + self.tolerance = 2.0e-4 + return ('PG3_2538_offsets','PG3_2538_golden_offsets') + +class PG3CCCalibration(stresstesting.MantidStressTest): + def cleanup(self): + os.remove(self.saved_cal_file) + + def skipTests(self): + return _skip_test() + + def requiredFiles(self): + files = ["PG3_2538_event.nxs"] + return files + + def requiredMemoryMB(self): + """Requires 3Gb""" + return 3000 + + def runTest(self): + # determine where to save + savedir = os.path.abspath(os.path.curdir) + + # run the actual code + output = CalibrateRectangularDetectors(OutputDirectory = savedir, SaveAs = 'calibration', FilterBadPulses = True, + GroupDetectorsBy = 'All', DiffractionFocusWorkspace = False, Binning = '0.5, -0.0004, 2.5', + MaxOffset=0.01, PeakPositions = '0.7282933,1.261441',DetectorsPeaks = '17,6', + CrossCorrelation = True, Instrument = 'PG3', RunNumber = '2538', Extension = '_event.nxs') + + if isinstance(output, basestring): + self.saved_cal_file = output + else: + raise NotImplementedError("Output from CalibrateRectangularDetectors is NOT string for calibration file name!") + + # load saved cal file + LoadCalFile(InputWorkspace="PG3_2538_calibrated", CalFileName=self.saved_cal_file, WorkspaceName="PG3_2538", + MakeGroupingWorkspace=False) + MaskDetectors(Workspace="PG3_2538_offsets",MaskedWorkspace="PG3_2538_mask") + # load golden cal file + LoadCalFile(InputWorkspace="PG3_2538_calibrated", CalFileName="PG3_goldenCC.cal", WorkspaceName="PG3_2538_golden", + MakeGroupingWorkspace=False) + MaskDetectors(Workspace="PG3_2538_golden_offsets",MaskedWorkspace="PG3_2538_golden_mask") + + def validateMethod(self): + return "ValidateWorkspaceToWorkspace" + + def validate(self): + self.tolerance = 1.0e-4 + return ('PG3_2538_offsets','PG3_2538_golden_offsets') diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/CodeConventions.py b/Code/Mantid/Testing/SystemTests/tests/analysis/CodeConventions.py new file mode 100644 index 0000000000000000000000000000000000000000..ea29c2d0a3121819ece16af8686241520bbf57e0 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/CodeConventions.py @@ -0,0 +1,216 @@ +import stresstesting +import mantid +from mantid.simpleapi import * +import re + +MAX_ALG_LEN = 40 # TODO convention says 20 is the maximum + +SPECIAL = ["InputWorkspace", "OutputWorkspace", "Workspace", + "ReductionProperties"] +SPECIAL_UPPER = [name.upper for name in SPECIAL] + +# TODO this list should be empty +ALG_BAD_PARAMS = { + "CalculateUMatrix(v1)":("a", "b", "c", "alpha", "beta", "gamma"), + "ConvertToMD(v1)":("dEAnalysisMode"), + "ConvertToMDMinMaxLocal(v1)":("dEAnalysisMode"), + "ConvertToMDMinMaxGlobal(v1)":("dEAnalysisMode"), + "FindUBUsingLatticeParameters(v1)":("a", "b", "c", "alpha", "beta", "gamma"), + "IndexSXPeaks(v1)":("a", "b", "c", "alpha", "beta", "gamma", "dTolerance"), + "ModeratorTzero(v1)":("tolTOF"), + "MuscatFunc(v1)":("dQ", "dW"), + "OptimizeCrystalPlacement(v1)":("nPeaks", "nParams", "nIndexed"), + "PDFFourierTransform(v1)":("rho0"), + "PoldiAutoCorrelation(v5)":("wlenmin", "wlenmax"), + "PoldiLoadChopperSlits(v1)":("nbLoadedSlits"), + "PoldiLoadSpectra(v1)":("nbSpectraLoaded"), + "PoldiProjectRun(v1)":("wlenmin", "wlenmax"), + "PoldiRemoveDeadWires(v1)":("nbExcludedWires", "nbAuteDeadWires"), + "SaveIsawQvector(v1)":("Qx_vector", "Qy_vector", "Qz_vector"), + "SCDCalibratePanels(v1)":("a", "b", "c", "alpha", "beta", "gamma", + "useL0", "usetimeOffset", "usePanelWidth", + "usePanelHeight", "usePanelPosition", + "usePanelOrientation", "tolerance", + "MaxPositionChange_meters"), + "SetUB(v1)":("a", "b", "c", "alpha", "beta", "gamma", "u", "v"), + "ViewBOA(v1)":("CD-Distance"), + "PoldiCreatePeaksFromCell(v1)":("a", "b", "c", "alpha", "beta", "gamma") + } + +# TODO this list should be empty +FUNC_BAD_NAME = ("Muon_ExpDecayOscTest") + +# TODO this list should be empty +FUNC_BAD_PARAMS = { + "Bk2BkExpConvPV":("TOF_h"), + "CubicSpline":("y0", "y1", "y2"), + "DiffRotDiscreteCircle":("f0.Height", "f0.Radius"), + "DiffSphere":("f0.Height", "f0.Radius"), + "LatticeErrors":("p0", "p1", "p2", "p3", "p4", "p5"), + "Muon_ExpDecayOscTest":("lambda", "frequency", "phi"), + "SCDPanelErrors":("f0_detWidthScale", "f0_detHeightScale", + "f0_Xoffset", "f0_Yoffset", "f0_Zoffset", + "f0_Xrot", "f0_Yrot", "f0_Zrot", + "l0", "t0"), + "StretchedExpFT":("height", "tau", "beta") + } + +class Algorithms(stresstesting.MantidStressTest): + def verifyAlgName(self, name): + if not self.algRegExp.match(name): + print "Algorithm " + name + " has a name that violates conventions" + return False + + if bool(len(name) > MAX_ALG_LEN): + print "%s has a name that is longer than " % name, \ + "%d characters (%d > %d)" % (MAX_ALG_LEN, len(name), MAX_ALG_LEN) + return False + + # passed all of the checks + return True + + def verifyCategories(self, name, categories): + if len(categories) <= 0: + print name + " has no categories" + + for category in categories: + if not self.categoryRegExp.match(category): + print name + " has a bad category " + category + return False + + return True + + def checkAllowed(self, alg_descr, name): + if alg_descr not in ALG_BAD_PARAMS.keys(): + return False + + return name in ALG_BAD_PARAMS[alg_descr] + + def verifyProperty(self, alg_descr, name): + upper = name.upper() + if (upper in SPECIAL_UPPER) and (not name in SPECIAL): + index = SPECIAL_UPPER.index(upper) + print alg_descr + " property (" + name + ") has special name "\ + + "with wrong case: " + name + " should be " + SPECIAL[index] + return False + + if not self.paramRegExp.match(name): + if not self.checkAllowed(alg_descr, name): + print alg_descr + " property (" + name +") violates conventions" + return False + + # passed all of the checks + return True + + def runTest(self): + self.__ranOk = 0 + self.algRegExp = re.compile(r'^[A-Z][a-zA-Z0-9]+$') + self.paramRegExp = re.compile(r'^[A-Z][a-zA-Z0-9]*$') + self.categoryRegExp = re.compile(r'^([A-Z][a-zA-Z]+\\?)+$') + + algs = AlgorithmFactory.getRegisteredAlgorithms(True) + + for (name, versions) in algs.iteritems(): + if not self.verifyAlgName(name): + self.__ranOk += 1 + continue + for version in versions: + # get an instance + alg = mantid.AlgorithmManager.create(name, version) + alg_descr = "%s(v%d)" % (name, version) + + # verify the categories + if not self.verifyCategories(alg_descr, alg.categories()): + self.__ranOk += 1 + + # verify the properties + props = alg.getProperties() + for prop in props: + if not self.verifyProperty(alg_descr, prop.name): + self.__ranOk += 1 + + + def validate(self): + if self.__ranOk > 0: + print "Found %d errors. Coding conventions found at" % self.__ranOk,\ + "http://www.mantidproject.org/Mantid_Standards" + return False + + return True + +class FitFunctions(stresstesting.MantidStressTest): + def verifyFuncName(self, name): + if name in FUNC_BAD_NAME: + return True + + if not self.funcRegExp.match(name): + print "Function " + name + " has a name that violates conventions" + return False + + if bool(len(name) > MAX_ALG_LEN): + print "%s has a name that is longer than " % name, \ + "%d characters (%d > %d)" % (MAX_ALG_LEN, len(name), MAX_ALG_LEN) + return False + + # passed all of the checks + return True + + def verifyCategories(self, name, categories): + if len(categories) <= 0: + print name + " has no categories" + + for category in categories: + # TODO remove the special case + if category == "C++ User Defined": + return True + + if not self.categoryRegExp.match(category): + print name + " has a bad category " + category + return False + + return True + + def checkAllowed(self, func, name): + if func not in FUNC_BAD_PARAMS.keys(): + return False + + return name in FUNC_BAD_PARAMS[func] + + def verifyParameter(self, alg_descr, name): + + if not self.paramRegExp.match(name): + if not self.checkAllowed(alg_descr, name): + print alg_descr + " property (" + name +") violates conventions" + return False + + # passed all of the checks + return True + + def runTest(self): + self.__ranOk = 0 + self.funcRegExp = re.compile(r'^[A-Z][a-zA-Z0-9]+$') + self.paramRegExp = re.compile(r'^[A-Z][a-zA-Z0-9]*$') + self.categoryRegExp = re.compile(r'^([A-Z][a-zA-Z]+\\?)+$') + + functions = mantid.api.FunctionFactory.getFunctionNames() + for name in functions: + if not self.verifyFuncName(name): + self.__ranOk += 1 + continue + + function = mantid.api.FunctionFactory.createFunction(name) + + if not self.verifyCategories(name, function.categories()): + self.__ranOk += 1 + + for i in xrange(function.numParams()): + if not self.verifyParameter(name, function.getParamName(i)): + self.__ranOk += 1 + + def validate(self): + if self.__ranOk > 0: + print "Found %d errors. Coding conventions found at" % self.__ranOk,\ + "http://www.mantidproject.org/Mantid_Standards" + return False + + return True diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ConvertToMDworkflow.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ConvertToMDworkflow.py new file mode 100644 index 0000000000000000000000000000000000000000..25ccc3e58010162cc1bae7ae26588d76235496f0 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ConvertToMDworkflow.py @@ -0,0 +1,85 @@ +import stresstesting +from mantid.simpleapi import * +from mantid.api import Workspace + + +#---------------------------------------------------------------------- +class ConvertToMDworkflow(stresstesting.MantidStressTest): + """ + """ + + + + def runTest(self): + + # let's load test event workspace, which has been already preprocessed and available in Mantid Test folder + WS_Name='CNCS_7860_event' + Load(Filename=WS_Name,OutputWorkspace=WS_Name) + # this workspace has been obtained from an inelastic experiment with input energy Ei = 3. + # Usually this energy is stored in workspace + # but if it is not, we have to provide it for inelastic conversion to work. + AddSampleLog(Workspace=WS_Name,LogName='Ei',LogText='3.0',LogType='Number') + # disable multithreaded splitting as BoxID-s are assigned in random manner + # AddSampleLog(Workspace=WS_Name,LogName='NUM_THREADS',LogText='0',LogType='Number') + # + # set up target ws name and remove target workspace with the same name which can occasionally exist. + RezWS = 'WS_4D' + try: + DeleteWorkspace(RezWS) + except ValueError: + print "Target ws ",RezWS," not found in analysis data service\n" + # + #---> Start loop over contributing files + for i in xrange(0,20,5): + # the following operations simulate different workspaces, obtained from experiment using rotating crystal; + # For real experiment we usually just load these workspaces from nxspe files with proper Psi values defined there + # and have to set up ub matrix + SourceWS = 'SourcePart'+str(i) + # ws emulation begin ----> + CloneWorkspace(InputWorkspace=WS_Name,OutputWorkspace=SourceWS) + # using scattering on a crystal with cubic lattice and 1,0,0 direction along the beam. + SetUB(Workspace=SourceWS,a='1.4165',b='1.4165',c='1.4165',u='1,0,0',v='0,1,0') + # rotated by proper number of degrees around axis Y + AddSampleLog(Workspace=SourceWS,LogName='Psi',LogText=str(i)+'.0',LogType='Number Series') + SetGoniometer(Workspace=SourceWS,Axis0='Psi,0,1,0,1') + # ws emulation, end --------------------------------------------------------------------------------------- + + ConvertToMD(InputWorkspace=SourceWS,OutputWorkspace=RezWS,QDimensions='Q3D',QConversionScales='HKL',\ + OverwriteExisting=0,dEAnalysisMode='Direct',MinValues='-3,-3,-3,-1',MaxValues='3,3,3,3',\ + SplitInto="20,20,1,1") + # delete source workspace from memory; + DeleteWorkspace(SourceWS) + + + def validate(self): + """Returns the name of the workspace & file to compare""" + self.tolerance = 1e-5 + #elf.disableChecking.append('SpectraMap') + #elf.disableChecking.append('Instrument') + result = 'WS_4D' + reference = "ConvertToMDSample.nxs" + + valNames = [result,reference] + from mantid.simpleapi import Load,CompareMDWorkspaces,FrameworkManager,SaveNexus + + Load(Filename=reference,OutputWorkspace=valNames[1]) + + checker = AlgorithmManager.create("CompareMDWorkspaces") + checker.setLogging(True) + checker.setPropertyValue("Workspace1",result) + checker.setPropertyValue("Workspace2",valNames[1]) + checker.setPropertyValue("Tolerance", str(self.tolerance)) + checker.setPropertyValue("IgnoreBoxID", "1") + checker.setPropertyValue("CheckEvents", "1") + + checker.execute() + if checker.getPropertyValue("Equals") != "1": + print " Workspaces do not match, result: ",checker.getPropertyValue("Result") + print self.__class__.__name__ + SaveMD(InputWorkspace=valNames[0],Filename=self.__class__.__name__+'-mismatch.nxs') + return False + + + return True + + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/DOSTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/DOSTest.py new file mode 100644 index 0000000000000000000000000000000000000000..956e027e37c24195082f131319d4ef10cbb443dd --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/DOSTest.py @@ -0,0 +1,151 @@ +import stresstesting +from mantid.kernel import * +from mantid.api import * +from mantid.simpleapi import * + +class DOSPhononTest(stresstesting.MantidStressTest): + + def runTest(self): + file_name = 'squaricn.phonon' + self.ouput_ws_name = 'squaricn' + self.ref_result = 'II.DOSTest.nxs' + + DensityOfStates(File=file_name, OutputWorkspace=self.ouput_ws_name) + + def validate(self): + return self.ouput_ws_name, self.ref_result + +#------------------------------------------------------------------------------------ + +class DOSPhononCrossSectionScaleTest(stresstesting.MantidStressTest): + + def runTest(self): + file_name = 'squaricn.phonon' + self.ouput_ws_name = 'squaricn' + self.ref_result = 'II.DOSCrossSectionScaleTest.nxs' + + DensityOfStates(File=file_name, ScaleByCrossSection='Incoherent', OutputWorkspace=self.ouput_ws_name) + + def validate(self): + return self.ouput_ws_name, self.ref_result + +#------------------------------------------------------------------------------------ + +class DOSCastepTest(stresstesting.MantidStressTest): + + def runTest(self): + file_name = 'squaricn.castep' + self.ouput_ws_name = 'squaricn' + self.ref_result = 'II.DOSTest.nxs' + + DensityOfStates(File=file_name,OutputWorkspace=self.ouput_ws_name) + + def validate(self): + return self.ouput_ws_name, self.ref_result + +#------------------------------------------------------------------------------------ + +class DOSRamanActiveTest(stresstesting.MantidStressTest): + + def runTest(self): + file_name = 'squaricn.phonon' + spec_type = 'Raman_Active' + self.ouput_ws_name = 'squaricn' + self.ref_result = 'II.DOSRamanTest.nxs' + + DensityOfStates(File=file_name, SpectrumType=spec_type, OutputWorkspace=self.ouput_ws_name) + + def validate(self): + self.tolerance = 1e-3 + return self.ouput_ws_name, self.ref_result + +#------------------------------------------------------------------------------------ + +class DOSIRActiveTest(stresstesting.MantidStressTest): + + def runTest(self): + file_name = 'squaricn.phonon' + spec_type = 'IR_Active' + self.ouput_ws_name = 'squaricn' + self.ref_result = 'II.DOSIRTest.nxs' + + DensityOfStates(File=file_name, SpectrumType=spec_type, OutputWorkspace=self.ouput_ws_name) + + def validate(self): + return self.ouput_ws_name, self.ref_result + +#------------------------------------------------------------------------------------ + +class DOSPartialTest(stresstesting.MantidStressTest): + + def runTest(self): + file_name = 'squaricn.phonon' + spec_type = 'DOS' + self.ouput_ws_name = 'squaricn' + self.ref_result = 'II.DOSPartialTest.nxs' + + DensityOfStates(File=file_name, SpectrumType=spec_type, Ions="H,C,O", OutputWorkspace=self.ouput_ws_name) + + def validate(self): + return self.ouput_ws_name, self.ref_result + +#------------------------------------------------------------------------------------ + +class DOSPartialSummedContributionsTest(stresstesting.MantidStressTest): + """ + This test checks the reference result of the total DOS against + the summed partial contributions of all elements. The two should be roughly + equal to within a small degree of error. + """ + + def runTest(self): + + file_name = 'squaricn.phonon' + spec_type = 'DOS' + self.ouput_ws_name = 'squaricn' + self.ref_result = 'II.DOSTest.nxs' + self.tolerance = 1e-10 + + DensityOfStates(File=file_name, SpectrumType=spec_type, Ions="H,C,O", SumContributions=True, OutputWorkspace=self.ouput_ws_name) + + def validate(self): + return self.ouput_ws_name, self.ref_result + +#------------------------------------------------------------------------------------ + +class DOSPartialCrossSectionScaleTest(stresstesting.MantidStressTest): + + def runTest(self): + file_name = 'squaricn.phonon' + spec_type = 'DOS' + self.ouput_ws_name = 'squaricn' + self.ref_result = 'II.DOSPartialCrossSectionScaleTest.nxs' + + DensityOfStates(File=file_name, SpectrumType=spec_type, Ions="H,C,O", ScaleByCrossSection='Incoherent', + OutputWorkspace=self.ouput_ws_name) + + def validate(self): + return self.ouput_ws_name, self.ref_result + +#------------------------------------------------------------------------------------ + +class DOSPartialSummedContributionsCrossSectionScaleTest(stresstesting.MantidStressTest): + """ + This test checks the reference result of the total DOS against + the summed partial contributions of all elements. The two should be roughly + equal to within a small degree of error. + """ + + def runTest(self): + + file_name = 'squaricn.phonon' + spec_type = 'DOS' + self.ouput_ws_name = 'squaricn' + self.ref_result = 'II.DOSCrossSectionScaleTest.nxs' + self.tolerance = 1e-10 + + DensityOfStates(File=file_name, SpectrumType=spec_type, Ions="H,C,O", SumContributions=True, + ScaleByCrossSection='Incoherent', OutputWorkspace=self.ouput_ws_name) + + def validate(self): + return self.ouput_ws_name, self.ref_result diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/Diffraction_Workflow_Test.py b/Code/Mantid/Testing/SystemTests/tests/analysis/Diffraction_Workflow_Test.py new file mode 100644 index 0000000000000000000000000000000000000000..c5d8891379615bff7f094be9b9d731de5e969b44 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/Diffraction_Workflow_Test.py @@ -0,0 +1,175 @@ +""" +System test that loads TOPAZ single-crystal data, +and runs Diffraction Workflow. +""" +import stresstesting +import numpy +from mantid.simpleapi import * +from mantid.api import FileFinder + +import os + +class Diffraction_Workflow_Test(stresstesting.MantidStressTest): + + def cleanup(self): + Files = ["TOPAZ_3132.hkl", + "TOPAZ_3132FFT.hkl"] + for file in Files: + absfile = FileFinder.getFullPath(file) + if os.path.exists(absfile): + os.remove(absfile) + return True + + def requiredMemoryMB(self): + """ Require about 4GB free """ + return 4000 + + def runTest(self): + import platform + if platform.system() == "Darwin": + import resource + # Activate core dumps to try & find the reason for the crashes + resource.setrlimit(resource.RLIMIT_CORE, (-1, -1)) + + # determine where to save + import os + savedir = os.path.abspath(os.path.curdir) + + + # Basic parameters for Triphylite Crystal + #Name of the workspaces to create + ws = "TOPAZ_3132" + filename = ws+"_event.nxs" + LoadEventNexus(Filename=filename,OutputWorkspace=ws,FilterByTofMin='3000',FilterByTofMax='16000') + + # Load optimized DetCal file + #LoadIsawDetCal(InputWorkspace=ws,Filename="/SNS/TOPAZ/shared/Spectra/TOPAZ_8Sept11.DetCal") + + # Spherical Absorption and Lorentz Corrections + AnvredCorrection(InputWorkspace=ws,OutputWorkspace=ws,LinearScatteringCoef="0.451",LinearAbsorptionCoef="0.993",Radius="0.14") + + # Convert to Q space + ConvertToDiffractionMDWorkspace(InputWorkspace=ws,OutputWorkspace=ws+'_MD2',LorentzCorrection='0', + OutputDimensions='Q (lab frame)', SplitInto='2',SplitThreshold='150') #,Version=1 + # Find peaks (Reduced number of peaks so file comparison with reference does not fail with small differences) + FindPeaksMD(InputWorkspace=ws+'_MD2',MaxPeaks='20',OutputWorkspace=ws+'_peaksLattice') + # 3d integration to centroid peaks + CentroidPeaksMD(InputWorkspace=ws+'_MD2',CoordinatesToUse='Q (lab frame)', + PeakRadius='0.12',PeaksWorkspace=ws+'_peaksLattice',OutputWorkspace=ws+'_peaksLattice') + # Find the UB matrix using the peaks and known lattice parameters + FindUBUsingLatticeParameters(PeaksWorkspace=ws+'_peaksLattice',a='10.3522',b='6.0768',c='4.7276', + alpha='90',beta='90',gamma='90', NumInitial='20', Tolerance='0.12') + # And index to HKL + IndexPeaks(PeaksWorkspace=ws+'_peaksLattice', Tolerance='0.12') + # Integrate peaks in Q space using spheres + IntegratePeaksMD(InputWorkspace=ws+'_MD2',PeakRadius='0.12', + BackgroundOuterRadius='0.18',BackgroundInnerRadius='0.15', + PeaksWorkspace=ws+'_peaksLattice',OutputWorkspace=ws+'_peaksLattice') + # Save for SHELX + SaveHKL(InputWorkspace=ws+'_peaksLattice', Filename=savedir+'/'+ws+'.hkl') + + # Find peaks again for FFT + FindPeaksMD(InputWorkspace=ws+'_MD2',MaxPeaks='100',OutputWorkspace=ws+'_peaksFFT') + # 3d integration to centroid peaks + CentroidPeaksMD(InputWorkspace=ws+'_MD2', CoordinatesToUse='Q (lab frame)', + PeakRadius='0.12',PeaksWorkspace=ws+'_peaksFFT',OutputWorkspace=ws+'_peaksFFT') + # Find the UB matrix using FFT + FindUBUsingFFT(PeaksWorkspace=ws+'_peaksFFT',MinD=3.,MaxD=14.) + + ## TODO conventional cell + + # And index to HKL + alg = IndexPeaks(PeaksWorkspace=ws+'_peaksFFT', Tolerance='0.12') + + # Integrate peaks in Q space using spheres + IntegratePeaksMD(InputWorkspace=ws+'_MD2',PeakRadius='0.12', + BackgroundOuterRadius='0.18',BackgroundInnerRadius='0.15', + PeaksWorkspace=ws+'_peaksFFT',OutputWorkspace=ws+'_peaksFFT') + # Save for SHELX + SaveHKL(InputWorkspace=ws+'_peaksFFT', Filename=savedir+'/'+ws+'FFT.hkl') + + + # Copy the UB matrix back to the original workspace + CopySample(InputWorkspace=ws+'_peaksFFT',OutputWorkspace=ws, + CopyName='0',CopyMaterial='0',CopyEnvironment='0',CopyShape='0', CopyLattice=1) + # Convert to reciprocal space, in the sample frame + + ConvertToDiffractionMDWorkspace(InputWorkspace=ws,OutputWorkspace=ws+'_HKL', + OutputDimensions='HKL',LorentzCorrection='0', SplitInto='2',SplitThreshold='150') + # Bin to a regular grid + BinMD(InputWorkspace=ws+'_HKL',AlignedDim0="[H,0,0], -20, 20, 800",AlignedDim1="[0,K,0], -5, 5, 50", + AlignedDim2="[0,0,L], -10, 10, 800",OutputWorkspace=ws+'_binned') + + + originalUB = numpy.array(mtd["TOPAZ_3132"].sample().getOrientedLattice().getUB()) + w = mtd["TOPAZ_3132"] + s = w.sample() + ol = s.getOrientedLattice() + self.assertDelta( ol.a(), 4.712, 0.01, "Correct lattice a value not found.") + self.assertDelta( ol.b(), 6.06, 0.01, "Correct lattice b value not found.") + self.assertDelta( ol.c(), 10.41, 0.01, "Correct lattice c value not found.") + self.assertDelta( ol.alpha(), 90, 0.4, "Correct lattice angle alpha value not found.") + self.assertDelta( ol.beta(), 90, 0.4, "Correct lattice angle beta value not found.") + self.assertDelta( ol.gamma(), 90, 0.4, "Correct lattice angle gamma value not found.") + + # Go to HKL + ConvertToDiffractionMDWorkspace(InputWorkspace='TOPAZ_3132',OutputWorkspace='TOPAZ_3132_HKL',OutputDimensions='HKL',LorentzCorrection='1',SplitInto='2',SplitThreshold='150') + + + # Bin to a line (H=0 to 6, L=3, K=3) + BinMD(InputWorkspace='TOPAZ_3132_HKL',AxisAligned='0', + BasisVector0='X,units,1,0,0',BasisVector1='Y,units,6.12323e-17,1,0',BasisVector2='2,units,-0,0,1', + Translation='-0,3,6',OutputExtents='0,6, -0.1,0.1, -0.1,0.1',OutputBins='60,1,1', + OutputWorkspace='TOPAZ_3132_HKL_line') + + # Now check the integrated bin and the peaks + w = mtd["TOPAZ_3132_HKL_line"] + self.assertLessThan( w.signalAt(1), 1e4, "Limited background signal" ) + self.assertDelta( w.signalAt(10), 140.824, 1, "Peak 1") #self.assertDelta( w.signalAt(10), 1110.86, 10, "Peak 1") + self.assertDelta( w.signalAt(20), 36.25, 1, "Peak 2") #self.assertDelta( w.signalAt(20), 337.71, 10, "Peak 2") + self.assertDelta( w.signalAt(30), 26.53, 1, "Peak 3") #self.assertDelta( w.signalAt(30), 195.548, 10, "Peak 3") + + # Now do the same peak finding with Q in the sample frame + + + ConvertToDiffractionMDWorkspace(InputWorkspace='TOPAZ_3132',OutputWorkspace='TOPAZ_3132_QSample',OutputDimensions='Q (sample frame)',LorentzCorrection='1',SplitInto='2',SplitThreshold='150') + FindPeaksMD(InputWorkspace='TOPAZ_3132_QSample',PeakDistanceThreshold='0.12',MaxPeaks='200',OutputWorkspace='peaks_QSample') + FindUBUsingFFT(PeaksWorkspace='peaks_QSample',MinD='2',MaxD='16') + CopySample(InputWorkspace='peaks_QSample',OutputWorkspace='TOPAZ_3132',CopyName='0',CopyMaterial='0',CopyEnvironment='0',CopyShape='0') + + # Index the peaks and check + results = IndexPeaks(PeaksWorkspace='peaks_QSample') + indexed = results[0] + if indexed < 100: + raise Exception("Expected at least 100 of 100 peaks to be indexed. Only indexed %d!" % indexed) + + # Check the UB matrix + w = mtd["TOPAZ_3132"] + s = w.sample() + ol = s.getOrientedLattice() + self.assertDelta( ol.a(), 4.714, 0.01, "Correct lattice a value not found.") + self.assertDelta( ol.b(), 6.06, 0.01, "Correct lattice b value not found.") + self.assertDelta( ol.c(), 10.42, 0.01, "Correct lattice c value not found.") + self.assertDelta( ol.alpha(), 90, 0.4, "Correct lattice angle alpha value not found.") + self.assertDelta( ol.beta(), 90, 0.4, "Correct lattice angle beta value not found.") + self.assertDelta( ol.gamma(), 90, 0.4, "Correct lattice angle gamma value not found.") + + # Compare new and old UBs + newUB = numpy.array(mtd["TOPAZ_3132"].sample().getOrientedLattice().getUB()) + # UB Matrices are not necessarily the same, some of the H,K and/or L sign can be reversed + diff = abs(newUB) - abs(originalUB) < 0.001 + for c in xrange(3): + # This compares each column, allowing old == new OR old == -new + if not (numpy.all(diff[:,c]) ): + raise Exception("More than 0.001 difference between UB matrices: Q (lab frame):\n%s\nQ (sample frame):\n%s" % (originalUB, newUB) ) + + # load output hkl file and the golden one + LoadHKL(Filename="TOPAZ_3132.hkl", OutputWorkspace="TOPAZ_3132") + LoadHKL(Filename=os.path.join(os.path.dirname(__file__), 'ReferenceResults','TOPAZ_3132_reference.hkl'), + OutputWorkspace="TOPAZ_3132_golden") + + def validateMethod(self): + return "ValidateWorkspaceToWorkspace" + + def validate(self): + return ('TOPAZ_3132','TOPAZ_3132_golden') diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/DirectInelasticDiagnostic.py b/Code/Mantid/Testing/SystemTests/tests/analysis/DirectInelasticDiagnostic.py new file mode 100644 index 0000000000000000000000000000000000000000..9262949c94743fc4b519aaf279f1761667f14022 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/DirectInelasticDiagnostic.py @@ -0,0 +1,67 @@ +from stresstesting import MantidStressTest +from mantid.simpleapi import MaskDetectors, mtd, config +import Direct.DirectEnergyConversion as reduction +import os + +class DirectInelasticDiagnostic(MantidStressTest): + + def requiredMemoryMB(self): + """Requires 4Gb""" + return 4000 + + def runTest(self): + white = 'MAP17186.raw' + sample = 'MAP17269.raw' + + # Libisis values to check against + tiny=1e-10 + huge=1e10 + + v_out_lo = 0.01 + v_out_hi = 100. + + vv_lo = 0.1 + vv_hi = 2.0 + vv_sig = 0.0 + + sv_sig = 3.3 + sv_hi = 1.5 + sv_lo = 0.0 + s_zero = True + + reducer = reduction.setup_reducer('MAPS') + # parameters which explicitly affect diagnostics + # + reducer.prop_man.wb_integr_range = [20,300] + reducer.prop_man.bkgd_range=[12000,18000] + diag_mask = reducer.diagnose(white, sample, tiny=tiny, huge=huge, + van_out_lo=v_out_lo, van_out_hi=v_out_hi, + van_lo=vv_lo, van_hi=vv_hi, van_sig=vv_sig, + samp_lo=sv_lo, samp_hi=sv_hi, samp_sig=sv_sig, samp_zero=s_zero,hard_mask_file=None) + + sample = reducer.get_run_descriptor(sample) + sample_ws = sample.get_workspace() + MaskDetectors(Workspace=sample_ws, MaskedWorkspace=diag_mask) + + # Save the masked spectra nmubers to a simple ASCII file for comparison + self.saved_diag_file = os.path.join(config['defaultsave.directory'], 'CurrentDirectInelasticDiag.txt') + handle = file(self.saved_diag_file, 'w') + for index in range(sample_ws.getNumberHistograms()): + if sample_ws.getDetector(index).isMasked(): + spec_no = sample_ws.getSpectrum(index).getSpectrumNo() + handle.write(str(spec_no) + '\n') + handle.close + + def cleanup(self): + if os.path.exists(self.saved_diag_file): + if self.succeeded(): + os.remove(self.saved_diag_file) + else: + os.rename(self.saved_diag_file, os.path.join(config['defaultsave.directory'], 'DirectInelasticDiag-Mismatch.txt')) + + def validateMethod(self): + return 'validateASCII' + + def validate(self): + return self.saved_diag_file, \ + os.path.join(os.path.dirname(__file__), 'ReferenceResults','DirectInelasticDiagnostic.txt') diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/DirectInelasticDiagnostic2.py b/Code/Mantid/Testing/SystemTests/tests/analysis/DirectInelasticDiagnostic2.py new file mode 100644 index 0000000000000000000000000000000000000000..238d5c266cdc9c02a394ee65279867fb139d14ae --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/DirectInelasticDiagnostic2.py @@ -0,0 +1,91 @@ +from stresstesting import MantidStressTest +from mantid.simpleapi import * +from mantid.kernel import PropertyManager +from mantid import config +import os + +def MAX_DBL(): + import sys + return sys.float_info[0]/2 + +def getNamedParameter(ws, name): + return ws.getInstrument().getNumberParameter(name)[0] + +class DirectInelasticDiagnostic2(MantidStressTest): + + def requiredMemoryMB(self): + """Requires 4Gb""" + return 4000 + + + def runTest(self): + red_man = PropertyManager() + red_man_name = "__dgs_reduction_properties" + pmds[red_man_name] = red_man + + if 'detvan' in mtd: + detvan = mtd['detvan'] + else: + detvan = Load('MAP17186.raw') + if 'sample' in mtd: + sample = mtd['sample'] + else: + sample = Load('MAP17269.raw') + + # Libisis values to check against + # All PropertyManager properties need to be set + red_man["LowCounts"] = 1e-10 + red_man["HighCounts"] = 1e10 + red_man["LowOutlier"] = 0.01 + red_man["HighOutlier"] = 100. + red_man["ErrorBarCriterion"] = 0.0 + red_man["MedianTestLow"] = 0.1 + red_man["MedianTestHigh"] = 2.0 + red_man["SamBkgMedianTestLow"] = 0.0 + red_man["SamBkgMedianTestHigh"] = 1.5 + red_man["SamBkgErrorbarCriterion"] = 3.3 + red_man["RejectZeroBackground"] = True + # Things needed to run vanadium reduction + red_man["IncidentBeamNormalisation"] = "ToMonitor" + red_man["DetVanIntRangeUnits"] = "Energy" + # properties affecting diagnostics: + + #reducer.wb_integr_range = [20,300] + red_man["DetVanIntRangeLow"] = 20. + red_man["DetVanIntRangeHigh"] = 300. + red_man["BackgroundCheck"] = True + red_man["BackgroundTofStart"]=12000. + red_man["BackgroundTofEnd"]=18000. + #reducer.bkgd_range=[12000,18000] + + + diag_mask = DgsDiagnose(DetVanWorkspace=detvan, SampleWorkspace=sample, + ReductionProperties=red_man_name) + + MaskDetectors(sample, MaskedWorkspace=diag_mask) + # Save the masked spectra numbers to a simple ASCII file for comparison + self.saved_diag_file = os.path.join(config['defaultsave.directory'], + 'CurrentDirectInelasticDiag2.txt') + handle = file(self.saved_diag_file, 'w') + for index in range(sample.getNumberHistograms()): + if sample.getDetector(index).isMasked(): + spec_no = sample.getSpectrum(index).getSpectrumNo() + handle.write(str(spec_no) + '\n') + handle.close + + def cleanup(self): + if os.path.exists(self.saved_diag_file): + if self.succeeded(): + os.remove(self.saved_diag_file) + else: + os.rename(self.saved_diag_file, + os.path.join(config['defaultsave.directory'], + 'DirectInelasticDiag2-Mismatch.txt')) + + def validateMethod(self): + return 'validateASCII' + + def validate(self): + return self.saved_diag_file, \ + os.path.join(os.path.dirname(__file__), + 'ReferenceResults', 'DirectInelasticDiagnostic.txt') diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSBeamCenterAPIv2.py b/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSBeamCenterAPIv2.py new file mode 100644 index 0000000000000000000000000000000000000000..11b5526b50482858a47b485eca2e6509e3c1dd76 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSBeamCenterAPIv2.py @@ -0,0 +1,75 @@ +import stresstesting +import mantid +from mantid.simpleapi import * +from reduction_workflow.instruments.sans.sns_command_interface import * +from mantid.api import * + +import os + +def do_cleanup(): + absfile = FileFinder.getFullPath("EQSANS_4061_event_reduction.log") + if os.path.exists(absfile): + os.remove(absfile) + return True + +class EQSANSBeamCenter(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='SNS' + EQSANS(False) + AppendDataFile("EQSANS_4061_event.nxs") + NoSolidAngle() + IndependentBinning(False) + UseConfig(False) + UseConfigTOFTailsCutoff(False) + UseConfigMask(False) + SetTransmission(1.0, 0.0) + TotalChargeNormalization(normalize_to_beam=False) + DirectBeamCenter("EQSANS_1466_event.nxs") + Reduce() + # Scale up to match correct scaling. The reference data is off by a factor 10.0 + Scale(InputWorkspace="EQSANS_4061_event_frame2_Iq", Factor=10.0, + Operation='Multiply', OutputWorkspace="EQSANS_4061_event_frame2_Iq") + Scale(InputWorkspace="EQSANS_4061_event_frame2_Iq", Factor=277.781, + Operation='Multiply', OutputWorkspace="EQSANS_4061_event_frame2_Iq") + + def validate(self): + # Be more tolerant with the output, mainly because of the errors. + # The following tolerance check the errors up to the third digit. + self.tolerance = 0.1 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "EQSANS_4061_event_frame2_Iq", 'EQSANSBeamCenter.nxs' + +class EQSANSBeamCenterEvent(EQSANSBeamCenter): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='SNS' + EQSANS(True) + AppendDataFile("EQSANS_4061_event.nxs") + NoSolidAngle() + IndependentBinning(False) + UseConfig(False) + UseConfigTOFTailsCutoff(False) + UseConfigMask(False) + SetTransmission(1.0, 0.0) + TotalChargeNormalization(normalize_to_beam=False) + DirectBeamCenter("EQSANS_1466_event.nxs") + Reduce() + # Scale up to match correct scaling. The reference data is off by a factor 10.0 + Scale(InputWorkspace="EQSANS_4061_event_frame2_Iq", Factor=10.0, + Operation='Multiply', OutputWorkspace="EQSANS_4061_event_frame2_Iq") + Scale(InputWorkspace="EQSANS_4061_event_frame2_Iq", Factor=277.781, + Operation='Multiply', OutputWorkspace="EQSANS_4061_event_frame2_Iq") diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSDarkCurrentAPIv2.py b/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSDarkCurrentAPIv2.py new file mode 100644 index 0000000000000000000000000000000000000000..daa6d4bb8c9103520af4671ac0996f8408ec4e6e --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSDarkCurrentAPIv2.py @@ -0,0 +1,50 @@ +import stresstesting +import mantid +from mantid.simpleapi import * +from reduction_workflow.instruments.sans.sns_command_interface import * +from mantid.api import * + +import os + +class EQSANSDarkCurrent(stresstesting.MantidStressTest): + + def cleanup(self): + absfile = FileFinder.getFullPath("EQSANS_1466_event_reduction.log") + if os.path.exists(absfile): + os.remove(absfile) + return True + + """ + Analysis Tests for EQSANS + Testing that the I(Q) output of is correct + """ + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='SNS' + EQSANS(True) + SolidAngle() + SetBeamCenter(96.29, 126.15) + PerformFlightPathCorrection(False) + UseConfig(False) + UseConfigTOFTailsCutoff(False) + SetTOFTailsCutoff(low_cut=0.00, high_cut=0.00) + UseConfigMask(False) + TotalChargeNormalization(normalize_to_beam=False) + SetTransmission(1.0,0.0, False) + DarkCurrent("EQSANS_4061_event.nxs") + AppendDataFile("EQSANS_1466_event.nxs") + Reduce1D() + # Scale up to match correct scaling. + Scale(InputWorkspace="EQSANS_1466_event_Iq", Factor=2777.81, + Operation='Multiply', OutputWorkspace="EQSANS_1466_event_Iq") + + def validate(self): + self.tolerance = 1.0 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + + return "EQSANS_1466_event_Iq", 'EQSANSDarkCurrent.nxs' + \ No newline at end of file diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSEffAPIv2.py b/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSEffAPIv2.py new file mode 100644 index 0000000000000000000000000000000000000000..60c2f0b67a7f3a7060b4b7bb2bc5dcb86d131d57 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSEffAPIv2.py @@ -0,0 +1,47 @@ +import stresstesting +import mantid +from mantid.simpleapi import * +from reduction_workflow.instruments.sans.sns_command_interface import * +from mantid.api import FileFinder + +import os + +class EQSANSEff(stresstesting.MantidStressTest): + + def cleanup(self): + absfile = FileFinder.getFullPath("EQSANS_1466_event_reduction.log") + if os.path.exists(absfile): + os.remove(absfile) + return True + + def runTest(self): + """ + System test for sensitivity correction + """ + config = ConfigService.Instance() + config["facilityName"]='SNS' + EQSANS(False) + AppendDataFile("EQSANS_1466_event.nxs") + SolidAngle() + UseConfig(False) + UseConfigTOFTailsCutoff(False) + UseConfigMask(False) + SetBeamCenter(96.29, 126.15) + SetTransmission(1.0, 0.0) + TotalChargeNormalization(normalize_to_beam=False) + SensitivityCorrection("EQSANS_4061_event.nxs", min_sensitivity=0.5, max_sensitivity=1.5, dark_current=None, use_sample_dc=False) + Reduce1D() + Scale(InputWorkspace="EQSANS_1466_event_Iq", Factor=277.781, + Operation='Multiply', OutputWorkspace="EQSANS_1466_event_Iq") + + def validate(self): + # Be more tolerant with the output, mainly because of the errors. + # The following tolerance check the errors up to the third digit. + mtd["EQSANS_1466_event_Iq"].dataE(0)[0]=8.13907 + self.tolerance = 0.1 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "EQSANS_1466_event_Iq", 'EQSANSEff.nxs' + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSFlatTestAPIv2.py b/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSFlatTestAPIv2.py new file mode 100644 index 0000000000000000000000000000000000000000..fe4ac6a3f4ca7bf74ca17dc0d9477eb5389962d6 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSFlatTestAPIv2.py @@ -0,0 +1,65 @@ +import stresstesting +import mantid +from mantid.simpleapi import * +from reduction_workflow.instruments.sans.sns_command_interface import * + +FILE_LOCATION = "/SNS/EQSANS/IPTS-5636/data/" + +class EQSANSFlatTest(stresstesting.MantidStressTest): + def requiredFiles(self): + files = [] + files.append(FILE_LOCATION+"EQSANS_5704_event.nxs") + files.append(FILE_LOCATION+"EQSANS_5734_event.nxs") + files.append(FILE_LOCATION+"EQSANS_5732_event.nxs") + files.append(FILE_LOCATION+"EQSANS_5738_event.nxs") + files.append(FILE_LOCATION+"EQSANS_5729_event.nxs") + files.append(FILE_LOCATION+"EQSANS_5737_event.nxs") + files.append(FILE_LOCATION+"EQSANS_5703_event.nxs") + files.append("bl6_flux_at_sample") + return files + + def runTest(self): + """ + System test for EQSANS. + This test is meant to be run at SNS and takes a long time. + It is used to verify that the complete reduction chain works + and reproduces reference results. + """ + config = ConfigService.Instance() + config["facilityName"]='SNS' + EQSANS(True) + SolidAngle() + DarkCurrent(FILE_LOCATION+"EQSANS_5704_event.nxs") + TotalChargeNormalization(beam_file="bl6_flux_at_sample") + AzimuthalAverage(n_bins=100, n_subpix=1, log_binning=False) + IQxQy(nbins=100) + UseConfigTOFTailsCutoff(True) + PerformFlightPathCorrection(True) + UseConfigMask(True) + SetBeamCenter(89.6749, 129.693) + SensitivityCorrection(FILE_LOCATION+'EQSANS_5703_event.nxs', + min_sensitivity=0.5, + max_sensitivity=1.5, use_sample_dc=True) + DirectBeamTransmission(FILE_LOCATION+"EQSANS_5734_event.nxs", + FILE_LOCATION+"EQSANS_5738_event.nxs", beam_radius=3) + ThetaDependentTransmission(False) + AppendDataFile([FILE_LOCATION+"EQSANS_5729_event.nxs"]) + CombineTransmissionFits(True) + + Background(FILE_LOCATION+"EQSANS_5732_event.nxs") + BckDirectBeamTransmission(FILE_LOCATION+"EQSANS_5737_event.nxs", + FILE_LOCATION+"EQSANS_5738_event.nxs", beam_radius=3) + BckThetaDependentTransmission(False) + BckCombineTransmissionFits(True) + SaveIqAscii(process='None') + SetAbsoluteScale(277.781) + Reduce1D() + + def validate(self): + self.tolerance = 0.3 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "EQSANS_5729_event_frame1_Iq", 'EQSANSFlatTest.nxs' + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSIQOutputAPIv2.py b/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSIQOutputAPIv2.py new file mode 100644 index 0000000000000000000000000000000000000000..69da2f6824d5a9db39f322aeee4afd1d4810c8e6 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSIQOutputAPIv2.py @@ -0,0 +1,275 @@ +import stresstesting +import math +import mantid +from mantid.simpleapi import * +from reduction_workflow.instruments.sans.sns_command_interface import * +from mantid.api import * + +import os + +def do_cleanup(): + Files = ["EQSANS_4061_event_reduction.log", + "EQSANS_1466_event_reduction.log"] + for file in Files: + absfile = FileFinder.getFullPath(file) + if os.path.exists(absfile): + os.remove(absfile) + return True + +class EQSANSIQOutput(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + """ + Analysis Tests for EQSANS + Testing that the I(Q) output of is correct + """ + + def runTest(self): + """ + Check that EQSANSTofStructure returns the correct workspace + """ + config = ConfigService.Instance() + config["facilityName"]='SNS' + EQSANS() + SetBeamCenter(96.29, 126.15) + AppendDataFile("EQSANS_1466_event.nxs") + NoSolidAngle() + UseConfig(False) + UseConfigTOFTailsCutoff(False) + UseConfigMask(False) + TotalChargeNormalization(normalize_to_beam=False) + Reduce1D() + # Scale up to match correct scaling. + Scale(InputWorkspace="EQSANS_1466_event_Iq", Factor=2777.81, + Operation='Multiply', OutputWorkspace="EQSANS_1466_event_Iq") + + def validate(self): + self.tolerance = 0.2 + mtd["EQSANS_1466_event_Iq"].dataY(0)[0] = 269.687 + mtd["EQSANS_1466_event_Iq"].dataE(0)[0] = 16.4977 + mtd["EQSANS_1466_event_Iq"].dataE(0)[1] = 6.78 + mtd["EQSANS_1466_event_Iq"].dataY(0)[2] = 11.3157 + mtd["EQSANS_1466_event_Iq"].dataE(0)[2] = 1.23419 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "EQSANS_1466_event_Iq", 'EQSANSIQOutput.nxs' + +class EQSANSBeamMonitor(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + """ + Analysis Tests for EQSANS + Testing that the I(Q) output of is correct + """ + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='SNS' + EQSANS() + SetBeamCenter(96.29, 126.15) + AppendDataFile("EQSANS_1466_event.nxs") + NoSolidAngle() + UseConfig(False) + UseConfigTOFTailsCutoff(False) + UseConfigMask(False) + BeamMonitorNormalization('SANSBeamFluxCorrectionMonitor.nxs') + Reduce1D() + + def validate(self): + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "EQSANS_1466_event_Iq", 'EQSANSBeamMonitor.nxs' + +class EQSANSDQPositiveOutput(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + """ + Analysis Tests for EQSANS + Testing that the Q resolution output of is correct + """ + + def runTest(self): + """ + Check that the Q resolution calculation returns positive values + even when background is larger than signal and I(q) is negative. + (Non-physical value that's an experimental edge case) + """ + config = ConfigService.Instance() + config["facilityName"]='SNS' + EQSANS() + SetBeamCenter(96.29, 126.15) + AppendDataFile("EQSANS_1466_event.nxs") + UseConfig(False) + UseConfigTOFTailsCutoff(False) + UseConfigMask(False) + TotalChargeNormalization(normalize_to_beam=False) + SetTransmission(1.0,0.0, False) + Background("EQSANS_4061_event.nxs") + Resolution() + Reduce1D() + + def validate(self): + dq = mtd['EQSANS_1466_event_Iq'].dataDx(0) + for x in dq: + if x<0: + return False + return True + +class EQSANSDQOutput(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + """ + Analysis Tests for EQSANS + Testing that the Q resolution output of is correct + """ + + def runTest(self): + """ + Check that the Q resolution calculation returns positive values + even when background is larger than signal and I(q) is negative. + (Non-physical value that's an experimental edge case) + """ + config = ConfigService.Instance() + config["facilityName"]='SNS' + EQSANS() + SetBeamCenter(96.29, 126.15) + AppendDataFile("EQSANS_1466_event.nxs") + UseConfig(False) + UseConfigTOFTailsCutoff(False) + UseConfigMask(False) + TotalChargeNormalization(normalize_to_beam=False) + SetTransmission(1.0, 0.0, False) + Background("EQSANS_4061_event.nxs") + Resolution(10) + Reduce1D() + + def validate(self): + """ + Reference values were generate using the event-by-event method + and are slightly different than the ones generated using + the histogram method. + The event-by-event method processes each event one-by-one, + computes dQ for each of them, and averages those dQ for each + Q bin of the I(Q) distribution. + """ + dq_ref = [0.00178823,0.0014458,0.00144805,0.00155836,0.00150908, + 0.00163262,0.00158216,0.00160879,0.00165932,0.00164304, + 0.00165549,0.00163676,0.00167581,0.0016957,0.00167898, + 0.00172297,0.00169375,0.00174938,0.00173394,0.00180498, + 0.00188825,0.00184747,0.00181396,0.00185052,0.00191187, + 0.00192331,0.00196536,0.00196182,0.00202844,0.00205516, + 0.00208013,0.00210195,0.00212621,0.00217228,0.00217713, + 0.002243,0.00225329,0.00229956,0.00234733,0.00234773, + 0.00239551,0.00243152,0.0024392,0.00248026,0.00249286, + 0.00252012,0.00253674,0.00257043,0.00257755,0.00261695, + 0.00263961,0.00268499,0.0026836,0.00273043,0.00272828, + 0.00279073,0.00279924,0.00284322,0.00283794,0.00288332, + 0.00289423,0.00291934,0.00294244,0.00295239,0.00297587, + 0.00300671,0.00299071,0.00307836,0.00304013,0.00307726, + 0.00312929,0.00314636,0.00315895,0.00312642,0.00322729, + 0.00325368,0.00326916,0.00328936,0.00331894,0.00328319, + 0.00337098,0.00335638,0.00335586,0.00340926,0.00343972, + 0.00349148,0.003528,0.00352863,0.0035665,0.0036791, + 0.00360243,0.00364245,0.003671,0,0,0,0.00375495,0,0,0,0] + dq = mtd['EQSANS_1466_event_Iq'].readDx(0) + diff = [math.fabs(dq_ref[i]-dq[i])<0.0001 for i in range(7,100)] + output = reduce(lambda x,y:x and y, diff) + if not output: + for i in range(len(dq)): + print i, dq[i], dq_ref[i], math.fabs(dq_ref[i]-dq[i])<0.0001 + return output + +class EQSANSDQOutput_FS(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + """ + Analysis Tests for EQSANS + Testing that the Q resolution output of is correct + """ + + def runTest(self): + """ + Check that the Q resolution calculation returns positive values + even when background is larger than signal and I(q) is negative. + (Non-physical value that's an experimental edge case) + """ + config = ConfigService.Instance() + config["facilityName"]='SNS' + EQSANS() + SetBeamCenter(96.29, 126.15) + AppendDataFile("EQSANS_4061_event.nxs") + UseConfig(False) + UseConfigTOFTailsCutoff(False) + UseConfigMask(False) + TotalChargeNormalization(normalize_to_beam=False) + SetTransmission(1.0,0.0, False) + Resolution(12) + Reduce1D() + + def validate(self): + """ + Reference values were generate using the event-by-event method + and are slightly different than the ones generated using + the histogram method. + The event-by-event method processes each event one-by-one, + computes dQ for each of them, and averages those dQ for each + Q bin of the I(Q) distribution. + """ + dq_ref = [0.00255107356133, 0.00215833578128, 0.00208718785908, + 0.00258510271064, 0.00293816108702, 0.00247205866985, + 0.00243935430286, 0.00239444669495, 0.00222146661565, + 0.00218605712485, 0.00219528175558, 0.0022064529384, + 0.00222261319274, 0.00224172877526, 0.00225796674563, + 0.00228220728003, 0.00230427122347, 0.00232713464119, + 0.00235408216185, 0.00238474827119, 0.00240595507163, + 0.00243366105712, 0.00246093985138, 0.00248828126962, + 0.00251992966389, 0.00255373215231, 0.00259127844171, + 0.00263727405994, 0.00268617120932, 0.00273367187508, + 0.00277746568962, 0.00282377112768, 0.00287707862012, + 0.00292488071673, 0.00297083402995, 0.00302034443396, + 0.00306791149356, 0.00311128530472, 0.00315886049123, + 0.0032012867282, 0.00324181579199, 0.00328255488894, + 0.00332106647848, 0.00336006110389, 0.00339953376057, + 0.00343507183824, 0.00347168225631, 0.00350947714109, + 0.00354374653283, 0.00357867641742, 0.00361759403268, + 0.00365056833748, 0.00368612178547, 0.00372126622111, + 0.00375568496126, 0.00378827338665, 0.00382102059653, + 0.00386208119997, 0.00389527759712, 0.00392382196507, + 0.00395898855656, 0.00399254216973, 0.00402263239642, + 0.00405571908096, 0.0040850426166, 0.004115066991, + 0.00414251925121, 0.00417373849783, 0.00420187672507, + 0.00422580041865, 0.00425450461041, 0.00428409252891, + 0.0043057691751, 0.00434121835718, 0.00437168838538, + 0.00439831287327, 0.00443009051949, 0.00446383617502, + 0.00448646538796, 0.00452524116438, 0.00455891945975, + 0.00458584606578, 0.00461675547089, 0.00465411973842, + 0.00468084439834, 0.00470294856029, 0.0047424262336, + 0.00478414058644, 0.00481411031777, 0.00482401661572, + 0.00486137558128, 0.0049171158478, 0.00494417232844, + 0.00496567444129, 0.0049866092171, 0.00500861857974, + 0.00503217184255, 0.0, 0.0, 0.0, 0.0] + + + + dq = mtd['EQSANS_4061_event_frame1_Iq'].readDx(0) + diff = [math.fabs(dq_ref[i]-dq[i])<0.0001 for i in range(7,100)] + output = reduce(lambda x,y:x and y, diff) + + if not output: + for i in range(len(dq)): + print i, dq[i], dq_ref[i], math.fabs(dq_ref[i]-dq[i])<0.0001 + return output \ No newline at end of file diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSNormalisationAPIv2.py b/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSNormalisationAPIv2.py new file mode 100644 index 0000000000000000000000000000000000000000..f11e20d9589bdb514a10d33a5a4cea47beaeccd9 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSNormalisationAPIv2.py @@ -0,0 +1,150 @@ +import stresstesting +from mantid.simpleapi import * +from reduction_workflow.instruments.sans.sns_command_interface import * +from mantid.api import * +import os + +class EQSANSNormalisationNoFlux(stresstesting.MantidStressTest): + """ + Analysis Tests for EQSANS + Testing that the I(Q) output of is correct + """ + + def runTest(self): + """ + Check that EQSANSTofStructure returns the correct workspace + """ + config = ConfigService.Instance() + config["facilityName"]='SNS' + ws = "__eqsans_normalisation_test" + + EQSANSLoad(Filename="EQSANS_1466_event.nxs", OutputWorkspace=ws, + PreserveEvents=False, LoadMonitors=False) + EQSANSNormalise(InputWorkspace=ws, NormaliseToBeam=False, + OutputWorkspace=ws) + SumSpectra(InputWorkspace=ws, OutputWorkspace="eqsans_no_flux") + + def validate(self): + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + + return "eqsans_no_flux", 'EQSANSNormalisation_NoFlux.nxs' + +class EQSANSNormalisationDefault(stresstesting.MantidStressTest): + """ + Analysis Tests for EQSANS + Testing that the I(Q) output of is correct + """ + + def runTest(self): + """ + Check that EQSANSTofStructure returns the correct workspace + """ + config = ConfigService.Instance() + config["facilityName"]='SNS' + ws = "__eqsans_normalisation_test" + + EQSANSLoad(Filename="EQSANS_1466_event.nxs", OutputWorkspace=ws, + PreserveEvents=False, LoadMonitors=False) + EQSANSNormalise(InputWorkspace=ws,NormaliseToBeam=True, + OutputWorkspace=ws) + SumSpectra(InputWorkspace=ws, OutputWorkspace="eqsans_default_flux") + + def validate(self): + # This test only makes sense if /SNS is not available, + # otherwise we will end up using the actual beam file, + # which may not produce the same output. This test + # is meant to exercise the functionality to find the + # beam profile and will only produce the correct results + # on a system that is not hooked up to real instrument files. + if os.path.isdir('/SNS/EQSANS'): + return True + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + + return "eqsans_default_flux", 'EQSANSNormalisation_DefaultFlux.nxs' + +class EQSANSNormalisationInputFlux(stresstesting.MantidStressTest): + """ + Analysis Tests for EQSANS + Testing that the I(Q) output of is correct + """ + + def runTest(self): + """ + Check that EQSANSTofStructure returns the correct workspace + """ + config = ConfigService.Instance() + config["facilityName"]='SNS' + ws = "__eqsans_normalisation_test" + spectrum_file = "eqsans_beam_flux.txt" + + EQSANSLoad(Filename="EQSANS_1466_event.nxs", OutputWorkspace=ws, + PreserveEvents=False, LoadMonitors=False) + EQSANSNormalise(InputWorkspace=ws,NormaliseToBeam=True, + BeamSpectrumFile=spectrum_file, + OutputWorkspace=ws) + SumSpectra(InputWorkspace=ws, OutputWorkspace="eqsans_input_flux") + + def validate(self): + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + + return "eqsans_input_flux", 'EQSANSNormalisation_InputFlux.nxs' + +class EQSANSNormalisationBeamFlux(stresstesting.MantidStressTest): + """ + Analysis Tests for EQSANS + """ + + def runTest(self): + """ + Check that EQSANSTofStructure returns the correct workspace + """ + config = ConfigService.Instance() + config["facilityName"]='SNS' + self.prop_mng = "eqsans_normalise_options" + self.data_ws = "eqsans_normalise_data_ws" + + EQSANSLoad(Filename="EQSANS_3293_event.nxs", + NoBeamCenter=True, + ReductionProperties=self.prop_mng, + OutputWorkspace=self.data_ws) + + EQSANSNormalise(InputWorkspace=self.data_ws, + BeamSpectrumFile='SANSBeamFluxCorrectionMonitor.nxs', + NormaliseToMonitor=True, + ReductionProperties=self.prop_mng, + OutputWorkspace=self.data_ws) + + def validate(self): + ref_values = [9.66631788e-08, 1.99540011e-08, 0.00000000e+00, 2.84897084e-08, + 2.58802935e-08, 0.00000000e+00, 3.43023370e-08, 1.11017160e-08, + 3.22199520e-08, 8.31598470e-08, 3.05866692e-08, 3.00540473e-08, + 2.97218143e-08, 5.92981344e-08, 2.92735276e-08, 1.91616696e-08, + 4.63637972e-08, 8.94602703e-09, 4.34305480e-08, 1.71487695e-08, + 2.51816301e-08, 3.24283000e-08, 2.40811371e-08, 3.20081242e-08, + 8.03994116e-09, 3.23002602e-08, 2.43204630e-08, 7.99166600e-09, + 2.40009985e-08, 8.04082934e-09, 1.61818559e-08, 2.44975746e-08, + 0.00000000e+00, 2.49096583e-08, 0.00000000e+00, 8.48764614e-09, + 8.59073435e-09, 0.00000000e+00, 8.77853612e-09, 0.00000000e+00, + 3.69158961e-08, 2.16789982e-08, 1.41834793e-08] + + output_y = mtd[self.data_ws].readY(0) + if output_y[0]-ref_values[0] > 0.000006: + return False + if output_y[5]-ref_values[5] > 0.000006: + return False + if output_y[10]-ref_values[10] > 0.000006: + return False + if output_y[25]-ref_values[25] > 0.000006: + return False + + return True + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSProcessedEffAPIv2.py b/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSProcessedEffAPIv2.py new file mode 100644 index 0000000000000000000000000000000000000000..eb9ec24a140e1def8b3104a40e11c2a8efb310bb --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSProcessedEffAPIv2.py @@ -0,0 +1,45 @@ +import stresstesting +import mantid +from mantid.simpleapi import * +from reduction_workflow.instruments.sans.sns_command_interface import * +from mantid.api import FileFinder + +import os + +class EQSANSProcessedEff(stresstesting.MantidStressTest): + + def cleanup(self): + absfile = FileFinder.getFullPath("EQSANS_1466_event_reduction.log") + if os.path.exists(absfile): + os.remove(absfile) + return True + + def runTest(self): + """ + System test for sensitivity correction + """ + config = ConfigService.Instance() + config["facilityName"]='SNS' + EQSANS(False) + AppendDataFile("EQSANS_1466_event.nxs") + SolidAngle() + UseConfig(False) + UseConfigTOFTailsCutoff(False) + UseConfigMask(False) + SetBeamCenter(96.29, 126.15) + SetTransmission(1.0, 0.0) + TotalChargeNormalization(normalize_to_beam=False) + SensitivityCorrection("EQSANS_sensitivity.nxs") + Reduce1D() + Scale(InputWorkspace="EQSANS_1466_event_Iq", Factor=277.781, + Operation='Multiply', OutputWorkspace="EQSANS_1466_event_Iq") + + def validate(self): + # Be more tolerant with the output, mainly because of the errors. + # The following tolerance check the errors up to the third digit. + self.tolerance = 0.1 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "EQSANS_1466_event_Iq", 'EQSANSProcessedEff.nxs' diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSSolidAPIv2.py b/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSSolidAPIv2.py new file mode 100644 index 0000000000000000000000000000000000000000..429bfe2cd110e373be4e735d59cbd094ec24c5b5 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSSolidAPIv2.py @@ -0,0 +1,85 @@ +import stresstesting +from mantid.simpleapi import * +from reduction_workflow.instruments.sans.sns_command_interface import * +from mantid.api import * + +import os + +def do_cleanup(): + absfile = FileFinder.getFullPath("EQSANS_1466_event_reduction.log") + if os.path.exists(absfile): + os.remove(absfile) + print "cleaned" + return True + +class EQSANSSolid(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + """ + Analysis Tests for EQSANS + Testing that the I(Q) output of is correct + """ + + def runTest(self): + """ + Check that EQSANSTofStructure returns the correct workspace + """ + config = ConfigService.Instance() + config["facilityName"]='SNS' + EQSANS(False) + AppendDataFile("EQSANS_1466_event.nxs") + SolidAngle() + UseConfig(False) + UseConfigTOFTailsCutoff(False) + UseConfigMask(False) + TotalChargeNormalization(normalize_to_beam=False) + SetBeamCenter(96.29, 126.15) + SetTransmission(1.0,0.0, False) + Reduce1D() + # Scale up to match correct scaling. + Scale(InputWorkspace="EQSANS_1466_event_Iq", Factor=2777.81, + Operation='Multiply', OutputWorkspace="EQSANS_1466_event_Iq") + + def validate(self): + self.tolerance = 0.2 + mtd["EQSANS_1466_event_Iq"].dataY(0)[0] = 269.688 + mtd["EQSANS_1466_event_Iq"].dataE(0)[0] = 13.8013 + mtd["EQSANS_1466_event_Iq"].dataY(0)[2] = 11.3167 + + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + + return "EQSANS_1466_event_Iq", 'EQSANSSolid.nxs' + +class EQSANSSolidEvent(EQSANSSolid): + + def cleanup(self): + do_cleanup() + return True + """ + Analysis Tests for EQSANS + Testing that the I(Q) output of is correct + """ + def runTest(self): + """ + Check that EQSANSTofStructure returns the correct workspace + """ + config = ConfigService.Instance() + config["facilityName"]='SNS' + EQSANS(True) + AppendDataFile("EQSANS_1466_event.nxs") + SolidAngle() + UseConfig(False) + UseConfigTOFTailsCutoff(False) + UseConfigMask(False) + TotalChargeNormalization(normalize_to_beam=False) + SetBeamCenter(96.29, 126.15) + SetTransmission(1.0,0.0, False) + Reduce1D() + # Scale up to match correct scaling. + Scale(InputWorkspace="EQSANS_1466_event_Iq", Factor=2777.81, + Operation='Multiply', OutputWorkspace="EQSANS_1466_event_Iq") diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSTransAPIv2.py b/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSTransAPIv2.py new file mode 100644 index 0000000000000000000000000000000000000000..b6bc10883c35176b795f3dad23ba0dedcb60bb27 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/EQSANSTransAPIv2.py @@ -0,0 +1,233 @@ +import stresstesting +import mantid +from mantid.simpleapi import * +from reduction_workflow.instruments.sans.sns_command_interface import * +from mantid.api import * + +import os + +def do_cleanup(): + Files = ["EQSANS_4061_event_reduction.log", + "EQSANS_1466_event_reduction.log"] + for file in Files: + absfile = FileFinder.getFullPath(file) + if os.path.exists(absfile): + os.remove(absfile) + return True + +class EQSANSTransmission(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='SNS' + EQSANS(False) + AppendDataFile("EQSANS_1466_event.nxs") + SolidAngle() + UseConfig(False) + UseConfigTOFTailsCutoff(False) + CombineTransmissionFits(True) + UseConfigMask(False) + SetBeamCenter(96.29, 126.15) + TotalChargeNormalization(normalize_to_beam=False) + DirectBeamTransmission("EQSANS_1466_event.nxs", "EQSANS_4061_event.nxs", beam_radius=3) + ThetaDependentTransmission(True) + Reduce1D() + # Scale up to match correct scaling. + Scale(InputWorkspace="EQSANS_1466_event_Iq", Factor=2777.81, + Operation='Multiply', OutputWorkspace="EQSANS_1466_event_Iq") + + def validate(self): + # Be more tolerant with the output, mainly because of the errors. + # The following tolerance check the errors up to the third digit. + self.tolerance = 0.1 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "EQSANS_1466_event_Iq", 'EQSANSTrans.nxs' + +class EQSANSTransmissionEvent(EQSANSTransmission): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='SNS' + EQSANS(True) + AppendDataFile("EQSANS_1466_event.nxs") + SolidAngle() + UseConfig(False) + UseConfigTOFTailsCutoff(False) + UseConfigMask(False) + SetBeamCenter(96.29, 126.15) + TotalChargeNormalization(normalize_to_beam=False) + DirectBeamTransmission("EQSANS_1466_event.nxs", "EQSANS_4061_event.nxs", beam_radius=3) + ThetaDependentTransmission(True) + Reduce1D() + # Scale up to match correct scaling. + Scale(InputWorkspace="EQSANS_1466_event_Iq", Factor=2777.81, + Operation='Multiply', OutputWorkspace="EQSANS_1466_event_Iq") + + def validate(self): + # Be more tolerant with the output, mainly because of the errors. + # The following tolerance check the errors up to the third digit. + self.tolerance = 0.1 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "EQSANS_1466_event_Iq", 'EQSANSTransEvent.nxs' + + +class EQSANSTransmissionDC(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + """ + Check that EQSANSTofStructure returns the correct workspace + """ + config = ConfigService.Instance() + config["facilityName"]='SNS' + EQSANS(False) + AppendDataFile("EQSANS_1466_event.nxs") + SolidAngle() + UseConfig(False) + UseConfigTOFTailsCutoff(False) + UseConfigMask(False) + SetBeamCenter(96.29, 126.15) + DarkCurrent("EQSANS_4061_event.nxs") + TotalChargeNormalization(normalize_to_beam=False) + DirectBeamTransmission("EQSANS_1466_event.nxs", "EQSANS_1466_event.nxs", beam_radius=3) + ThetaDependentTransmission(True) + Reduce1D() + # Scale up to match correct scaling. + Scale(InputWorkspace="EQSANS_1466_event_Iq", Factor=2777.81, + Operation='Multiply', OutputWorkspace="EQSANS_1466_event_Iq") + + def validate(self): + # Be more tolerant with the output, mainly because of the errors. + # The following tolerance check the errors up to the third digit. + self.tolerance = 0.1 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "EQSANS_1466_event_Iq", 'EQSANSTransmissionDC.nxs' + +class EQSANSTransmissionCompatibility(EQSANSTransmission): + + def cleanup(self): + do_cleanup() + return True + + """ + Analysis Tests for EQSANS + Check that the transmission correction can be applied if the + sample run and transmission runs don't have the same binning + """ + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='SNS' + EQSANS(True) + AppendDataFile("EQSANS_1466_event.nxs") + SolidAngle() + UseConfig(False) + UseConfigTOFTailsCutoff(False) + UseConfigMask(False) + SetBeamCenter(96.29, 126.15) + TotalChargeNormalization(normalize_to_beam=False) + DirectBeamTransmission("EQSANS_4061_event.nxs", "EQSANS_4061_event.nxs", beam_radius=3) + ThetaDependentTransmission(True) + Reduce1D() + # Scale up to match correct scaling. + Scale(InputWorkspace="EQSANS_1466_event_Iq", Factor=2777.81, + Operation='Multiply', OutputWorkspace="EQSANS_1466_event_Iq") + + def validate(self): + # Be more tolerant with the output, mainly because of the errors. + # The following tolerance check the errors up to the third digit. + self.tolerance = 0.1 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "EQSANS_1466_event_Iq", 'EQSANSTransmissionCompatibility.nxs' + +class EQSANSTransmissionFS(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + """ + Check that EQSANSTofStructure returns the correct workspace + """ + config = ConfigService.Instance() + config["facilityName"]='SNS' + EQSANS(False) + SetBeamCenter(96.29, 126.15) + AppendDataFile("EQSANS_4061_event.nxs") + SolidAngle() + UseConfig(False) + UseConfigTOFTailsCutoff(False) + UseConfigMask(False) + TotalChargeNormalization(normalize_to_beam=False) + SetTransmission(0.5, 0.1) + ThetaDependentTransmission(False) + Reduce1D() + + def validate(self): + self.tolerance = 0.000001 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "EQSANS_4061_event_frame1_Iq", 'EQSANSTransmissionFS.nxs' + +class EQSANSDirectTransFS(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + """ + Check that EQSANSTofStructure returns the correct workspace + """ + config = ConfigService.Instance() + config["facilityName"]='SNS' + EQSANS(False) + SetBeamCenter(96.29, 126.15) + AppendDataFile("EQSANS_4061_event.nxs") + UseConfig(False) + SetTOFTailsCutoff(500, 500) + UseConfigMask(False) + TotalChargeNormalization(normalize_to_beam=False) + DirectBeamTransmission("EQSANS_4061_event.nxs", "EQSANS_4061_event.nxs", beam_radius=3) + ThetaDependentTransmission(False) + NoIQxQy() + Reduce1D() + Scale(InputWorkspace="EQSANS_4061_event_frame1_Iq", Factor=2.0, + Operation='Multiply', OutputWorkspace="EQSANS_4061_event_frame1_Iq") + + def validate(self): + # Relax the tolerance since the reference data is not for that exact + # scenario but for one that's very close to it. + self.tolerance = 0.00001 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "EQSANS_4061_event_frame1_Iq", 'EQSANSDirectTransFS.nxs' + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/EllipsoidIntegr.py b/Code/Mantid/Testing/SystemTests/tests/analysis/EllipsoidIntegr.py new file mode 100644 index 0000000000000000000000000000000000000000..30bc5c2ce8937ea69850fb92535f29e2d123e5c0 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/EllipsoidIntegr.py @@ -0,0 +1,72 @@ +# File: EllipsoidIntegr.py +# +# Integrates a run using the ellipsoid technique + +import os +import sys +import shutil +import time + +import stresstesting +import numpy + + +from mantid.api import * +#sys.path.append("/home/ruth/GIT_MantidBuild/bin/") +from mantid.simpleapi import * + +class EllipsoidIntegr( stresstesting.MantidStressTest): + + def requiredMemoryMB(self): + """ Require about 12GB free """ + return 2000 + + def runTest(self): + # expected results with size determined + # automatically from projected event sigmas + inti_auto = [ 88, 99, 23, 33, 8, 8, 4 ] + sigi_auto = [ 13.784, 18.1384, 13.1529, 9.94987, 5.83095, 10.2956, 10.2956] + # expected results with fixed size + # ellipsoids + inti_fixed = [ 87.541, 95.3934, 21.3607, 33.4262, 7.36066, 9.68852, 3.54098 ] + sigi_fixed = [ 13.9656, 18.4523, 13.4335, 10.1106, 5.94223, 10.5231, 10.5375 ] + + # first, load peaks into a peaks workspace + + + peaks_file = "TOPAZ_3007.peaks" + peaks_ws_name="TOPAZ_3007_peaks" + LoadIsawPeaks( Filename=peaks_file,OutputWorkspace = peaks_ws_name) + + + # next, load events into an event workspace + event_file="TOPAZ_3007_bank_37_20_sec.nxs" + event_ws_name="TOPAZ_3007_events" + + LoadNexus(Filename=event_file, OutputWorkspace=event_ws_name) + # configure and test the algorithm + # using automatically determined + # ellipsoid sizes + IntegrateEllipsoids(event_ws_name, peaks_ws_name,".25","0",".2",".2",".25",OutputWorkspace=peaks_ws_name) + + peaks_ws = mtd[peaks_ws_name] + for i in range( 13, 20) : + + self.assertDelta( peaks_ws.getPeak(i).getIntensity(), inti_auto[i-13], 0.1 ) + self.assertDelta( peaks_ws.getPeak(i).getSigmaIntensity(), sigi_auto[i-13], 0.1 ) + + # configure and test the algorithm + # using fixed ellipsoid sizes + peaks_ws=IntegrateEllipsoids( event_ws_name,peaks_ws_name,.25,1,.2,.2,.25,OutputWorkspace=peaks_ws_name) + peaks_ws = mtd[peaks_ws_name] + + for i in range( 13,20 ): + self.assertDelta(peaks_ws.getPeak(i).getIntensity(), inti_fixed[i-13], 0.1 ) + self.assertDelta( peaks_ws.getPeak(i).getSigmaIntensity(), sigi_fixed[i-13], 0.1 ) + + def validate(self): + return True + + def requiredFiles(self): + + return ["TOPAZ_3007_bank_37_20_sec.nxs","TOPAZ_3007.peaks"] \ No newline at end of file diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/EnginXCalibrateTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/EnginXCalibrateTest.py new file mode 100644 index 0000000000000000000000000000000000000000..eae4d552dd9dc4a99b95c2765456e4df7b36db29 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/EnginXCalibrateTest.py @@ -0,0 +1,31 @@ +import platform +import stresstesting +from mantid.simpleapi import * + +class EnginXCalibrateTest(stresstesting.MantidStressTest): + + def runTest(self): + positions = EnginXCalibrateFull(Filename = 'ENGINX00193749.nxs', + Bank = 1, + ExpectedPeaks = '1.3529, 1.6316, 1.9132') + + (self.difc, self.zero) = EnginXCalibrate(Filename = 'ENGINX00193749.nxs', + Bank = 1, + ExpectedPeaks = '2.7057,1.9132,1.6316,1.5621,1.3528,0.9566', + DetectorPositions = positions) + + def validate(self): + import sys + if sys.platform == "darwin": + # Mac fitting tests produce differences for some reason. + self.assertDelta(self.difc, 18405.4, 0.1) + if int(platform.release().split('.')[0]) < 13: + self.assertDelta(self.zero, 3.53, 0.01) + else: + self.assertDelta(self.zero, 3.51, 0.01) + else: + self.assertDelta(self.difc, 18404.522, 0.001) + self.assertDelta(self.zero, 4.426, 0.001) + + def cleanup(self): + mtd.remove('positions') diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/FilteredLoadvsLoadThenFilter.py b/Code/Mantid/Testing/SystemTests/tests/analysis/FilteredLoadvsLoadThenFilter.py new file mode 100644 index 0000000000000000000000000000000000000000..23b8dbeedd39fd5c845aac8fd8fbcad844af76e0 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/FilteredLoadvsLoadThenFilter.py @@ -0,0 +1,18 @@ +import stresstesting +from mantid.simpleapi import * + +'''Tests that filtering with LoadEventNexus gives the same answer as loading the whole file and then filtering''' +class FilteredLoadvsLoadThenFilter(stresstesting.MantidStressTest): + + def runTest(self): + filteredLoad = LoadEventNexus("CNCS_7860_event.nxs",FilterByTimeStart=60.0,FilterByTimeStop=120.0,FilterByTofMin=-1e10,FilterByTofMax=1e10) + loadAll = LoadEventNexus("CNCS_7860_event.nxs",FilterByTimeStart=-1e10,FilterByTimeStop=1e10,FilterByTofMin=-1e10,FilterByTofMax=1e10) + loadAndFilter = FilterByTime(loadAll,StartTime=60.0,StopTime=120.0) + # This next step is needed otherwise the X boundaries are different causing CheckWorkspacesMatch to fail + loadAndFilter = RebinToWorkspace(WorkspaceToRebin=loadAndFilter,WorkspaceToMatch=filteredLoad) + + def validateMethod(self): + return "ValidateWorkspaceToWorkspace" + + def validate(self): + return 'filteredLoad','loadAndFilter' diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/GEMTests.py b/Code/Mantid/Testing/SystemTests/tests/analysis/GEMTests.py new file mode 100644 index 0000000000000000000000000000000000000000..8f7bb3ec9c03fd66cdbe6564ab6e4795b6ab393f --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/GEMTests.py @@ -0,0 +1,185 @@ +import stresstesting +import os +from mantid.simpleapi import * + +class GEMTest(stresstesting.MantidStressTest): + + def __init__(self): + stresstesting.MantidStressTest.__init__(self) + self.gss_file = '' + self.ref_gss_file = 'GEM58654.gss' + self.xye_tof_files = [] + self.ref_xye_tof_files = ['GEM58654_b1_TOF.dat','GEM58654_b2_TOF.dat','GEM58654_b3_TOF.dat','GEM58654_b4_TOF.dat','GEM58654_b5_TOF.dat','GEM58654_b6_TOF.dat'] + self.xye_d_files = [] + self.ref_xye_d_files = ['GEM58654_b1_D.dat','GEM58654_b2_D.dat','GEM58654_b3_D.dat','GEM58654_b4_D.dat','GEM58654_b5_D.dat','GEM58654_b6_D.dat'] + self.file_index = 0 + self.new_cal_file = '' + + def runTest(self): + # do something + LoadRaw(Filename=r'GEM59378.raw',OutputWorkspace='Vanadium',LoadLogFiles='0') + CreateSingleValuedWorkspace(OutputWorkspace='totuamps',DataValue='450.02215576200001') + Divide(LHSWorkspace='Vanadium',RHSWorkspace='totuamps',OutputWorkspace='Vanadium') + SolidAngle(InputWorkspace='Vanadium',OutputWorkspace='Corr') + CreateSingleValuedWorkspace(OutputWorkspace='Sc',DataValue='100') + Multiply(LHSWorkspace='Corr',RHSWorkspace='Sc',OutputWorkspace='Corr') + Divide(LHSWorkspace='Vanadium',RHSWorkspace='Corr',OutputWorkspace='Vanadium') + ConvertUnits(InputWorkspace='Vanadium',OutputWorkspace='Vanadium',Target='Wavelength') + Integration(InputWorkspace='Vanadium',OutputWorkspace='Vanadium',RangeLower='1.3999999999999999',RangeUpper='3') + Multiply(LHSWorkspace='Corr',RHSWorkspace='Vanadium',OutputWorkspace='Corr') + DeleteWorkspace('Vanadium') + CreateSingleValuedWorkspace(OutputWorkspace='Sc',DataValue='100000') + Divide(LHSWorkspace='Corr',RHSWorkspace='Sc',OutputWorkspace='Corr') + + self.new_cal_file = os.path.join(config['defaultsave.directory'],'offsets_2011_cycle111b_new.cal') + MaskDetectorsIf(InputWorkspace='Corr',Mode='DeselectIf',InputCalFile=r'offsets_2011_cycle111b.cal',OutputCalFile=self.new_cal_file) + # load precompiled vanadium files + LoadNexusProcessed(Filename=r'van_gem59378_benchmark-0.nxs',OutputWorkspace='Vanadium-1') + LoadNexusProcessed(Filename=r'van_gem59378_benchmark-1.nxs',OutputWorkspace='Vanadium-2') + LoadNexusProcessed(Filename=r'van_gem59378_benchmark-2.nxs',OutputWorkspace='Vanadium-3') + LoadNexusProcessed(Filename=r'van_gem59378_benchmark-3.nxs',OutputWorkspace='Vanadium-4') + LoadNexusProcessed(Filename=r'van_gem59378_benchmark-4.nxs',OutputWorkspace='Vanadium-5') + LoadNexusProcessed(Filename=r'van_gem59378_benchmark-5.nxs',OutputWorkspace='Vanadium-6') + # load data + LoadRaw(Filename=r'GEM58654.raw',OutputWorkspace='sample',LoadLogFiles='0') + LoadRaw(Filename=r'GEM58654.raw',OutputWorkspace='sampleadd',LoadLogFiles='0') + Plus(LHSWorkspace='sampleadd',RHSWorkspace='sample',OutputWorkspace='sample') + DeleteWorkspace('sampleadd') + CreateSingleValuedWorkspace(OutputWorkspace='totuamps',DataValue='600.05676269499997') + Divide(LHSWorkspace='sample',RHSWorkspace='totuamps',OutputWorkspace='sample') + + LoadRaw(Filename=r'GEM59381.raw',OutputWorkspace='Sempty',LoadLogFiles='0') + CreateSingleValuedWorkspace(OutputWorkspace='totuamps',DataValue='400.04138183600003') + Divide(LHSWorkspace='Sempty',RHSWorkspace='totuamps',OutputWorkspace='Sempty') + Minus(LHSWorkspace='sample',RHSWorkspace='Sempty',OutputWorkspace='sample') + DeleteWorkspace('Sempty') + AlignDetectors(InputWorkspace='sample',OutputWorkspace='sample',CalibrationFile=r'offsets_2011_cycle111b.cal') + Divide(LHSWorkspace='sample',RHSWorkspace='Corr',OutputWorkspace='sample') + DeleteWorkspace('Corr') + CreateSingleValuedWorkspace(OutputWorkspace='scale',DataValue='1') + Multiply(LHSWorkspace='sample',RHSWorkspace='scale',OutputWorkspace='sample') + ConvertUnits(InputWorkspace='sample',OutputWorkspace='sample',Target='Wavelength') + CylinderAbsorption(InputWorkspace='sample',OutputWorkspace='SampleTrans',AttenuationXSection='0.5',ScatteringXSection='1',SampleNumberDensity='1',NumberOfWavelengthPoints='100',CylinderSampleHeight='4',CylinderSampleRadius='0.40000000000000002',NumberOfSlices='10',NumberOfAnnuli='10') + Divide(LHSWorkspace='sample',RHSWorkspace='SampleTrans',OutputWorkspace='sample') + ConvertUnits(InputWorkspace='sample',OutputWorkspace='sample',Target='dSpacing') + DiffractionFocussing(InputWorkspace='sample',OutputWorkspace='sample',GroupingFileName=self.new_cal_file) + + CropWorkspace(InputWorkspace='sample',OutputWorkspace='sample-1',EndWorkspaceIndex='0') + CropWorkspace(InputWorkspace='sample',OutputWorkspace='sample-2',StartWorkspaceIndex='1',EndWorkspaceIndex='1') + CropWorkspace(InputWorkspace='sample',OutputWorkspace='sample-3',StartWorkspaceIndex='2',EndWorkspaceIndex='2') + CropWorkspace(InputWorkspace='sample',OutputWorkspace='sample-4',StartWorkspaceIndex='3',EndWorkspaceIndex='3') + CropWorkspace(InputWorkspace='sample',OutputWorkspace='sample-5',StartWorkspaceIndex='4',EndWorkspaceIndex='4') + CropWorkspace(InputWorkspace='sample',OutputWorkspace='sample-6',StartWorkspaceIndex='5',EndWorkspaceIndex='5') + DeleteWorkspace('sample') + Divide(LHSWorkspace='sample-1',RHSWorkspace='Vanadium-1',OutputWorkspace='ResultD-1') + Divide(LHSWorkspace='sample-2',RHSWorkspace='Vanadium-2',OutputWorkspace='ResultD-2') + Divide(LHSWorkspace='sample-3',RHSWorkspace='Vanadium-3',OutputWorkspace='ResultD-3') + Divide(LHSWorkspace='sample-4',RHSWorkspace='Vanadium-4',OutputWorkspace='ResultD-4') + Divide(LHSWorkspace='sample-5',RHSWorkspace='Vanadium-5',OutputWorkspace='ResultD-5') + Divide(LHSWorkspace='sample-6',RHSWorkspace='Vanadium-6',OutputWorkspace='ResultD-6') + Rebin(InputWorkspace='ResultD-1',OutputWorkspace='ResultD-1',Params='0.559211,-0.004,37.6844') + Rebin(InputWorkspace='ResultD-2',OutputWorkspace='ResultD-2',Params='0.348675,-0.002,14.5631') + Rebin(InputWorkspace='ResultD-3',OutputWorkspace='ResultD-3',Params='0.169661,-0.0011546,8.06311') + Rebin(InputWorkspace='ResultD-4',OutputWorkspace='ResultD-4',Params='0.108284,-0.00111682,4.25328') + Rebin(InputWorkspace='ResultD-5',OutputWorkspace='ResultD-5',Params='0.0818697,-0.00109142,2.82906') + Rebin(InputWorkspace='ResultD-6',OutputWorkspace='ResultD-6',Params='0.0661098,-0.00105175,1.87008') + ConvertUnits(InputWorkspace='ResultD-1',OutputWorkspace='ResultTOF-1',Target='TOF') + ReplaceSpecialValues(InputWorkspace='ResultD-1',OutputWorkspace='ResultD-1',NaNValue='0',InfinityValue='0',BigNumberThreshold='99999999.999999985') + ReplaceSpecialValues(InputWorkspace='ResultTOF-1',OutputWorkspace='ResultTOF-1',NaNValue='0',InfinityValue='0',BigNumberThreshold='99999999.999999985') + ConvertUnits(InputWorkspace='ResultD-2',OutputWorkspace='ResultTOF-2',Target='TOF') + ReplaceSpecialValues(InputWorkspace='ResultD-2',OutputWorkspace='ResultD-2',NaNValue='0',InfinityValue='0',BigNumberThreshold='99999999.999999985') + ReplaceSpecialValues(InputWorkspace='ResultTOF-2',OutputWorkspace='ResultTOF-2',NaNValue='0',InfinityValue='0',BigNumberThreshold='99999999.999999985') + ConvertUnits(InputWorkspace='ResultD-3',OutputWorkspace='ResultTOF-3',Target='TOF') + ReplaceSpecialValues(InputWorkspace='ResultD-3',OutputWorkspace='ResultD-3',NaNValue='0',InfinityValue='0',BigNumberThreshold='99999999.999999985') + ReplaceSpecialValues(InputWorkspace='ResultTOF-3',OutputWorkspace='ResultTOF-3',NaNValue='0',InfinityValue='0',BigNumberThreshold='99999999.999999985') + ConvertUnits(InputWorkspace='ResultD-4',OutputWorkspace='ResultTOF-4',Target='TOF') + ReplaceSpecialValues(InputWorkspace='ResultD-4',OutputWorkspace='ResultD-4',NaNValue='0',InfinityValue='0',BigNumberThreshold='99999999.999999985') + ReplaceSpecialValues(InputWorkspace='ResultTOF-4',OutputWorkspace='ResultTOF-4',NaNValue='0',InfinityValue='0',BigNumberThreshold='99999999.999999985') + ConvertUnits(InputWorkspace='ResultD-5',OutputWorkspace='ResultTOF-5',Target='TOF') + ReplaceSpecialValues(InputWorkspace='ResultD-5',OutputWorkspace='ResultD-5',NaNValue='0',InfinityValue='0',BigNumberThreshold='99999999.999999985') + ReplaceSpecialValues(InputWorkspace='ResultTOF-5',OutputWorkspace='ResultTOF-5',NaNValue='0',InfinityValue='0',BigNumberThreshold='99999999.999999985') + ConvertUnits(InputWorkspace='ResultD-6',OutputWorkspace='ResultTOF-6',Target='TOF') + ReplaceSpecialValues(InputWorkspace='ResultD-6',OutputWorkspace='ResultD-6',NaNValue='0',InfinityValue='0',BigNumberThreshold='99999999.999999985') + ReplaceSpecialValues(InputWorkspace='ResultTOF-6',OutputWorkspace='ResultTOF-6',NaNValue='0',InfinityValue='0',BigNumberThreshold='99999999.999999985') + + # group and save + GroupWorkspaces(InputWorkspaces='ResultTOF-1,ResultTOF-2,ResultTOF-3,ResultTOF-4,ResultTOF-5,ResultTOF-6',OutputWorkspace='ResultTOFgrp') + + self.gss_file = os.path.join(config['defaultsave.directory'],'GEM58654_new.gss') + append=False + for i in range(1,7): + if i > 1: + append=True + SaveGSS(InputWorkspace='ResultTOF-%d' % i,Filename=self.gss_file,SplitFiles=False,Append=append,Bank=i) + + filename= os.path.join(config['defaultsave.directory'],r'GEM58654_b%d_TOF.dat' % i) + SaveFocusedXYE(InputWorkspace='ResultTOF-%d' % i,Filename=filename,SplitFiles=False,IncludeHeader='0') + self.xye_tof_files.append(filename) + + filename= os.path.join(config['defaultsave.directory'],r'GEM58654_b%d_D.dat' % i) + SaveFocusedXYE(InputWorkspace='ResultD-%d' % i,Filename=filename,SplitFiles=False,IncludeHeader='0') + self.xye_d_files.append(filename) + + def cleanup(self): + '''Remove temporary files''' + if os.path.exists(self.gss_file): + os.remove(self.gss_file) + if os.path.exists(self.new_cal_file): + os.remove(self.new_cal_file) + for file in self.xye_tof_files: + if os.path.exists(file): + os.remove(file) + for file in self.xye_d_files: + if os.path.exists(file): + os.remove(file) + + def doValidation(self): + '''Override doValidation to vaildate two things at the same time''' + self.disableChecking.append('Instrument') + # reset validate() method to call validateNexus() instead + self.validate = self.validateNexus + res = self.validateWorkspaceToNeXus() + if not res: + return False + # reset validate() method to call validateGSS() + self.validate = self.validateGSS + res = self.validateASCII() + if not res: + return False + # reset validate() method to call validateTOFXYE() + self.validate = self.validateTOFXYE + self.file_index = 0 + # file_index is incremented after each call to validateASCII() + res = self.validateASCII() and self.validateASCII() and self.validateASCII() and self.validateASCII() and self.validateASCII() and self.validateASCII() + if not res: + return False + # reset validate() method to call validateTOFXYE() + self.validate = self.validateDXYE + self.file_index = 0 + # file_index is incremented after each call to validateASCII() + res = self.validateASCII() and self.validateASCII() and self.validateASCII() and self.validateASCII() and self.validateASCII() and self.validateASCII() + return res + + def validateNexus(self): + '''Compare the result of reduction with the reference nexus file''' + return 'ResultTOFgrp','GEM58654.nxs' + + def validateGSS(self): + '''Validate the created gss file''' + from mantid.api import FileFinder + return self.gss_file, FileFinder.getFullPath(self.ref_gss_file) + + def validateTOFXYE(self): + '''Validate the created gss file''' + from mantid.api import FileFinder + i = self.file_index + self.file_index += 1 + return self.xye_tof_files[i], FileFinder.getFullPath(self.ref_xye_tof_files[i]) + + def validateDXYE(self): + '''Validate the created gss file''' + from mantid.api import FileFinder + i = self.file_index + self.file_index += 1 + return self.xye_d_files[i], FileFinder.getFullPath(self.ref_xye_d_files[i]) + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/HFIRBackgroundAPIv2.py b/Code/Mantid/Testing/SystemTests/tests/analysis/HFIRBackgroundAPIv2.py new file mode 100644 index 0000000000000000000000000000000000000000..f047afcd211e0c050947f19a257c5da6e1571db0 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/HFIRBackgroundAPIv2.py @@ -0,0 +1,176 @@ +import stresstesting +import mantid +from mantid.api import FileFinder +from mantid.simpleapi import * +from reduction_workflow.instruments.sans.hfir_command_interface import * + +import os + +def do_cleanup(): + Files = ["BioSANS_test_data_reduction.log", + "BioSANS_test_data_Iq.xml", + "BioSANS_test_data_Iq.txt", + "BioSANS_test_data_Iqxy.dat"] + for file in Files: + absfile = FileFinder.getFullPath(file) + if os.path.exists(absfile): + os.remove(absfile) + return True + +class HFIRBackground(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='HFIR' + GPSANS() + SetBeamCenter(16, 95) + AppendDataFile("BioSANS_test_data.xml") + Background("BioSANS_test_data.xml") + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + def validate(self): + self.tolerance = 0.00001 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "BioSANS_test_data_Iq", 'HFIRBackground.nxs' + +class HFIRBackgroundTransmission(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='HFIR' + GPSANS() + AppendDataFile("BioSANS_test_data.xml") + Background("BioSANS_test_data.xml") + SetBckTransmission(0.55, 0.1) + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + def validate(self): + self.tolerance = 0.00001 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "BioSANS_test_data_Iq", 'HFIRBackgroundTransmission.nxs' + +class HFIRBackgroundDirectBeamTrans(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='HFIR' + GPSANS() + AppendDataFile("BioSANS_test_data.xml") + Background("BioSANS_test_data.xml") + BckDirectBeamTransmission(sample_file="BioSANS_sample_trans.xml", + empty_file="BioSANS_empty_trans.xml", + beam_radius=10.0) + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + def validate(self): + self.tolerance = 0.00001 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "BioSANS_test_data_Iq", 'HFIRBackgroundDirectBeamTrans.nxs' + +class HFIRBackgroundBeamSpreaderTrans(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='HFIR' + GPSANS() + AppendDataFile("BioSANS_test_data.xml") + Background("BioSANS_test_data.xml") + BckBeamSpreaderTransmission(sample_spreader="BioSANS_test_data.xml", + direct_spreader="BioSANS_empty_cell.xml", + sample_scattering="BioSANS_test_data.xml", + direct_scattering="BioSANS_empty_cell.xml", + spreader_transmission=0.5, + spreader_transmission_err=0.1) + AzimuthalAverage(binning="0.01,0.001,0.11") + Reduce1D() + + def validate(self): + self.tolerance = 0.00001 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "BioSANS_test_data_Iq", 'HFIRBackgroundBeamSpreaderTrans.nxs' + +class HFIRBackgroundTransDarkCurrent(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='HFIR' + GPSANS() + AppendDataFile("BioSANS_test_data.xml") + Background("BioSANS_test_data.xml") + BckDirectBeamTransmission(sample_file="BioSANS_sample_trans.xml", + empty_file="BioSANS_empty_trans.xml", + beam_radius=10.0) + BckTransmissionDarkCurrent("BioSANS_dark_current.xml") + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + def validate(self): + self.tolerance = 0.00001 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "BioSANS_test_data_Iq", 'HFIRBackgroundTransDarkCurrent.nxs' + +class HFIRBackgroundDirectBeamTransDC(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='HFIR' + GPSANS() + AppendDataFile("BioSANS_test_data.xml") + Background("BioSANS_test_data.xml") + BckDirectBeamTransmission(sample_file="BioSANS_sample_trans.xml", + empty_file="BioSANS_empty_trans.xml", + beam_radius=10.0) + BckTransmissionDarkCurrent("BioSANS_dark_current.xml") + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + def validate(self): + self.tolerance = 0.00001 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "BioSANS_test_data_Iq", 'HFIRBackgroundDirectBeamTransDC.nxs' + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/HFIREffAPIv2.py b/Code/Mantid/Testing/SystemTests/tests/analysis/HFIREffAPIv2.py new file mode 100644 index 0000000000000000000000000000000000000000..f8a80cfcea5188f2deebe6b260fbf75cd24c93d5 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/HFIREffAPIv2.py @@ -0,0 +1,109 @@ +import stresstesting +import mantid +from mantid.api import FileFinder +from mantid.simpleapi import * +from reduction_workflow.instruments.sans.hfir_command_interface import * + +import os + +def do_cleanup(): + Files = ["BioSANS_test_data_reduction.log", + "BioSANS_test_data_Iq.xml", + "BioSANS_test_data_Iq.txt", + "BioSANS_test_data_Iqxy.dat"] + for file in Files: + absfile = FileFinder.getFullPath(file) + if os.path.exists(absfile): + os.remove(absfile) + return True + +class HFIREffAPIv2(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + """ + System test for sensitivity correction + """ + config = ConfigService.Instance() + config["facilityName"]='HFIR' + GPSANS() + DirectBeamCenter("BioSANS_empty_cell.xml") + AppendDataFile("BioSANS_test_data.xml") + SetTransmission(0.51944, 0.011078) + SensitivityCorrection("BioSANS_flood_data.xml", dark_current="BioSANS_dark_current.xml") + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + def validate(self): + self.tolerance = 0.00001 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "BioSANS_test_data_Iq", 'HFIREff.nxs' + +class HFIRSensitivityDirectBeamCenter(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + """ + System test for sensitivity correction + """ + config = ConfigService.Instance() + config["facilityName"]='HFIR' + GPSANS() + DirectBeamCenter("BioSANS_empty_cell.xml") + AppendDataFile("BioSANS_test_data.xml") + SetTransmission(0.51944, 0.011078) + SensitivityCorrection("BioSANS_flood_data.xml", + dark_current="BioSANS_dark_current.xml") + SensitivityDirectBeamCenter("BioSANS_empty_trans.xml") + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + def validate(self): + self.tolerance = 0.00001 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "BioSANS_test_data_Iq", 'HFIRSensitivityDirectBeamCenter.nxs' + +class HFIRSensitivityScatteringBeamCenter(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + """ + System test for sensitivity correction + """ + config = ConfigService.Instance() + config["facilityName"]='HFIR' + GPSANS() + DirectBeamCenter("BioSANS_empty_cell.xml") + AppendDataFile("BioSANS_test_data.xml") + SetTransmission(0.51944, 0.011078) + SensitivityCorrection("BioSANS_flood_data.xml", + dark_current="BioSANS_dark_current.xml") + SensitivityScatteringBeamCenter("BioSANS_test_data.xml") + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + def validate(self): + self.tolerance = 0.00001 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "BioSANS_test_data_Iq", 'HFIRSensitivityScatteringBeamCenter.nxs' + + + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/HFIRReductionAPIv2.py b/Code/Mantid/Testing/SystemTests/tests/analysis/HFIRReductionAPIv2.py new file mode 100644 index 0000000000000000000000000000000000000000..17a7a32a2a9251169c771506d43c3f0ab8e0e6c8 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/HFIRReductionAPIv2.py @@ -0,0 +1,106 @@ +import stresstesting +import mantid +from mantid.api import FileFinder +from mantid.simpleapi import * +from reduction_workflow.instruments.sans.hfir_command_interface import * + +import os + +def do_cleanup(): + Files = ["BioSANS_test_data_reduction.log", + "BioSANS_test_data_Iq.xml", + "BioSANS_test_data_Iq.txt", + "BioSANS_test_data_Iqxy.dat"] + for file in Files: + absfile = FileFinder.getFullPath(file) + if os.path.exists(absfile): + os.remove(absfile) + return True + +class HFIRReductionAPIv2(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + """ + Simple reduction example + """ + + def runTest(self): + + config = ConfigService.Instance() + config["facilityName"]='HFIR' + GPSANS() + DirectBeamCenter("BioSANS_empty_cell.xml") + AppendDataFile("BioSANS_test_data.xml") + SetTransmission(0.51944, 0.011078) + SensitivityCorrection("BioSANS_flood_data.xml") + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce() + + def validate(self): + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "BioSANS_test_data_Iq", "HFIRReduction.nxs" + +class HFIRAbsoluteScalingReference(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + """ + Test absolute scaling using a reference data set + """ + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='HFIR' + GPSANS() + SolidAngle(detector_tubes=True) + MonitorNormalization() + AzimuthalAverage(binning="0.01,0.001,0.2") + SetBeamCenter(16.39, 95.53) + SetDirectBeamAbsoluteScale('BioSANS_empty_trans.xml') + AppendDataFile(["BioSANS_test_data.xml"]) + Reduce() + + def validate(self): + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "BioSANS_test_data_Iq", "HFIRAbsoluteScalingReference.nxs" + +class HFIRAbsoluteScalingValue(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + """ + Test absolute scaling using a reference data set + """ + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='HFIR' + GPSANS() + SolidAngle(detector_tubes=True) + MonitorNormalization() + AzimuthalAverage(binning="0.01,0.001,0.2") + SetBeamCenter(16.39, 95.53) + SetAbsoluteScale(1.680537663117948) + AppendDataFile(["BioSANS_test_data.xml"]) + Reduce() + + def validate(self): + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "BioSANS_test_data_Iq", "HFIRAbsoluteScalingReference.nxs" + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/HFIRTestsAPIv2.py b/Code/Mantid/Testing/SystemTests/tests/analysis/HFIRTestsAPIv2.py new file mode 100644 index 0000000000000000000000000000000000000000..05e25ea38f739cbfaa79e0657a845943ee66b6c3 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/HFIRTestsAPIv2.py @@ -0,0 +1,732 @@ +""" + System tests for HFIR SANS reduction. + + The following tests were converted from the unittest framework + that is part of python to the stresstesting framework used in Mantid. +""" +import stresstesting +from mantid.api import * +from mantid.simpleapi import * +from reduction_workflow.instruments.sans.hfir_command_interface import * +import types +import traceback +import math +import os + +# Set directory containing the test data, relative to the Mantid release directory. +TEST_DIR = "." +data_search_dirs = ConfigService.Instance()["datasearch.directories"].split(';') +for item in data_search_dirs: + if item.endswith("SANS2D/"): + TEST_DIR = item +if len(TEST_DIR)==0: + raise RuntimeError, "Could not locate test data directory: [...]/Data/SANS2D" + +def _diff_iq(x,y): return x-y +def _add(x,y): return x+y + +def _read_IGOR(filepath): + """ + Read in an HFIR IGOR output file with reduced data + @param filepath: path of the file to be read + """ + data = [] + with open(filepath) as f: + # Skip first header line + f.readline() + for line in f: + toks = line.split() + try: + q = float(toks[0]) + iq = float(toks[1]) + diq = float(toks[2]) + data.append([q, iq, diq]) + except: + print "_read_IGOR:", sys.exc_value + return data + +def _check_result(ws, test_file, tolerance=1e-6): + """ + Compare the data in two reduced data files. + @param reduced_file: path of the Mantid-reduced file + @param test_file: path of the IGOR-reduced file + """ + passed = True + + # Read mantid data + x = ws.dataX(0)[:len(ws.dataX(0))] + y = ws.dataY(0) + e = ws.dataE(0) + data_mantid = zip(x,y,e) + + # Read the test data to compare with + data_igor = _read_IGOR(test_file) + + # Check length + if not len(data_mantid)==len(data_igor): + print "Incompatible data lengths" + return False + + # Utility methods for manipulating the lists + def _diff_chi2(x,y): return (x[1]-y[1])*(x[1]-y[1])/(x[2]*x[2]) + def _diff_iq(x,y): return x[1]-y[1] + def _diff_err(x,y): return x[2]-y[2] + def _add(x,y): return x+y + + # Check that I(q) is the same for both data sets + deltas = map(_diff_iq, data_mantid, data_igor) + delta = reduce(_add, deltas)/len(deltas) + if math.fabs(delta)>tolerance or math.isnan(delta): + passed = False + print "Sum of I(q) deltas is outside tolerance: %g > %g" % (math.fabs(delta), tolerance) + + # Then compare the errors + deltas = map(_diff_err, data_mantid, data_igor) + delta_err = reduce(_add, deltas)/len(deltas) + if math.fabs(delta_err)>tolerance or math.isnan(delta): + passed = False + print "Sum of dI(q) deltas is outside tolerance: %g > %g" % (math.fabs(delta_err), tolerance) + + # Compute chi2 of our result relative to IGOR + deltas = map(_diff_chi2, data_mantid, data_igor) + chi2 = reduce(_add, deltas)/len(data_igor) + if chi2>10.0*tolerance or math.isnan(delta): + passed= False + print "Chi2 is outside tolerance: %g > %g" % (chi2, 10.0*tolerance) + + return passed + +def do_cleanup(): + Files = ["GPSANS_reduction.log", + "BioSANS_exp61_scan0004_0001_Iq.txt", + "BioSANS_exp61_scan0004_0001_Iq.xml", + "BioSANS_exp61_scan0004_0001_Iqxy.dat", + "BioSANS_exp61_scan0004_0001_reduction.log", + "BioSANS_test_data_Iq.txt", + "BioSANS_test_data_Iq.xml", + "BioSANS_test_data_Iqxy.dat", + "BioSANS_test_data_reduction.log", + "test_data_Iq.txt", + "test_data_Iq.xml", + "test_data_Iqxy.dat", + "test_data_reduction.log"] + for file in Files: + absfile = FileFinder.getFullPath(file) + if os.path.exists(absfile): + os.remove(absfile) + return True + +class HFIRTestsAPIv2(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def assertTrue(self, condition): + if not condition: + raise RuntimeError, "Condition failed" + + def assertEqual(self, a, b): + if not a == b: + raise RuntimeError, "%s != %s" % (a, b) + + def _assertAlmostEqual(self, first, second, places=None, msg=None, delta=None, rel_delta=None): + return self.assertAlmostEqual(first, second, places, msg, delta, rel_delta) + + def assertAlmostEqual(self, first, second, places=None, msg=None, delta=None, rel_delta=None): + if not assertAlmostEqual(first, second, places, msg, delta, rel_delta): + if msg is None: + msg = "Failed condition" + raise RuntimeError, msg + + def _cleanup(self): + ws_list = AnalysisDataService.getObjectNames() + for ws in ws_list: + AnalysisDataService.remove(ws) + + def runTest(self): + + class TestStub(object): + def __init__(self, test_method): + self._test_method = test_method + self._passed = True + + def run_test(self): + # Set up the test + ReductionSingleton.clean() + # Execute the test + try: + print self._test_method.__name__ + return self._test_method() + except: + print traceback.format_exc() + return False + + self.all_passed = True + self.n_tests = 0 + self.n_passed = 0 + self.failed_tests = [] + for item in dir(self): + m = getattr(self, item) + if item.startswith("test_") and type(m)==types.MethodType: + self.n_tests += 1 + t = TestStub(m) + result = t.run_test() + self._cleanup() + if result is None or result==True: + self.n_passed += 1 + else: + self.failed_tests.append(item) + self.all_passed = False + + def test_data_path(self): + self.assertEqual(ReductionSingleton()._data_path, '.') + #any path that definitely exists on a computer with Mantid installed + test_path = os.path.normcase(ConfigService.Instance()['instrumentDefinition.directory']) + DataPath(test_path) + self.assertEqual(ReductionSingleton()._data_path, test_path) + + def test_set_detector_distance(self): + GPSANS() + DataPath(TEST_DIR) + AppendDataFile("BioSANS_test_data.xml") + SetSampleDetectorDistance(2500.0) + Reduce1D() + + ws = AnalysisDataService.retrieve("BioSANS_test_data") + sdd = ws.getRun().getProperty("sample_detector_distance").value + self.assertEqual(sdd, 2500.0) + + def test_set_detector_offset(self): + GPSANS() + DataPath(TEST_DIR) + AppendDataFile("BioSANS_test_data.xml") + SetSampleDetectorOffset(500.0) + Reduce1D() + + ws = AnalysisDataService.retrieve("BioSANS_test_data") + sdd = ws.getRun().getProperty("sample_detector_distance").value + self.assertEqual(sdd, 6500.0) + + def test_set_distance_and_detector_offset(self): + """ + If both detector distance and offset are set, use only the distance + """ + GPSANS() + DataPath(TEST_DIR) + AppendDataFile("BioSANS_test_data.xml") + SetSampleDetectorDistance(2500.0) + SetSampleDetectorOffset(500.0) + Reduce1D() + + ws = AnalysisDataService.retrieve("BioSANS_test_data") + sdd = ws.getRun().getProperty("sample_detector_distance").value + self.assertEqual(sdd, 2500.0) + + def test_set_wavelength(self): + GPSANS() + DataPath(TEST_DIR) + AppendDataFile("BioSANS_test_data.xml") + SetWavelength(5.0, 1.2) + Reduce1D() + + ws = AnalysisDataService.retrieve("BioSANS_test_data") + v_x = ws.dataX(0) + self.assertEqual(v_x[0], 4.4) + self.assertEqual(v_x[1], 5.6) + + def test_direct_beam_center(self): + GPSANS() + DataPath(TEST_DIR) + DirectBeamCenter("BioSANS_empty_cell.xml") + AppendDataFile("BioSANS_test_data.xml") + Reduce() + + ws = AnalysisDataService.retrieve("BioSANS_test_data") + center_x = ws.getRun().getProperty("beam_center_x").value + center_y = ws.getRun().getProperty("beam_center_y").value + self.assertAlmostEqual(center_x, 16.6038, delta=0.0001) + self.assertAlmostEqual(center_y, 96.771, delta=0.0001) + + propmng_name = ReductionSingleton().get_reduction_table_name() + p = PropertyManagerDataService.retrieve(propmng_name) + center_x = p.getProperty("LatestBeamCenterX").value + center_y = p.getProperty("LatestBeamCenterY").value + self.assertAlmostEqual(center_x, 16.6038, delta=0.0001) + self.assertAlmostEqual(center_y, 96.771, delta=0.0001) + + def test_hand_beam_center(self): + GPSANS() + SetBeamCenter(1.1, 2.2) + Reduce() + + propmng_name = ReductionSingleton().get_reduction_table_name() + p = PropertyManagerDataService.retrieve(propmng_name) + + center_x = p.getProperty("LatestBeamCenterX").value + center_y = p.getProperty("LatestBeamCenterY").value + + self.assertAlmostEqual(center_x, 1.1, delta=0.0001) + self.assertAlmostEqual(center_y, 2.2, delta=0.0001) + + def test_load_run(self): + GPSANS() + DataPath(TEST_DIR) + self.assertEqual(len(ReductionSingleton()._data_files), 0) + AppendDataFile("BioSANS_test_data.xml") + self.assertEqual(len(ReductionSingleton()._data_files), 1) + + def test_to_steps(self): + GPSANS() + DataPath(TEST_DIR) + DirectBeamCenter("BioSANS_empty_cell.xml") + AppendDataFile("BioSANS_test_data.xml") + DarkCurrent("BioSANS_dark_current.xml") + SensitivityCorrection("BioSANS_flood_data.xml", dark_current="BioSANS_dark_current.xml") + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + ws = AnalysisDataService.retrieve("BioSANS_test_data") + sdd = ws.getRun().getProperty("sample_detector_distance").value + self.assertEqual(sdd, 6000.0) + + ws = AnalysisDataService.retrieve("BioSANS_test_data_Iq") + self.assertTrue(_check_result(ws, TEST_DIR+"reduced_center_calculated.txt", tolerance=1e-4)) + + def test_reduction_1(self): + GPSANS() + DataPath(TEST_DIR) + DirectBeamCenter("BioSANS_empty_cell.xml") + AppendDataFile("BioSANS_test_data.xml") + SensitivityCorrection("BioSANS_flood_data.xml") + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + ws = AnalysisDataService.retrieve("BioSANS_test_data_Iq") + data = ws.dataY(0) + check = [0.19472,0.204269,0.215354,0.230114,0.238961,0.237201,0.247843,0.248424,0.253676,0.254327,0.254366,0.252931,0.258339,0.259297,0.257155,0.254059,0.252383,0.252826,0.256604,0.256754,0.255592,0.256813,0.248569,0.25331,0.251032,0.246424,0.249477,0.250939,0.251959,0.24925,0.250372,0.246148,0.250478,0.244621,0.247428,0.246431,0.245041,0.241647,0.24307,0.240096,0.242797,0.238182,0.237548,0.239789,0.241477,0.23456,0.237372,0.233715,0.233789,0.232262,0.231589,0.230986,0.231646,0.231331,0.230484,0.2277,0.226819,0.224341,0.227239,0.223228,0.221232,0.222011,0.224747,0.219533,0.216973,0.218734,0.21668,0.218366,0.214926,0.213985,0.214469,0.210473,0.209867,0.209066,0.208965,0.207498,0.204505,0.205786,0.202186,0.200442,0.200485,0.200554,0.200499,0.198152,0.193945,0.192082,0.193783,0.193787,0.190557,0.190471,0.186827,0.190088,0.188204,0.187547,0.182206,0.181384,0.180358,0.182663,0.178844,0.176556] + + deltas = map(_diff_iq, data, check) + delta = reduce(_add, deltas)/len(deltas) + self.assertTrue(math.fabs(delta)<0.00001) + + def test_no_solid_angle(self): + GPSANS() + DataPath(TEST_DIR) + DirectBeamCenter("BioSANS_empty_cell.xml") + AppendDataFile("BioSANS_test_data.xml") + NoSolidAngle() + SensitivityCorrection("BioSANS_flood_data.xml") + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + ws = AnalysisDataService.retrieve("BioSANS_test_data_Iq") + data = ws.dataY(0) + self.assertAlmostEqual(data[0], 0.1948464330517794, delta=0.00001) + self.assertAlmostEqual(data[10], 0.25088976280978281, delta=0.00001) + self.assertAlmostEqual(data[20], 0.252098592791137, delta=0.00001) + + def test_reduction_2(self): + GPSANS() + DataPath(TEST_DIR) + DirectBeamCenter("BioSANS_empty_cell.xml") + AppendDataFile("BioSANS_test_data.xml") + DarkCurrent("BioSANS_dark_current.xml") + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + ws = AnalysisDataService.retrieve("BioSANS_test_data_Iq") + data = ws.dataY(0) + check = [0.268942,0.272052,0.269806,0.27129,0.273852,0.271301,0.271732,0.271103,0.270996,0.269677,0.27098,0.266802,0.26789,0.268222,0.266125,0.262736,0.262752,0.263827,0.26315,0.262775,0.261541,0.260818,0.258955,0.257675,0.255908,0.254088,0.256778,0.256883,0.253568,0.25636,0.252323,0.251833,0.251914,0.252298,0.249375,0.247718,0.247768,0.244636,0.245604,0.243996,0.244332,0.244363,0.242985,0.242234,0.241118,0.241411,0.24084,0.239293,0.2392,0.236565,0.234557,0.233974,0.232905,0.231898,0.231085,0.229586,0.22862,0.227001,0.226783,0.225837,0.224835,0.223807,0.222296,0.221557,0.220464,0.219139,0.217611,0.217049,0.21606,0.215739,0.216233,0.213467,0.213141,0.213275,0.219695,0.216121,0.215502,0.21792,0.209364,0.209368,0.2064,0.205844,0.20431,0.203443,0.202442,0.200195,0.199408,0.19853,0.195654,0.195514,0.193086,0.193388,0.19137,0.190122,0.189119,0.18864,0.185473,0.184958,0.183981,0.182581] + + deltas = map(_diff_iq, data, check) + delta = reduce(_add, deltas)/len(deltas) + self.assertTrue(math.fabs(delta)<0.00001) + + def test_straight_Q1D(self): + GPSANS() + DataPath(TEST_DIR) + DirectBeamCenter("BioSANS_empty_cell.xml") + AppendDataFile("BioSANS_test_data.xml") + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + ws = AnalysisDataService.retrieve("BioSANS_test_data_Iq") + data = ws.dataY(0) + check = [0.269037,0.272176,0.269917,0.271416,0.273988,0.271432,0.271857,0.271232,0.271118,0.269797,0.271095,0.266912,0.268015,0.268356,0.266256,0.26287,0.262888,0.263964,0.263281,0.262905,0.261669,0.26094,0.259081,0.257802,0.256029,0.254228,0.256913,0.257021,0.253692,0.256491,0.252454,0.251969,0.25204,0.252423,0.249516,0.247844,0.247895,0.24476,0.245734,0.244125,0.244474,0.244491,0.243126,0.242359,0.241239,0.24154,0.240976,0.239421,0.23933,0.236688,0.234685,0.234105,0.233034,0.232036,0.231208,0.229714,0.228749,0.227122,0.226918,0.225969,0.22497,0.223933,0.222426,0.221684,0.2206,0.219277,0.217739,0.217173,0.216193,0.215869,0.216354,0.213597,0.213271,0.213407,0.219829,0.216259,0.215635,0.218058,0.209499,0.209503,0.206529,0.205981,0.20445,0.203577,0.202577,0.200334,0.199544,0.198663,0.195786,0.195653,0.19322,0.193537,0.191503,0.190253,0.189253,0.188771,0.1856,0.185099,0.184111,0.182717] + + deltas = map(_diff_iq, data, check) + delta = reduce(_add, deltas)/len(deltas) + self.assertTrue(math.fabs(delta)<0.00001) + + def test_transmission(self): + GPSANS() + DataPath(TEST_DIR) + DirectBeamCenter("BioSANS_empty_cell.xml") + TimeNormalization() + DirectBeamTransmission(sample_file="BioSANS_sample_trans.xml", + empty_file="BioSANS_empty_trans.xml") + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + AppendDataFile("BioSANS_test_data.xml") + Reduce1D() + + ws = AnalysisDataService.retrieve("BioSANS_test_data_Iq") + data = ws.dataY(0) + check = [0.514758,0.520759,0.516451,0.51932,0.524206,0.519275,0.520125,0.518997,0.518729,0.516198,0.518718,0.51072,0.512816,0.513449,0.509453,0.502968,0.503003,0.505098,0.503835,0.503088,0.500716,0.499304,0.495777,0.49332,0.489926,0.486497,0.491656,0.491858,0.48546,0.490808,0.483111,0.482176,0.482359,0.483098,0.477528,0.474279,0.474485,0.468472,0.470305,0.467228,0.467934,0.467971,0.465358,0.463885,0.461762,0.462352,0.461285,0.458322,0.458118,0.453064,0.44927,0.448151,0.446129,0.444207,0.442629,0.439792,0.437958,0.434826,0.434443,0.432655,0.430731,0.428771,0.425893,0.424477,0.422421,0.419886,0.416942,0.415876,0.414037,0.41339,0.414353,0.409062,0.408431,0.408712,0.419282,0.412833,0.41062,0.414427,0.400056,0.400141,0.394724,0.393821,0.390721,0.38932,0.387497,0.383062,0.381603,0.380016,0.374635,0.374214,0.369733,0.370353,0.366464,0.364109,0.362184,0.361299,0.355246,0.354339,0.352412,0.349748] + + deltas = map(_diff_iq, data, check) + delta = reduce(_add, deltas)/len(deltas) + self.assertTrue(math.fabs(delta)<0.001) + + def test_spreader_transmission(self): + GPSANS() + DataPath(TEST_DIR) + DirectBeamCenter("BioSANS_empty_cell.xml") + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + BeamSpreaderTransmission(sample_spreader="BioSANS_test_data.xml", + direct_spreader="BioSANS_empty_cell.xml", + sample_scattering="BioSANS_test_data.xml", + direct_scattering="BioSANS_empty_cell.xml", + spreader_transmission=0.5, + spreader_transmission_err=0.1) + + AppendDataFile("BioSANS_test_data.xml") + Reduce1D() + + data = mtd["BioSANS_test_data_Iq"].dataY(0) + self.assertAlmostEqual(data[0], 0.00418831, delta=0.00001) + self.assertAlmostEqual(data[10], 0.0042193, delta=0.00001) + + def test_transmission_by_hand(self): + GPSANS() + DataPath(TEST_DIR) + DirectBeamCenter("BioSANS_empty_cell.xml") + AppendDataFile("BioSANS_test_data.xml") + SetTransmission(0.51944, 0.011078) + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + property_manager = PropertyManagerDataService.retrieve(ReductionSingleton().get_reduction_table_name()) + p=property_manager.getProperty("TransmissionAlgorithm") + + ws = AnalysisDataService.retrieve("BioSANS_test_data_Iq") + self.assertTrue(_check_result(ws, TEST_DIR+"reduced_transmission.txt", 0.0001)) + + def test_center_by_hand(self): + GPSANS() + DataPath(TEST_DIR) + SetBeamCenter(16, 95) + AppendDataFile("BioSANS_test_data.xml") + SensitivityCorrection("BioSANS_flood_data.xml", dark_current="BioSANS_dark_current.xml") + DarkCurrent("BioSANS_dark_current.xml") + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + ws = AnalysisDataService.retrieve("BioSANS_test_data_Iq") + self.assertTrue(_check_result(ws, TEST_DIR+"reduced_center_by_hand.txt", 0.0001)) + + def test_background(self): + GPSANS() + DataPath(TEST_DIR) + SetBeamCenter(16, 95) + AppendDataFile("BioSANS_test_data.xml") + SensitivityCorrection("BioSANS_flood_data.xml", dark_current="BioSANS_dark_current.xml") + DarkCurrent("BioSANS_dark_current.xml") + Background("BioSANS_test_data.xml") + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + ws = AnalysisDataService.retrieve("BioSANS_test_data_Iq") + data = ws.dataY(0) + self.assertAlmostEqual(data[0], 0.0,10) + self.assertAlmostEqual(data[10], 0.0,10) + self.assertAlmostEqual(data[20], 0.0,10) + + def test_background_multiple_files(self): + """ + Subtracting background using multiple files should properly take + into account the normalization. + """ + GPSANS() + DataPath(TEST_DIR) + SetBeamCenter(16, 95) + AppendDataFile("BioSANS_test_data.xml") + SensitivityCorrection("BioSANS_flood_data.xml", dark_current="BioSANS_dark_current.xml") + DarkCurrent("BioSANS_dark_current.xml") + Background("BioSANS_test_data.xml") + Background("BioSANS_test_data.xml,BioSANS_test_data.xml") + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + ws = AnalysisDataService.retrieve("BioSANS_test_data_Iq") + data = ws.dataY(0) + self.assertAlmostEqual(data[0], 0.0,10) + self.assertAlmostEqual(data[10], 0.0,10) + self.assertAlmostEqual(data[20], 0.0,10) + + def test_bck_w_transmission(self): + GPSANS() + DataPath(TEST_DIR) + SetBeamCenter(16, 95) + AppendDataFile("BioSANS_test_data.xml", "test_data") + SensitivityCorrection("BioSANS_flood_data.xml", dark_current="BioSANS_dark_current.xml") + DarkCurrent("BioSANS_dark_current.xml") + Background("BioSANS_test_data.xml") + SetTransmission(0.6,0.1) + SetBckTransmission(0.6,0.1) + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + ws = AnalysisDataService.retrieve("test_data_Iq") + data = ws.dataY(0) + self.assertAlmostEqual(data[0], 0.0,10) + self.assertAlmostEqual(data[10], 0.0,10) + self.assertAlmostEqual(data[20], 0.0,10) + + def test_transmission_by_hand_w_sensitivity(self): + GPSANS() + DataPath(TEST_DIR) + DirectBeamCenter("BioSANS_empty_cell.xml") + AppendDataFile("BioSANS_test_data.xml") + SetTransmission(0.51944, 0.011078) + SensitivityCorrection("BioSANS_flood_data.xml") + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + ws = AnalysisDataService.retrieve("BioSANS_test_data_Iq") + data = ws.dataY(0) + check = [0.374914,0.393394,0.414756,0.443152,0.460175,0.456802,0.477264,0.478456,0.488523,0.489758,0.489871,0.487127,0.497585,0.499346,0.49526,0.489273,0.486082,0.486923,0.494208,0.494531,0.492264,0.494608,0.478766,0.487872,0.48357,0.474654,0.48052,0.483367,0.485269,0.480079,0.482254,0.47413,0.48245,0.471207,0.476589,0.474701,0.472014,0.465479,0.468236,0.462524,0.46773,0.458851,0.457653,0.461929,0.465216,0.451887,0.45733,0.450281,0.45045,0.447508,0.446209,0.445063,0.446328,0.445735,0.444096,0.438758,0.43707,0.432302,0.437903,0.430176,0.426317,0.427858,0.433131,0.423087,0.418146,0.421584,0.417606,0.420891,0.414255,0.412448,0.413393,0.405706,0.404541,0.403016,0.402806,0.400023,0.394248,0.396725,0.389808,0.386475,0.386525,0.386674,0.386575,0.382081,0.373986,0.370391,0.37367,0.373686,0.367479,0.36732,0.36031,0.366588,0.362994,0.361712,0.351433,0.349867,0.3479,0.352355,0.344987,0.340605] + + # Check that I(q) is the same for both data sets + deltas = map(_diff_iq, data, check) + delta = reduce(_add, deltas)/len(deltas) + self.assertTrue(math.fabs(delta)<0.00001) + + def test_SampleGeometry_functions(self): + print "SKIPPING test_SampleGeometry_functions()" + return + GPSANS() + DataPath(TEST_DIR) + AppendDataFile("BioSANS_test_data.xml") + SampleGeometry('cuboid') + SampleThickness(2.0) + SampleHeight(3.0) + SampleWidth(5.0) + + # we don't need to do a full reduction for this test, do a partial reduction + ReductionSingleton().pre_process() + ReductionSingleton()._reduction_steps[0].execute(ReductionSingleton(), "BioSANS_test_data") + ReductionSingleton().geometry_correcter.execute(ReductionSingleton(), "BioSANS_test_data") + + ws = AnalysisDataService.retrieve("BioSANS_test_data") + data = [ws.dataY(0)[0], ws.dataY(1)[0], ws.dataY(2)[0], ws.dataY(3)[0], ws.dataY(4)[0], ws.dataY(5)[0]] + + check = [500091.0,60.0,40.8333,13.6333, 13.4667,13.6667] + # Check that I(q) is the same for both data sets + deltas = map(_diff_iq, data, check) + delta = reduce(_add, deltas)/len(deltas) + self.assertTrue(math.fabs(delta)<0.1) + + def test_noDC_eff_with_DC(self): + ref = [28.06525, 136.94662, -16.20412, 0.00000, 147.79915, 146.42713, 302.00869, 0.00000, 0.00000,-1869.20724,-2190.89681,-1892.14939,-2140.79608,-1980.60037,-2096.75974,-2221.30118,-2263.51541,-2264.89989,-2364.83528,-2420.58152,-2444.51906,-2418.28886,-2606.16991,-2556.93660,-2623.71380,-2547.79671,-2670.60962,-2714.35237,-2717.01692,-2730.84974,-2768.92925,-2753.96396,-2732.66316,-2795.89687,-2780.37320,-2755.38910,-2814.88120,-2830.74081,-2803.42030,-2815.33244,-2754.70444,-2718.55136,-2740.03811,-2754.60415,-2815.96387,-2754.62039,-2781.54596,-2765.26282,-2676.04665,-2762.33751,-2722.94832,-2707.74990,-2730.50371,-2721.71272,-2682.02439,-2703.36446,-2679.47677,-2658.57573,-2669.41871,-2618.90655,-2638.41601,-2614.69128,-2583.29713,-2589.39730,-2567.19209,-2535.09328,-2539.43296,-2489.60117,-2500.76844,-2456.22248,-2444.13734,-2392.68589,-2410.98591,-2348.68064,-2334.84651,-2310.41426,-2250.24085,-2220.02192,-2184.65990,-2154.19638,-2099.56797,-2058.51585,-2004.05601,-1966.52356,-1910.47283,-1876.72098,-1817.69045,-1768.62167,-1721.56444,-1666.47199,-1608.86707,-1544.26178,-1492.78389,-1438.69256,-1358.60437,-1299.34476,-1221.57010,-1080.69421,-609.77891, -77.72765] + BIOSANS() + SetSampleDetectorOffset(837.9) + #SolidAngle() # name clash with SolidAngle algorithm + MonitorNormalization() + AzimuthalAverage(n_bins=100, n_subpix=1, log_binning=True) + #IQxQy(nbins=100) + DirectBeamCenter("BioSANS_empty_cell.xml") + SensitivityCorrection('BioSANS_flood_data.xml', min_sensitivity=0.5, max_sensitivity=1.5, dark_current='BioSANS_empty_trans.xml', use_sample_dc=False) + DivideByThickness(1) + SetTransmission(1, 0) + ThetaDependentTransmission(True) + DataPath(TEST_DIR) + AppendDataFile(["BioSANS_exp61_scan0004_0001.xml"]) + Background("BioSANS_test_data.xml") + SetBckTransmission(1, 0) + BckThetaDependentTransmission(True) + Reduce1D() + + ws = AnalysisDataService.retrieve("BioSANS_exp61_scan0004_0001_Iq") + res = ws.dataY(0) + for i in range(len(res)): + self._assertAlmostEqual(res[i], ref[i], delta=0.01, + rel_delta=0.001, + msg="result point %d: %g, found %g" % (i, ref[i], res[i])) + + def test_DC_eff_with_DC(self): + #ref = [8328.70241,8506.01586,5118.44441, 0.00000,7774.69442,8455.91783,14509.24224, 0.00000, 0.00000,-27551.42890,-34835.52157,-28076.35417,-32645.28731,-29923.90302,-32544.89749,-34519.58590,-35354.19282,-35242.21670,-37201.40137,-38547.80168,-38708.50152,-38339.04967,-41672.21115,-40898.80246,-41881.33026,-40789.34624,-43124.60460,-43846.74602,-43608.61731,-44050.49270,-44607.80184,-44662.71286,-44125.45576,-45197.75580,-45086.38543,-44502.49049,-45552.66509,-45678.42736,-45347.87980,-45613.96643,-44424.82296,-43888.62587,-44292.95665,-44465.13383,-45647.14865,-44450.82619,-44951.69404,-44597.94666,-43277.63573,-44605.52402,-44004.61793,-43774.86031,-44169.38692,-43970.30050,-43316.88231,-43786.96873,-43355.97746,-42952.99756,-43062.07976,-42184.58157,-42578.47214,-42199.41403,-41700.43004,-41780.97621,-41386.94893,-40865.71000,-40932.98886,-40036.67895,-40214.90469,-39471.74497,-39278.21830,-38383.80488,-38728.91704,-37705.78298,-37327.89414,-36943.11807,-35906.89550,-35399.21901,-34751.80556,-34209.49716,-33271.20006,-32530.08744,-31561.29164,-30906.03234,-29895.47664,-29278.16621,-28248.29021,-27341.79392,-26549.84441,-25476.57298,-24453.63444,-23305.85255,-22332.01538,-21306.01200,-19867.21655,-18795.14216,-17317.28374,-14745.54556,-6037.28367,4125.05228] + ref = [28.0476,136.906,-16.3079,0,147.757,146.403,301.982,0,0,-1869.21,-2190.93,-1892.16,-2140.81,-1980.62,-2096.79,-2221.34,-2263.55,-2264.93,-2364.87,-2420.61,-2444.56,-2418.32,-2606.21,-2556.98,-2623.75,-2547.84,-2670.66,-2714.39,-2717.06,-2730.89,-2768.96,-2754.01,-2732.7,-2795.93,-2780.41,-2755.42,-2814.92,-2830.79,-2803.46,-2815.38,-2754.75,-2718.6,-2740.08,-2754.65,-2816.01,-2754.66,-2781.59,-2765.3,-2676.09,-2762.38,-2722.99,-2707.8,-2730.55,-2721.76,-2682.07,-2703.41,-2679.52,-2658.62,-2669.46,-2618.95,-2638.46,-2614.74,-2583.34,-2589.44,-2567.23,-2535.14,-2539.48,-2489.64,-2500.81,-2456.26,-2444.18,-2392.73,-2411.03,-2348.73,-2334.89,-2310.46,-2250.28,-2220.07,-2184.7,-2154.24,-2099.61,-2058.56,-2004.1,-1966.57,-1910.52,-1876.76,-1817.73,-1768.67,-1721.61,-1666.51,-1608.91,-1544.31,-1492.83,-1438.74,-1358.65,-1299.39,-1221.61,-1080.73,-609.821,-77.7712] + BIOSANS() + SetSampleDetectorOffset(837.9) + #SolidAngle() + DarkCurrent("BioSANS_dark_current.xml") + MonitorNormalization() + AzimuthalAverage(n_bins=100, n_subpix=1, log_binning=True) + #IQxQy(nbins=100) + DirectBeamCenter("BioSANS_empty_cell.xml") + SensitivityCorrection('BioSANS_flood_data.xml', min_sensitivity=0.5, max_sensitivity=1.5, dark_current='BioSANS_empty_trans.xml', use_sample_dc=False) + DivideByThickness(1) + SetTransmission(1, 0) + ThetaDependentTransmission(True) + DataPath(TEST_DIR) + AppendDataFile(["BioSANS_exp61_scan0004_0001.xml"]) + Background("BioSANS_test_data.xml") + SetBckTransmission(1, 0) + BckThetaDependentTransmission(True) + Reduce1D() + + ws = AnalysisDataService.retrieve("BioSANS_exp61_scan0004_0001_Iq") + res = ws.dataY(0) + for i in range(len(res)): + self._assertAlmostEqual(res[i], ref[i], delta=0.01, + rel_delta=0.001, + msg="result point %d: %g, found %g" % (i, ref[i], res[i])) + + def test_DC_eff_noDC(self): + #ref = [7164.60565,7752.68818,5711.05627, 0.00000,5900.87667,8062.67404, 0.00000, 0.00000,-24761.10043,-23989.79632,-27228.05671,-27520.90826,-28702.43297,-30016.08164,-31857.27731,-32831.96025,-33274.36135,-33765.95318,-35208.90831,-37330.42544,-38283.00967,-38157.84654,-40398.13178,-40807.56861,-40981.56490,-40010.58202,-42502.81591,-43001.82289,-42582.26700,-43857.23377,-44163.99857,-44732.14970,-43799.50312,-44791.12989,-44777.68791,-43985.74941,-45468.56174,-45452.90859,-45309.47499,-45759.04142,-43969.71697,-43854.45515,-44260.09016,-44420.83533,-45370.71500,-44500.35745,-45047.70688,-44404.89711,-43526.84357,-44566.97107,-43693.66349,-43741.61517,-44045.48712,-43860.53110,-43371.59488,-43623.05598,-43456.87922,-42905.84855,-42947.82849,-42114.29792,-42493.59647,-41998.37587,-41635.60470,-41808.27092,-41359.04234,-40774.21357,-40842.43155,-40073.84107,-40151.59039,-39504.86741,-39166.91772,-38472.64978,-38668.95577,-37731.30203,-37416.76227,-36798.92809,-35971.80065,-35477.59413,-34782.44503,-34089.54104,-33225.67613,-32520.31544,-31591.39201,-30937.42531,-29962.72283,-29241.95009,-28269.99833,-27317.23101,-26561.76975,-25533.91747,-24418.32912,-23309.34592,-22383.49546,-21298.00468,-19889.28546,-18800.07365,-17315.89420,-14744.66783,-6047.10832,4171.62004] + ref = [10.4139,124.814,25.0443,0,38.3413,133.417,0,0,-1733.56,-1627.57,-1811.38,-1851.58,-1888.38,-1957.07,-2056.47,-2117.52,-2139.32,-2176.94,-2239.91,-2350.65,-2417.75,-2406.99,-2525.48,-2551.45,-2566.83,-2499.38,-2632.35,-2662.17,-2653.14,-2718.65,-2740.78,-2758.94,-2712,-2771.35,-2761.38,-2724.05,-2809.97,-2815.92,-2801.25,-2824.54,-2726.76,-2716.63,-2737.83,-2752.06,-2798.95,-2757.7,-2787.58,-2753.12,-2691.47,-2759.93,-2703.94,-2705.55,-2722.64,-2714.75,-2685.28,-2693.49,-2685.75,-2655.65,-2662.42,-2614.47,-2633.12,-2602.29,-2579.4,-2591.17,-2565.28,-2529.61,-2533.85,-2491.87,-2496.78,-2458.25,-2437.25,-2398.16,-2407.29,-2350.32,-2340.43,-2301.5,-2254.37,-2224.97,-2186.64,-2146.73,-2096.71,-2058.12,-2006.2,-1968.6,-1914.93,-1874.31,-1819.05,-1767.14,-1722.35,-1670.38,-1606.61,-1544.51,-1496.24,-1438.21,-1360.12,-1299.68,-1221.61,-1080.91,-610.638,-71.9557] + BIOSANS() + SetSampleDetectorOffset(837.9) + #SolidAngle() + DarkCurrent("BioSANS_dark_current.xml") + MonitorNormalization() + AzimuthalAverage(n_bins=100, n_subpix=1, log_binning=True) + #IQxQy(nbins=100) + DirectBeamCenter("BioSANS_empty_cell.xml") + SensitivityCorrection('BioSANS_flood_data.xml', min_sensitivity=0.5, max_sensitivity=1.5, use_sample_dc=False) + DivideByThickness(1) + SetTransmission(1, 0) + ThetaDependentTransmission(True) + DataPath(TEST_DIR) + AppendDataFile(["BioSANS_exp61_scan0004_0001.xml"]) + Background("BioSANS_test_data.xml") + SetBckTransmission(1, 0) + BckThetaDependentTransmission(True) + Reduce1D() + + ws = AnalysisDataService.retrieve("BioSANS_exp61_scan0004_0001_Iq") + res = ws.dataY(0) + for i in range(len(res)): + self._assertAlmostEqual(res[i], ref[i], delta=0.01, + rel_delta=0.001, + msg="result point %d: %g, found %g" % (i, ref[i], res[i])) + + def test_transmission_beam_center(self): + GPSANS() + DataPath(TEST_DIR) + DirectBeamCenter("BioSANS_empty_cell.xml") + AppendDataFile("BioSANS_test_data.xml", "test_data") + SensitivityCorrection("BioSANS_flood_data.xml", dark_current="BioSANS_dark_current.xml") + DarkCurrent("BioSANS_dark_current.xml") + DirectBeamTransmission(sample_file="BioSANS_sample_trans.xml", + empty_file="BioSANS_empty_trans.xml", + beam_radius=10.0) + SetTransmissionBeamCenter(100,15) + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + ws = AnalysisDataService.retrieve("test_data_Iq") + data = ws.dataY(0) + self.assertAlmostEqual(data[0], 0.195821, delta=0.00001) + self.assertAlmostEqual(data[10], 0.256210, delta=0.00001) + self.assertAlmostEqual(data[20], 0.257666, delta=0.00001) + + def test_bck_transmission_default_beam_center(self): + GPSANS() + DataPath(TEST_DIR) + DirectBeamCenter("BioSANS_empty_cell.xml") + AppendDataFile("BioSANS_test_data.xml", "test_data") + DarkCurrent("BioSANS_dark_current.xml") + Background("BioSANS_test_data.xml") + SetTransmission(0.6,0.1) + BckDirectBeamTransmission(sample_file="BioSANS_sample_trans.xml", empty_file="BioSANS_empty_trans.xml", beam_radius=10.0) + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + ws = AnalysisDataService.retrieve("test_data_Iq") + data = ws.dataY(0) + self.assertAlmostEqual(data[0], -0.0682723, delta=0.00001) + self.assertAlmostEqual(data[10], -0.068800, delta=0.00001) + self.assertAlmostEqual(data[20], -0.066403, delta=0.00001) + + def test_bck_transmission_set_beam_center(self): + GPSANS() + DataPath(TEST_DIR) + DirectBeamCenter("BioSANS_empty_cell.xml") + AppendDataFile("BioSANS_test_data.xml", "test_data") + DarkCurrent("BioSANS_dark_current.xml") + Background("BioSANS_test_data.xml") + SetTransmission(0.6,0.1) + BckDirectBeamTransmission(sample_file="BioSANS_sample_trans.xml", empty_file="BioSANS_empty_trans.xml", beam_radius=10.0) + SetBckTransmissionBeamCenter(100,15) + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + data = mtd["test_data_Iq"].dataY(0) + self.assertAlmostEqual(data[0], 0.1787709, delta=0.00001) + self.assertAlmostEqual(data[10], 0.1801518, delta=0.00001) + self.assertAlmostEqual(data[20], 0.1738586, delta=0.00001) + + def test_bck_transmission_direct_beam_center(self): + GPSANS() + DataPath(TEST_DIR) + #DirectBeamCenter("BioSANS_empty_cell.xml") + SetBeamCenter(100,15) + AppendDataFile("BioSANS_test_data.xml", "test_data") + DarkCurrent("BioSANS_dark_current.xml") + Background("BioSANS_test_data.xml") + SetTransmission(0.6,0.1) + BckDirectBeamTransmission(sample_file="BioSANS_sample_trans.xml", empty_file="BioSANS_empty_trans.xml", beam_radius=10.0) + BckTransmissionDirectBeamCenter("BioSANS_empty_cell.xml") + AzimuthalAverage(binning="0.01,0.001,0.11", error_weighting=True) + Reduce1D() + + data = mtd["test_data_Iq"].dataY(0) + self.assertAlmostEqual(data[0], -0.046791, delta=0.00001) + self.assertAlmostEqual(data[10], -0.047874, delta=0.00001) + self.assertAlmostEqual(data[20], -0.047785, delta=0.00001) + + def validate(self): + print "HFIRTests: %d / %d tests passed" % (self.n_passed, self.n_tests) + for item in self.failed_tests: + print item + return self.all_passed + +def assertAlmostEqual(first, second, places=None, msg=None, delta=None, rel_delta=None): + """ + Simple test to compare two numbers + @return: True of the two numbers agree within tolerance + """ + if first == second: + # shortcut + return True + + if delta is not None and places is not None: + raise TypeError("specify delta or places not both") + + if delta is not None: + if abs(first - second) <= delta: + return True + elif abs(first - second)/abs(second)<rel_delta: + print '\n-----> %s != %s but within %s percent' % (str(first), + str(second), + str(rel_delta*100.0)) + return True + + standardMsg = '%s != %s within %s delta' % (str(first), + str(second), + str(delta)) + else: + if places is None: + places = 7 + + if round(abs(second-first), places) == 0: + return True + + standardMsg = '%s != %s within %r places' % (str(first), + str(second), + places) + print standardMsg + return False + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/HFIRTransAPIv2.py b/Code/Mantid/Testing/SystemTests/tests/analysis/HFIRTransAPIv2.py new file mode 100644 index 0000000000000000000000000000000000000000..dd7e3aedb779fb87adfc0b0919a27d344e1b704e --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/HFIRTransAPIv2.py @@ -0,0 +1,277 @@ +import stresstesting +import mantid +from mantid.api import FileFinder +from mantid.simpleapi import * +from reduction_workflow.instruments.sans.hfir_command_interface import * + +import os + +def do_cleanup(): + Files = ["BioSANS_test_data_reduction.log", + "BioSANS_test_data_Iq.xml", + "BioSANS_test_data_Iq.txt", + "BioSANS_test_data_Iqxy.dat"] + for file in Files: + absfile = FileFinder.getFullPath(file) + if os.path.exists(absfile): + os.remove(absfile) + return True + +class HFIRTrans(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='HFIR' + GPSANS() + DirectBeamCenter("BioSANS_empty_cell.xml") + TimeNormalization() + DirectBeamTransmission(sample_file="BioSANS_sample_trans.xml", + empty_file="BioSANS_empty_trans.xml") + AzimuthalAverage(binning="0.01,0.001,0.11") + AppendDataFile("BioSANS_test_data.xml") + Reduce1D() + + def validate(self): + self.tolerance = 0.00001 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "BioSANS_test_data_Iq", 'HFIRTrans.nxs' + +class HFIRTrans(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='HFIR' + GPSANS() + DirectBeamCenter("BioSANS_empty_cell.xml") + TimeNormalization() + SetTransmission(0.522296, 0.009134) + AzimuthalAverage(binning="0.01,0.001,0.11") + AppendDataFile("BioSANS_test_data.xml") + Reduce1D() + + def validate(self): + self.tolerance = 0.00001 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "BioSANS_test_data_Iq", 'HFIRTrans.nxs' + +class HFIRTransmissionDarkCurrent(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='HFIR' + GPSANS() + DirectBeamCenter("BioSANS_empty_cell.xml") + TimeNormalization() + DirectBeamTransmission(sample_file="BioSANS_sample_trans.xml", + empty_file="BioSANS_empty_trans.xml") + TransmissionDarkCurrent("BioSANS_dark_current.xml") + AzimuthalAverage(binning="0.01,0.001,0.11") + AppendDataFile("BioSANS_test_data.xml") + Reduce1D() + + def validate(self): + self.tolerance = 0.00001 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "BioSANS_test_data_Iq", 'HFIRTransmissionDarkCurrent.nxs' + +class HFIRTransmissionDirectBeamCenter(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='HFIR' + GPSANS() + DirectBeamCenter("BioSANS_empty_cell.xml") + TimeNormalization() + DirectBeamTransmission(sample_file="BioSANS_sample_trans.xml", + empty_file="BioSANS_empty_trans.xml") + TransmissionDirectBeamCenter("BioSANS_empty_trans.xml") + AzimuthalAverage(binning="0.01,0.001,0.11") + AppendDataFile("BioSANS_test_data.xml") + Reduce1D() + + def validate(self): + self.tolerance = 0.00001 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "BioSANS_test_data_Iq", 'HFIRTransmissionDirectBeamCenter.nxs' + +class HFIRTransmissionBeamCenter(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='HFIR' + GPSANS() + DirectBeamCenter("BioSANS_empty_cell.xml") + TimeNormalization() + DirectBeamTransmission(sample_file="BioSANS_sample_trans.xml", + empty_file="BioSANS_empty_trans.xml") + SetTransmissionBeamCenter(16.389123399465063, + 95.530251864359087) + AzimuthalAverage(binning="0.01,0.001,0.11") + AppendDataFile("BioSANS_test_data.xml") + Reduce1D() + + def validate(self): + self.tolerance = 0.00001 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "BioSANS_test_data_Iq", 'HFIRTransmissionDirectBeamCenter.nxs' + +class HFIRTransmissionBeamSpreader(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='HFIR' + GPSANS() + DirectBeamCenter("BioSANS_empty_cell.xml") + TimeNormalization() + BeamSpreaderTransmission(sample_spreader="BioSANS_test_data.xml", + direct_spreader="BioSANS_empty_cell.xml", + sample_scattering="BioSANS_test_data.xml", + direct_scattering="BioSANS_empty_cell.xml", + spreader_transmission=0.5, + spreader_transmission_err=0.1) + AzimuthalAverage(binning="0.01,0.001,0.11") + AppendDataFile("BioSANS_test_data.xml") + Reduce1D() + + def validate(self): + self.tolerance = 0.00001 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "BioSANS_test_data_Iq", 'HFIRTransmissionBeamSpreader.nxs' + +class HFIRTransmissionBeamSpreaderDC(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='HFIR' + GPSANS() + DirectBeamCenter("BioSANS_empty_cell.xml") + TimeNormalization() + BeamSpreaderTransmission(sample_spreader="BioSANS_test_data.xml", + direct_spreader="BioSANS_empty_cell.xml", + sample_scattering="BioSANS_test_data.xml", + direct_scattering="BioSANS_empty_cell.xml", + spreader_transmission=0.5, + spreader_transmission_err=0.1) + TransmissionDarkCurrent("BioSANS_dark_current.xml") + AzimuthalAverage(binning="0.01,0.001,0.11") + AppendDataFile("BioSANS_test_data.xml") + Reduce1D() + + def validate(self): + self.tolerance = 0.00001 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "BioSANS_test_data_Iq", 'HFIRTransmissionBeamSpreaderDC.nxs' + +class HFIRTransmissionBeamSpreaderDBC(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='HFIR' + GPSANS() + DirectBeamCenter("BioSANS_empty_cell.xml") + TimeNormalization() + BeamSpreaderTransmission(sample_spreader="BioSANS_test_data.xml", + direct_spreader="BioSANS_empty_cell.xml", + sample_scattering="BioSANS_test_data.xml", + direct_scattering="BioSANS_empty_cell.xml", + spreader_transmission=0.5, + spreader_transmission_err=0.1) + TransmissionDirectBeamCenter("BioSANS_empty_trans.xml") + AzimuthalAverage(binning="0.01,0.001,0.11") + AppendDataFile("BioSANS_test_data.xml") + Reduce1D() + + def validate(self): + self.tolerance = 0.00001 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "BioSANS_test_data_Iq", 'HFIRTransmissionBeamSpreaderDBC.nxs' + +class HFIRTransmissionBeamSpreaderBC(stresstesting.MantidStressTest): + + def cleanup(self): + do_cleanup() + return True + + def runTest(self): + config = ConfigService.Instance() + config["facilityName"]='HFIR' + GPSANS() + DirectBeamCenter("BioSANS_empty_cell.xml") + TimeNormalization() + BeamSpreaderTransmission(sample_spreader="BioSANS_test_data.xml", + direct_spreader="BioSANS_empty_cell.xml", + sample_scattering="BioSANS_test_data.xml", + direct_scattering="BioSANS_empty_cell.xml", + spreader_transmission=0.5, + spreader_transmission_err=0.1) + SetTransmissionBeamCenter(16.389123399465063, + 95.530251864359087) + AzimuthalAverage(binning="0.01,0.001,0.11") + AppendDataFile("BioSANS_test_data.xml") + Reduce1D() + + def validate(self): + self.tolerance = 0.00001 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "BioSANS_test_data_Iq", 'HFIRTransmissionBeamSpreaderDBC.nxs' + + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/HYSPECReductionTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/HYSPECReductionTest.py new file mode 100644 index 0000000000000000000000000000000000000000..697b16639a5f80e803aea8e3ecf720d6e768d495 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/HYSPECReductionTest.py @@ -0,0 +1,52 @@ +""" +System test for HYSPEC reduction +""" + +from mantid.simpleapi import * +import os +import stresstesting + +class HYSPECReductionTest(stresstesting.MantidStressTest): + + def requiredMemoryMB(self): + return 5000 + + def requiredFiles(self): + return ['HYS_13656_event.nxs','HYS_13657_event.nxs','HYS_13658_event.nxs'] + + def cleanup(self): + if os.path.exists(self.groupingFile): + os.remove(self.groupingFile) + return True + + def runTest(self): + Load(Filename='HYS_13656-13658',OutputWorkspace='sum') + FilterByLogValue(InputWorkspace='sum',OutputWorkspace='sum1',LogName='s1',MinimumValue='0',MaximumValue='24.5',LogBoundary='Left') + DeleteWorkspace('sum') + GenerateEventsFilter(InputWorkspace='sum1',OutputWorkspace='splboth',InformationWorkspace='info',UnitOfTime='Nanoseconds',LogName='s1',MaximumLogValue='24.5',LogValueInterval='3') + FilterEvents(InputWorkspace='sum1',OutputWorkspaceBaseName='split',InformationWorkspace='info',SplitterWorkspace='splboth',FilterByPulseTime='1',GroupWorkspaces='1') + DeleteWorkspace('split_unfiltered') + DeleteWorkspace("splboth") + DeleteWorkspace("info") + DeleteWorkspace('sum1') + CompressEvents('split',0.1,OutputWorkspace='splitc') + DeleteWorkspace('split') + self.groupingFile=os.path.join(config.getString('defaultsave.directory'),'group4x2.xml') + GenerateGroupingSNSInelastic(AlongTubes="4",AcrossTubes="2",Instrument="HYSPEC",Filename=self.groupingFile) + config['default.facility']="SNS" + DgsReduction(SampleInputWorkspace='splitc',IncidentBeamNormalisation='ByCurrent',OutputWorkspace='reduced',GroupingFile=self.groupingFile,TimeIndepBackgroundSub ='1',TibTofRangeStart =10400,TibTofRangeEnd =12400,IncidentEnergyGuess=50) + DeleteWorkspace('splitc') + SetGoniometer('reduced',Axis0="s1,0,1,0,1") + SetUB('reduced',5.823,6.475,3.186,90,90,90,'0,1,0','0,0,1') + ConvertToMD(InputWorkspace='reduced',OutputWorkspace='md',QDimensions='Q3D',QConversionScales='HKL',MinValues='-0.5,-3,-5,-10',MaxValues='0.5,6,2,45') + DeleteWorkspace('reduced') + MergeMD(InputWorkspaces='md',OutputWorkspace='merged') + DeleteWorkspace("md") + BinMD(InputWorkspace='merged',AxisAligned='0',BasisVector0='[H,0,0],in 1.079 A^-1,1,0,0,0',BasisVector1='[0,K,0],in 0.97 A^-1,0,1,0,0',BasisVector2='[0,0,L],in 1.972 A^-1,0,0,1,0',BasisVector3='DeltaE,DeltaE,0,0,0,1',OutputExtents='-3,3,-2,6,-4,-1.5,-3,3',OutputBins='1,100,100,1',Parallel='1',OutputWorkspace='slice') + DeleteWorkspace("merged") + DeleteWorkspace("PreprocessedDetectorsWS") + + def validate(self): + self.tolerance = 1e-8 + return 'slice','HYSPECReduction_TIBasEvents.nxs' + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ILLD2BTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ILLD2BTest.py new file mode 100644 index 0000000000000000000000000000000000000000..12c63c0b86cf39436d1f3a810e6d21780a34705a --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ILLD2BTest.py @@ -0,0 +1,64 @@ +import stresstesting + +from mantid.api import mtd, IMDEventWorkspace +from mantid.simpleapi import LoadILLAscii + +import unittest + +class ILLD2BLoadTest(unittest.TestCase): + + ws_name = "d2b_ws" + prefix = "D2B" + dataFile = "ILL/ILL_D2B_121459.txt" + + def tearDown(self): + for wsName in mtd.getObjectNames(): + if wsName.startswith(self.prefix): + mtd.remove(wsName) + + #================== Success cases ================================ + def test_load_single_file(self): + self._run_load(self.dataFile) + + # Check some data + wsOut = mtd[self.ws_name] + self.assertEqual(wsOut.getNEvents(), 409600) + + + + def _run_load(self, dataFile): + """ + ILL Loader + """ + LoadILLAscii(Filename=dataFile,OutputWorkspace=self.ws_name) + self._do_ads_check(self.ws_name) + + def _do_ads_check(self, name): + self.assertTrue(name in mtd) + self.assertTrue(type(mtd[name]) == IMDEventWorkspace) + + + +#==================================================================================== + +class ILLD2BTest(stresstesting.MantidStressTest): + + def requiredMemoryMB(self): + """Set a limit of 2.5Gb to avoid 32-bit environment""" + return 2500 + + def runTest(self): + self._success = False + # Custom code to create and run this single test suite + suite = unittest.TestSuite() + suite.addTest( unittest.makeSuite(ILLD2BLoadTest, "test") ) + runner = unittest.TextTestRunner() + # Run using either runner + res = runner.run(suite) + if res.wasSuccessful(): + self._success = True + else: + self._success = False + + def validate(self): + return self._success diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ILLD33Test.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ILLD33Test.py new file mode 100644 index 0000000000000000000000000000000000000000..24531122016bf66f605507069ffe9c98a6658609 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ILLD33Test.py @@ -0,0 +1,115 @@ +import stresstesting + +from mantid.api import mtd +from mantid.simpleapi import SetupILLD33Reduction, SANSReduction,Rebin,SANSAzimuthalAverage1D + +import unittest + +class ILLD33SANSTest(unittest.TestCase): + + prefix = "D33" + + def tearDown(self): + for wsName in mtd.getObjectNames(): + if wsName.startswith(self.prefix): + mtd.remove(wsName) + + def test_all(self): + + SetupILLD33Reduction( + # Beam center shouldn't work + #BeamCenterMethod="None", + MaskedDetectorList=[14709,14710,14711,14712,14713,14714,14715,14716,14717,14718,14719, + 14720,14721,14722,14723,14724,14725,14726,14727,14728,14729,14730, + 14731,14732,14733,14734,14735,14965,14966,14967,14968,14969,14970, + 14971,14972,14973,14974,14975,14976,14977,14978,14979,14980,14981, + 14982,14983,14984,14985,14986,14987,14988,14989,14990,14991,15221, + 15222,15223,15224,15225,15226,15227,15228,15229,15230,15231,15232, + 15233,15234,15235,15236,15237,15238,15239,15240,15241,15242,15243, + 15244,15245,15246,15247,15477,15478,15479,15480,15481,15482,15483, + 15484,15485,15486,15487,15488,15489,15490,15491,15492,15493,15494, + 15495,15496,15497,15498,15499,15500,15501,15502,15503,15733,15734, + 15735,15736,15737,15738,15739,15740,15741,15742,15743,15744,15745, + 15746,15747,15748,15749,15750,15751,15752,15753,15754,15755,15756, + 15757,15758,15759,15989,15990,15991,15992,15993,15994,15995,15996, + 15997,15998,15999,16000,16001,16002,16003,16004,16005,16006,16007, + 16008,16009,16010,16011,16012,16013,16014,16015,16245,16246,16247, + 16248,16249,16250,16251,16252,16253,16254,16255,16256,16257,16258, + 16259,16260,16261,16262,16263,16264,16265,16266,16267,16268,16269, + 16270,16271,16501,16502,16503,16504,16505,16506,16507,16508,16509, + 16510,16511,16512,16513,16514,16515,16516,16517,16518,16519,16520, + 16521,16522,16523,16524,16525,16526,16527,16757,16758,16759,16760, + 16761,16762,16763,16764,16765,16766,16767,16768,16769,16770,16771, + 16772,16773,16774,16775,16776,16777,16778,16779,16780,16781,16782, + 16783,17013,17014,17015,17016,17017,17018,17019,17020,17021,17022, + 17023,17024,17025,17026,17027,17028,17029,17030,17031,17032,17033, + 17034,17035,17036,17037,17038,17039,17269,17270,17271,17272,17273, + 17274,17275,17276,17277,17278,17279,17280,17281,17282,17283,17284, + 17285,17286,17287,17288,17289,17290,17291,17292,17293,17294,17295, + 17525,17526,17527,17528,17529,17530,17531,17532,17533,17534,17535, + 17536,17537,17538,17539,17540,17541,17542,17543,17544,17545,17546, + 17547,17548,17549,17550,17551], + BeamCenterMethod="DirectBeam", + BeamCenterFile='ILL/001427.nxs', + Normalisation="Timer", + DarkCurrentFile= 'ILL/001420.nxs', + TransmissionMethod="DirectBeam", + TransmissionSampleDataFile= 'ILL/001431.nxs', + TransmissionEmptyDataFile= 'ILL/001427.nxs', + BckTransmissionEmptyDataFile= 'ILL/001427.nxs', + TransmissionBeamRadius = 3, + TransmissionUseSampleDC=False, + BackgroundFiles='ILL/001422.nxs', + BckTransmissionSampleDataFile='ILL/001428.nxs', + DoAzimuthalAverage=False, + Do2DReduction=False, + ComputeResolution=True, + ReductionProperties=self.prefix + "props") + + output=SANSReduction(Filename='ILL/001425.nxs', ReductionProperties=self.prefix + "props", + OutputWorkspace=self.prefix + "out") + Rebin(InputWorkspace=self.prefix + 'out',OutputWorkspace=self.prefix + 'out_rebin', + Params='4,0.1,15') + SANSAzimuthalAverage1D(InputWorkspace=self.prefix + 'out_rebin',Binning='0.001,0.0002,0.03', + OutputWorkspace=self.prefix + 'final') + + # Check some data + wsOut = mtd[self.prefix + 'out'] + self.assertEqual(wsOut.getNumberHistograms(), 65538) + wsOut = mtd[self.prefix + 'out_rebin'] + self.assertEqual(wsOut.getNumberHistograms(), 65538) + wsOut = mtd[self.prefix + 'final'] + self.assertEqual(wsOut.getNumberHistograms(), 1) + + + + #================== Failure cases ================================ + + # TODO + + + + +#==================================================================================== + +class ILLD33Test(stresstesting.MantidStressTest): + + def requiredMemoryMB(self): + """Set a limit of 2.5Gb to avoid 32-bit environment""" + return 2500 + + def runTest(self): + self._success = False + # Custom code to create and run this single test suite + suite = unittest.TestSuite() + suite.addTest( unittest.makeSuite(ILLD33SANSTest, "test") ) + runner = unittest.TextTestRunner() + # Run using either runner + res = runner.run(suite) + if res.wasSuccessful(): + self._success = True + else: + self._success = False + + def validate(self): + return self._success diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ILLIN4Test.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ILLIN4Test.py new file mode 100644 index 0000000000000000000000000000000000000000..d64e9ec7c2fdbf673c459c33c1ff6dc1594f3e5e --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ILLIN4Test.py @@ -0,0 +1,75 @@ +import stresstesting + +from mantid.api import MatrixWorkspace, mtd +from mantid.simpleapi import LoadILL +from mantid.kernel import V3D + +import unittest + +DIFF_PLACES = 12 + +class ILLIN4Tests(unittest.TestCase): + + ws_name = "in4_ws" + dataFile = "ILL/ILLIN4_074252.nxs" + + def tearDown(self): + if self.ws_name in mtd: + mtd.remove(self.ws_name) + + #================== Success cases ================================ + def test_load_file(self): + self._run_load(self.dataFile) + + # Check some data + wsOut = mtd[self.ws_name] + self.assertEqual(wsOut.getNumberHistograms(), 397) + + # Check is the two detectors have the same theta + samplePos = wsOut.getInstrument().getSample().getPos() + beamDirection = V3D(0,0,1) + det9 = wsOut.getDetector(9) + det209 = wsOut.getDetector(209) + self.assertEqual(det9.getTwoTheta(samplePos, beamDirection), + det209.getTwoTheta(samplePos, beamDirection)) + + # Same mirror position + self.assertEqual(det9.getPos().getX(),det209.getPos().getX()) + self.assertEqual(det9.getPos().getZ(),det209.getPos().getZ()) + self.assertEqual(det9.getPos().getY(),-det209.getPos().getY()) + + #================== Failure cases ================================ + + # TODO + + + def _run_load(self, dataFile): + """ + ILL Loader + """ + LoadILL(Filename=dataFile,OutputWorkspace=self.ws_name) + self._do_ads_check(self.ws_name) + + def _do_ads_check(self, name): + self.assertTrue(name in mtd) + self.assertTrue(type(mtd[name]) == MatrixWorkspace) + +#==================================================================================== + +class LoadILLIN4Test(stresstesting.MantidStressTest): + + def runTest(self): + self._success = False + # Custom code to create and run this single test suite + suite = unittest.TestSuite() + suite.addTest( unittest.makeSuite(ILLIN4Tests, "test") ) + runner = unittest.TextTestRunner() + # Run using either runner + res = runner.run(suite) + if res.wasSuccessful(): + self._success = True + else: + self._success = False + + def validate(self): + return self._success diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ILLIN5Test.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ILLIN5Test.py new file mode 100644 index 0000000000000000000000000000000000000000..9255b1eedb9253b4d3ee5416823c97eafbd02fde --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ILLIN5Test.py @@ -0,0 +1,89 @@ +import stresstesting + +from mantid.api import MatrixWorkspace, mtd +from mantid.simpleapi import LoadILL + +import unittest + +DIFF_PLACES = 12 + +class ILLIN5Tests(unittest.TestCase): + + wsData_name = "in5_ws_data" + wsVana_name = "in5_ws_vana" + dataDispersionFile = "ILL/ILLIN5_Sample_096003.nxs" + vanadiumFile = "ILL/ILLIN5_Vana_095893.nxs" + + + def tearDown(self): + if self.wsData_name in mtd: + mtd.remove(self.wsData_name) + if self.wsVana_name in mtd: + mtd.remove(self.wsVana_name) + + #================== Success cases ================================ + def test_load_single_file(self): + self._run_load(self.dataDispersionFile) + + # Check some data + wsOut = mtd[self.wsData_name] + self.assertEqual(wsOut.getNumberHistograms(), 98305) + + def test_load_dispersion_file_and_vanadium_file(self): + self._run_load(self.dataDispersionFile,self.vanadiumFile) + + # Check some data + wsOut = mtd[self.wsData_name] + self.assertEqual(wsOut.getNumberHistograms(), 98305) + + def test_load_dispersion_file_and_vanadium_workspace(self): + + self._run_load(self.vanadiumFile,outWSName=self.wsVana_name) + # Check some data + wsVana = mtd[self.wsVana_name] + self.assertEqual(wsVana.getNumberHistograms(), 98305) + + + self._run_load(self.dataDispersionFile,vanaFile=None,vanaWS=self.wsVana_name,outWSName=self.wsData_name) + + # Check some data + wsData = mtd[self.wsData_name] + self.assertEqual(wsData.getNumberHistograms(), 98305) + + #================== Failure cases ================================ + + # TODO + + #================== Private methods ================================ + + + def _run_load(self, dataFile, vanaFile=None,vanaWS=None,outWSName=wsData_name): + """ + ILL Loader + """ + LoadILL(Filename=dataFile,FilenameVanadium=None,WorkspaceVanadium=None,OutputWorkspace=outWSName) + self._do_ads_check(outWSName) + + def _do_ads_check(self, name): + self.assertTrue(name in mtd) + self.assertTrue(type(mtd[name]) == MatrixWorkspace) + +#==================================================================================== + +class LoadILLIN5Test(stresstesting.MantidStressTest): + + def runTest(self): + self._success = False + # Custom code to create and run this single test suite + suite = unittest.TestSuite() + suite.addTest( unittest.makeSuite(ILLIN5Tests, "test") ) + runner = unittest.TextTestRunner() + # Run using either runner + res = runner.run(suite) + if res.wasSuccessful(): + self._success = True + else: + self._success = False + + def validate(self): + return self._success diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/INTERLoadingTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/INTERLoadingTest.py new file mode 100644 index 0000000000000000000000000000000000000000..8cc5a9c7feede2c51fa7789e43d0c9e92de02278 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/INTERLoadingTest.py @@ -0,0 +1,17 @@ +from LoadAndCheckBase import * + +''' +Test File loading and basic data integrity checks of INTER data in Mantid. +''' +class INTERLoadingTest(LoadAndCheckBase): + def get_raw_workspace_filename(self): + return "INTER00007709.raw" + + def get_nexus_workspace_filename(self): + return "INTER00007709.nxs" + + def get_integrated_reference_workspace_filename(self): + return "INTER00007709Integrated.nxs" + + def get_expected_instrument_name(self): + return "INTER" \ No newline at end of file diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISISDirectInelastic.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISDirectInelastic.py new file mode 100644 index 0000000000000000000000000000000000000000..889c2128fd7d59685b47b43338b68133c0f6bc77 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISDirectInelastic.py @@ -0,0 +1,403 @@ +import stresstesting +from mantid.simpleapi import * +from mantid.api import Workspace + +from abc import ABCMeta, abstractmethod +from Direct.PropertyManager import PropertyManager + + +#---------------------------------------------------------------------- +class ISISDirectInelasticReduction(stresstesting.MantidStressTest): + """A base class for the ISIS direct inelastic tests + + The workflow is defined in the runTest() method, simply + define an __init__ method and set the following properties + on the object + - instr_name: A string giving the instrument name for the test + - sample_run: An integer run number of the sample or a a workspace + - incident_energy: A float value for the Ei guess + - bins: A list of rebin parameters + - white_beam: An integer giving a white_beam_file or a workspace + - mono_van: An integer giving a mono-vanadium run or a workspace or None + - map_file: An optional string pointing to a map file + - sample_mass: A float value for the sample mass or None + - sample_rmm: A float value for the sample rmm or None + - hard_mask: An hard mask file or None + """ + __metaclass__ = ABCMeta # Mark as an abstract class + + @abstractmethod + def get_reference_file(self): + """Returns the name of the reference file to compare against""" + raise NotImplementedError("Implement get_reference_file to return " + "the name of the file to compare against.") + + @abstractmethod + def get_result_workspace(self): + """Returns the result workspace to be checked""" + + @abstractmethod + def runTest(self): + """Defines the workflow for the test""" + # rename workspace to the name expected by unit test framework + + + + def validate(self): + """Returns the name of the workspace & file to compare""" + self.tolerance = 1e-6 + self.tolerance_is_reller=True + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + result = self.get_result_workspace() + reference = self.get_reference_file() + return result, reference + + def _is_numeric(self, obj): + """Returns true if the object is an int or float, false otherwise""" + if type(obj) != float or type(obj) != int: + return True + else: + return False + + def _is_workspace(self, obj): + """ Returns True if the object is a workspace""" + return isinstance(obj, Workspace) + def __init__(self): + stresstesting.MantidStressTest.__init__(self) + # this is temporary parameter + self.scale_to_fix_abf=1 + +#------------------------- MARI tests ------------------------------------------------- + +class MARIReductionFromFile(ISISDirectInelasticReduction): + + def __init__(self): + ISISDirectInelasticReduction.__init__(self) + + from ISIS_MariReduction import ReduceMARIFromFile + + self.red = ReduceMARIFromFile() + self.red.def_advanced_properties() + self.red.def_main_properties() + # temporary fix to account for different monovan integral + self.scale_to_fix_abf = 0.997979227566217 + + def runTest(self): + outWS = self.red.reduce() + outWS*=self.scale_to_fix_abf + + + + def get_result_workspace(self): + """Returns the result workspace to be checked""" + return "outWS" + def get_reference_file(self): + return "MARIReduction.nxs" + +class MARIReductionFromWorkspace(ISISDirectInelasticReduction): + + def __init__(self): + ISISDirectInelasticReduction.__init__(self) + + from ISIS_MariReduction import ReduceMARIFromWorkspace + + self.red = ReduceMARIFromWorkspace() + self.red.def_advanced_properties() + self.red.def_main_properties() + + self.scale_to_fix_abf = 0.997979227566217 + + + def runTest(self): + """Defines the workflow for the test""" + + outWS=self.red.reduce() + # temporary fix to account for different monovan integral + outWS*=self.scale_to_fix_abf + + + def get_result_workspace(self): + """Returns the result workspace to be checked""" + return "outWS" + + def get_reference_file(self): + return "MARIReduction.nxs" + +class MARIReductionMon2Norm(ISISDirectInelasticReduction): + + def __init__(self): + ISISDirectInelasticReduction.__init__(self) + + from ISIS_MariReduction import ReduceMARIMon2Norm + + self.red = ReduceMARIMon2Norm() + self.red.def_advanced_properties() + self.red.def_main_properties() + + def runTest(self): + """Defines the workflow for the test""" + + outWS=self.red.reduce() + # temporary fix to account for different monovan integral + outWS*=0.989834962505304 + + def get_result_workspace(self): + """Returns the result workspace to be checked""" + return "outWS" + + def get_reference_file(self): + return "MARIReduction.nxs" + + def validate(self): + result,reference = super(MARIReductionMon2Norm,self).validate() + self.tolerance = 1e-3 + return result,reference + + +class MARIReductionMonSeparate(ISISDirectInelasticReduction): + + def __init__(self): + ISISDirectInelasticReduction.__init__(self) + # This test has not been run properly so reference file is kind-of + # arbitrary. It just checks that this reduction works. + # Mari reduction masks are not correct for monitors loaded separately, + # This explains all the difference encountered. + from ISIS_MariReduction import ReduceMARIMonitorsSeparate + + self.red = ReduceMARIMonitorsSeparate() + self.red.def_advanced_properties() + self.red.def_main_properties() + + def runTest(self): + """Defines the workflow for the test""" + # temporary fix cross-influence of tests for MARI. changes to nex ticket make this unnecessary + PropertyManager.mono_correction_factor.set_cash_mono_run_number(None) + outWS=self.red.reduce() + # temporary fix to account for different monovan integral + outWS*=0.997966051169129 + + + def get_result_workspace(self): + """Returns the result workspace to be checked""" + return "outWS" + + def get_reference_file(self): + # monitor separate for MARI needs new maps and masks so, it is easier to redefine + # reference file for the time being + return "MARIReductionMonSeparate.nxs" + +class MARIReductionSum(ISISDirectInelasticReduction): + + def __init__(self): + + ISISDirectInelasticReduction.__init__(self) + from ISIS_MariReduction import MARIReductionSum + + self.red = MARIReductionSum() + self.red.def_advanced_properties() + self.red.def_main_properties() + + def runTest(self): + """Defines the workflow for the test + It verifies operation on summing two files on demand. No absolute units + """ + outWS=self.red.reduce() + #outWS*=1.00001556766686 + + def get_result_workspace(self): + """Returns the result workspace to be checked""" + return "outWS" + + def get_reference_file(self): + return "MARIReductionSum.nxs" + +#------------------------- MAPS tests ------------------------------------------------- + +class MAPSDgreduceReduction(ISISDirectInelasticReduction): + + def requiredMemoryMB(self): + """Far too slow for managed workspaces. They're tested in other places. Requires 10Gb""" + return 10000 + + def __init__(self): + ISISDirectInelasticReduction.__init__(self) + + from ISIS_MAPS_DGSReduction import ReduceMAPS + + self.red = ReduceMAPS() + self.red.def_advanced_properties() + self.red.def_main_properties() + + def runTest(self): + + outWS=self.red.reduce() + #New WBI value 0.02720959162181584 + #Old WBI Value 0.027209867107187088 + # fix old system test. + #outWS*=0.02720959162181584/0.027209867107187088 + + # rename workspace to the name expected by unit test framework + #RenameWorkspace(InputWorkspace=outWS,OutputWorkspace=wsName) + self.ws_name = 'outWS' + + + def get_reference_file(self): + return "MAPSDgreduceReduction.nxs" + def get_result_workspace(self): + """Returns the result workspace to be checked""" + return self.ws_name + + +#------------------------- MERLIN tests ------------------------------------------------- + +class MERLINReduction(ISISDirectInelasticReduction): + + def requiredMemoryMB(self): + """Far too slow for managed workspaces. They're tested in other places. Requires 16Gb""" + return 16000 + + def __init__(self): + ''' Test relies on MERLIN_Parameters.xml file introduced in July 2014 + ''' + ISISDirectInelasticReduction.__init__(self) + + from ISIS_MERLINReduction import ReduceMERLIN + + self.red = ReduceMERLIN() + self.red.def_advanced_properties() + self.red.def_main_properties() + + def runTest(self): + outWS = self.red.reduce() + + def get_reference_file(self): + return "MERLINReduction.nxs" + def get_result_workspace(self): + """Returns the result workspace to be checked""" + return "outWS" + + def validate(self): + self.tolerance = 1e-6 + self.tolerance_is_reller=True + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Instrument') + result = self.get_result_workspace() + reference = self.get_reference_file() + return result, reference + +#------------------------- LET tests ------------------------------------------------- +# + +class LETReduction(stresstesting.MantidStressTest): + + def requiredMemoryMB(self): + """Far too slow for managed workspaces. They're tested in other places. Requires 2Gb""" + return 2000 + + def runTest(self): + """ + Run the LET reduction with event NeXus files + + Relies on LET_Parameters.xml file from June 2013 + """ + from ISIS_LETReduction import ReduceLET_OneRep + red = ReduceLET_OneRep() + red.def_main_properties() + red.def_advanced_properties() + + outWS=red.reduce() + + + def validate(self): + self.tolerance = 1e-6 + self.tolerance_is_reller=True + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Instrument') + + return "outWS", "LETReduction.nxs" + +class LETReductionEvent2014Multirep(stresstesting.MantidStressTest): + """ + written in a hope that most of the stuff find here will eventually find its way into main reduction routines + """ + + def requiredMemoryMB(self): + """Far too slow for managed workspaces. They're tested in other places. Requires 20Gb""" + return 20000 + + def runTest(self): + """ + Run the LET reduction with event NeXus files + + Relies on LET_Parameters.xml file from June 2013 + """ + from ISIS_LETReduction import ReduceLET_MultiRep2014 + red = ReduceLET_MultiRep2014() + + red.def_advanced_properties() + red.def_main_properties() + + + out_ws_list=red.reduce() + + #mults =[41.178539329370217/41.178300987983413,72.235863046309746/72.231475173892022] + #New normalization for 3.4 meV: 41.178539329370217 + #Old normalization for 3.4 meV: 41.178300987983413 + #New normalization for 8 meV: 72.235863046309746 + #Old normalization for 8 meV: 72.231475173892022 + #for ind,ws in enumerate(out_ws_list): + # ws *=mults[ind] + + + + + + def validate(self): + self.tolerance = 1e-6 + self.tolerance_is_reller=False + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Instrument') + + return "LETreducedEi3.4","LET14305_3_4mev.nxs","LETreducedEi8.0", "LET14305_8_0mev.nxs" + +class LETReductionEvent2015Multirep(stresstesting.MantidStressTest): + """ + written in a hope that most of the stuff find here will eventually find its way into main reduction routines + """ + + def requiredMemoryMB(self): + """Far too slow for managed workspaces. They're tested in other places. Requires 20Gb""" + return 20000 + + def runTest(self): + """ + Run the LET reduction with event NeXus files + + Relies on LET_Parameters.xml file from June 2013 + """ + from ISIS_LETReduction import ReduceLET_MultiRep2015 + red = ReduceLET_MultiRep2015() + + red.def_advanced_properties() + red.def_main_properties() + + + out_ws_list=red.reduce() + + #for ind,ws in enumerate(out_ws_list): + # ws *=mults[ind] + + + + + + def validate(self): + self.tolerance = 1e-6 + self.tolerance_is_reller=False + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Instrument') + + return "LETreducedEi3.4","LET14305_3_4meV2015.nxs","LETreducedEi8.0", "LET14305_8_0meV2015.nxs" + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISISDirectReductionComponents.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISDirectReductionComponents.py new file mode 100644 index 0000000000000000000000000000000000000000..6c4ce777dcf31e0bd4980a5336ccbe4a34fb5244 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISDirectReductionComponents.py @@ -0,0 +1,253 @@ +import os,sys +import stresstesting +from mantid.simpleapi import * +from mantid.api import Workspace,IEventWorkspace + +from Direct.PropertyManager import PropertyManager +from Direct.RunDescriptor import RunDescriptor +import ISIS_MariReduction as mr + +#---------------------------------------------------------------------- +class ISIS_ReductionWebLike(stresstesting.MantidStressTest): + def __init__(self): + stresstesting.MantidStressTest.__init__(self) + + # prepare reduction variable + self.rd = mr.ReduceMARIFromFile() + self.rd.def_main_properties() + self.rd.def_advanced_properties() + + save_folder = config['defaultsave.directory'] + + self.rd.save_web_variables(os.path.join(save_folder,'reduce_vars.py')) + + + def runTest(self): + # run reduction using saved variables like web variables + web_var_folder = config['defaultsave.directory'] + sys.path.insert(0,web_var_folder) + reload(mr) + + # change these variables to save result as nxs workspace + mr.web_var.advanced_vars['save_format']='nxs' + # web services currently needs input file to be defined + input_file = 'MAR11001.RAW' + rez = mr.main(input_file,web_var_folder) + + # verify if result was indeed written + self.rd.reducer.sample_run = input_file + saveFileName = self.rd.reducer.save_file_name + oputputFile = os.path.join(web_var_folder,saveFileName+'.nxs') + + self.assertTrue(os.path.exists(oputputFile)) + + web_var_file = os.path.join(web_var_folder,'reduce_vars') + if os.path.exists(web_var_file+'.py'): + os.remove(web_var_file+'.py') + if os.path.exists(web_var_file+'.pyc'): + os.remove(web_var_file+'.pyc') + + + def get_result_workspace(self): + """Returns the result workspace to be checked""" + saveFileName = self.rd.reducer.save_file_name + outWS = Load(Filename=saveFileName+'.nxs') + outWS *= 0.997979227566217 + return "outWS" + def get_reference_file(self): + return "MARIReduction.nxs" + + def validate(self): + """Returns the name of the workspace & file to compare""" + self.tolerance = 1e-6 + self.tolerance_is_reller=True + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + result = self.get_result_workspace() + reference = self.get_reference_file() + return result, reference + +class ISIS_ReductionWrapperValidate(stresstesting.MantidStressTest): + def __init__(self): + stresstesting.MantidStressTest.__init__(self) + self.result = False + + + def runTest(self): + # prepare reduction variable + rd = mr.ReduceMARIFromFile() + rd.def_main_properties() + rd.def_advanced_properties() + + self.result,message = rd.validate_result() + if not self.result: + print "*** Validation failed: {0}".format(message) + + + + def validate(self): + """Returns the name of the workspace & file to compare""" + return self.result + + +#---------------------------------------------------------------------- +class ISISLoadFilesRAW(stresstesting.MantidStressTest): + + + def __init__(self): + stresstesting.MantidStressTest.__init__(self) + self.valid = False + + def runTest(self): + propman = PropertyManager('MAR') + + propman.sample_run = 11001 + propman.load_monitors_with_workspace = True + + mon_ws = PropertyManager.sample_run.get_monitors_ws() + ws = PropertyManager.sample_run.get_workspace() + + self.assertTrue(isinstance(ws,Workspace)) + self.assertEqual(ws.getNumberHistograms(),922) + + DeleteWorkspace(ws) + + propman.load_monitors_with_workspace = False + propman.sample_run = 11001 + ws = PropertyManager.sample_run.get_workspace() + mon_ws = PropertyManager.sample_run.get_monitors_ws() + + self.assertEqual(ws.getNumberHistograms(),919) + self.assertEqual(mon_ws.getNumberHistograms(),3) + wsName = ws.name() + self.assertEqual(wsName,PropertyManager.sample_run.get_ws_name()) + + # + propman = PropertyManager('MAPS') + propman.sample_run = 17186 + propman.load_monitors_with_workspace = False + + mon_ws = PropertyManager.sample_run.get_monitors_ws() + ws = PropertyManager.sample_run.get_workspace() + self.assertTrue(isinstance(ws,Workspace)) + self.assertEqual(ws.getNumberHistograms(),41472) + self.assertEqual(mon_ws.getNumberHistograms(),4) + # + self.valid = True + + def validate(self): + return self.valid + +class ISISLoadFilesMER(stresstesting.MantidStressTest): + + + def __init__(self): + stresstesting.MantidStressTest.__init__(self) + self.valid = False + + def runTest(self): + # + propman = PropertyManager('MER') + propman.sample_run = 6398 # (raw file) + propman.det_cal_file = 6399 + propman.load_monitors_with_workspace = False + + mon_ws = PropertyManager.sample_run.get_monitors_ws() + self.assertTrue(not(mon_ws is None)) + + ws = PropertyManager.sample_run.get_workspace() + self.assertTrue(isinstance(ws,Workspace)) + self.assertEqual(ws.getNumberHistograms(),69632) + self.assertEqual(mon_ws.getNumberHistograms(),9) + + # test load together + propman.sample_run = None # (clean things up) + propman.load_monitors_with_workspace = True + propman.sample_run = 6398 + + mon_ws = PropertyManager.sample_run.get_monitors_ws() + self.assertTrue(not(mon_ws is None)) + ws = PropertyManager.sample_run.get_workspace() + self.assertTrue(isinstance(ws,Workspace)) + self.assertEqual(ws.getNumberHistograms(),69641) + self.assertEqual(mon_ws.getNumberHistograms(),69641) + + + propman.sample_run = 18492 # (histogram nxs file ) + propman.det_cal_file = None + mon_ws = PropertyManager.sample_run.get_monitors_ws() + self.assertTrue(not(mon_ws is None)) + ws = PropertyManager.sample_run.get_workspace() + self.assertTrue(isinstance(ws,Workspace)) + self.assertEqual(ws.getNumberHistograms(),69641) + self.assertEqual(mon_ws.getNumberHistograms(),69641) + + + self.valid = True + return + # enable when bug #10980 is fixed + propman.sample_run = 18492 # (histogram nxs file ) + propman.det_cal_file = None + mon_ws = PropertyManager.sample_run.get_monitors_ws() + self.assertTrue(not(mon_ws is None)) + + ws = PropertyManager.sample_run.get_workspace() + self.assertTrue(isinstance(ws,Workspace)) + self.assertEqual(ws.getNumberHistograms(),69632) + self.assertEqual(mon_ws.getNumberHistograms(),9) + + + self.valid = True + + + def validate(self): + return self.valid + +class ISISLoadFilesLET(stresstesting.MantidStressTest): + + + def __init__(self): + stresstesting.MantidStressTest.__init__(self) + self.valid = False + + def runTest(self): + + # + propman = PropertyManager('LET') + + + propman.sample_run = 6278 #event nexus file + propman.load_monitors_with_workspace = False + + # Here we have known problem of propman loading new IDF, and + # workspace is written using old IDF. New IDF has mon1_norm_spec =73729 + # and ei_mon1_spec=73734 (on January 2015) and old + # IDF -- mon1_norm_spec =40961 and 40966 (forever) + # Normalized by monitor-1. -- need monitor1 and ei needs ei_mon1_spec + # This problem is hopefully fixed in reduction now, but here + # we have to specify these values manually to guard against + # changes in a future + propman.normalise_method='monitor-1' + propman.mon1_norm_spec=40961 + propman.ei_mon1_spec =40966 + + mon_ws = PropertyManager.sample_run.get_monitors_ws() + self.assertTrue(not(mon_ws is None)) + ws = PropertyManager.sample_run.get_workspace() + + self.assertTrue(isinstance(ws,IEventWorkspace)) + self.assertEqual(ws.getNumberHistograms(),40960) + self.assertTrue(isinstance(mon_ws,Workspace)) + # + self.assertEqual(mon_ws.getNumberHistograms(),9) + + + self.valid = True + + + def validate(self): + return self.valid + +if __name__=="__main__": + ISISLoadFilesMER.runTest() diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISISIndirectAbsCorTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISIndirectAbsCorTest.py new file mode 100644 index 0000000000000000000000000000000000000000..2d3e85724613f25a8f44b59c3d2615daa5ddaf5a --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISIndirectAbsCorTest.py @@ -0,0 +1,237 @@ +import stresstesting +from mantid.simpleapi import * +from IndirectImport import is_supported_f2py_platform +import os + +#==================================================================================================== + + +class CylAbsTest(stresstesting.MantidStressTest): + + def skipTests(self): + return not is_supported_f2py_platform() + + def runTest(self): + import IndirectAbsCor as Main + + sname = 'irs26176_graphite002_red' + LoadNexusProcessed(Filename=sname, OutputWorkspace=sname) + + beam = [3.0, 1.0, -1.0, 2.0, -2.0, 0.0, 3.0, 0.0, 3.0] + size = [0.2, 0.25, 0.26, 0.0] + density = [0.1, 0.1, 0.1] + sigs = [5.0, 0.1, 0.1] + siga = [0.0, 5.0, 5.0] + avar = 0.002 + saveOp = False + Main.AbsRun(sname, 'cyl', beam, 2, size, density, + sigs, siga, avar, saveOp) + + def validate(self): + self.tolerance = 1e-3 + return 'irs26176_graphite002_cyl_Abs', 'ISISIndirectAbsCor_CylAbsTest.nxs' + +#==================================================================================================== + + +class FltAbsTest(stresstesting.MantidStressTest): + + def skipTests(self): + return not is_supported_f2py_platform() + + def runTest(self): + import IndirectAbsCor as Main + + sname = 'irs26176_graphite002_red' + LoadNexusProcessed(Filename=sname, OutputWorkspace=sname) + + beam = '' + size = [0.1, 0.01, 0.01] + density = [0.1, 0.1, 0.1] + sigs = [5.0, 0.1, 0.1] + siga = [0.0, 5.0, 5.0] + avar = 45.0 + saveOp = False + Main.AbsRun(sname, 'flt', beam, 2, size, density, + sigs, siga, avar, saveOp) + + def validate(self): + self.tolerance = 1e-3 + return 'irs26176_graphite002_flt_Abs', 'ISISIndirectAbsCor_FltAbsTest.nxs' + + +#==================================================================================================== + + +class FltAbsTSecCloseTo90Test(stresstesting.MantidStressTest): + + def skipTests(self): + return not is_supported_f2py_platform() + + def runTest(self): + import IndirectAbsCor as Main + + sname = 'irs59330_graphite002_red' + LoadNexusProcessed(Filename=sname, OutputWorkspace=sname) + + beam = '' + size = [0.1, 0.01, 0.01] + density = [0.05, 0.5, 0.5] + sigs = [5.0, 0.1, 0.1] + siga = [0.0, 5.0, 5.0] + avar = 45.0 + saveOp = False + Main.AbsRun(sname, 'flt', beam, 2, size, density, + sigs, siga, avar, saveOp) + + def validate(self): + self.tolerance = 1e-3 + return 'iris59330_graphite002_flt_Abs', 'ISISIndirectAbsCor_FltAbsTSecCloseTo90Test.nxs' + +#==================================================================================================== + + +class AbsRunFeederTest(stresstesting.MantidStressTest): + """ + Test AbsRunFeeder with given values for scattering and absorption cross sections + for both sample and can. + """ + + def skipTests(self): + return not is_supported_f2py_platform() + + def runTest(self): + from IndirectAbsCor import AbsRunFeeder + + # H20 sample + inputWS = 'irs26176_graphite002_red' + # cylindrical Vanadium can + canWS = 'irs26173_graphite002_red' + + Load(inputWS + '.nxs', OutputWorkspace=inputWS) + Load(canWS + '.nxs', OutputWorkspace=canWS) + + geom = 'cyl' + ncan = 2 + size = [0.2, 0.25, 0.26, 0.0] + sigs = [5.0, 0.1, 0.1] + siga = [0.0, 5.0, 5.0] + avar = 0.002 + density = [0.1, 0.1, 0.1] + beam_width = 4.0 + AbsRunFeeder(inputWS, canWS, geom, ncan, size, avar, density, beam_width=beam_width, sigs=sigs, siga=siga) + + def validate(self): + self.tolerance = 1e-3 + return 'irs26176_graphite002_cyl_Abs', 'ISISIndirectAbsCor_AbsRunFeederTest.nxs' + +#==================================================================================================== + + +class AbsRunFeederChemicalFormulaTest(stresstesting.MantidStressTest): + """ + Test AbsRunFeeder with chemical formula input for scattering and absorption cross sections + for both sample and can. + """ + + def skipTests(self): + return not is_supported_f2py_platform() + + def runTest(self): + from IndirectAbsCor import AbsRunFeeder + + # H20 sample + inputWS = 'irs26176_graphite002_red' + # cylindrical Vanadium can + canWS = 'irs26173_graphite002_red' + + Load(inputWS + '.nxs', OutputWorkspace=inputWS) + Load(canWS + '.nxs', OutputWorkspace=canWS) + + geom = 'cyl' + ncan = 2 + size = [0.2, 0.25, 0.26, 0.0] + avar = 0.002 + density = [0.1, 0.1, 0.1] + beam_width = 4.0 + sampleFormula = 'H2-O' + canFormula = 'V' + AbsRunFeeder(inputWS, canWS, geom, ncan, size, avar, density, beam_width=beam_width, sample_formula=sampleFormula, can_formula=canFormula, sigs=[0,0,0], siga=[0,0,0]) + + def validate(self): + self.tolerance = 1e-3 + return 'irs26176_graphite002_cyl_Abs', 'ISISIndirectAbsCor_ChemicalFormulaTest.nxs' + +#==================================================================================================== + + +class AbsRunFeederDefaultBeamWidthTest(stresstesting.MantidStressTest): + """ + Test AbsRunFeeder with given values for scattering and absorption cross sections + for both sample and can and the beam width taken from the IPF. + """ + + def skipTests(self): + return not is_supported_f2py_platform() + + def runTest(self): + from IndirectAbsCor import AbsRunFeeder + + # H20 sample + inputWS = 'irs26176_graphite002_red' + # cylindrical Vanadium can + canWS = 'irs26173_graphite002_red' + + Load(inputWS + '.nxs', OutputWorkspace=inputWS) + path = os.path.join(config['instrumentDefinition.directory'], 'IRIS_Parameters.xml') + LoadParameterFile(inputWS, Filename=path) + Load(canWS + '.nxs', OutputWorkspace=canWS) + + geom = 'cyl' + ncan = 2 + size = [0.2, 0.25, 0.26, 0.0] + sigs = [5.0, 0.1, 0.1] + siga = [0.0, 5.0, 5.0] + avar = 0.002 + density = [0.1, 0.1, 0.1] + AbsRunFeeder(inputWS, canWS, geom, ncan, size, avar, density, sigs=sigs, siga=siga) + + def validate(self): + self.tolerance = 1e-3 + return 'irs26176_graphite002_cyl_Abs', 'ISISIndirectAbsCor_DefaultBeamWidthTest.nxs' + +#==================================================================================================== + + +class AbsRunFeederDiffractionTest(stresstesting.MantidStressTest): + """ + Test AbsRunFeeder with sample and can material formulas for a diffraction run. + """ + + def skipTests(self): + return not is_supported_f2py_platform() + + def runTest(self): + from IndirectAbsCor import AbsRunFeeder + + # H20 sample + inputWS = 'irs26176_diffspec_red' + # cylindrical Vanadium can + canWS = 'irs26173_diffspec_red' + + Load(inputWS + '.nxs', OutputWorkspace=inputWS) + Load(canWS + '.nxs', OutputWorkspace=canWS) + + geom = 'cyl' + ncan = 2 + size = [0.2, 0.25, 0.26, 0.0] + avar = 0.002 + density = [0.1, 0.1, 0.1] + beam_width = 4.0 + sampleFormula = 'H2-O' + canFormula = 'V' + AbsRunFeeder(inputWS, canWS, geom, ncan, size, avar, density, beam_width=beam_width, sample_formula=sampleFormula, can_formula=canFormula, sigs=[0,0,0], siga=[0,0,0]) + + def validate(self): + self.tolerance = 1e-3 + return 'irs26176_diffspec_cyl_Abs', 'ISISIndirectAbsCor_AbsRunFeederDiffractionTest.nxs' diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISISIndirectAnalysisTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISIndirectAnalysisTest.py new file mode 100644 index 0000000000000000000000000000000000000000..c5410ba8b05fd71907abf5518c27b94270921684 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISIndirectAnalysisTest.py @@ -0,0 +1,31 @@ +import stresstesting +import os +from mantid.simpleapi import * +from IndirectImport import is_supported_f2py_platform + + +class ElasticWindowMultipleTest(stresstesting.MantidStressTest): + + def runTest(self): + Load(Filename='osi92762_graphite002_red.nxs,osi92763_graphite002_red.nxs', + OutputWorkspace='__ElWinMulti_InputWS') + + ElasticWindowMultiple( + InputWorkspaces='__ElWinMulti_InputWS', + Range1Start=-0.2, + Range1End=0.2, + Range2Start='-0.24', + Range2End='-0.22', + OutputInQ='eq', + OutputInQSquared='eq2', + OutputELF='elf', + OutputELT='elt') + + GroupWorkspaces(InputWorkspaces=['elf', 'elt'], + OutputWorkspace='__ElWinMulti_OutputWS') + + SaveNexus(Filename='__ElWinMulti_OutputWS', InputWorkspace='__ElWinMulti_OutputWS') + + def validate(self): + self.tolerance = 1e-10 + return '__ElWinMulti_OutputWS', 'II.AnalysisElwinMulti.nxs' diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISISIndirectBayesTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISIndirectBayesTest.py new file mode 100644 index 0000000000000000000000000000000000000000..437cc801abbd6326285b6ea7893b04f50cf74576 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISIndirectBayesTest.py @@ -0,0 +1,388 @@ +import stresstesting +import os +from mantid.simpleapi import * +from IndirectImport import is_supported_f2py_platform + +def _cleanup_files(dirname, filenames): + """ + Attempts to remove each filename from + the given directory + """ + for filename in filenames: + path = os.path.join(dirname, filename) + try: + os.remove(path) + except OSError: + pass + +class QLresTest(stresstesting.MantidStressTest): + + def skipTests(self): + if is_supported_f2py_platform(): + return False + else: + return True + + def runTest(self): + import IndirectBayes as Main + nbins = ['1', '1'] + sname = 'irs26176_graphite002_red' + rname = 'irs26173_graphite002_res' + rsname = '' + wfile = '' + erange = [-0.5, 0.5] + fitOp = [True, 'Sloping', False, False] #elastic, background, width, resnorm + loopOp = False + plotOp = False + saveOp = False + + spath = sname+'.nxs' # path name for sample nxs file + LoadNexusProcessed(Filename=spath, OutputWorkspace=sname) + rpath = rname+'.nxs' # path name for res nxs file + LoadNexusProcessed(Filename=rpath, OutputWorkspace=rname) + Main.QLRun('QL',sname,rname,rsname,erange,nbins,fitOp,wfile,loopOp,plotOp,saveOp) + + def validate(self): + self.tolerance = 1e-4 + return 'irs26176_graphite002_QLr_Workspace_0','ISISIndirectBayes_QlresTest.nxs' + + def cleanup(self): + filenames = ['irs26176_graphite002_QLr.lpt','irs26176_graphite002_QLr.ql1', + 'irs26176_graphite002_QLr.ql2','irs26176_graphite002_QLr.ql3', + 'irs26176_graphite002_QLr_Parameters.nxs'] + _cleanup_files(config['defaultsave.directory'], filenames) + +#======================================================================== +class ResNormTest(stresstesting.MantidStressTest): + + def skipTests(self): + if is_supported_f2py_platform(): + return False + else: + return True + + def runTest(self): + import IndirectBayes as Main + nbin = '1' + vname = 'irs26173_graphite002_red' + rname = 'irs26173_graphite002_res' + erange = [-0.2, 0.2] + plotOp = False + saveOp = False + + vpath = vname+'.nxs' # path name for van nxs file + LoadNexusProcessed(Filename=vpath, OutputWorkspace=vname) + rpath = rname+'.nxs' # path name for res nxs file + LoadNexusProcessed(Filename=rpath, OutputWorkspace=rname) + Main.ResNormRun(vname,rname,erange,nbin,plotOp,saveOp) + + def validate(self): + self.tolerance = 1e-4 + self.disableChecking.append("SpectraMap") + return 'irs26173_graphite002_ResNorm_Fit','ISISIndirectBayes_ResNormTest.nxs' + + def cleanup(self): + filenames = ['irs26173_graphite002_resnrm.lpt'] + _cleanup_files(config['defaultsave.directory'], filenames) + +#========================================================================= +class QuestTest(stresstesting.MantidStressTest): + + def skipTests(self): + if is_supported_f2py_platform(): + return False + else: + return True + + def runTest(self): + import IndirectBayes as Main + nbins = [1, 1] + nbs = [50, 30] + sname = 'irs26176_graphite002_red' + rname = 'irs26173_graphite002_res' + erange = [-0.5, 0.5] + fitOp = [True, 'Sloping', False, False] #elastic, background, width, resnorm + loopOp = False + plotOp = 'None' + saveOp = False + + spath = sname+'.nxs' # path name for sample nxs file + LoadNexusProcessed(Filename=spath, OutputWorkspace=sname) + rpath = rname+'.nxs' # path name for res nxs file + LoadNexusProcessed(Filename=rpath, OutputWorkspace=rname) + Main.QuestRun(sname,rname,nbs,erange,nbins,fitOp,loopOp,plotOp,saveOp) + + def validate(self): + self.tolerance = 1e-1 + return 'irs26176_graphite002_Qst_Fit','ISISIndirectBayes_QuestTest.nxs' + + def cleanup(self): + filenames = ['irs26176_graphite002_Qst.lpt','irs26176_graphite002_Qss.ql2', + 'irs26176_graphite002_Qsb.ql1'] + _cleanup_files(config['defaultsave.directory'], filenames) + +#============================================================================= +class QSeTest(stresstesting.MantidStressTest): + + def skipTests(self): + if is_supported_f2py_platform(): + return False + else: + return True + + def runTest(self): + import IndirectBayes as Main + nbins = ['1', '1'] + sname = 'irs26176_graphite002_red' + rname = 'irs26173_graphite002_res' + rsname = '' + wfile = '' + erange = [-0.5, 0.5] + fitOp = [True, 'Sloping', False, False] #elastic, background, width, resnorm + loopOp = False + plotOp = False + saveOp = False + + spath = sname+'.nxs' # path name for sample nxs file + LoadNexusProcessed(Filename=spath, OutputWorkspace=sname) + rpath = rname+'.nxs' # path name for res nxs file + LoadNexusProcessed(Filename=rpath, OutputWorkspace=rname) + Main.QLRun('QSe',sname,rname,rsname,erange,nbins,fitOp,wfile,loopOp,plotOp,saveOp) + + def validate(self): + self.tolerance = 1e-1 + return 'irs26176_graphite002_QSe_Workspace_0','ISISIndirectBayes_QSeTest.nxs' + + def cleanup(self): + filenames = ['irs26176_graphite002_QSe_Parameters.nxs', 'irs26176_graphite002_Qse.qse', + 'irs26176_graphite002_Qse.lpt'] + _cleanup_files(config['defaultsave.directory'], filenames) + +#============================================================================= +class QLDataTest(stresstesting.MantidStressTest): + + def skipTests(self): + if is_supported_f2py_platform(): + return False + else: + return True + + def runTest(self): + import IndirectBayes as Main + nbins = ['1', '1'] + sname = 'irs26176_graphite002_red' + rname = 'irs26173_graphite002_red' + rsname = '' + wfile = '' + erange = [-0.5, 0.5] + fitOp = [True, 'Sloping', False, False] #elastic, background, width, resnorm + loopOp = False + plotOp = False + saveOp = False + + spath = sname+'.nxs' # path name for sample nxs file + LoadNexusProcessed(Filename=spath, OutputWorkspace=sname) + rpath = rname+'.nxs' # path name for res nxs file + LoadNexusProcessed(Filename=rpath, OutputWorkspace=rname) + Main.QLRun('QL',sname,rname,rsname,erange,nbins,fitOp,wfile,loopOp,plotOp,saveOp) + + def validate(self): + self.tolerance = 1e-4 + return 'irs26176_graphite002_QLd_Workspace_0','ISISIndirectBayes_QLDataTest.nxs' + + def cleanup(self): + filenames = ['irs26176_graphite002_QLd.lpt','irs26176_graphite002_QLd.ql1', + 'irs26176_graphite002_QLd.ql2','irs26176_graphite002_QLd.ql3', + 'irs26176_graphite002_QLd_Parameters.nxs'] + _cleanup_files(config['defaultsave.directory'], filenames) + +#============================================================================= +class QLResNormTest(stresstesting.MantidStressTest): + + def skipTests(self): + if is_supported_f2py_platform(): + return False + else: + return True + + def runTest(self): + import IndirectBayes as Main + + nbins = ['1', '1'] + sname = 'irs26176_graphite002_red' + rname = 'irs26173_graphite002_res' + rsname = 'irs26173_graphite002_ResNorm' + wfile = '' + erange = [-0.5, 0.5] + fitOp = [True, 'Sloping', False, True] #elastic, background, width, resnorm + loopOp = True + plotOp = False + saveOp = False + + spath = sname+'.nxs' # path name for sample nxs file + LoadNexusProcessed(Filename=spath, OutputWorkspace=sname) + rpath = rname+'.nxs' # path name for res nxs file + LoadNexusProcessed(Filename=rpath, OutputWorkspace=rname) + rspath = rsname+'_Paras.nxs' # path name for resNorm nxs file + LoadNexusProcessed(Filename=rspath, OutputWorkspace=rsname) + Main.QLRun('QL',sname,rname,rsname,erange,nbins,fitOp,wfile,loopOp,plotOp,saveOp) + + def validate(self): + self.tolerance = 1e-1 + return 'irs26176_graphite002_QLr_Workspaces','ISISIndirectBayes_QLr_ResNorm_Test.nxs' + + def cleanup(self): + filenames = ['irs26176_graphite002_QLd.lpt','irs26176_graphite002_QLd.ql1', + 'irs26176_graphite002_QLd.ql2','irs26176_graphite002_QLd.ql3', + 'irs26176_graphite002_QLd_Parameters.nxs'] + _cleanup_files(config['defaultsave.directory'], filenames) + +#============================================================================= +class QLWidthTest(stresstesting.MantidStressTest): + + def skipTests(self): + if is_supported_f2py_platform(): + return False + else: + return True + + def runTest(self): + import IndirectBayes as Main + + nbins = ['1', '1'] + sname = 'irs26176_graphite002_red' + rname = 'irs26173_graphite002_res' + rsname = '' + wfile = 'irs26176_graphite002_width_water.dat' + erange = [-0.5, 0.5] + fitOp = [True, 'Sloping', True, False] #elastic, background, width, resnorm + loopOp = False + plotOp = False + saveOp = False + + spath = sname+'.nxs' # path name for sample nxs file + LoadNexusProcessed(Filename=spath, OutputWorkspace=sname) + rpath = rname+'.nxs' # path name for res nxs file + LoadNexusProcessed(Filename=rpath, OutputWorkspace=rname) + Main.QLRun('QL',sname,rname,rsname,erange,nbins,fitOp,wfile,loopOp,plotOp,saveOp) + + def validate(self): + self.tolerance = 1e-1 + return 'irs26176_graphite002_QLr_Workspace_0','ISISIndirectBayes_QLr_width_Test.nxs' + + def cleanup(self): + filenames = ['irs26176_graphite002_QLd.lpt','irs26176_graphite002_QLd.ql1', + 'irs26176_graphite002_QLd.ql2','irs26176_graphite002_QLd.ql3', + 'irs26176_graphite002_QLd_Parameters.nxs'] + _cleanup_files(config['defaultsave.directory'], filenames) + +#============================================================================= + +class JumpCETest(stresstesting.MantidStressTest): + + def runTest(self): + sname = 'irs26176_graphite002_QLr_Workspace' + qrange = [0.6, 1.705600] + plotOp = False + saveOp = False + + filename = sname + '.nxs' # path name for nxs file + LoadNexusProcessed(Filename=filename, OutputWorkspace=sname) + + # Data must be in HWHM + Scale(InputWorkspace=sname, Factor=0.5, OutputWorkspace=sname) + + JumpFit(InputWorkspace=sname, + Function='ChudleyElliot', + Width=2, + QMin=qrange[0], + QMax=qrange[1], + Plot=plotOp, + Save=saveOp) + + def validate(self): + self.tolerance = 1e-5 + return 'irs26176_graphite002_QLr_ChudleyElliot_fit_Workspace','ISISIndirectBayes_JumpCETest.nxs' + +#============================================================================= +class JumpHallRossTest(stresstesting.MantidStressTest): + + def runTest(self): + sname = 'irs26176_graphite002_QLr_Workspace' + qrange = [0.6, 1.705600] + plotOp = False + saveOp = False + + path = sname+'.nxs' # path name for nxs file + LoadNexusProcessed(Filename=path, OutputWorkspace=sname) + + # Data must be in HWHM + Scale(InputWorkspace=sname, Factor=0.5, OutputWorkspace=sname) + + JumpFit(InputWorkspace=sname, + Function='HallRoss', + Width=2, + QMin=qrange[0], + QMax=qrange[1], + Plot=plotOp, + Save=saveOp) + + def validate(self): + self.tolerance = 1e-5 + return 'irs26176_graphite002_QLr_HallRoss_fit_Workspace','ISISIndirectBayes_JumpHallRossTest.nxs' + +#============================================================================= +class JumpFickTest(stresstesting.MantidStressTest): + + def runTest(self): + sname = 'irs26176_graphite002_QLr_Workspace' + qrange = [0.6, 1.705600] + plotOp = False + saveOp = False + + path = sname+'.nxs' # path name for nxs file + LoadNexusProcessed(Filename=path, OutputWorkspace=sname) + + # Data must be in HWHM + Scale(InputWorkspace=sname, Factor=0.5, OutputWorkspace=sname) + + JumpFit(InputWorkspace=sname, + Function='FickDiffusion', + Width=2, + QMin=qrange[0], + QMax=qrange[1], + Plot=plotOp, + Save=saveOp) + + def validate(self): + self.tolerance = 5e-4 + return 'irs26176_graphite002_QLr_FickDiffusion_fit_Workspace','ISISIndirectBayes_JumpFickTest.nxs' + +#============================================================================= +class JumpTeixeiraTest(stresstesting.MantidStressTest): + + def runTest(self): + sname = 'irs26176_graphite002_QLr_Workspace' + qrange = [0.6, 1.705600] + plotOp = False + saveOp = False + + path = sname+'.nxs' # path name for nxs file + LoadNexusProcessed(Filename=path, OutputWorkspace=sname) + + # Data must be in HWHM + Scale(InputWorkspace=sname, Factor=0.5, OutputWorkspace=sname) + + JumpFit(InputWorkspace=sname, + Function='TeixeiraWater', + Width=2, + QMin=qrange[0], + QMax=qrange[1], + Plot=plotOp, + Save=saveOp) + + def validate(self): + self.tolerance = 1e-2 + return 'irs26176_graphite002_QLr_TeixeiraWater_fit_Workspace','ISISIndirectBayes_JumpTeixeiraTest.nxs' + +#============================================================================= diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISISIndirectInelastic.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISIndirectInelastic.py new file mode 100644 index 0000000000000000000000000000000000000000..30a6b8ac06ec97c7c894a3c67a51e20781f1aed8 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISIndirectInelastic.py @@ -0,0 +1,1366 @@ +import stresstesting +import os +import platform +from abc import ABCMeta, abstractmethod + +from mantid.simpleapi import * + +# For debugging only. +from mantid.api import FileFinder + +# Import our workflows. +from inelastic_indirect_reducer import IndirectReducer +from inelastic_indirect_reduction_steps import CreateCalibrationWorkspace +from IndirectDataAnalysis import msdfit, furyfitSeq, furyfitMult, confitSeq, abscorFeeder + +''' +- TOSCA only supported by "Reduction" (the Energy Transfer tab of C2E). +- OSIRIS/IRIS supported by all tabs / interfaces. +- VESUVIO is not supported by any interface as of yet. + +For diagrams on the intended work flow of the IDA and Indirect parts of the +C2E interface, please see: + +- http://www.mantidproject.org/IDA +- http://www.mantidproject.org/Indirect + +System test class hierarchy as shown below: + +stresstesting.MantidStressTest + | + +--ISISIndirectInelasticBase + | + +--ISISIndirectInelasticReduction + | | + | +--TOSCAReduction + | +--IRISReduction + | +--OSIRISReduction + | + +--ISISIndirectInelasticCalibratrion + | | + | +--IRISCalibratrion + | +--OSIRISCalibratrion + | + +--ISISIndirectInelasticResolution + | | + | +--IRISResolution + | +--OSIRISResolution + | + +--ISISIndirectInelasticDiagnostics + | | + | +--IRISDiagnostics + | +--OSIRISDiagnostics + | + +--ISISIndirectInelasticMoments + | | + | +--IRISMoments + | +--OSIRISMoments + | + +--ISISIndirectInelasticElwinAndMSDFit + | | + | +--IRISElwinAndMSDFit + | +--OSIRISElwinAndMSDFit + | + +--ISISIndirectInelasticFuryAndFuryFit + | | + | +--IRISFuryAndFuryFit + | +--OSIRISFuryAndFuryFit + | + +--ISISIndirectInelasticFuryAndFuryFitMulti + | | + | +--IRISFuryAndFuryFitMulti + | +--OSIRISFuryAndFuryFitMulti + | + +--ISISIndirectInelasticConvFit + | | + | +--IRISConvFit + | +--OSIRISConvFit + | +''' + + +class ISISIndirectInelasticBase(stresstesting.MantidStressTest): + '''A common base class for the ISISIndirectInelastic* base classes. + ''' + + __metaclass__ = ABCMeta # Mark as an abstract class + + @abstractmethod + def get_reference_files(self): + '''Returns the name of the reference files to compare against.''' + raise NotImplementedError("Implmenent get_reference_files to return " + "the names of the files to compare against.") + + @abstractmethod + def _run(self): + raise NotImplementedError("Implement _run.") + + def validate_results_and_references(self): + if type(self.get_reference_files()) != list: + raise RuntimeError("The reference file(s) should be in a list") + if type(self.result_names) != list: + raise RuntimeError("The result workspace(s) should be in a list") + if len(self.get_reference_files()) !=\ + len(self.result_names): + raise RuntimeError("The number of result workspaces does not match" + " the number of reference files.") + if len(self.get_reference_files()) < 1: + raise RuntimeError("There needs to be a least one result and " + "reference.") + + @abstractmethod + def _validate_properties(self): + '''Check the object properties are in an expected state to continue''' + raise NotImplementedError("Implmenent _validate_properties.") + + def runTest(self): + self._validate_properties() + self._run() + self.validate_results_and_references() + + def validate(self): + '''Performs the validation for the generalised case of multiple results + and multiple reference files. + ''' + + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Instrument') + self.disableChecking.append('Axes') + + for reference_file, result in zip(self.get_reference_files(), + self.result_names): + wsName = "RefFile" + if reference_file.endswith('.nxs'): + LoadNexus(Filename=reference_file, OutputWorkspace=wsName) + else: + raise RuntimeError("Should supply a NeXus file: %s" % + reference_file) + + if not self.validateWorkspaces([result, wsName]): + print str([reference_file, result]) + " do not match." + return False + + return True + + def get_temp_dir_path(self, filename): + '''Given a filename, prepends the system test temporary directory + and returns the full path.''' + return os.path.join(config['defaultsave.directory'], filename) + + +#============================================================================== +class ISISIndirectInelasticReduction(ISISIndirectInelasticBase): + '''A base class for the ISIS indirect inelastic reduction tests + + The workflow is defined in the _run() method, simply + define an __init__ method and set the following properties + on the object + - instr_name: A string giving the instrument name for the test + - detector_range: A list containing the lower and upper bounds of the + range of detectors to use + - data_file: A string giving the data file to use + - rebin_string: A comma separated string giving the rebin params + - save_formats: A list containing the file extensions of the formats + to save to. + ''' + + __metaclass__ = ABCMeta # Mark as an abstract class + sum_files = False + + def _run(self): + self.tolerance = 1e-7 + '''Defines the workflow for the test''' + reducer = IndirectReducer() + reducer.set_instrument_name(self.instr_name) + reducer.set_detector_range(self.detector_range[0], + self.detector_range[1]) + reducer.set_sum_files(self.sum_files) + self.parameter_file = self.instr_name + '_graphite_002_Parameters.xml' + reducer.set_parameter_file(self.parameter_file) + + for name in self.data_files: + reducer.append_data_file(name) + + if self.rebin_string is not None: + reducer.set_rebin_string(self.rebin_string) + + # Do the reduction and rename the result. + reducer.reduce() + self.result_names = sorted(reducer.get_result_workspaces()) + + def _validate_properties(self): + '''Check the object properties are in an expected state to continue''' + if type(self.instr_name) != str: + raise RuntimeError("instr_name property should be a string") + if type(self.detector_range) != list and len(self.detector_range) != 2: + raise RuntimeError("detector_range should be a list of exactly 2 " + "values") + if type(self.data_files) != list: + raise RuntimeError("data_file property should be a string") + if self.rebin_string is not None and type(self.rebin_string) != str: + raise RuntimeError("rebin_string property should be a string") + if self.sum_files is not None and type(self.sum_files) != bool: + raise RuntimeError("sum_files property should be a bool") + +#------------------------- TOSCA tests ---------------------------------------- + + +class TOSCAReduction(ISISIndirectInelasticReduction): + + def __init__(self): + ISISIndirectInelasticReduction.__init__(self) + self.instr_name = 'TOSCA' + self.detector_range = [0, 139] + self.data_files = ['TSC15352.raw'] + self.rebin_string = '-2.5,0.015,3,-0.005,1000' + + def get_reference_files(self): + return ["II.TOSCAReductionFromFile.nxs"] + +class TOSCAMultiFileReduction(ISISIndirectInelasticReduction): + + def __init__(self): + ISISIndirectInelasticReduction.__init__(self) + self.instr_name = 'TOSCA' + self.detector_range = [0, 139] + self.data_files = ['TSC15352.raw', 'TSC15353.raw','TSC15354.raw'] + self.rebin_string = '-2.5,0.015,3,-0.005,1000' + + def get_reference_files(self): + #note that the same run for single reduction is used. + #as they should be the same + return ['II.TOSCAReductionFromFile.nxs', 'II.TOSCAMultiFileReduction1.nxs', 'II.TOSCAMultiFileReduction2.nxs'] + +class TOSCAMultiFileSummedReduction(ISISIndirectInelasticReduction): + + def __init__(self): + ISISIndirectInelasticReduction.__init__(self) + self.instr_name = 'TOSCA' + self.detector_range = [0, 139] + self.data_files = ['TSC15352.raw', 'TSC15353.raw','TSC15354.raw'] + self.rebin_string = '-2.5,0.015,3,-0.005,1000' + self.sum_files = True + + def get_reference_files(self): + return ['II.TOSCAMultiFileSummedReduction.nxs'] + + +#------------------------- OSIRIS tests --------------------------------------- + + +class OSIRISReduction(ISISIndirectInelasticReduction): + + def __init__(self): + ISISIndirectInelasticReduction.__init__(self) + self.instr_name = 'OSIRIS' + self.detector_range = [962, 1003] + self.data_files = ['OSIRIS00106550.raw'] + self.rebin_string = None + + def get_reference_files(self): + return ["II.OSIRISReductionFromFile.nxs"] + +class OSIRISMultiFileReduction(ISISIndirectInelasticReduction): + + def __init__(self): + ISISIndirectInelasticReduction.__init__(self) + self.instr_name = 'OSIRIS' + self.detector_range = [962, 1003] + self.data_files = ['OSIRIS00106550.raw',' OSIRIS00106551.raw'] + self.rebin_string = None + + def get_reference_files(self): + #note that the same run for single reduction is used. + #as they should be the same + return ['II.OSIRISReductionFromFile.nxs','II.OSIRISMultiFileReduction1.nxs'] + +class OSIRISMultiFileSummedReduction(ISISIndirectInelasticReduction): + + def __init__(self): + ISISIndirectInelasticReduction.__init__(self) + self.instr_name = 'OSIRIS' + self.detector_range = [962, 1003] + self.data_files = ['OSIRIS00106550.raw', 'OSIRIS00106551.raw'] + self.rebin_string = None + self.sum_files = True + + def get_reference_files(self): + return ['II.OSIRISMultiFileSummedReduction.nxs'] + +#------------------------- IRIS tests ----------------------------------------- + +class IRISReduction(ISISIndirectInelasticReduction): + + def __init__(self): + ISISIndirectInelasticReduction.__init__(self) + self.instr_name = 'IRIS' + self.detector_range = [2, 52] + self.data_files = ['IRS21360.raw'] + self.rebin_string = None + + def get_reference_files(self): + return ["II.IRISReductionFromFile.nxs"] + + +class IRISMultiFileReduction(ISISIndirectInelasticReduction): + + def __init__(self): + ISISIndirectInelasticReduction.__init__(self) + self.instr_name = 'IRIS' + self.detector_range = [2, 52] + self.data_files = ['IRS21360.raw', 'IRS53664.raw'] + self.rebin_string = None + + def get_reference_files(self): + return ['II.IRISReductionFromFile.nxs', 'II.IRISMultiFileReduction1.nxs'] + + +class IRISMultiFileSummedReduction(ISISIndirectInelasticReduction): + + def __init__(self): + ISISIndirectInelasticReduction.__init__(self) + self.instr_name = 'IRIS' + self.detector_range = [2, 52] + self.data_files = ['IRS21360.raw', 'IRS53664.raw'] + self.sum_files = True + self.rebin_string = None + + def get_reference_files(self): + #note that the same run for single reduction is used. + #as they should be the same + return ['II.IRISMultiFileSummedReduction.nxs'] + +#--------------------- Generic Reduction tests ----------------------------- + +class ISISIndirectInelasticReductionOutput(stresstesting.MantidStressTest): + + def runTest(self): + reducer = self._setup_reducer() + reducer.reduce() + self.result_names = sorted(reducer.get_result_workspaces()) + + def validate(self): + self.assertEqual(len(self.result_names), 1) + self.result_name = self.result_names[0] + + self.output_file_names = self._get_file_names() + self.assert_reduction_output_exists(self.output_file_names) + self.assert_ascii_file_matches() + self.assert_aclimax_file_matches() + self.assert_spe_file_matches() + + def cleanup(self): + mtd.clear() + + for file_path in self.output_file_names.itervalues(): + if os.path.isfile(file_path): + os.remove(file_path) + + def assert_ascii_file_matches(self): + expected_result = [ + 'X , Y0 , E0 , Y1 , E1 , Y2 , E2', + '-2.4925,0,0,0.617579,0.362534,0.270868,0.159006', + '-2.4775,0.375037,0.273017,0,0,0.210547,0.153272' + ] + self.assert_file_format_matches_expected(expected_result, self.output_file_names['ascii'], + "Output of ASCII format did not match expected result.") + + def assert_aclimax_file_matches(self): + expected_result = [ + '# X \t Y \t E', + '0', + '3.0075\t0.175435\t0.115017' + ] + self.assert_file_format_matches_expected(expected_result, self.output_file_names['aclimax'], + "Output of aclimax format did not match expected result.") + + def assert_spe_file_matches(self): + #Old SPE format: + # ' 3 1532', + # '### Phi Grid', + # ' 5.000E-01 1.500E+00 2.500E+00 3.500E+00', + # '### Energy Grid', + # '-2.500E+00-2.485E+00-2.470E+00-2.455E+00-2.440E+00-2.425E+00-2.410E+00-2.395E+00' + # + # New SPE format: + expected_result = [ + ' 3 1532', + '### Phi Grid', + '0.5 1.5 2.5 3.5', + '### Energy Grid', + '-2.5 -2.485 -2.47 -2.455 -2.44 -2.425 -2.41 -2.395' + ] + self.assert_file_format_matches_expected(expected_result, self.output_file_names['spe'], + "Output of SPE format did not match expected result.") + + def assert_reduction_output_exists(self, output_file_names): + for file_path in output_file_names.itervalues(): + self.assertTrue(os.path.exists(file_path), "File does not exist in the default save directory") + self.assertTrue(os.path.isfile(file_path), "Output file of reduction output is not a file.") + + def assert_file_format_matches_expected(self, expected_result, file_path, msg=""): + num_lines = len(expected_result) + actual_result = self._read_ascii_file(file_path, num_lines) + self.assertTrue(actual_result == expected_result, msg + " (%s != %s)" % (actual_result, expected_result)) + + def _setup_reducer(self): + self.file_formats = ['nxs', 'spe', 'nxspe', 'ascii', 'aclimax'] + self.file_extensions = ['.nxs', '.spe', '.nxspe', '.dat', '_aclimax.dat'] + self.instr_name = 'TOSCA' + self.detector_range = [0, 139] + self.data_files = ['TSC15352.raw'] + self.rebin_string = '-2.5,0.015,3,-0.005,1000' + self.parameter_file = self.instr_name + '_graphite_002_Parameters.xml' + + reducer = IndirectReducer() + reducer.set_instrument_name(self.instr_name) + reducer.set_detector_range(self.detector_range[0], + self.detector_range[1]) + reducer.set_sum_files(False) + reducer.set_parameter_file(self.parameter_file) + reducer.set_save_formats(self.file_formats) + + for name in self.data_files: + reducer.append_data_file(name) + + if self.rebin_string is not None: + reducer.set_rebin_string(self.rebin_string) + + return reducer + + def _read_ascii_file(self, path, num_lines): + with open(path,'rb') as file_handle: + lines = [file_handle.readline().rstrip() for _ in xrange(num_lines)] + return lines + + def _get_file_names(self): + working_directory = config['defaultsave.directory'] + + output_names = {} + for format, ext in zip(self.file_formats, self.file_extensions): + output_file_name = self.result_name + ext + output_file_name = os.path.join(working_directory, output_file_name) + output_names[format] = output_file_name + + return output_names + +#============================================================================== +class ISISIndirectInelasticCalibration(ISISIndirectInelasticBase): + '''A base class for the ISIS indirect inelastic calibration tests + + The workflow is defined in the _run() method, simply + define an __init__ method and set the following properties + on the object + - self.data_file: a string giving the name of the data file + - self.detector_range: a list of two ints, giving the lower and + upper bounds of the detector range + - self.parameters: a list containing four doubles, each a parameter. + - self.analyser: a string giving the name of the analyser to use + - self.reflection: a string giving the reflection to use + ''' + + __metaclass__ = ABCMeta # Mark as an abstract class + + def _run(self): + '''Defines the workflow for the test''' + self.tolerance = 1e-7 + + self.result_names = ['IndirectCalibration_Output'] + + CreateCalibrationWorkspace(InputFiles=self.data_file, + OutputWorkspace='IndirectCalibration_Output', + DetectorRange=self.detector_range, + PeakRange=self.peak, + BackgroundRange=self.back) + + def _validate_properties(self): + '''Check the object properties are in an expected state to continue''' + + if type(self.data_file) != str: + raise RuntimeError("data_file property should be a string") + if type(self.detector_range) != list and len(self.detector_range) != 2: + raise RuntimeError("detector_range should be a list of exactly 2 values") + if type(self.peak) != list and len(self.peak) != 2: + raise RuntimeError("peak should be a list of exactly 2 values") + if type(self.back) != list and len(self.back) != 2: + raise RuntimeError("back should be a list of exactly 2 values") + +#------------------------- OSIRIS tests --------------------------------------- + + +class OSIRISCalibration(ISISIndirectInelasticCalibration): + + def __init__(self): + ISISIndirectInelasticCalibration.__init__(self) + self.data_file = 'OSI97935.raw' + self.detector_range = [963, 1004] + self.back = [68000.00, 70000.00] + self.peak = [59000.00, 61000.00] + + def get_reference_files(self): + return ["II.OSIRISCalibration.nxs"] + +#------------------------- IRIS tests --------------------------------------- + + +class IRISCalibration(ISISIndirectInelasticCalibration): + + def __init__(self): + ISISIndirectInelasticCalibration.__init__(self) + self.data_file = 'IRS53664.raw' + self.detector_range = [3, 53] + self.back = [59000.00, 61500.00] + self.peak = [62500.00, 65000.00] + + def get_reference_files(self): + return ["II.IRISCalibration.nxs"] + + +#============================================================================== +class ISISIndirectInelasticResolution(ISISIndirectInelasticBase): + '''A base class for the ISIS indirect inelastic resolution tests + + The workflow is defined in the _run() method, simply + define an __init__ method and set the following properties + on the object + - self.instrument: a string giving the intrument name + - self.analyser: a string giving the name of the analyser + - self.reflection: a string giving the name of the reflection + - self.detector_range: a list of two integers, giving the range of detectors + - self.background: a list of two doubles, giving the background params + - self.rebin_params: a comma separated string containing the rebin params + - self.files: a list of strings containing filenames + ''' + + __metaclass__ = ABCMeta # Mark as an abstract class + + def _run(self): + self.tolerance = 1e-7 + '''Defines the workflow for the test''' + + IndirectResolution(InputFiles=self.files, + OutputWorkspace='__IndirectResolution_Test', + Instrument=self.instrument, + Analyser=self.analyser, + Reflection=self.reflection, + DetectorRange=self.detector_range, + BackgroundRange=self.background, + RebinParam=self.rebin_params, + Plot=False) + + self.result_names = ['__IndirectResolution_Test'] + + def _validate_properties(self): + '''Check the object properties are in an expected state to continue''' + + if type(self.instrument) != str: + raise RuntimeError("instrument property should be a string") + if type(self.analyser) != str: + raise RuntimeError("analyser property should be a string") + if type(self.reflection) != str: + raise RuntimeError("reflection property should be a string") + if type(self.detector_range) != list and len(self.detector_range) != 2: + raise RuntimeError("detector_range should be a list of exactly 2 values") + if type(self.background) != list and len(self.background) != 2: + raise RuntimeError("background should be a list of exactly 2 values") + if type(self.rebin_params) != str: + raise RuntimeError("rebin_params property should be a string") + # Have this as just one file for now. + if type(self.files) != list and len(self.files) != 1: + raise RuntimeError("files should be a list of exactly 1 value") + +#------------------------- OSIRIS tests --------------------------------------- + + +class OSIRISResolution(ISISIndirectInelasticResolution): + + def __init__(self): + ISISIndirectInelasticResolution.__init__(self) + self.instrument = 'OSIRIS' + self.analyser = 'graphite' + self.reflection = '002' + self.detector_range = [963, 1004] + self.background = [-0.563032, 0.605636] + self.rebin_params = '-0.2,0.002,0.2' + self.files = ['OSI97935.raw'] + + def get_reference_files(self): + return ["II.OSIRISResolution.nxs"] + +#------------------------- IRIS tests ----------------------------------------- + + +class IRISResolution(ISISIndirectInelasticResolution): + + def __init__(self): + ISISIndirectInelasticResolution.__init__(self) + self.instrument = 'IRIS' + self.analyser = 'graphite' + self.reflection = '002' + self.detector_range = [3, 53] + self.background = [-0.54, 0.65] + self.rebin_params = '-0.2,0.002,0.2' + self.files = ['IRS53664.raw'] + + def get_reference_files(self): + return ["II.IRISResolution.nxs"] + + +#============================================================================== +class ISISIndirectInelasticDiagnostics(ISISIndirectInelasticBase): + '''A base class for the ISIS indirect inelastic diagnostic tests + + The workflow is defined in the _run() method, simply + define an __init__ method and set the following properties + on the object + ''' + + __metaclass__ = ABCMeta # Mark as an abstract class + + def _run(self): + '''Defines the workflow for the test''' + + self.tolerance = 1e-7 + + TimeSlice(InputFiles=self.rawfiles, + OutputNameSuffix=self.suffix, + OutputWorkspace='__IndirectInelasticDiagnostics_out_group', + PeakRange=self.peak, + SpectraRange=self.spectra, + Plot=False, + Save=False) + + # Construct the result ws name. + self.result_names = [os.path.splitext(self.rawfiles[0])[0] + self.suffix] + + def _validate_properties(self): + '''Check the object properties are in an expected state to continue''' + + if type(self.rawfiles) != list and len(self.rawfiles) != 1: + raise RuntimeError("rawfiles should be a list of exactly 1 value") + if type(self.peak) != list and len(self.peak) != 2: + raise RuntimeError("peak should be a list of exactly 2 values") + if type(self.spectra) != list and len(self.spectra) != 2: + raise RuntimeError("spectra should be a list of exactly 2 values") + if type(self.suffix) != str: + raise RuntimeError("suffix property should be a string") + + +#------------------------- IRIS tests ----------------------------------------- + + +class IRISDiagnostics(ISISIndirectInelasticDiagnostics): + + def __init__(self): + ISISIndirectInelasticDiagnostics.__init__(self) + + self.peak = [62500, 65000] + self.rawfiles = ['IRS53664.raw'] + self.spectra = [3, 53] + self.suffix = '_graphite002_slice' + + def get_reference_files(self): + return ["II.IRISDiagnostics.nxs"] + + +#------------------------- OSIRIS tests --------------------------------------- + + +class OSIRISDiagnostics(ISISIndirectInelasticDiagnostics): + + def __init__(self): + ISISIndirectInelasticDiagnostics.__init__(self) + + self.peak = [59000, 61000] + self.rawfiles = ['OSI97935.raw'] + self.spectra = [963, 1004] + self.suffix = '_graphite002_slice' + + def get_reference_files(self): + return ["II.OSIRISDiagnostics.nxs"] + + +#============================================================================== +class ISISIndirectInelasticMoments(ISISIndirectInelasticBase): + '''A base class for the ISIS indirect inelastic Fury/FuryFit tests + + The output of Elwin is usually used with MSDFit and so we plug one into + the other in this test. + ''' + # Mark as an abstract class + __metaclass__ = ABCMeta + + def _run(self): + '''Defines the workflow for the test''' + + LoadNexus(self.input_workspace, + OutputWorkspace=self.input_workspace) + + SofQWMoments(Sample=self.input_workspace, EnergyMin=self.e_min, + EnergyMax=self.e_max, Scale=self.scale, + Plot=False, Save=False, OutputWorkspace=self.input_workspace + '_Moments') + + self.result_names = [self.input_workspace + '_Moments'] + + def _validate_properties(self): + '''Check the object properties are in an expected state to continue''' + + if type(self.input_workspace) != str: + raise RuntimeError("Input workspace should be a string.") + if type(self.e_min) != float: + raise RuntimeError("Energy min should be a float") + if type(self.e_max) != float: + raise RuntimeError("Energy max should be a float") + if type(self.scale) != float: + raise RuntimeError("Scale should be a float") + + +#------------------------- OSIRIS tests --------------------------------------- +class OSIRISMoments(ISISIndirectInelasticMoments): + + def __init__(self): + ISISIndirectInelasticMoments.__init__(self) + self.input_workspace = 'osi97935_graphite002_sqw.nxs' + self.e_min = -0.4 + self.e_max = 0.4 + self.scale = 1.0 + + def get_reference_files(self): + return ['II.OSIRISMoments.nxs'] + + +#------------------------- IRIS tests ----------------------------------------- +class IRISMoments(ISISIndirectInelasticMoments): + + def __init__(self): + ISISIndirectInelasticMoments.__init__(self) + self.input_workspace = 'irs53664_graphite002_sqw.nxs' + self.e_min = -0.4 + self.e_max = 0.4 + self.scale = 1.0 + + def get_reference_files(self): + return ['II.IRISMoments.nxs'] + + +#============================================================================== +class ISISIndirectInelasticElwinAndMSDFit(ISISIndirectInelasticBase): + '''A base class for the ISIS indirect inelastic Elwin/MSD Fit tests + + The output of Elwin is usually used with MSDFit and so we plug one into + the other in this test. + ''' + + __metaclass__ = ABCMeta # Mark as an abstract class + + def _run(self): + '''Defines the workflow for the test''' + self.tolerance = 1e-7 + + elwin_input = '__ElWinMult_in' + elwin_results = ['__ElWinMult_q', '__ElWinMult_q2', '__ElWinMult_elf'] + + # Load files and create workspace group + for filename in self.files: + Load(Filename=filename, OutputWorkspace=filename) + GroupWorkspaces(InputWorkspaces=self.files, OutputWorkspace=elwin_input) + + ElasticWindowMultiple(InputWorkspaces=elwin_input, Plot=False, + Range1Start=self.eRange[0], Range1End=self.eRange[1], + OutputInQ=elwin_results[0], OutputInQSquared=elwin_results[1], + OutputELF=elwin_results[2]) + + int_files = [self.get_temp_dir_path(filename) + ".nxs" + for filename in elwin_results] + + # Save the EQ1 & EQ2 results from Elwin to put into MSDFit. + for ws, filename in zip(elwin_results, int_files): + SaveNexusProcessed(Filename=filename, + InputWorkspace=ws) + + eq2_file = elwin_results[1] + msdfit_result = msdfit(eq2_file, + startX=self.startX, + endX=self.endX, + Save=False, + Plot=False) + + # @TODO: MSDFit has some other, as yet unfinalised, workspaces as its + # output. We need to test these too, eventually. + + # Annoyingly, MSDFit eats the EQ2 workspaces we feed it, so let's + # reload them for checking against the reference files later. + for ws, filename in zip(elwin_results, int_files): + LoadNexusProcessed(Filename=filename, + OutputWorkspace=ws) + + # Clean up the intermediate files. + for filename in int_files: + os.remove(filename) + + # We're interested in the intermediate Elwin results as well as the + # final MSDFit result. + self.result_names = [elwin_results[0], # EQ1 + elwin_results[1], # EQ2 + msdfit_result] + + def _validate_properties(self): + """Check the object properties are in an expected state to continue""" + + if type(self.files) != list or len(self.files) != 2: + raise RuntimeError("files should be a list of exactly 2 " + "strings") + if type(self.eRange) != list or len(self.eRange) != 2: + raise RuntimeError("eRange should be a list of exactly 2 " + "values") + if type(self.startX) != float: + raise RuntimeError("startX should be a float") + if type(self.endX) != float: + raise RuntimeError("endX should be a float") + +#------------------------- OSIRIS tests --------------------------------------- + + +class OSIRISElwinAndMSDFit(ISISIndirectInelasticElwinAndMSDFit): + + def __init__(self): + ISISIndirectInelasticElwinAndMSDFit.__init__(self) + self.files = ['osi97935_graphite002_red.nxs', + 'osi97936_graphite002_red.nxs'] + self.eRange = [-0.02, 0.02] + self.startX = 0.195082 + self.endX = 3.202128 + + def get_reference_files(self): + return ['II.OSIRISElwinEQ1.nxs', + 'II.OSIRISElwinEQ2.nxs', + 'II.OSIRISMSDFit.nxs'] + +#------------------------- IRIS tests ----------------------------------------- + + +class IRISElwinAndMSDFit(ISISIndirectInelasticElwinAndMSDFit): + + def __init__(self): + ISISIndirectInelasticElwinAndMSDFit.__init__(self) + self.files = ['irs53664_graphite002_red.nxs', + 'irs53665_graphite002_red.nxs'] + self.eRange = [-0.02, 0.02] + self.startX = 0.313679 + self.endX = 3.285377 + + def get_reference_files(self): + return ['II.IRISElwinEQ1.nxs', + 'II.IRISElwinEQ2.nxs', + 'II.IRISMSDFit.nxs'] + + +#============================================================================== +class ISISIndirectInelasticFuryAndFuryFit(ISISIndirectInelasticBase): + ''' + A base class for the ISIS indirect inelastic Fury/FuryFit tests + + The output of Fury is usually used with FuryFit and so we plug one into + the other in this test. + ''' + + __metaclass__ = ABCMeta # Mark as an abstract class + + def _run(self): + '''Defines the workflow for the test''' + self.tolerance = 1e-7 + self.samples = [sample[:-4] for sample in self.samples] + + # Load files into Mantid + for sample in self.samples: + LoadNexus(sample, OutputWorkspace=sample) + LoadNexus(self.resolution, OutputWorkspace=self.resolution) + + fury_props, fury_ws = Fury(Sample=self.samples[0], + Resolution=self.resolution, + EnergyMin=self.e_min, + EnergyMax=self.e_max, + NumBins=self.num_bins, + DryRun=False, + Save=False, + Plot=False) + + # Test FuryFit Sequential + furyfitSeq_ws = furyfitSeq(fury_ws.getName(), + self.func, + self.ftype, + self.startx, + self.endx, + Save=False, + Plot='None') + + self.result_names = [fury_ws.getName(), + furyfitSeq_ws] + + # Remove workspaces from Mantid + for sample in self.samples: + DeleteWorkspace(sample) + + DeleteWorkspace(self.resolution) + + def _validate_properties(self): + """Check the object properties are in an expected state to continue""" + + if type(self.samples) != list: + raise RuntimeError("Samples should be a list of strings.") + if type(self.resolution) != str: + raise RuntimeError("Resolution should be a string.") + if type(self.e_min) != float: + raise RuntimeError("e_min should be a float") + if type(self.e_max) != float: + raise RuntimeError("e_max should be a float") + if type(self.num_bins) != int: + raise RuntimeError("num_bins should be an int") + if type(self.func) != str: + raise RuntimeError("Function should be a string.") + if type(self.ftype) != str: + raise RuntimeError("Function type should be a string.") + if type(self.startx) != float: + raise RuntimeError("startx should be a float") + if type(self.endx) != float: + raise RuntimeError("endx should be a float") + +#------------------------- OSIRIS tests --------------------------------------- + + +class OSIRISFuryAndFuryFit(ISISIndirectInelasticFuryAndFuryFit): + + def __init__(self): + ISISIndirectInelasticFuryAndFuryFit.__init__(self) + + # Fury + self.samples = ['osi97935_graphite002_red.nxs'] + self.resolution = 'osi97935_graphite002_res.nxs' + self.e_min = -0.4 + self.e_max = 0.4 + self.num_bins = 4 + + # Fury Seq Fit + self.func = r'name=LinearBackground,A0=0,A1=0,ties=(A1=0);name=UserFunction,Formula=Intensity*exp(-(x/Tau)),Intensity=0.304185,Tau=100;ties=(f1.Intensity=1-f0.A0)' + self.ftype = '1E_s' + self.startx = 0.022861 + self.endx = 0.118877 + + def get_reference_files(self): + return ['II.OSIRISFury.nxs', + 'II.OSIRISFuryFitSeq.nxs'] + +#------------------------- IRIS tests ----------------------------------------- + + +class IRISFuryAndFuryFit(ISISIndirectInelasticFuryAndFuryFit): + + def __init__(self): + ISISIndirectInelasticFuryAndFuryFit.__init__(self) + + # Fury + self.samples = ['irs53664_graphite002_red.nxs'] + self.resolution = 'irs53664_graphite002_res.nxs' + self.e_min = -0.4 + self.e_max = 0.4 + self.num_bins = 4 + + # Fury Seq Fit + self.func = r'name=LinearBackground,A0=0,A1=0,ties=(A1=0);name=UserFunction,Formula=Intensity*exp(-(x/Tau)),Intensity=0.355286,Tau=100;ties=(f1.Intensity=1-f0.A0)' + self.ftype = '1E_s' + self.startx = 0.013717 + self.endx = 0.169171 + + def get_reference_files(self): + return ['II.IRISFury.nxs', + 'II.IRISFuryFitSeq.nxs'] + +#============================================================================== + + +class ISISIndirectInelasticFuryAndFuryFitMulti(ISISIndirectInelasticBase): + '''A base class for the ISIS indirect inelastic Fury/FuryFit tests + + The output of Elwin is usually used with MSDFit and so we plug one into + the other in this test. + ''' + + __metaclass__ = ABCMeta # Mark as an abstract class + + def _run(self): + '''Defines the workflow for the test''' + self.tolerance = 1e-6 + self.samples = [sample[:-4] for sample in self.samples] + + #load files into mantid + for sample in self.samples: + LoadNexus(sample, OutputWorkspace=sample) + LoadNexus(self.resolution, OutputWorkspace=self.resolution) + + fury_props, fury_ws = Fury(Sample=self.samples[0], + Resolution=self.resolution, + EnergyMin=self.e_min, + EnergyMax=self.e_max, + NumBins=self.num_bins, + DryRun=False, + Save=False, + Plot=False) + + # Test FuryFit Sequential + furyfitSeq_ws = furyfitMult(fury_ws.getName(), + self.func, + self.ftype, + self.startx, + self.endx, + Save=False, + Plot='None') + + self.result_names = [fury_ws.getName(), + furyfitSeq_ws] + + #remove workspaces from mantid + for sample in self.samples: + DeleteWorkspace(sample) + DeleteWorkspace(self.resolution) + + def _validate_properties(self): + """Check the object properties are in an expected state to continue""" + + if type(self.samples) != list: + raise RuntimeError("Samples should be a list of strings.") + if type(self.resolution) != str: + raise RuntimeError("Resolution should be a string.") + if type(self.e_min) != float: + raise RuntimeError("e_min should be a float") + if type(self.e_max) != float: + raise RuntimeError("e_max should be a float") + if type(self.num_bins) != int: + raise RuntimeError("num_bins should be an int") + if type(self.func) != str: + raise RuntimeError("Function should be a string.") + if type(self.ftype) != str: + raise RuntimeError("Function type should be a string.") + if type(self.startx) != float: + raise RuntimeError("startx should be a float") + if type(self.endx) != float: + raise RuntimeError("endx should be a float") + +#------------------------- OSIRIS tests --------------------------------------- + + +class OSIRISFuryAndFuryFitMulti(ISISIndirectInelasticFuryAndFuryFitMulti): + + def skipTests(self): + return (platform.system() == "Darwin") + + def __init__(self): + ISISIndirectInelasticFuryAndFuryFitMulti.__init__(self) + + # Fury + self.samples = ['osi97935_graphite002_red.nxs'] + self.resolution = 'osi97935_graphite002_res.nxs' + self.e_min = -0.4 + self.e_max = 0.4 + self.num_bins = 4 + + # Fury Seq Fit + self.func = r'name=LinearBackground,A0=0.510595,A1=0,ties=(A1=0);name=UserFunction,Formula=Intensity*exp( -(x/Tau)^Beta),Intensity=0.489405,Tau=0.105559,Beta=1.61112e-14;ties=(f1.Intensity=1-f0.A0)' + self.ftype = '1E_s' + self.startx = 0.0 + self.endx = 0.119681 + + def get_reference_files(self): + return ['II.OSIRISFury.nxs', + 'II.OSIRISFuryFitMulti.nxs'] + +#------------------------- IRIS tests ----------------------------------------- + + +class IRISFuryAndFuryFitMulti(ISISIndirectInelasticFuryAndFuryFitMulti): + + def __init__(self): + ISISIndirectInelasticFuryAndFuryFitMulti.__init__(self) + + # Fury + self.samples = ['irs53664_graphite002_red.nxs'] + self.resolution = 'irs53664_graphite002_res.nxs' + self.e_min = -0.4 + self.e_max = 0.4 + self.num_bins = 4 + + # Fury Seq Fit + self.func = r'name=LinearBackground,A0=0.584488,A1=0,ties=(A1=0);name=UserFunction,Formula=Intensity*exp( -(x/Tau)^Beta),Intensity=0.415512,Tau=4.848013e-14,Beta=0.022653;ties=(f1.Intensity=1-f0.A0)' + self.ftype = '1S_s' + self.startx = 0.0 + self.endx = 0.156250 + + def get_reference_files(self): + return ['II.IRISFury.nxs', + 'II.IRISFuryFitMulti.nxs'] + +#============================================================================== + + +class ISISIndirectInelasticConvFit(ISISIndirectInelasticBase): + '''A base class for the ISIS indirect inelastic ConvFit tests + + The workflow is defined in the _run() method, simply + define an __init__ method and set the following properties + on the object + ''' + # Mark as an abstract class + __metaclass__ = ABCMeta + + def _run(self): + '''Defines the workflow for the test''' + self.tolerance = 1e-4 + LoadNexus(self.sample, OutputWorkspace=self.sample) + + confitSeq( + self.sample, + self.func, + self.startx, + self.endx, + self.ftype, + self.bg, + specMin=self.spectra_min, + specMax=self.spectra_max, + Plot='None', + Save=False) + + def _validate_properties(self): + '''Check the object properties are in an expected state to continue''' + + if type(self.sample) != str: + raise RuntimeError("Sample should be a string.") + if type(self.resolution) != str: + raise RuntimeError("Resolution should be a string.") + if not os.path.isfile(self.resolution): + raise RuntimeError("Resolution must be a file that exists.") + if type(self.func) != str: + raise RuntimeError("Function should be a string.") + if type(self.bg) != str: + raise RuntimeError("Background type should be a string.") + if type(self.ftype) != str: + raise RuntimeError("Function type should be a string.") + if type(self.startx) != float: + raise RuntimeError("startx should be a float") + if type(self.endx) != float: + raise RuntimeError("endx should be a float") + if type(self.spectra_min) != int: + raise RuntimeError("Min spectrum should be a int") + if type(self.spectra_max) != int: + raise RuntimeError("Max spectrum should be a int") + if type(self.ties) != bool: + raise RuntimeError("ties should be a boolean.") + +#------------------------- OSIRIS tests --------------------------------------- + + +class OSIRISConvFit(ISISIndirectInelasticConvFit): + + def __init__(self): + ISISIndirectInelasticConvFit.__init__(self) + self.sample = 'osi97935_graphite002_red.nxs' + self.resolution = FileFinder.getFullPath('osi97935_graphite002_res.nxs') + #ConvFit fit function + self.func = 'name=LinearBackground,A0=0,A1=0;(composite=Convolution,FixResolution=true,NumDeriv=true;name=Resolution,FileName=\"%s\";name=Lorentzian,Amplitude=2,PeakCentre=0,FWHM=0.05)' % self.resolution + self.ftype = '1L' + self.startx = -0.2 + self.endx = 0.2 + self.bg = 'FitL_s' + self.spectra_min = 0 + self.spectra_max = 41 + self.ties = False + + self.result_names = ['osi97935_graphite002_conv_1LFitL_s0_to_41_Result'] + + def get_reference_files(self): + return ['II.OSIRISConvFitSeq.nxs'] + + +#------------------------- IRIS tests ----------------------------------------- +class IRISConvFit(ISISIndirectInelasticConvFit): + + def __init__(self): + ISISIndirectInelasticConvFit.__init__(self) + self.sample = 'irs53664_graphite002_red.nxs' + self.resolution = FileFinder.getFullPath('irs53664_graphite002_res.nxs') + #ConvFit fit function + self.func = 'name=LinearBackground,A0=0.060623,A1=0.001343;(composite=Convolution,FixResolution=true,NumDeriv=true;name=Resolution,FileName=\"%s\";name=Lorentzian,Amplitude=1.033150,PeakCentre=-0.000841,FWHM=0.001576)' % self.resolution + self.ftype = '1L' + self.startx = -0.2 + self.endx = 0.2 + self.bg = 'FitL_s' + self.spectra_min = 0 + self.spectra_max = 50 + self.ties = False + + self.result_names = ['irs53664_graphite002_conv_1LFitL_s0_to_50_Result'] + + def get_reference_files(self): + return ['II.IRISConvFitSeq.nxs'] + +#============================================================================== + + +class ISISIndirectInelasticApplyCorrections(ISISIndirectInelasticBase): + '''A base class for the ISIS indirect inelastic Apply Corrections tests + + The workflow is defined in the _run() method, simply + define an __init__ method and set the following properties + on the object + ''' + # Mark as an abstract class + __metaclass__ = ABCMeta + + def _run(self): + '''Defines the workflow for the test''' + self.tolerance = 1e-4 + + LoadNexus(self._sample_workspace + '.nxs', OutputWorkspace=self._sample_workspace) + if self._corrections_workspace != '': + LoadNexus(self._corrections_workspace + '.nxs', OutputWorkspace=self._corrections_workspace) + if self._can_workspace != '': + LoadNexus(self._can_workspace + '.nxs', OutputWorkspace=self._can_workspace) + + output_workspaces = self._run_apply_corrections() + self.result_names = [output_workspaces['reduced_workspace']] + + def _run_apply_corrections(self): + abscorFeeder(self._sample_workspace, self._can_workspace, self._can_geometry, + self._using_corrections, self._corrections_workspace, **self._kwargs) + return self._get_output_workspace_names() + + def _get_output_workspace_names(self): + """ + abscorFeeder doesn't return anything, these names should exist in the ADS + apply corrections uses the following naming convention: + <instrument><sample number>_<analyser><reflection>_<mode>_<can number> + """ + + if self._can_workspace != '': + can_run = mtd[self._can_workspace].getRun() + can_run_number = can_run.getProperty('run_number').value + + mode = '' + if self._corrections_workspace != '' and self._can_workspace != '': + mode = 'Correct_%s' % can_run_number + elif self._corrections_workspace != '': + mode = 'Corrected' + else: + mode = 'Subtract_%s' % can_run_number + + workspace_name_stem = self._sample_workspace[:-3] + mode + + output_workspaces = { + 'reduced_workspace': workspace_name_stem + '_red', + 'rqw_workspace': workspace_name_stem + '_rqw', + } + + if self._can_workspace != '': + output_workspaces['result_workspace'] = workspace_name_stem + '_Result' + + return output_workspaces + + def _validate_properties(self): + '''Check the object properties are in an expected state to continue''' + +#------------------------- IRIS tests ----------------------------------------- + +class IRISApplyCorrectionsWithCan(ISISIndirectInelasticApplyCorrections): + """ Test applying corrections with just a can workspace """ + + def __init__(self): + ISISIndirectInelasticApplyCorrections.__init__(self) + + self._sample_workspace = 'irs26176_graphite002_red' + self._can_workspace = 'irs26173_graphite002_red' + self._corrections_workspace = '' + self._can_geometry = 'cyl' + self._using_corrections = False + + self._kwargs = {'RebinCan':False, 'ScaleOrNotToScale':False, + 'factor':1, 'Save':False, 'PlotResult':'None', 'PlotContrib':False} + + def get_reference_files(self): + return ['II.IRISApplyCorrectionsWithCan.nxs'] + + +class IRISApplyCorrectionsWithCorrectionsWS(ISISIndirectInelasticApplyCorrections): + """ Test applying corrections with a corrections workspace """ + + def __init__(self): + ISISIndirectInelasticApplyCorrections.__init__(self) + + self._sample_workspace = 'irs26176_graphite002_red' + self._can_workspace = '' + self._corrections_workspace = 'irs26176_graphite002_cyl_Abs' + self._can_geometry = 'cyl' + self._using_corrections = True + + self._kwargs = {'RebinCan':False, 'ScaleOrNotToScale':False, + 'factor':1, 'Save':False, 'PlotResult':'None', 'PlotContrib':False} + + def get_reference_files(self): + return ['II.IRISApplyCorrectionsWithCorrectionsWS.nxs'] + +class IRISApplyCorrectionsWithBoth(ISISIndirectInelasticApplyCorrections): + """ Test applying corrections with both a can and a corrections workspace """ + + def __init__(self): + ISISIndirectInelasticApplyCorrections.__init__(self) + + self._sample_workspace = 'irs26176_graphite002_red' + self._can_workspace = 'irs26173_graphite002_red' + self._corrections_workspace = 'irs26176_graphite002_cyl_Abs' + self._can_geometry = 'cyl' + self._using_corrections = True + + self._kwargs = {'RebinCan':False, 'ScaleOrNotToScale':False, + 'factor':1, 'Save':False, 'PlotResult':'None', 'PlotContrib':False} + + def get_reference_files(self): + return ['II.IRISApplyCorrections.nxs'] + +#============================================================================== +# Transmission Monitor Test + +class ISISIndirectInelasticTransmissionMonitor(ISISIndirectInelasticBase): + ''' + ''' + + # Mark as an abstract class + __metaclass__ = ABCMeta + + def _run(self): + '''Defines the workflow for the test''' + + self.tolerance = 1e-4 + Load(self.sample, OutputWorkspace=self.sample) + Load(self.can, OutputWorkspace=self.can) + + IndirectTransmissionMonitor(SampleWorkspace=self.sample, CanWorkspace=self.can, + OutputWorkspace='IRISTransmissionMonitorTest') + + def _validate_properties(self): + '''Check the object properties are in an expected state to continue''' + + if type(self.sample) != str: + raise RuntimeError("Sample should be a string.") + if type(self.can) != str: + raise RuntimeError("Can should be a string.") + + +#------------------------- IRIS tests ----------------------------------------- +class IRISTransmissionMonitor(ISISIndirectInelasticTransmissionMonitor): + + def __init__(self): + ISISIndirectInelasticTransmissionMonitor.__init__(self) + self.sample = 'IRS26176.RAW' + self.can = 'IRS26173.RAW' + + self.result_names = ['IRISTransmissionMonitorTest'] + + def get_reference_files(self): + return ['II.IRISTransmissionMonitor.nxs'] diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISISIndirectLoadAsciiTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISIndirectLoadAsciiTest.py new file mode 100644 index 0000000000000000000000000000000000000000..bbd945bfa153532a3ac41d6eb783360cddc85276 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISIndirectLoadAsciiTest.py @@ -0,0 +1,98 @@ +import stresstesting +import os +from mantid.simpleapi import * + +#==================================================================================================== +class IN10SiliconTest(stresstesting.MantidStressTest): + + def runTest(self): + import IndirectNeutron as Main + + instr = 'IN10' + ana = 'silicon' + refl = '111' + run = 'P3OT_350K' + rejectZ = False + useM = False + saveOp = False + plotOp = False + Main.InxStart(instr,run,ana,refl,rejectZ,useM,'',plotOp,saveOp) + + def validate(self): + self.tolerance = 1e-2 + self.disableChecking.append("Instrument") + return 'IN10_P3OT_350K_silicon111_red', 'ISISIndirectLoadAscii_IN10SiliconTest.nxs' + +#==================================================================================================== +class IN13CaFTest(stresstesting.MantidStressTest): + + def runTest(self): + import IndirectNeutron as Main + + instr = 'IN13' + ana = 'CaF' + refl = '422' + run = '16347' + rejectZ = False + useM = False + saveOp = False + plotOp = False + Main.IN13Start(instr,run,ana,refl,rejectZ,useM,'',plotOp,saveOp) + + def validate(self): + self.tolerance = 1e-2 + + from mantid.simpleapi import Load + + Load(Filename='ISISIndirectLoadAscii_IN13CaFTest.nxs',OutputWorkspace='ISISIndirectLoadAscii_IN13CaFTest') + Load(Filename='ISISIndirectLoadAscii_IN13CaFTest2.nxs',OutputWorkspace='ISISIndirectLoadAscii_IN13CaFTest2') + + # check each of the resulting workspaces match + ws1Match = self.checkWorkspacesMatch('IN13_16347_CaF422_q', 'ISISIndirectLoadAscii_IN13CaFTest2') + ws2Match = self.checkWorkspacesMatch('IN13_16347_CaF422_ang', 'ISISIndirectLoadAscii_IN13CaFTest') + + return ( ws1Match and ws2Match ) + + # function to check two workspaces match + # Used when the result of a test produces more than a single workspace + def checkWorkspacesMatch(self, ws1, ws2): + from mantid.simpleapi import SaveNexus, AlgorithmManager + checker = AlgorithmManager.create("CheckWorkspacesMatch") + checker.setLogging(True) + checker.setPropertyValue("Workspace1", ws1) + checker.setPropertyValue("Workspace2", ws2) + checker.setPropertyValue("Tolerance", str(self.tolerance)) + checker.setPropertyValue("CheckInstrument","0") + + checker.execute() + + if checker.getPropertyValue("Result") != 'Success!': + print self.__class__.__name__ + SaveNexus(InputWorkspace=ws2,Filename=self.__class__.__name__+'-mismatch.nxs') + return False + + return True + + +#==================================================================================================== +class IN16SiliconTest(stresstesting.MantidStressTest): + + def runTest(self): + import IndirectNeutron as Main + + instr = 'IN16' + ana = 'silicon' + refl = '111' + run = '65722' + rejectZ = True + useM = False + saveOp = False + plotOp = False + Main.IbackStart(instr,run,ana,refl,rejectZ,useM,'',plotOp,saveOp) + + def validate(self): + self.tolerance = 1e-2 + self.disableChecking.append("SpectraMap") + self.disableChecking.append("Instrument") + return 'IN16_65722_silicon111_red', 'ISISIndirectLoadAscii_IN16SiliconTest.nxs' + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISISIndirectSimulationTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISIndirectSimulationTest.py new file mode 100644 index 0000000000000000000000000000000000000000..a83e389b4397a2111ddd0652b597bf6d71395b0e --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISIndirectSimulationTest.py @@ -0,0 +1,76 @@ +import stresstesting +import os +from mantid.simpleapi import * + +#==================================================================================================== +class MolDynCdlTest(stresstesting.MantidStressTest): + + def runTest(self): + from mantid.simpleapi import MolDyn + + MolDyn(Filename='DISF_NaF.cdl', + Functions=['Fqt-total', 'Sqw-total'], + Plot='None', + Save=False, + OutputWorkspace='ISISIndirectSimulationTest_MolDynCdl') + + + def validate(self): + self.tolerance = 1e-2 + self.disableChecking.append("Instrument") + + from mantid.simpleapi import Load + + Load(Filename='ISISIndirectSimulation_MolDynCDL.nxs',OutputWorkspace='ISISIndirectSimulation_MolDynCDL') + Load(Filename='ISISIndirectSimulation_MolDynCDL_SQW.nxs',OutputWorkspace='ISISIndirectSimulation_MolDynCDL_SQW') + + # check each of the resulting workspaces match + ws1Match = self.checkWorkspacesMatch('DISF_NaF_Fqt-total', 'ISISIndirectSimulation_MolDynCDL') + ws2Match = self.checkWorkspacesMatch('DISF_NaF_Sqw-total', 'ISISIndirectSimulation_MolDynCDL_SQW') + + return ( ws1Match and ws2Match ) + + + def checkWorkspacesMatch(self, ws1, ws2): + """ + Function to check two workspaces match + Used when the result of a test produces more than a single workspace + """ + + from mantid.simpleapi import SaveNexus, AlgorithmManager + + checker = AlgorithmManager.create("CheckWorkspacesMatch") + checker.setLogging(True) + checker.setPropertyValue("Workspace1", ws1) + checker.setPropertyValue("Workspace2", ws2) + checker.setPropertyValue("Tolerance", str(self.tolerance)) + checker.setPropertyValue("CheckInstrument","0") + + checker.execute() + + if checker.getPropertyValue("Result") != 'Success!': + print self.__class__.__name__ + SaveNexus(InputWorkspace=ws2,Filename=self.__class__.__name__+'-mismatch.nxs') + return False + + return True + + +#==================================================================================================== +class MolDynDatTest(stresstesting.MantidStressTest): + + def runTest(self): + from mantid.simpleapi import MolDyn + + MolDyn(Filename='WSH_test.dat', + Plot='None', + Save=False, + OutputWorkspace='WSH_test_iqt') + + + def validate(self): + self.tolerance = 1e-2 + self.disableChecking.append("Instrument") + + return 'WSH_test_iqt', 'ISISIndirectSimulation_MolDynDAT.nxs' + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISISLoadingEventData.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISLoadingEventData.py new file mode 100644 index 0000000000000000000000000000000000000000..1e15aebe9e2a8c197f00478d334eebb8218a685f --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISLoadingEventData.py @@ -0,0 +1,16 @@ +import stresstesting +from mantid.simpleapi import * + +class ISISLoadingEventData(stresstesting.MantidStressTest): + """ There is no event data inside mantid/Test directory. + Hence, all the units test that are specific to ISIS + when loading EventData should go to here. + """ + def runTest(self): + ev_ws = LoadEventNexus('LET00006278.nxs') + # isis_vms_compat/SPB[2] + self.assertEqual(ev_ws.sample().getGeometryFlag(), 1, "It does not read correctly the vms compat (check ") + # Isis correct the tof using loadTimeOfFlight method. + self.assertDelta(ev_ws.getEventList(10).getTofs()[1], 1041.89,0.01, "The ISIS event correction is incorrect (check LoadEventNexus::loadTimeOfFlight") + def validate(self): + return True diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISISMuonAnalysis.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISMuonAnalysis.py new file mode 100644 index 0000000000000000000000000000000000000000..2232f4953ce22f03a2423f57ca8fea3996559fdd --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISMuonAnalysis.py @@ -0,0 +1,189 @@ +import math +import stresstesting +from mantid.simpleapi import * + +from abc import ABCMeta, abstractmethod + +#---------------------------------------------------------------------- +class ISISMuonAnalysis(stresstesting.MantidStressTest): + """A base class for the ISIS Muon Analysis tests + + The workflow is defined in the runTest() method, simply + define an __init__ method and set the following properties + on the object + - file_name: String pointing to nexus file to be used. + - map_name: String pointing to xml grouping file. + - instr_name: A string giving the instrument name. + - sample_run: An integer run number of the sample + - period_data: A boolean denoting whether the file has period data. + - asym: A boolean to tell whether the plot type is assymetry or not. + - x_min: Float value of the minimum x. + - x_max: Float value of the maximum x. + - rebin: Boolean to tell whether rebinning is to be done. + - rebin_fixed: Optional boolean to tell if the rebinning is in fixed steps. + - rebin_params: A string containing the rebin parameters. See wiki rebin for more info. + """ + __metaclass__ = ABCMeta # Mark as an abstract class + + @abstractmethod + def get_reference_file(self): + """Returns the name of the reference file to compare against""" + raise NotImplementedError("Implmenent get_reference_file to return " + "the name of the file to compare against.") + + def get_result_workspace(self): + """Returns the result workspace to be checked""" + return (self.instr_name + str(self.sample_run) ) + + def runTest(self): + """Defines the workflow for the test""" + + self._validate_properties() + + outputWS = (self.instr_name + str(self.sample_run) ) + + # Load + LoadMuonNexus(Filename=self.file_name, OutputWorkspace='MuonAnalysis' ) + + # Group, Crop, Clone + if(self.period_data): + GroupDetectors(InputWorkspace='MuonAnalysis_1', OutputWorkspace=outputWS, MapFile=self.map_name) + else: + GroupDetectors(InputWorkspace='MuonAnalysis', OutputWorkspace=outputWS, MapFile=self.map_name) + CropWorkspace(InputWorkspace=outputWS, OutputWorkspace=outputWS, XMin=self.x_min, XMax=self.x_max) + CloneWorkspace(InputWorkspace=outputWS, OutputWorkspace=(outputWS + '_Raw') ) + + # Rebin then... + if (self.rebin ): + + ws = mtd[outputWS] + binSize = ws.dataX(0)[1]-ws.dataX(0)[0] + firstX = ws.dataX(0)[0] + lastX = ws.dataX(0)[ws.blocksize()] + + if (self.rebin_fixed): + Rebin(InputWorkspace=outputWS, OutputWorkspace=outputWS, Params=str(binSize*float(self.rebin_params) ) ) + else: + Rebin(InputWorkspace=outputWS, OutputWorkspace=outputWS, Params=self.rebin_params) + + numberOfFullBunchedBins = math.floor((lastX - firstX) / binSize ) + + # ...Crop + if(numberOfFullBunchedBins > 0): + lastX = firstX + numberOfFullBunchedBins*binSize + lastX_str = '%.15f' % lastX + CropWorkspace(InputWorkspace=outputWS, OutputWorkspace=outputWS, XMax=lastX_str ) + + GroupWorkspaces(InputWorkspaces=outputWS + ',' + outputWS + '_Raw', OutputWorkspace='MuonGroup') + + if(self.asym): + AsymmetryCalc(InputWorkspace=outputWS, OutputWorkspace=outputWS, ForwardSpectra='0', BackwardSpectra='1') + + def validate(self): + """Returns the name of the workspace & file to compare""" + self.tolerance = 1e-7 + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Instrument') + result = self.get_result_workspace() + reference = self.get_reference_file() + return result, reference + + def _validate_properties(self): + """Check the object properties are + in an expected state to continue + """ + if type(self.instr_name) != str: + raise RuntimeError("instr_name property should be a string") + if type(self.file_name) != str: + raise RuntimeError("file_name property should be a string") + if type(self.period_data) != bool: + raise RuntimeError("period_data property should be a bool") + + + +#------------------------- ARGUS tests ------------------------------------------------- + +class ARGUSAnalysisFromFile(ISISMuonAnalysis): + + def __init__(self): + ISISMuonAnalysis.__init__(self) + self.file_name = 'argus0044309.nxs' + self.map_name = 'ARGUSGrouping.xml' + self.instr_name = 'ARGUS' + self.sample_run = 44309 + self.asym = True + self.period_data = False + self.x_min = 2 + self.x_max = 12 + self.rebin = True + self.rebin_fixed = True + self.rebin_params = '1' + + def get_reference_file(self): + return "ARGUSAnalysis.nxs" + + +#------------------------- EMU tests ------------------------------------------------- + +class EMUAnalysisFromFile(ISISMuonAnalysis): + + def __init__(self): + ISISMuonAnalysis.__init__(self) + self.file_name = 'emu00031895.nxs' + self.map_name = 'EMUGrouping.xml' + self.instr_name = 'EMU' + self.sample_run = 31895 + self.asym = True + self.period_data = True + self.x_min = 0.11 + self.x_max = 10 + self.rebin = False + + def get_reference_file(self): + return "EMUAnalysis.nxs" + + +#------------------------- HiFi tests ------------------------------------------------- + +class HiFiAnalysisFromFile(ISISMuonAnalysis): + + def __init__(self): + ISISMuonAnalysis.__init__(self) + self.file_name = 'hifi00038401.nxs' + self.map_name = 'HiFiGrouping.xml' + self.instr_name = 'Hifi' + self.sample_run = 38401 + self.asym = True + self.period_data = False + self.x_min = 1 + self.x_max = 5 + self.rebin = True + self.rebin_fixed = True + self.rebin_params = '1' + + def get_reference_file(self): + return "HiFiAnalysis.nxs" + + +#------------------------- MuSR tests ------------------------------------------------- + +class MuSRAnalysisFromFile(ISISMuonAnalysis): + + def __init__(self): + ISISMuonAnalysis.__init__(self) + self.file_name = 'MUSR00015192.nxs' + self.map_name = 'MuSRGrouping.xml' + self.instr_name = 'MuSR' + self.sample_run = 15192 + self.asym = True + self.period_data = True + self.x_min = 0.11 + self.x_max = 10 + self.rebin = True + self.rebin_fixed = False + self.rebin_params = '0.11,0.0159999,10' + + def get_reference_file(self): + return "MuSRAnalysis.nxs" + + \ No newline at end of file diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISISMuonAnalysisGrouping.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISMuonAnalysisGrouping.py new file mode 100644 index 0000000000000000000000000000000000000000..2e404a5ebcf88daa666bad528bc547a679fa031d --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISMuonAnalysisGrouping.py @@ -0,0 +1,170 @@ +import math +import stresstesting +from mantid.simpleapi import * + +from abc import ABCMeta, abstractmethod + +#---------------------------------------------------------------------- +class ISISMuonAnalysisGrouping(stresstesting.MantidStressTest): + """A base class for the ISIS Muon Analysis tests + + The workflow is defined in the runTest() method, simply + define an __init__ method and set the following properties + on the object + - file_name: String pointing to nexus file to be used. + - map_name: String pointing to xml grouping file. + - instr_name: A string giving the instrument name. + - sample_run: An integer run number of the sample + - period_data: A boolean denoting whether the file has period data. + - asym: A boolean to tell whether the plot type is assymetry or not. + - logs: A boolean to tell whether the plot type is logorithmic or not. + - x_min: Float value of the minimum x. + - x_max: Float value of the maximum x. + """ + __metaclass__ = ABCMeta # Mark as an abstract class + + @abstractmethod + def get_reference_file(self): + """Returns the name of the reference file to compare against""" + raise NotImplementedError("Implmenent get_reference_file to return " + "the name of the file to compare against.") + + def get_result_workspace(self): + """Returns the result workspace to be checked""" + return (self.instr_name + str(self.sample_run) ) + + def runTest(self): + """Defines the workflow for the test""" + + self._validate_properties() + + outputWS = (self.instr_name + str(self.sample_run) ) + + # Load + LoadMuonNexus(Filename=self.file_name, OutputWorkspace='MuonAnalysis' ) + + # Group, Crop, Clone + if(self.period_data): + GroupDetectors(InputWorkspace='MuonAnalysis_1', OutputWorkspace=outputWS, MapFile=self.map_name) + else: + GroupDetectors(InputWorkspace='MuonAnalysis', OutputWorkspace=outputWS, MapFile=self.map_name) + CropWorkspace(InputWorkspace=outputWS, OutputWorkspace=outputWS, XMin=self.x_min, XMax=self.x_max) + CloneWorkspace(InputWorkspace=outputWS, OutputWorkspace=(outputWS + '_Raw') ) + GroupWorkspaces(InputWorkspaces=outputWS + ',' + outputWS + '_Raw', OutputWorkspace='MuonGroup') + + if(self.logs): + Logarithm(InputWorkspace=outputWS, OutputWorkspace=outputWS) + if(self.asym): + RemoveExpDecay(InputWorkspace=outputWS, OutputWorkspace=outputWS) + + + def validate(self): + """Returns the name of the workspace & file to compare""" + self.tolerance = 1e-7 + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Instrument') + result = self.get_result_workspace() + reference = self.get_reference_file() + return result, reference + + def _validate_properties(self): + """Check the object properties are + in an expected state to continue + """ + if type(self.file_name) != str: + raise RuntimeError("file_name property should be a string") + if type(self.map_name) != str: + raise RuntimeError("map_name property should be a string") + if type(self.instr_name) != str: + raise RuntimeError("instr_name property should be a string") + if type(self.period_data) != bool: + raise RuntimeError("period_data property should be a bool") + if type(self.asym) != bool: + raise RuntimeError("asym property should be a bool") + if type(self.logs) != bool: + raise RuntimeError("log property should be a bool") + + + +#------------------------- ARGUS group fwd test ------------------------------------------------- + +class ARGUSAnalysisFromFile(ISISMuonAnalysisGrouping): + + def __init__(self): + ISISMuonAnalysisGrouping.__init__(self) + self.file_name = 'argus0044309.nxs' + self.map_name = 'ARGUSFwdGrouping.xml' + self.instr_name = 'ARGUS' + self.sample_run = 44309 + self.period_data = False + self.asym = False + self.logs = True + self.x_min = 3 + self.x_max = 10 + + def get_reference_file(self): + return "ARGUSAnalysisLogFwd.nxs" + + +#------------------------- EMU group fwd test ------------------------------------------------- + +class EMUAnalysisFromFile(ISISMuonAnalysisGrouping): + + def __init__(self): + ISISMuonAnalysisGrouping.__init__(self) + self.file_name = 'emu00031895.nxs' + self.map_name = 'EMUFwdGrouping.xml' + self.instr_name = 'EMU' + self.sample_run = 31895 + self.period_data = True + self.asym = True + self.logs = False + self.x_min = 0.11 + self.x_max = 10 + + + def get_reference_file(self): + return "EMUAnalysisAsymFwd.nxs" + + +#------------------------- HiFi group 0 test ------------------------------------------------- + +class HiFiAnalysisFromFile(ISISMuonAnalysisGrouping): + + def __init__(self): + ISISMuonAnalysisGrouping.__init__(self) + self.file_name = 'hifi00038401.nxs' + self.map_name = 'HiFi0Grouping.xml' + self.instr_name = 'Hifi' + self.sample_run = 38401 + self.period_data = False + self.asym = True + self.logs = False + self.x_min = 0.1199 + self.x_max = 7.4999 + + def get_reference_file(self): + return "HiFiAnalysisAsym0.nxs" + + +#------------------------- MuSR Group 1 test ------------------------------------------------- + +class MuSRAnalysisFromFile(ISISMuonAnalysisGrouping): + + def __init__(self): + ISISMuonAnalysisGrouping.__init__(self) + self.file_name = 'MUSR00015192.nxs' + self.map_name = 'MuSR1Grouping.xml' + self.instr_name = 'MuSR' + self.sample_run = 15192 + self.period_data = True + self.asym = False + self.logs = True + self.x_min = 1.4 + self.x_max = 3.9 + + + def get_reference_file(self): + return "MuSRAnalysisLog1.nxs" + + \ No newline at end of file diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISISReflInstrumentIDFTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISReflInstrumentIDFTest.py new file mode 100644 index 0000000000000000000000000000000000000000..d9d7db86066a59250c4d630e328ae75e9a4e9b24 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISISReflInstrumentIDFTest.py @@ -0,0 +1,57 @@ +""" +These system tests are to verify that the IDF and parameter files for POLREF, CRISP, INTER and SURF are read properly +""" + +import stresstesting +from mantid.simpleapi import * +import os +from abc import ABCMeta, abstractmethod + +class ISISReflInstrumentIDFTest(stresstesting.MantidStressTest): + + __metaclass__ = ABCMeta # Mark as an abstract class + + @abstractmethod + def get_IDF_name(self): + """Returns the IDF""" + raise NotImplementedError("Implement get_IDF_name to return ") + + def runTest(self): + IDF_path = os.path.join(config['instrumentDefinition.directory'], self.get_IDF_name()) + ws = LoadEmptyInstrument(IDF_path) + inst = ws.getInstrument() + self.assertTrue(isinstance(inst.getNumberParameter('MonitorIntegralMin')[0] , float)) + self.assertTrue(isinstance(inst.getNumberParameter('MonitorIntegralMax')[0] , float)) + self.assertTrue(isinstance(inst.getNumberParameter('MonitorBackgroundMin')[0] , float)) + self.assertTrue(isinstance(inst.getNumberParameter('MonitorBackgroundMax')[0] , float)) + self.assertTrue(isinstance(inst.getNumberParameter('PointDetectorStart')[0] , float)) + self.assertTrue(isinstance(inst.getNumberParameter('PointDetectorStop')[0] , float)) + self.assertTrue(isinstance(inst.getNumberParameter('MultiDetectorStart')[0] , float)) + self.assertTrue(isinstance(inst.getNumberParameter('I0MonitorIndex')[0] , float)) + self.assertTrue(isinstance(inst.getNumberParameter('LambdaMin')[0] , float)) + self.assertTrue(isinstance(inst.getNumberParameter('LambdaMax')[0] , float)) + + return True; + + def doValidate(self): + return True; + +# Specialisation for testing POLREF +class POLREF_ISISReflInstrumentIDFTest(ISISReflInstrumentIDFTest): + def get_IDF_name(self): + return "POLREF_Definition.xml" + +# Specialisation for testing INTER +class INTER_ISISReflInstrumentIDFTest(ISISReflInstrumentIDFTest): + def get_IDF_name(self): + return "INTER_Definition.xml" + +# Specialisation for testing SURF +class SURF_ISISReflInstrumentIDFTest(ISISReflInstrumentIDFTest): + def get_IDF_name(self): + return "SURF_Definition.xml" + +# Specialisation for testing CRISP +class CRISP_ISISReflInstrumentIDFTest(ISISReflInstrumentIDFTest): + def get_IDF_name(self): + return "CRISP_Definition.xml" diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_LETReduction.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_LETReduction.py new file mode 100644 index 0000000000000000000000000000000000000000..c6226dee4137fdd8f215a02b101afe9b550399d4 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_LETReduction.py @@ -0,0 +1,418 @@ +""" Sample LET reduction scrip """ +import os +os.environ["PATH"] = r"c:/Mantid/Code/builds/br_master/bin/Release;"+os.environ["PATH"] + + +from Direct.ReductionWrapper import * +try: + import reduce_vars as rv +except: + rv = None + +# +def find_binning_range(energy,ebin): + """ function finds the binning range used in multirep mode + for merlin ls=11.8,lm2=10. mult=2.8868 dt_DAE=1 + for LET ls=25,lm2=23.5 mult=4.1 dt_DAE=1.6 + all these values have to be already present in IDF and should be taken from there + + # THIS FUNCTION SHOULD BE MADE GENERIG AND MOVED OUT OF HERE + """ + + InstrName = config['default.instrument'][0:3] + if InstrName.find('LET')>-1: + ls =25 + lm2 =23.5 + mult=4.1 + dt_DAE = 1.6 + elif InstrName.find('MER')>-1: + ls =11.8 + lm2=10 + mult=2.8868 + dt_DAE = 1 + else: + raise RuntimeError("Find_binning_range: unsupported/unknown instrument found") + + energy=float(energy) + + emin=(1.0-ebin[2])*energy #minimum energy is with 80% energy loss + lam=(81.81/energy)**0.5 + lam_max=(81.81/emin)**0.5 + tsam=252.82*lam*ls #time at sample + tmon2=252.82*lam*lm2 #time to monitor 6 on LET + tmax=tsam+(252.82*lam_max*mult) #maximum time to measure inelastic signal to + t_elastic=tsam+(252.82*lam*mult) #maximum time of elastic signal + tbin=[int(tmon2),dt_DAE,int(tmax)] + energybin=[float("{0: 6.4f}".format(elem*energy)) for elem in ebin] + + return (energybin,tbin,t_elastic) +#-------------------------------------------------------------------------------------------------------- +def find_background(ws_name,bg_range): + """ Function to find background from multirep event workspace + dt_DAE = 1 for MERLIN and 1.6 for LET + should be precalculated or taken from IDF + + # THIS FUNCTION SHOULD BE MADE GENERIC AND MOVED OUT OF HERE + """ + InstrName = config['default.instrument'][0:3] + if InstrName.find('LET')>-1: + dt_DAE = 1.6 + elif InstrName.find('MER')>-1: + dt_DAE = 1 + else: + raise RuntimeError("Find_binning_range: unsupported/unknown instrument found") + + bg_ws_name = 'bg' + delta=bg_range[1]-bg_range[0] + Rebin(InputWorkspace='w1',OutputWorkspace=bg_ws_name,Params=[bg_range[0],delta,bg_range[1]],PreserveEvents=False) + v=(delta)/dt_DAE + CreateSingleValuedWorkspace(OutputWorkspace='d',DataValue=v) + Divide(LHSWorkspace=bg_ws_name,RHSWorkspace='d',OutputWorkspace=bg_ws_name) + return bg_ws_name + + +class ReduceLET_OneRep(ReductionWrapper): + @MainProperties + def def_main_properties(self): + """ Define main properties used in reduction """ + + + prop = {} + ei = 7.0 + ebin = [-1,0.002,0.95] + + prop['sample_run'] = 'LET00006278.nxs' + prop['wb_run'] = 'LET00005545.raw' + prop['incident_energy'] = ei + prop['energy_bins'] = ebin + + + # Absolute units reduction properties. + #prop['monovan_run'] = 17589 + #prop['sample_mass'] = 10/(94.4/13) # -- this number allows to get approximately the same system test intensities for MAPS as the old test + #prop['sample_rmm'] = 435.96 # + return prop + + @AdvancedProperties + def def_advanced_properties(self): + """ separation between simple and advanced properties depends + on scientist, experiment and user. + main properties override advanced properties. + """ + + prop = {} + prop['map_file'] = 'rings_103' + prop['hard_mask_file'] ='LET_hard.msk' + prop['det_cal_file'] = 'det_corrected7.dat' + prop['save_format']='' + prop['bleed'] = False + prop['norm_method']='current' + prop['detector_van_range']=[0.5,200] + prop['load_monitors_with_workspace']=True + #TODO: this has to be loaded from the workspace and work without this + #prop['ei-mon1-spec']=40966 + + + return prop + # + @iliad + def reduce(self,input_file=None,output_directory=None): + # run reduction, write auxiliary script to add something here. + + prop = self.reducer.prop_man + # Ignore input properties for the time being + white_ws = 'wb_wksp' + LoadRaw(Filename='LET00005545.raw',OutputWorkspace=white_ws) + #prop.wb_run = white_ws + + sample_ws = 'w1' + monitors_ws = sample_ws + '_monitors' + LoadEventNexus(Filename='LET00006278.nxs',OutputWorkspace=sample_ws, + SingleBankPixelsOnly='0',LoadMonitors='1', + MonitorsAsEvents='1') + ConjoinWorkspaces(InputWorkspace1=sample_ws, InputWorkspace2=monitors_ws) + #prop.sample_run = sample_ws + + + ebin = prop.energy_bins + ei = prop.incident_energy + + (energybin,tbin,t_elastic) = find_binning_range(ei,ebin) + Rebin(InputWorkspace=sample_ws,OutputWorkspace=sample_ws, Params=tbin, PreserveEvents='1') + + prop.bkgd_range=[int(t_elastic),int(tbin[2])] + + ebinstring = str(energybin[0])+','+str(energybin[1])+','+str(energybin[2]) + self.reducer.prop_man.energy_bins = ebinstring + + red = DirectEnergyConversion() + + red.initialise(prop) + outWS = red.convert_to_energy(white_ws,sample_ws) + #SaveNexus(ws,Filename = 'MARNewReduction.nxs') + + #when run from web service, return additional path for web server to copy data to" + return outWS + + def __init__(self,rv=None): + """ sets properties defaults for the instrument with Name""" + ReductionWrapper.__init__(self,'LET',rv) +#---------------------------------------------------------------------------------------------------------------------- + +class ReduceLET_MultiRep2014(ReductionWrapper): + @MainProperties + def def_main_properties(self): + """ Define main properties used in reduction """ + + + prop = {} + ei=[3.4,8.] # multiple energies provided in the data file + ebin=[-4,0.002,0.8] #binning of the energy for the spe file. The numbers are as a fraction of ei [from ,step, to ] + + prop['sample_run'] = [14305] + prop['wb_run'] = 5545 + prop['incident_energy'] = ei + prop['energy_bins'] = ebin + + + # Absolute units reduction properties. + # Vanadium labelled Dec 2011 - flat plate of dimensions: 40.5x41x2.0# volume = 3404.025 mm**3 mass= 20.79 + prop['monovan_run'] = 14319 # vanadium run in the same configuration as your sample + prop['sample_mass'] = 20.79 # 17.25 # mass of your sample (PrAl3) + prop['sample_rmm'] = 50.9415 # 221.854 # molecular weight of your sample + + return prop + + @AdvancedProperties + def def_advanced_properties(self): + """ separation between simple and advanced properties depends + on scientist, experiment and user. + main properties override advanced properties. + """ + + prop = {} + prop['map_file'] = 'rings_103.map' + prop['det_cal_file'] = 'det_corrected7.nxs' + prop['save_format']='' + prop['bleed'] = False + prop['norm_method']='current' + prop['detector_van_range']=[2,7] + prop['background_range'] = [92000,98000] # TOF range for the calculating flat background + prop['hardmaskOnly']='LET_hard.msk' # diag does not work well on LET. At present only use a hard mask RIB has created + + # Disable internal background check TODO: fix internal background check + prop['check_background']=False + + prop['monovan_mapfile'] = 'rings_103.map' + + #TODO: Correct monitor, depending on workspace. This has to be loaded from the workspace and work without this settings + #prop['ei-mon1-spec']=40966 + + + + return prop + # + @iliad + def reduce(self,input_file=None,output_directory=None): + # run reduction, write auxiliary script to add something here. + + red_properties = self.reducer.prop_man + ####### + wb= red_properties.wb_run + run_no = red_properties.sample_run + bg_range = red_properties.background_range + ei = red_properties.incident_energy + ebin = red_properties.energy_bins + + remove_background = True #if true then will subtract a flat background in time from the time range given below otherwise put False + + red = DirectEnergyConversion() + + red.initialise(red_properties) + + energybin,tbin,t_elastic = find_binning_range(ei[0],ebin) + energybin,tbin,t_elastic = find_binning_range(ei[1],ebin) + + # loads the white-beam (or rather the long monovan ). Does it as a raw file to save time as the event mode is very large + if 'wb_wksp' in mtd: + wb_wksp=mtd['wb_wksp'] + else: #only load white-beam if not already there + wb_wksp = LoadRaw(Filename='LET0000'+str(wb)+'.raw',OutputWorkspace='wb_wksp') + #dgreduce.getReducer().det_cal_file = 'det_corrected7.nxs' + #wb_wksp = dgreduce.getReducer().load_data('LET0000'+str(wb)+'.raw','wb_wksp') + #dgreduce.getReducer().det_cal_file = wb_wksp + + for run in [run_no]: #loop around runs + fname='LET0000'+str(run)+'.nxs' + print ' processing file ', fname + #w1 = dgreduce.getReducer().load_data(run,'w1') + Load(Filename=fname,OutputWorkspace='w1',LoadMonitors='1') + + + if remove_background: + bg_ws_name=find_background('w1',bg_range) + + ############################################################################################# + # this section finds all the transmitted incident energies if you have not provided them + #if len(ei) == 0: -- not tested here -- should be unit test for that. + #ei = find_chopper_peaks('w1_monitors') + print 'Energies transmitted are:' + print (ei) + + RenameWorkspace(InputWorkspace = 'w1',OutputWorkspace='w1_storage') + RenameWorkspace(InputWorkspace = 'w1_monitors',OutputWorkspace='w1_mon_storage') + + #now loop around all energies for the run + result =[] + for ind,energy in enumerate(ei): + print float(energy) + (energybin,tbin,t_elastic) = find_binning_range(energy,ebin) + print " Rebinning will be performed in the range: ",energybin + # if we calculate more then one energy, initial workspace will be used more then once + if ind <len(ei)-1: + CloneWorkspace(InputWorkspace = 'w1_storage',OutputWorkspace='w1') + CloneWorkspace(InputWorkspace = 'w1_mon_storage',OutputWorkspace='w1_monitors') + else: + RenameWorkspace(InputWorkspace = 'w1_storage',OutputWorkspace='w1') + RenameWorkspace(InputWorkspace = 'w1_mon_storage',OutputWorkspace='w1_monitors') + + if remove_background: + w1=Rebin(InputWorkspace='w1',OutputWorkspace='w1',Params=tbin,PreserveEvents=False) + Minus(LHSWorkspace='w1',RHSWorkspace='bg',OutputWorkspace='w1') + + + ###################################################################### + # ensure correct round-off procedure + argi={} + argi['monovan_integr_range']=[round(ebin[0]*energy,4),round(ebin[2]*energy,4)] # integration range of the vanadium + #MonoVanWSName = None + + # absolute unit reduction -- if you provided MonoVan run or relative units if monoVan is not present + out=red.convert_to_energy(wb_wksp,"w1",energy,energybin,**argi) + + ws_name = 'LETreducedEi{0:2.1f}'.format(energy) + RenameWorkspace(InputWorkspace=out,OutputWorkspace=ws_name) + result.append(mtd[ws_name]) + + #TODO: this will go when multirep mode is implemented properly + # Store processed workspaces back to properties + wb_wksp = PropertyManager.wb_run.get_workspace() + + + #SaveNXSPE(InputWorkspace=ws_name,Filename=ws_name+'.nxspe') + + ####### + #when run from web service, return additional path for web server to copy data to" + return result + + def __init__(self,rv=None): + """ sets properties defaults for the instrument with Name""" + ReductionWrapper.__init__(self,'LET',rv) + +class ReduceLET_MultiRep2015(ReductionWrapper): + @MainProperties + def def_main_properties(self): + """ Define main properties used in reduction """ + + + prop = {} + ei=[3.4,8.] # multiple energies provided in the data file + ebin=[-4,0.002,0.8] #binning of the energy for the spe file. The numbers are as a fraction of ei [from ,step, to ] + + prop['sample_run'] = [14305] + prop['wb_run'] = 5545 + prop['incident_energy'] = ei + prop['energy_bins'] = ebin + + + # Absolute units reduction properties. + # Vanadium labelled Dec 2011 - flat plate of dimensions: 40.5x41x2.0# volume = 3404.025 mm**3 mass= 20.79 + prop['monovan_run'] = 14319 # vanadium run in the same configuration as your sample + prop['sample_mass'] = 20.79 # 17.25 # mass of your sample (PrAl3) + prop['sample_rmm'] = 50.9415 # 221.854 # molecular weight of your sample + + return prop + + @AdvancedProperties + def def_advanced_properties(self): + """ separation between simple and advanced properties depends + on scientist, experiment and user. + main properties override advanced properties. + """ + + prop = {} + prop['map_file'] = 'rings_103.map' + prop['det_cal_file'] = 'det_corrected7.nxs' + prop['bleed'] = False + prop['norm_method']='current' + prop['detector_van_range']=[2,7] + prop['background_range'] = [92000,98000] # TOF range for the calculating flat background + prop['hardmaskOnly']='LET_hard.msk' # diag does not work well on LET. At present only use a hard mask RIB has created + + prop['check_background']=True + + prop['monovan_mapfile'] = 'rings_103.map' + prop['save_format'] = '' + # if two input files with the same name and different extension found, what to prefer. + prop['data_file_ext']='.nxs' # for LET it may be choice between event and histo mode if + # raw file is written in histo, and nxs -- in event mode + + prop['monovan_mapfile'] = 'rings_103.map' + + + #TODO: Correct monitor, depending on workspace. This has to be loaded from the workspace and work without this settings + #prop['ei-mon1-spec']=40966 + + + + return prop + # + @iliad + def reduce(self,input_file=None,output_directory=None): + """ Method executes reduction over single file + + Overload only if custom reduction is needed or + special features are requested + """ + res = ReductionWrapper.reduce(self,input_file,output_directory) + # + en = self.reducer.prop_man.incident_energy + for ind,energy in enumerate(en): + ws_name = 'LETreducedEi{0:2.1f}'.format(energy) + RenameWorkspace(InputWorkspace=res[ind],OutputWorkspace=ws_name) + res[ind]= mtd[ws_name] + + #SaveNexus(ws,Filename = 'LETNewReduction.nxs') + return res + + def __init__(self,rv=None): + """ sets properties defaults for the instrument with Name""" + ReductionWrapper.__init__(self,'LET',rv) + +#---------------------------------------------------------------------------------------------------------------------- + + + +if __name__=="__main__": + maps_dir = 'd:/Data/MantidSystemTests/Data' + data_dir ='d:/Data/Mantid_Testing/14_11_27' + ref_data_dir = 'd:/Data/MantidSystemTests/SystemTests/AnalysisTests/ReferenceResults' + config.setDataSearchDirs('{0};{1};{2}'.format(data_dir,maps_dir,ref_data_dir)) + #config.appendDataSearchDir('d:/Data/Mantid_GIT/Test/AutoTestData') + config['defaultsave.directory'] = data_dir # folder to save resulting spe/nxspe files. Defaults are in + + # execute stuff from Mantid + rd =ReduceLET_MultiRep2015() + #rd =ReduceLET_MultiRep2014() + #rd = ReduceLET_OneRep() + rd.def_advanced_properties() + rd.def_main_properties() + + + #using_web_data = False + #if not using_web_data: + # run_dir=os.path.dirname(os.path.realpath(__file__)) + # file = os.path.join(run_dir,'reduce_vars.py') + # rd.export_changed_values(file) + + rd.reduce() diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MAPS_DGSReduction.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MAPS_DGSReduction.py new file mode 100644 index 0000000000000000000000000000000000000000..55717e64a8f6f120e4f0105f52bacf25fc2dcd01 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MAPS_DGSReduction.py @@ -0,0 +1,94 @@ +""" Sample MAPS reduction scrip """ +#import os +#os.environ["PATH"] = r"c:/Mantid/Code/builds/br_10803/bin/Release;"+os.environ["PATH"] +from Direct.ReductionWrapper import * +try: + import reduce_vars as web_var +except: + web_var = None + + +class ReduceMAPS(ReductionWrapper): + @MainProperties + def def_main_properties(self): + """ Define main properties used in reduction """ + prop = {} + prop['sample_run'] = 17269 + prop['wb_run'] = 17186 + prop['incident_energy'] = 150 + prop['energy_bins'] = [-15,3,135] + + + # Absolute units reduction properties. + prop['monovan_run'] = 17589 + prop['sample_mass'] = 10/(94.4/13) # -- this number allows to get approximately the same system test intensities for MAPS as the old test + prop['sample_rmm'] = 435.96 # + return prop + + @AdvancedProperties + def def_advanced_properties(self): + """ separation between simple and advanced properties depends + on scientist, experiment and user. + main properties override advanced properties. + """ + prop = {} + prop['map_file'] = 'default' + #prop['monovan_mapfile'] = 'default' #'4to1_mid_lowang.map' # default + prop['hard_mask_file'] =None + #prop['det_cal_file'] = ? default? + prop['save_format']='' + + prop['diag_remove_zero']=False + + # this are the parameters which were used in old MAPS_Parameters.xml test. + prop['wb-integr-max'] =300 + #prop['wb_integr_range']=[20,300] + prop['bkgd-range-min']=12000 + prop['bkgd-range-max']=18000 + #prop['bkgd_range']=[12000,18000] + + prop['diag_samp_hi']=1.5 + prop['diag_samp_sig']=3.3 + prop['diag_van_hi']=2.0 + + prop['abs_units_van_range']=[-40,40] + + return prop + # + @iliad + def reduce(self,input_file=None,output_directory=None): + """ Method executes reduction over single file + Overload only if custom reduction is needed + """ + outWS = ReductionWrapper.reduce(self,input_file,output_directory) + #SaveNexus(ws,Filename = 'MARNewReduction.nxs') + return outWS + + def __init__(self,web_var=None): + """ sets properties defaults for the instrument with Name""" + ReductionWrapper.__init__(self,'MAP',web_var) +#---------------------------------------------------------------------------------------------------------------------- + + + +if __name__=="__main__": + maps_dir = 'd:/Data/MantidSystemTests/Data' + data_dir ='d:/Data/Mantid_Testing/14_12_15' + ref_data_dir = 'd:/Data/MantidSystemTests/SystemTests/AnalysisTests/ReferenceResults' + config.setDataSearchDirs('{0};{1};{2}'.format(data_dir,maps_dir,ref_data_dir)) + #config.appendDataSearchDir('d:/Data/Mantid_GIT/Test/AutoTestData') + config['defaultsave.directory'] = data_dir # folder to save resulting spe/nxspe files. Defaults are in + + # execute stuff from Mantid + rd = ReduceMAPS() + rd.def_advanced_properties() + rd.def_main_properties() + + + #using_web_data = False + #if not using_web_data: + # run_dir=os.path.dirname(os.path.realpath(__file__)) + # file = os.path.join(run_dir,'reduce_vars.py') + # rd.save_web_vars(file) + + rd.reduce() diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MERLINReduction.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MERLINReduction.py new file mode 100644 index 0000000000000000000000000000000000000000..6bfebeb34b0626d08e01ae2fcfae877112e28bf4 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MERLINReduction.py @@ -0,0 +1,82 @@ +""" Sample MERLIN reduction scrip """ +import os +#os.environ["PATH"] = r"c:/Mantid/Code/builds/br_master/bin/Release;"+os.environ["PATH"] + +from Direct.ReductionWrapper import * +try: + import reduce_vars as web_var +except: + web_var = None + + +class ReduceMERLIN(ReductionWrapper): + @MainProperties + def def_main_properties(self): + """ Define main properties used in reduction """ + + + prop = {}; + prop['sample_run'] = 6398; + prop['wb_run'] = 6399 + prop['incident_energy'] = 18; + prop['energy_bins'] = [-10, 0.2, 15] + + + # Absolute units reduction properties. + #prop['monovan_run'] = 17589 + #prop['sample_mass'] = 10/(94.4/13) # -- this number allows to get approximately the same system test intensities for MAPS as the old test + #prop['sample_rmm'] = 435.96 # + return prop + + @AdvancedProperties + def def_advanced_properties(self): + """ separation between simple and advanced properties depends + on scientist, experiment and user. + main properties override advanced properties. + """ + prop = {}; + prop['map_file'] = 'rings_113.map' + #prop['monovan_mapfile'] = 'default' #'4to1_mid_lowang.map' # default + prop['hard_mask_file'] =None + prop['det_cal_file'] = 6399 #? default? + prop['save_format']='' + + return prop; + # + @iliad + def reduce(self,input_file=None,output_directory=None): + """ Method executes reduction over single file + Overload only if custom reduction is needed + """ + outWS = ReductionWrapper.reduce(self,input_file,output_directory) + #SaveNexus(ws,Filename = 'MARNewReduction.nxs') + return outWS + + def __init__(self): + """ sets properties defaults for the instrument with Name""" + ReductionWrapper.__init__(self,'MER',web_var) +#---------------------------------------------------------------------------------------------------------------------- + + + +if __name__=="__main__": + maps_dir = 'd:/Data/MantidSystemTests/Data' + data_dir ='d:/Data/Mantid_Testing/14_11_27' + ref_data_dir = 'd:/Data/MantidSystemTests/SystemTests/AnalysisTests/ReferenceResults' + config.setDataSearchDirs('{0};{1};{2}'.format(data_dir,maps_dir,ref_data_dir)) + #config.appendDataSearchDir('d:/Data/Mantid_GIT/Test/AutoTestData') + config['defaultsave.directory'] = data_dir # folder to save resulting spe/nxspe files. Defaults are in + + # execute stuff from Mantid + rd = ReduceMERLIN() + rd.def_advanced_properties() + rd.def_main_properties() + + + #using_web_data = False + #if not using_web_data: + # run_dir=os.path.dirname(os.path.realpath(__file__)) + # file = os.path.join(run_dir,'reduce_vars.py') + # rd.export_changed_values(file) + + rd.reduce() diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MariReduction.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MariReduction.py new file mode 100644 index 0000000000000000000000000000000000000000..2a688165d1d8a7b7c031065408fe380c5aec5e1f --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_MariReduction.py @@ -0,0 +1,306 @@ +import os +#os.environ["PATH"] =\ +#r"c:/Mantid/Code/builds/br_master/bin/Release;"+os.environ["PATH"] +""" Sample MARI reduction scrip used in testing ReductionWrapper """ +from Direct.ReductionWrapper import * +try: + import reduce_vars as web_var +except: + web_var = None + + +class ReduceMARIFromFile(ReductionWrapper): + @MainProperties + def def_main_properties(self): + """ Define main properties used in reduction """ + prop = {} + prop['sample_run'] = 11001 + prop['wb_run'] = 11060 + prop['incident_energy'] = 12 + prop['energy_bins'] = [-11,0.05,11] + + # Absolute units reduction properties. + prop['monovan_run'] = 11015 + prop['sample_mass'] = 10 + prop['sample_rmm'] = 435.96 + return prop + + @AdvancedProperties + def def_advanced_properties(self): + """ separation between simple and advanced properties depends + on scientist, experiment and user. + main properties override advanced properties. + """ + prop = {} + prop['map_file'] = "mari_res.map" + prop['monovan_mapfile'] = "mari_res.map" + prop['hard_mask_file'] = "mar11015.msk" + prop['det_cal_file'] = 11060 + prop['save_format'] = '' + return prop + # + @iliad + def reduce(self,input_file=None,output_directory=None): + """ Method executes reduction over single file + Overload only if custom reduction is needed + """ + outWS = ReductionWrapper.reduce(self,input_file,output_directory) + #SaveNexus(outWS,Filename = 'MARNewReduction.nxs') + return outWS + # + def validate_result(self,build_vaidatrion=False): + """ overloaded function provides filename for validation""" + return + def validate_result(self,build_validation=False): + """ Change this method to verify different results """ + # build_validation -- if true, build and save new workspace rather then validating the old one + rez,message = ReductionWrapper.build_or_validate_result(self,11001,"MARIReduction.nxs",build_validation,1.e-2) + return rez,message + + def __init__(self,web_var=None): + """ sets properties defaults for the instrument with Name""" + ReductionWrapper.__init__(self,'MAR',web_var) +#---------------------------------------------------------------------------------------------------------------------- +def main(input_file=None,output_directory=None): + """ This method is used to run code from web service + and should not be touched except changing the name of the + particular ReductionWrapper class (e.g. ReduceMARI here) + + You can also change the output folder to save data to + where web services will copy data + + This method will go when web service implements proper factory + """ + # note web variables initialization + rd = ReduceMARIFromFile(web_var) + rd.reduce(input_file,output_directory) + # change to the name of the folder to save data to + return '' + +#---------------------------------------------------------------------------------------------------------------------- +class ReduceMARIFromWorkspace(ReductionWrapper): + + @MainProperties + def def_main_properties(self): + """ Define main properties used in reduction """ + prop = {} + prop['sample_run'] = Load(Filename='MAR11001.RAW',OutputWorkspace='MAR11001.RAW') + # WB workspace + prop['wb_run'] = Load(Filename='MAR11060.RAW',OutputWorkspace='MAR11060.RAW') + prop['incident_energy'] = 12 + prop['energy_bins'] = [-11,0.05,11] + + # Absolute units reduction properties. + prop['monovan_run'] = Load(Filename='MAR11015.RAW',OutputWorkspace='MAR11015.RAW') + prop['sample_mass'] = 10 + prop['sample_rmm'] = 435.96 + + + return prop + + @AdvancedProperties + def def_advanced_properties(self): + """ separation between simple and advanced properties depends + on scientist, experiment and user. + main properties override advanced properties. + """ + prop = {} + prop['map_file'] = "mari_res.map" + prop['monovan_mapfile'] = "mari_res.map" + prop['hard_mask_file'] = "mar11015.msk" + # MARI calibration uses one of data files defined on instrument. Here + # vanadium run is used for calibration + # TODO: Why not workspace? + prop['det_cal_file'] = "11060" + prop['save_format'] = '' + return prop + # + @iliad + def reduce(self,input_file=None,output_directory=None): + """ Method executes reduction over single file + Overload only if custom reduction is needed + """ + ws = ReductionWrapper.reduce(self,input_file,output_directory) + #SaveNexus(ws,Filename = 'MARNewReduction.nxs') + return ws + + def __init__(self,web_var=None): + """ sets properties defaults for the instrument with Name""" + ReductionWrapper.__init__(self,'MAR',web_var) +#---------------------------------------------------------------------------------------------------------------------- +class ReduceMARIMon2Norm(ReductionWrapper): + + @MainProperties + def def_main_properties(self): + """ Define main properties used in reduction """ + prop = {} + prop['sample_run'] = Load(Filename='MAR11001.RAW',OutputWorkspace='MAR11001.RAW') + # WB workspace + prop['wb_run'] = Load(Filename='MAR11060.RAW',OutputWorkspace='MAR11060.RAW') + prop['incident_energy'] = 12 + prop['energy_bins'] = [-11,0.05,11] + + # Absolute units reduction properties. + prop['monovan_run'] = 11015 #Load(Filename='MAR11015.RAW',OutputWorkspace='MAR11015.RAW') + prop['sample_mass'] = 10 + prop['sample_rmm'] = 435.96 + + return prop + + @AdvancedProperties + def def_advanced_properties(self): + """ separation between simple and advanced properties depends + on scientist, experiment and user. + main properties override advanced properties. + """ + prop = {} + prop['map_file'] = "mari_res.map" + prop['monovan_mapfile'] = "mari_res.map" + prop['hard_mask_file'] = "mar11015.msk" + #prop['hardmaskOnly'] ="mar11015.msk" + prop['normalise_method'] = 'monitor-2' + # reduction from workspace currently needs detector_calibration file + # MARI calibration uses one of data files defined on instrument. Here + # vanadium run is used for calibration + # TODO: Why not workspace?, check it + prop['det_cal_file'] = "11060" + prop['save_format'] = [] + return prop + # + @iliad + def reduce(self,input_file=None,output_directory=None): + """ Method executes reduction over single file + Overload only if custom reduction is needed + """ + outWS = ReductionWrapper.reduce(self,input_file,output_directory) + #SaveNexus(ws,Filename = 'MARNewReduction.nxs') + return outWS + + def __init__(self,web_var=None): + """ sets properties defaults for the instrument with Name""" + ReductionWrapper.__init__(self,'MAR',web_var) +#---------------------------------------------------------------------------------------------------------------------- +class MARIReductionSum(ReductionWrapper): + @MainProperties + def def_main_properties(self): + """ Define main properties used in reduction """ + prop = {} + prop['sample_run'] = [11001,11015] + prop['wb_run'] = 11060 + prop['incident_energy'] = 11 + prop['energy_bins'] = [-11,0.05,11] + prop['sum_runs'] = True + + # Absolute units reduction properties. + #prop['monovan_run'] = 11015 + #prop['sample_mass'] = 32.58 + #prop['sample_rmm'] = 50.9415# 435.96 + return prop + + @AdvancedProperties + def def_advanced_properties(self): + """ separation between simple and advanced properties depends + on scientist, experiment and user. + main properties override advanced properties. + """ + prop = {} + prop['map_file'] = "mari_res.map" + prop['monovan_mapfile'] = "mari_res.map" + prop['hard_mask_file'] = "mar11015.msk" + #prop['det_cal_file'] =11060 + prop['save_format'] = '' + return prop + # + @iliad + def reduce(self,input_file=None,output_directory=None): + """ Method executes reduction over single file + Overload only if custom reduction is needed + """ + ws = ReductionWrapper.reduce(self,input_file,output_directory) + #SaveNexus(ws,Filename = 'MARNewReduction.nxs') + return ws + + def __init__(self,web_var=None): + """ sets properties defaults for the instrument with Name""" + ReductionWrapper.__init__(self,'MAR',web_var) +#---------------------------------------------------------------------------------------------------------------------- +class ReduceMARIMonitorsSeparate(ReductionWrapper): + + @MainProperties + def def_main_properties(self): + """ Define main properties used in reduction """ + prop = {} + prop['sample_run'] = 11001 # + # WB workspace Simulate workspace without monitors + prop['wb_run'] = Load(Filename='MAR11060.RAW',OutputWorkspace='MAR11060.RAW',LoadMonitors='Exclude') + prop['incident_energy'] = 12 + prop['energy_bins'] = [-11,0.05,11] + + # Absolute units reduction properties. + prop['monovan_run'] = 11015 # + prop['sample_mass'] = 10 + prop['sample_rmm'] = 435.96 + + return prop + + @AdvancedProperties + def def_advanced_properties(self): + """ separation between simple and advanced properties depends + on scientist, experiment and user. + main properties override advanced properties. + """ + prop = {} + prop['map_file'] = "mari_res.map" + prop['monovan_mapfile'] = "mari_res.map" + prop['hard_mask_file'] = "mar11015.msk" + # MARI calibration uses one of data files defined on instrument. Here + # vanadium run is used for calibration + # TODO: Why not workspace? + prop['det_cal_file'] = "11060" + prop['save_format'] = '' + prop['load_monitors_with_workspace'] = False + return prop + # + @iliad + def reduce(self,input_file=None,output_directory=None): + """ Method executes reduction over single file + Overload only if custom reduction is needed + """ + outWS = ReductionWrapper.reduce(self,input_file,output_directory) + #SaveNexus(outWS,Filename = 'MARNewReduction.nxs') + return outWS + + def __init__(self,web_var=None): + """ sets properties defaults for the instrument with Name""" + ReductionWrapper.__init__(self,'MAR',web_var) + + +if __name__ == "__main__": + + maps_dir = 'd:/Data/MantidSystemTests/Data' + data_dir = 'd:/Data/Mantid_Testing/14_12_15' + ref_data_dir = 'd:/Data/MantidSystemTests/SystemTests/AnalysisTests/ReferenceResults' + config.setDataSearchDirs('{0};{1};{2}'.format(data_dir,maps_dir,ref_data_dir)) + #config.appendDataSearchDir('d:/Data/Mantid_GIT/Test/AutoTestData') + config['defaultsave.directory'] = data_dir # folder to save resulting spe/nxspe files. Defaults are in + + # execute stuff from Mantid + rd = ReduceMARIFromFile() + #rd= ReduceMARIMon2Norm() + #rd = ReduceMARIMonitorsSeparate() + rd.def_advanced_properties() + rd.def_main_properties() + + + #run_dir = os.path.dirname(os.path.realpath(__file__)) + #file = os.path.join(run_dir,'reduce_vars.py') + #rd.save_web_variables(file) + + if rd.reducer.sum_runs: + red_ws=rd.reduce() + else: + runs = PropertyManager.sample_run.get_run_list() + for run in runs: + red_ws=rd.reduce(run) + #end + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/IndirectDiffractionTests.py b/Code/Mantid/Testing/SystemTests/tests/analysis/IndirectDiffractionTests.py new file mode 100644 index 0000000000000000000000000000000000000000..9d14be49631d0ca234972d77c6f35b609f36dbba --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/IndirectDiffractionTests.py @@ -0,0 +1,107 @@ +from abc import ABCMeta, abstractmethod +import stresstesting + + +class MSGDiffractionReductionTest(stresstesting.MantidStressTest): + """ + Base class for tests that use the MSGDiffractionReduction algorithm. + """ + + __metaclass__ = ABCMeta + + @abstractmethod + def get_reference_file(self): + """ + Gets reference result file for workspace comparison. + """ + raise NotImplementedError() + + def runTest(self): + """ + Runs an MSGDiffractionReduction with the configured parameters. + """ + from mantid.simpleapi import MSGDiffractionReduction + from mantid import mtd + + MSGDiffractionReduction(InputFiles=self.raw_file, + OutputWorkspace=self.output_workspace_group, + Instrument=self.instrument, + Mode=self.mode, + DetectorRange=self.detector_range, + RebinParam=self.rebinning) + + self._output_workspace = mtd[self.output_workspace_group].getNames()[0] + + def validate(self): + """ + Validates the result workspace with the reference file. + """ + self.disableChecking.append('Instrument') + return self._output_workspace, self.get_reference_file() + + +#------------------------------------------------------------------------------- +class IRISDiffspecDiffractionTest(MSGDiffractionReductionTest): + + def __init__(self): + MSGDiffractionReductionTest.__init__(self) + + self.instrument = 'IRIS' + self.mode = 'diffspec' + self.raw_file = 'IRS21360.raw' + self.detector_range = [105, 112] + self.rebinning = '3.0,0.001,4.0' + self.output_workspace_group = 'IRIS_Diffraction_DiffSpec_Test' + + def get_reference_file(self): + return 'IRISDiffspecDiffractionTest.nxs' + + +#------------------------------------------------------------------------------- +class TOSCADiffractionTest(MSGDiffractionReductionTest): + + def __init__(self): + MSGDiffractionReductionTest.__init__(self) + + self.instrument = 'TOSCA' + self.mode = 'diffspec' + self.raw_file = 'TSC11453.raw' + self.detector_range = [146, 149] + self.rebinning = '0.5,0.001,2.1' + self.output_workspace_group = 'TOSCA_Diffraction_DiffSpec_Test' + + def get_reference_file(self): + return 'TOSCADiffractionTest.nxs' + + +#------------------------------------------------------------------------------- +class OSIRISDiffspecDiffractionTest(MSGDiffractionReductionTest): + + def __init__(self): + MSGDiffractionReductionTest.__init__(self) + + self.instrument = 'OSIRIS' + self.mode = 'diffspec' + self.raw_file = 'osiris00101300.raw' + self.detector_range = [3, 962] + self.rebinning = '2.0,0.001,3.0' + self.output_workspace_group = 'OSIRIS_Diffraction_DiffSpec_Test' + + def get_reference_file(self): + return 'OsirisDiffspecDiffractionTest.nxs' + + +#------------------------------------------------------------------------------- +class OsirisDiffOnlyTest(stresstesting.MantidStressTest): + + def runTest(self): + from mantid.simpleapi import OSIRISDiffractionReduction + OSIRISDiffractionReduction( + OutputWorkspace="OsirisDiffractionTest", + Sample="OSI89813.raw, OSI89814.raw, OSI89815.raw, OSI89816.raw, OSI89817.raw", + CalFile="osiris_041_RES10.cal", + Vanadium="OSI89757, OSI89758, OSI89759, OSI89760, OSI89761") + + def validate(self): + self.disableChecking.append('Instrument') + return 'OsirisDiffractionTest', 'OsirisDiffractionTest.nxs' diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/IndirectEnergyConversionTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/IndirectEnergyConversionTest.py new file mode 100644 index 0000000000000000000000000000000000000000..d5af6da914f25a1cc291253d2c16a1d949facba9 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/IndirectEnergyConversionTest.py @@ -0,0 +1,26 @@ +import stresstesting +from mantid.simpleapi import * + +class IndirectEnergyConversionTest(stresstesting.MantidStressTest): + + def runTest(self): + instrument = 'IRIS' + analyser = 'graphite' + reflection = '002' + detector_range = [3, 53] + files = 'irs21360.raw' + rebin_string = '-0.5,0.005,0.5' + + InelasticIndirectReduction(InputFiles=files, + RebiNString=rebin_string, + DetectorRange=detector_range, + Instrument=instrument, + Analyser=analyser, + Reflection=reflection, + OutputWorkspace='__IndirectEnergyCOnversionTest_out_group') + + + def validate(self): + self.disableChecking.append('Instrument') + self.disableChecking.append('SpectraMap') + return 'irs21360_graphite002_red', 'IndirectEnergyConversionTest.nxs' diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/L2QScriptTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/L2QScriptTest.py new file mode 100644 index 0000000000000000000000000000000000000000..aaa77adc67ff90b341572e9729fcc8e902761cb5 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/L2QScriptTest.py @@ -0,0 +1,25 @@ + +import stresstesting +from mantid.simpleapi import * +from isis_reflectometry.l2q import * + +class L2QScriptTest(stresstesting.MantidStressTest): + + + def runTest(self): + ws = Load(Filename="INTER00013469.nxs") + ws = ConvertUnits(InputWorkspace=ws,Target="Wavelength",AlignBins=1) + Io=CropWorkspace(InputWorkspace=ws,XMin=0.8,XMax=14.5,StartWorkspaceIndex=2,EndWorkspaceIndex=2) + D=CropWorkspace(InputWorkspace=ws,XMin=0.8,XMax=14.5,StartWorkspaceIndex=3) + I= Divide(LHSWorkspace=D,RHSWorkspace=Io,AllowDifferentNumberSpectra=True) + detector_component_name = 'linear-detector' + sample_component_name = 'some-surface-holder' + theta = 0.7 + l2q(ws, detector_component_name, theta, sample_component_name) # This generates an output workspace called IvsQ + + + def validate(self): + self.disableChecking.append('Instrument') + return 'IvsQ','L2QReferenceResult.nxs' + + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/LOQAddBatch.py b/Code/Mantid/Testing/SystemTests/tests/analysis/LOQAddBatch.py new file mode 100644 index 0000000000000000000000000000000000000000..ebd0638483b054f30bfd4b069e85cf86f171be5e --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/LOQAddBatch.py @@ -0,0 +1,76 @@ +import stresstesting +from mantid.simpleapi import * +from mantid.api import FileFinder +from mantid import config +import ISISCommandInterface as ici +import SANSBatchMode as batch +import SANSadd2 as sansadd + +import os + +class SANSAddBatch(stresstesting.MantidStressTest): + output_file = '99630sannotrans' + csv_file = 'input.csv' + result = '' + + def cleanup(self): + print "Cleanup" + absfile = FileFinder.getFullPath("input.csv") + if os.path.exists(absfile): + os.remove(absfile) + return True + + def runTest(self): + #here we are testing the LOQ setup + ici.LOQ() + #rear detector + ici.Detector("main-detector-bank") + #test batch mode, although only the analysis from the last line is checked + # Find the file , this should really be in the BatchReduce reduction step + + f = open(self.csv_file,'w') + print >> f, "sample_sans,99630-add,output_as, %s"%self.output_file + f.close() + runnum = '99630' + sansadd.add_runs((runnum, runnum),'LOQ','.RAW') + + ici.Set1D() + ici.MaskFile('MASK.094AA') + batch.BatchReduce(self.csv_file, 'nxs', plotresults=False, saveAlgs={'SaveNexus':'nxs'}) + + print ' reduction without' + + ici._refresh_singleton() + + ici.LOQ() + ici.Detector("main-detector-bank") + ici.Set1D() + ici.MaskFile('MASK.094AA') + LOQ99630 = Load(runnum) + LOQ99630 += LOQ99630 + ici.AssignSample(LOQ99630, reload=False) + self.result = ici.WavRangeReduction() + + def validate(self): + # Need to disable checking of the Spectra-Detector map because it isn't + # fully saved out to the nexus file (it's limited to the spectra that + # are actually present in the saved workspace). + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + self.disableChecking.append('Instrument') + self.tolerance = 1.0e-10 #almost ZERO! + print 'validating', self.result, self.output_file + return self.result,self.output_file+'.nxs' + + + + def __del__(self): + # remove all created files. + defaultsave = config['defaultsave.directory'] + for file_name in ['LOQ99630-add.nxs', self.output_file+'.nxs', self.csv_file ]: + try: + os.remove(os.path.join(defaultsave,file_name)) + except: + pass + + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/LOQCentreNoGrav.py b/Code/Mantid/Testing/SystemTests/tests/analysis/LOQCentreNoGrav.py new file mode 100644 index 0000000000000000000000000000000000000000..51cca5aaae31118604fdefa1bc3774d3d9d034a4 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/LOQCentreNoGrav.py @@ -0,0 +1,56 @@ +import stresstesting +from mantid.simpleapi import * +from ISISCommandInterface import * + +class LOQCentreNoGrav(stresstesting.MantidStressTest): + + def runTest(self): + + LOQ() + + Set1D() + Detector("rear-detector") + MaskFile('MASK.094AA') + Gravity(False) + + AssignSample('54431.raw') + TransmissionSample('54435.raw', '54433.raw') + AssignCan('54432.raw') + TransmissionCan('54434.raw', '54433.raw') + + FindBeamCentre(60,200, 9) + + WavRangeReduction(3, 9, DefaultTrans) + + def validate(self): + + return '54431main_1D_3.0_9.0','LOQCentreNoGravSearchCentreFixed.nxs' + +class LOQCentreNoGravDefineCentre(stresstesting.MantidStressTest): + def runTest(self): + + LOQ() + + Set1D() + Detector("rear-detector") + MaskFile('MASK.094AA') + Gravity(False) + SetCentre(324.765, 327.670) + + AssignSample('54431.raw') + TransmissionSample('54435.raw', '54433.raw') + AssignCan('54432.raw') + TransmissionCan('54434.raw', '54433.raw') + + WavRangeReduction(3, 9, DefaultTrans) + + def validate(self): + # Need to disable checking of the Spectra-Detector map becauseit isn't + # fully saved out to the nexus file (it's limited to the spectra that + # are actually present in the saved workspace). + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + self.disableChecking.append('Instrument') + + return '54431main_1D_3.0_9.0','LOQCentreNoGrav.nxs' + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/LOQReductionGUI.py b/Code/Mantid/Testing/SystemTests/tests/analysis/LOQReductionGUI.py new file mode 100644 index 0000000000000000000000000000000000000000..a8ec0d38e701020b60e453dd083c26cfb8e748fb --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/LOQReductionGUI.py @@ -0,0 +1,27 @@ +import stresstesting +from mantid.simpleapi import * +import isis_reducer +import ISISCommandInterface as i +import isis_instrument +import isis_reduction_steps + +MASKFILE = FileFinder.getFullPath('MaskLOQData.txt') +BATCHFILE = FileFinder.getFullPath('loq_batch_mode_reduction.csv') + +class LOQMinimalBatchReduction(stresstesting.MantidStressTest): + def __init__(self): + super(LOQMinimalBatchReduction, self).__init__() + config['default.instrument'] = 'LOQ' + + def runTest(self): + import SANSBatchMode as batch + i.LOQ() + i.MaskFile(MASKFILE) + fit_settings = batch.BatchReduce(BATCHFILE, '.nxs', combineDet='merged', saveAlgs={}) + + def validate(self): + # note increased tolerance to something which quite high + # this is partly a temperary measure, but also justified by + # when overlaying the two options they overlap very well + self.tolerance = 1.0e+1 + return 'first_time_merged', 'LOQReductionMergedData.nxs' diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/LOQSANSUtilityTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/LOQSANSUtilityTest.py new file mode 100644 index 0000000000000000000000000000000000000000..e98d307ebe6634d3d37b34498c2b46ca086ef21f --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/LOQSANSUtilityTest.py @@ -0,0 +1,28 @@ +import stresstesting +from mantid.simpleapi import * +from mantid import config +import SANSUtility as su +import SANSadd2 as add + +import os + +def unixLikePathFromWorkspace(ws): + return su.getFilePathFromWorkspace(ws).replace('\\','/') + + +class SANSUtilityTest(stresstesting.MantidStressTest): + + def runTest(self): + # created after issue reported in #8156 + ws = Load('LOQ54432') + self.assertTrue('Data/LOQ/LOQ54432.raw' in unixLikePathFromWorkspace(ws)) + ws = Load('LOQ99618.RAW') + self.assertTrue('Data/LOQ/LOQ99618.RAW' in unixLikePathFromWorkspace(ws)) + add.add_runs(('LOQ54432','LOQ54432'),'LOQ','.raw') + ws = Load('LOQ54432-add') + file_path = unixLikePathFromWorkspace(ws) + logger.information("File Path from -add: "+str(file_path)) + file_path = file_path.replace('-ADD','-add') # MAC seems to report that the file is LOQ54432-ADD.nxs + self.assertTrue('logs/LOQ54432-add' in file_path) + os.remove(file_path) + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/LOQTransFitWorkspace2D.py b/Code/Mantid/Testing/SystemTests/tests/analysis/LOQTransFitWorkspace2D.py new file mode 100644 index 0000000000000000000000000000000000000000..2bb5558ba175381d28a0612d362d1dc602ad50b1 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/LOQTransFitWorkspace2D.py @@ -0,0 +1,53 @@ +import stresstesting +from mantid.simpleapi import * +from ISISCommandInterface import * + +class LOQTransFitWorkspace2D(stresstesting.MantidStressTest): + """ + Tests the SANS interface commands TransFit() and TransWorkspace(). Also tests + a LOQ reduction in 2D with can and transmission files + """ + + def runTest(self): + self.setup() + + #test TransFit() + TransFit('LOG',3.0,8.0) + TransmissionSample('54435.raw', '54433.raw') + TransmissionCan('54434.raw', '54433.raw') + + #run the reduction + WavRangeReduction(3, 4, False, '_suff') + + #save the results, we'll use them later, remove the other tempory workspaces + RenameWorkspace(InputWorkspace='54435_trans_sample_3.0_8.0',OutputWorkspace= 'samp') + RenameWorkspace(InputWorkspace='54434_trans_can_3.0_8.0',OutputWorkspace= 'can') + DeleteWorkspace(Workspace='54435_trans_sample_3.0_8.0_unfitted') + DeleteWorkspace(Workspace='54434_trans_can_3.0_8.0_unfitted') + DeleteWorkspace(Workspace='54431main_2D_3.0_4.0_suff') + + #now test TransWorkspace() + self.setup() + #use the results we calculated above + TransWorkspace('samp', 'can') + + WavRangeReduction(3, 4, False, '_suff') + + def setup(self): + #DataPath("../Data/LOQ/") + #UserPath("../Data/LOQ/") + LOQ() + MaskFile('MASK.094AA') + Gravity(False) + Set2D() + Detector("main-detector-bank") + AssignSample('54431.raw') + AssignCan('54432.raw') + LimitsWav(3,4, 0.2, 'LIN') + + def validate(self): + self.disableChecking.append('SpectraMap') + #when comparing LOQ files you seem to need the following + self.disableChecking.append('Axes') + self.disableChecking.append('Instrument') + return '54431main_2D_3.0_4.0_suff','LOQTransFitWorkspace2D.nxs' diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/LoadAndCheckBase.py b/Code/Mantid/Testing/SystemTests/tests/analysis/LoadAndCheckBase.py new file mode 100644 index 0000000000000000000000000000000000000000..28256b4f5867a737e7a31af966b65ed090e4fa7e --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/LoadAndCheckBase.py @@ -0,0 +1,94 @@ +""" +These system tests are to verify the behaviour of the ISIS reflectometry reduction scripts +""" + +import stresstesting +from mantid.simpleapi import * +import mantid.api._api + +from abc import ABCMeta, abstractmethod + +class LoadAndCheckBase(stresstesting.MantidStressTest): + + __metaclass__ = ABCMeta # Mark as an abstract class + + __comparison_out_workspace_name = 'a_integrated' + + @abstractmethod + def get_raw_workspace_filename(self): + """Returns the name of the raw workspace file""" + raise NotImplementedError("Implement get_raw_workspace_filename") + + @abstractmethod + def get_nexus_workspace_filename(self): + """Returns the name of the nexus workspace file""" + raise NotImplementedError("Implement get_nexus_workspace_filename") + + @abstractmethod + def get_expected_instrument_name(self): + """Returns the name of the instrument""" + raise NotImplementedError("Implement get_expected_instrument_name") + + def get_expected_number_of_periods(self): + return 1 + + def get_integrated_reference_workspace_filename(self): + """Returns the name of the benchmark file used for end-of-test comparison.""" + if self.enable_reference_result_checking(): + # Must have a reference result file if reference result checking is required + raise NotImplementedError("Implement get_nexus_workspace_filename") + + def enable_reference_result_checking(self): + return True + + def enable_instrument_checking(self): + return True + + + def do_check_workspace_shape(self, ws1, ws2): + self.assertTrue(ws1.getNumberHistograms(), ws2.getNumberHistograms()) + self.assertTrue(len(ws1.readX(0)) == len(ws2.readX(0))) + self.assertTrue(len(ws1.readY(0)) == len(ws2.readY(0))) + + def do_check_instrument_applied(self, ws1, ws2): + instrument_name = self.get_expected_instrument_name() + self.assertTrue(ws1.getInstrument().getName() == instrument_name) + self.assertTrue(ws2.getInstrument().getName() == instrument_name) + + def runTest(self): + Load(Filename=self.get_nexus_workspace_filename(), OutputWorkspace='nexus') + Load(Filename=self.get_raw_workspace_filename(), OutputWorkspace='raw') + + a = mtd['nexus'] + b = mtd['raw'] + n_periods = self.get_expected_number_of_periods() + + self.assertTrue(type(a) == type(b)) + + #raise NotImplementedError() + if(isinstance(a,mantid.api._api.WorkspaceGroup)): + self.assertEqual(a.size(), b.size()) + self.assertEqual(a.size(), n_periods) + # Loop through each workspace in the group and apply some simple comaprison checks. + for i in range(0, a.size()): + self.do_check_workspace_shape(a[i], b[i]) + if self.enable_instrument_checking(): + self.do_check_instrument_applied(a[i], b[i]) + if self.enable_reference_result_checking(): + Integration(InputWorkspace=a[0], OutputWorkspace=self.__comparison_out_workspace_name) + else: + self.do_check_workspace_shape(a, b) + if self.enable_instrument_checking(): + self.do_check_instrument_applied(a, b) + if self.enable_reference_result_checking(): + Integration(InputWorkspace=a, OutputWorkspace=self.__comparison_out_workspace_name) + + def validate(self): + self.disableChecking.append('Instrument') + if self.enable_reference_result_checking(): + return self.__comparison_out_workspace_name, self.get_integrated_reference_workspace_filename() + else: + return True + + + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/LoadEmbeddedInstrumentInfo.py b/Code/Mantid/Testing/SystemTests/tests/analysis/LoadEmbeddedInstrumentInfo.py new file mode 100644 index 0000000000000000000000000000000000000000..ab56356edd2d61a79ed0ceff6a8129b2296e035e --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/LoadEmbeddedInstrumentInfo.py @@ -0,0 +1,31 @@ +import stresstesting +from mantid.simpleapi import * + +""" +Here testing against embedded instrument info in different raw file formats + +This include to test that embedded information in raw ISIS Nexus file formats +get loaded correctly. + +""" + +# here test against a custom made ISIS raw hist nexus file created by Freddie +# where the A1_window has be, for the purpose of testing, been put at a +# completely wrong location of (0,3,0) +class ISISRawHistNexus(stresstesting.MantidStressTest): + + def runTest(self): + + # ISIS raw hist nexus file with A1_window at location (0,3,0) + MAPS00018314_raw_ISIS_hist = Load('MAPS00018314.nxs') + + def validate(self): + + MAPS00018314_raw_ISIS_hist = mtd['MAPS00018314_raw_ISIS_hist'] + inst = MAPS00018314_raw_ISIS_hist.getInstrument() + A1window = inst.getComponentByName('MAPS/A1_window') + + if str(A1window.getPos()) != '[0,3,0]' : + return False + + return True \ No newline at end of file diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/LoadLotsOfFiles.py b/Code/Mantid/Testing/SystemTests/tests/analysis/LoadLotsOfFiles.py new file mode 100644 index 0000000000000000000000000000000000000000..0a12b119767ca59bca69b5dc65092095a67a501b --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/LoadLotsOfFiles.py @@ -0,0 +1,279 @@ +from mantid.simpleapi import * +from mantid.api import FrameworkManager +import copy +import os +import re +import stresstesting + +BANNED_FILES = ['992 Descriptions.txt', + 'BASIS_AutoReduction_Mask.xml', + 'BioSANS_dark_current.xml', + 'BioSANS_empty_cell.xml', + 'BioSANS_empty_trans.xml', + 'BioSANS_exp61_scan0004_0001.xml', + 'BioSANS_flood_data.xml', + 'BioSANS_sample_trans.xml', + 'CNCS_TS_2008_08_18.dat', + 'DISF_NaF.cdl', + 'det_corrected7.dat', + 'det_LET_cycle12-3.dat', + 'eqsans_configuration.1463', + 'FLAT_CELL.061', + 'HYSA_mask.xml', + 'IN10_P3OT_350K.inx', + 'IN13_16347.asc', + 'IN16_65722.asc', + 'IP0005.dat', + 'batch_input.csv', + 'mar11015.msk', + 'LET_hard.msk', #It seems loade does not understand it? + 'MASK.094AA', + 'MASKSANS2D_094i_RKH.txt', + 'MASKSANS2D.091A', + 'MASKSANS2Doptions.091A', + 'MaskSANS2DReductionGUI.txt', + 'MaskSANS2DReductionGUI_MaskFiles.txt', + 'MaskSANS2DReductionGUI_LimitEventsTime.txt', + 'MAP17269.raw', # Don't need to check multiple MAPS files + 'MAP17589.raw', + 'MER06399.raw', # Don't need to check multiple MERLIN files + 'PG3_11485-1.dat', # Generic load doesn't do very well with ASCII files + 'PG3_2538_event.nxs', # Don't need to check all of the PG3 files + 'PG3_9829_event.nxs', + 'REF_M_9684_event.nxs', + 'REF_M_9709_event.nxs', + 'SANS2D_periodTests.csv', + 'SANS2D_992_91A.csv', + 'SANS2D_mask_batch.csv', + 'sans2d_reduction_gui_batch.csv', + 'squaricn.phonon', + 'squaricn.castep', + 'target_circles_mask.xml', + 'linked_circles_mask.xml', + 'testCansas1DMultiEntry.xml', + 'Wish_Diffuse_Scattering_ISAW_UB.mat', + 'WSH_test.dat', + 'SANS2D_multiPeriodTests.csv', + 'SANS2D_periodTests.csv', + 'DIRECTM1_15785_12m_31Oct12_v12.dat', + 'MaskSANS2DReductionGUI.txt', + 'sans2d_reduction_gui_batch.csv' + 'MANTID_FLAT_CELL.115', + 'MaskLOQData.txt', + 'DIRECTHAB.983', + 'loq_batch_mode_reduction.csv', + 'det_corrected7.nxs', # this file can be loaded by LoadDetectorInfo but I am not sure if generic loader should ever deal with it + 'poldi2013n006903.hdf', + 'poldi2013n006904.hdf', + 'poldi2014n019874.hdf', + 'poldi2014n019881.hdf' + ] + +EXPECTED_EXT = '.expected' + +BANNED_REGEXP = [r'SANS2D\d+.log$', + r'SANS2D00000808_.+.txt$', + r'.*_reduction.log$', + r'.+_characterization_\d+_\d+_\d+.*\.txt', + r'.*\.cal', + r'.*\.detcal', + r'.*Grouping\.xml', + r'.*\.map', + r'.*\.irf', + r'.*\.hkl', + r'EVS.*\.raw', + r'.*_pulseid\.dat'] + +# This list stores files that will be loaded first. +# Implemented as simple solution to avoid failures on +# WinXP where small files have trouble allocating larger +# amounts of contiguous memory. +# Usage of XP is getting lower so we don't want to compromise the +# performance of the code elsewhere just to pass here +PRIORITY_FILES = ['HYS_13658_event.nxs', + 'ILLIN5_Sample_096003.nxs', + 'ILLIN5_Vana_095893.nxs'] + +def useDir(direc): + """Only allow directories that aren't test output or + reference results.""" + if "ReferenceResults" in direc: + return False + if "logs" in direc: + return False + return ("Data" in direc) + +def useFile(direc, filename): + """Returns (useFile, abspath)""" + # list of explicitly banned files at the top of this script + if filename in BANNED_FILES: + return (False, filename) + + # is an 'expected' file + if filename.endswith(EXPECTED_EXT): + return (False, filename) + + # list of banned files by regexp + for regexp in BANNED_REGEXP: + if re.match(regexp, filename, re.I) is not None: + return (False, filename) + + filename = os.path.join(direc, filename) + if os.path.isdir(filename): + return (False, filename) + return (True, filename) + +class LoadLotsOfFiles(stresstesting.MantidStressTest): + def __getDataFileList__(self): + # get a list of directories to look in + dirs = config['datasearch.directories'].split(';') + dirs = [item for item in dirs if useDir(item)] + print "Looking for data files in:", ', '.join(dirs) + + # Files and their corresponding sizes. the low-memory win machines + # fair better loading the big files first + files = {} + priority_abspaths = copy.deepcopy(PRIORITY_FILES) + for direc in dirs: + myFiles = os.listdir(direc) + for filename in myFiles: + (good, fullpath) = useFile(direc, filename) + #print "***", good, filename + if good: + files[fullpath] = os.path.getsize(fullpath) + try: + cur_index = PRIORITY_FILES.index(filename) + priority_abspaths[cur_index] = fullpath + except ValueError: + pass + + datafiles = sorted(files, key=lambda key: files[key], reverse=True) + + # Put the priority ones first + for insertion_index, fname in enumerate(priority_abspaths): + try: + cur_index = datafiles.index(fname) + except ValueError: + continue + value = datafiles.pop(cur_index) + datafiles.insert(insertion_index, fname) + + return datafiles + + def __runExtraTests__(self, wksp, filename): + """Runs extra tests that are specified in '.expected' files + next to the data files""" + expected = filename + EXPECTED_EXT + if not os.path.exists(expected): #file exists + return True + if os.path.getsize(expected) <= 0: #non-zero length + return True + + # Eval statement will use current scope. Allow access to + # mantid module + import mantid + + print "Found an expected file '%s' file" % expected + expectedfile = open(expected) + tests = expectedfile.readlines() + failed = [] # still run all of the tests + for test in tests: + test = test.strip() + result = eval(test) + if not (result == True): + failed.append((test, result)) + if len(failed) > 0: + for item in failed: + print " Failed test '%s' returned '%s' instead of 'True'" % (item[0], item[1]) + return False + return True + + + def __loadAndTest__(self, filename): + """Do all of the real work of loading and testing the file""" + print "----------------------------------------" + print "Loading '%s'" % filename + from mantid.api import Workspace + from mantid.api import IMDEventWorkspace + # Output can be a tuple if the Load algorithm has extra output properties + # but the output workspace should always be the first argument + outputs = Load(filename) + if type(outputs) == tuple: + wksp = outputs[0] + else: + wksp = outputs + + if not isinstance(wksp, Workspace): + print "Unexpected output type from Load algorithm: Type found=%s" % str(type(outputs)) + return False + + if wksp is None: + print 'Load returned None' + return False + + # generic checks + if wksp.getName() is None or len(wksp.getName()) <= 0: + print "Workspace does not have a name" + del wksp + return False + + id = wksp.id() + if id is None or len(id) <= 0: + print "Workspace does not have an id" + del wksp + return False + + # checks based on workspace type + if hasattr(wksp, "getNumberHistograms"): + if wksp.getNumberHistograms() <= 0: + print "Workspace has zero histograms" + del wksp + return False + if "managed" not in id.lower() and wksp.getMemorySize() <= 0: + print "Workspace takes no memory: Memory used=" + str(wksp.getMemorySize()) + del wksp + return False + + # checks for EventWorkspace + if hasattr(wksp, "getNumberEvents"): + if wksp.getNumberEvents() <= 0: + print "EventWorkspace does not have events" + del wksp + return False + + # do the extra checks + result = self.__runExtraTests__(wksp, filename) + + # cleanup + del wksp + return result + + def runTest(self): + """Main entry point for the test suite""" + files = self.__getDataFileList__() + + # run the tests + failed = [] + for filename in files: + try: + if not self.__loadAndTest__(filename): + print "FAILED TO LOAD '%s'" % filename + failed.append(filename) + except Exception, e: + print "FAILED TO LOAD '%s' WITH ERROR:" % filename + print e + failed.append(filename) + finally: + # Clear everything for the next test + FrameworkManager.Instance().clear() + + # final say on whether or not it 'worked' + print "----------------------------------------" + if len(failed) != 0: + print "SUMMARY OF FAILED FILES" + for filename in failed: + print filename + raise RuntimeError("Failed to load %d of %d files" \ + % (len(failed), len(files))) + else: + print "Successfully loaded %d files" % len(files) diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/LoadLotsOfInstruments.py b/Code/Mantid/Testing/SystemTests/tests/analysis/LoadLotsOfInstruments.py new file mode 100644 index 0000000000000000000000000000000000000000..3736d6c01576d596426df0e7cc9307d418b61ddc --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/LoadLotsOfInstruments.py @@ -0,0 +1,78 @@ +from mantid.simpleapi import * +from mantid.api import FrameworkManager +import os +import re +import glob +import stresstesting + +EXPECTED_EXT = '.expected' + +class LoadLotsOfInstruments(stresstesting.MantidStressTest): + def __getDataFileList__(self): + # get a list of directories to look in + direc = config['instrumentDefinition.directory'] + print "Looking for instrument definition files in: %s" % direc + cwd = os.getcwd() + os.chdir(direc) + myFiles = glob.glob("*Definition*.xml") + os.chdir(cwd) + # Files and their corresponding sizes. the low-memory win machines + # fair better loading the big files first + files = [] + for filename in myFiles: + files.append(os.path.join(direc, filename)) + files.sort() + return files + + + def __loadAndTest__(self, filename): + """Do all of the real work of loading and testing the file""" + print "----------------------------------------" + print "Loading '%s'" % filename + wksp = LoadEmptyInstrument(filename) + if wksp is None: + return False + + # TODO standard tests + if wksp.getNumberHistograms() <= 0: + del wksp + return False + if wksp.getMemorySize() <= 0: + print "Workspace takes no memory: Memory used=" + str(wksp.getMemorySize()) + del wksp + return False + + # cleanup + del wksp + return True + + def runTest(self): + """Main entry point for the test suite""" + files = self.__getDataFileList__() + + # run the tests + failed = [] + for filename in files: + try: + if not self.__loadAndTest__(filename): + print "FAILED TO LOAD '%s'" % filename + failed.append(filename) + except Exception, e: + print "FAILED TO LOAD '%s' WITH ERROR:" % filename + print e + failed.append(filename) + finally: + # Clear everything for the next test + FrameworkManager.Instance().clear() + + # final say on whether or not it 'worked' + print "----------------------------------------" + if len(failed) != 0: + print "SUMMARY OF FAILED FILES" + for filename in failed: + print filename + raise RuntimeError("Failed to load %d of %d files" \ + % (len(failed), len(files))) + else: + print "Successfully loaded %d files" % len(files) + print files diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/LoadMuonNexusTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/LoadMuonNexusTest.py new file mode 100644 index 0000000000000000000000000000000000000000..a682f38210c1328f623b310d4a658e58aebf8545 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/LoadMuonNexusTest.py @@ -0,0 +1,17 @@ +import stresstesting +from mantid.simpleapi import * + +class LoadMuonNexusTest(stresstesting.MantidStressTest): + + def runTest(self): + # EMU03087 is an old data file produced by CONVERT_NEXUS from MCS binary files. + # Checked specifically because stores resulution (used to calculate FirstGoodData) + # as NX_FLOAT32 opposed to NX_INT32 in other Muon files. + loadResult = LoadMuonNexus(Filename = "EMU03087.nxs", + OutputWorkspace = "EMU03087") + + firstGoodData = loadResult[3] + self.assertDelta(firstGoodData, 0.416, 0.0001) + + def cleanup(self): + mtd.remove("EMU03087") diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/LoadTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/LoadTest.py new file mode 100644 index 0000000000000000000000000000000000000000..8b7aa21413572a5388b5b533b38051a5a2745502 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/LoadTest.py @@ -0,0 +1,222 @@ +""" + Extends the basic test of the Load algorithm done by the LoadLotsOfFiles + test to encompass the complex multi-file loading that the Load + algorithm is capable of. +""" +import stresstesting + +from mantid.api import AnalysisDataService, IEventWorkspace, MatrixWorkspace, WorkspaceGroup +from mantid.simpleapi import Load + +import unittest + +DIFF_PLACES = 8 + +class LoadTest(stresstesting.MantidStressTest): + + def runTest(self): + self._success = False + + # Custom code to create and run this single test suite + # and then mark as success or failure + suite = unittest.TestSuite() + suite.addTest( unittest.makeSuite(LoadTests, "test") ) + runner = unittest.TextTestRunner() + # Run using either runner + res = runner.run(suite) + if res.wasSuccessful(): + self._success = True + else: + self._success = False + + def validate(self): + return self._success + +#------------------------------------------------------------------------------ +# work horse +class LoadTests(unittest.TestCase): + + wsname = "__LoadTest" + cleanup_names = [] + + def tearDown(self): + self.cleanup_names.append(self.wsname) + for name in self.cleanup_names: + try: + AnalysisDataService.remove(name) + except KeyError: + pass + self.cleanup_names = [] + + def test_csv_list_with_same_instrument_produces_single_group(self): + data = Load("OFFSPEC10791,10792,10793.raw", OutputWorkspace = self.wsname) + + self.assertTrue(isinstance(data, WorkspaceGroup)) + self.assertEquals(6, data.getNumberOfEntries()) + ads_names = ["OFFSPEC00010791_1", "OFFSPEC00010791_2", + "OFFSPEC00010792_1", "OFFSPEC00010792_2", + "OFFSPEC00010793_1", "OFFSPEC00010793_2"] + for name in ads_names: + self.assertTrue(name in AnalysisDataService) + + deleted_names = ["OFFSPEC10791", "OFFSPEC10792", "OFFSPEC10793"] + for name in deleted_names: + self.assertTrue(name not in AnalysisDataService) + + self.cleanup_names = ads_names + + def test_csv_list_with_different_instrument_produces_single_group(self): + # Combine test of different instruments with giving the output name + # the same name as one of the members of the group + self.wsname = "LOQ99631" + data = Load("LOQ99631.RAW, CSP85423.raw", OutputWorkspace = self.wsname) + + self.assertTrue(isinstance(data, WorkspaceGroup)) + self.assertEquals(3, data.getNumberOfEntries()) + ads_names = ["LOQ99631", "CSP85423_1", "CSP85423_2"] + for name in ads_names: + self.assertTrue(name in AnalysisDataService) + + deleted_names = ["CSP85423"] + for name in deleted_names: + self.assertTrue(name not in AnalysisDataService) + + self.cleanup_names = ads_names + self.wsname = "__LoadTest" + + def test_extra_properties_passed_to_loader(self): + data = Load("CNCS_7860_event.nxs", OutputWorkspace = self.wsname, + BankName = "bank1", SingleBankPixelsOnly = False) + + self.assertTrue(isinstance(data, IEventWorkspace)) + self.assertEquals(1740, data.getNumberEvents()) + + def test_extra_properties_passed_to_loader_for_multiple_files(self): + data = Load("EQSANS_1466_event.nxs,EQSANS_3293_event.nxs", OutputWorkspace = self.wsname, + BankName = "bank1", SingleBankPixelsOnly = False) + + self.assertTrue(isinstance(data, WorkspaceGroup)) + self.assertEquals(2, data.getNumberOfEntries()) + # Test number of events in each + self.assertEquals(740, data[0].getNumberEvents()) + self.assertEquals(105666, data[1].getNumberEvents()) + + def test_range_operator_loads_correct_number_of_files(self): + data = Load("TSC15352:15354.raw", OutputWorkspace = self.wsname) + + self.assertTrue(isinstance(data, WorkspaceGroup)) + self.assertEquals(3, data.getNumberOfEntries()) + + self.assertTrue(isinstance(data[0], MatrixWorkspace)) + self.assertTrue(isinstance(data[1], MatrixWorkspace)) + self.assertTrue(isinstance(data[2], MatrixWorkspace)) + + # Cursory check that the correct ones were loaded + self.assertTrue("TO96_2" in data[0].getTitle()) + self.assertTrue("TO96_3" in data[1].getTitle()) + self.assertTrue("TO96_4" in data[2].getTitle()) + + def test_stepped_range_operator_loads_correct_number_of_files(self): + data = Load("TSC15352:15354:2.raw", OutputWorkspace = self.wsname) + + self.assertTrue(isinstance(data, WorkspaceGroup)) + self.assertEquals(2, data.getNumberOfEntries()) + + self.assertTrue(isinstance(data[0], MatrixWorkspace)) + self.assertTrue(isinstance(data[1], MatrixWorkspace)) + + # Cursory check that the correct ones were loaded + self.assertTrue("TO96_2" in data[0].getTitle()) + self.assertTrue("TO96_4" in data[1].getTitle()) + + def test_plus_operator_sums_single_set_files(self): + data = Load("TSC15352+15353.raw", OutputWorkspace = self.wsname) + + self.assertTrue(isinstance(data, MatrixWorkspace)) + self.assertEquals(149, data.getNumberHistograms()) + self.assertEquals(24974, data.blocksize()) + + self.assertAlmostEqual(9.0, data.readX(2)[1], places = DIFF_PLACES) + self.assertAlmostEqual(46352.0, data.readY(2)[1], places = DIFF_PLACES) + self.assertAlmostEqual(215.29514625276622, data.readE(2)[1], places = DIFF_PLACES) + + deleted_names = ["TSC15352", "TSC15353"] + for name in deleted_names: + self.assertTrue(name not in AnalysisDataService) + + def test_plus_operator_sums_multiple_set_files_to_give_group(self): + summed_data = Load("TSC15352+15353.raw,TSC15352+15354.raw", OutputWorkspace = self.wsname) + + self.assertTrue(isinstance(summed_data, WorkspaceGroup)) + self.assertEquals(2, summed_data.getNumberOfEntries()) + + # First group + data = summed_data[0] + self.assertEquals(149, data.getNumberHistograms()) + self.assertEquals(24974, data.blocksize()) + + self.assertAlmostEqual(9.0, data.readX(2)[1], places = DIFF_PLACES) + self.assertAlmostEqual(46352.0, data.readY(2)[1], places = DIFF_PLACES) + self.assertAlmostEqual(215.29514625276622, data.readE(2)[1], places = DIFF_PLACES) + + # Second group + data = summed_data[1] + self.assertEquals(149, data.getNumberHistograms()) + self.assertEquals(24974, data.blocksize()) + + self.assertAlmostEqual(9.0, data.readX(2)[1], places = DIFF_PLACES) + self.assertAlmostEqual(35640.0, data.readY(2)[1], places = DIFF_PLACES) + self.assertAlmostEqual(188.78559267062727, data.readE(2)[1], places = DIFF_PLACES) + + deleted_names = ["TSC15352", "TSC15353", "TSC15354"] + for name in deleted_names: + self.assertTrue(name not in AnalysisDataService,) + + def test_sum_range_operator_sums_to_single_workspace(self): + data = Load("TSC15352-15353.raw", OutputWorkspace = self.wsname) + + self.assertTrue(isinstance(data, MatrixWorkspace)) + self.assertEquals(149, data.getNumberHistograms()) + self.assertEquals(24974, data.blocksize()) + + self.assertAlmostEqual(9.0, data.readX(2)[1], places = DIFF_PLACES) + self.assertAlmostEqual(46352.0, data.readY(2)[1], places = DIFF_PLACES) + self.assertAlmostEqual(215.29514625276622, data.readE(2)[1], places = DIFF_PLACES) + + def test_sum_range_operator_with_step_sums_to_single_workspace(self): + data = Load("TSC15352-15354:2.raw", OutputWorkspace = self.wsname) + + self.assertTrue(isinstance(data, MatrixWorkspace)) + self.assertEquals(149, data.getNumberHistograms()) + self.assertEquals(24974, data.blocksize()) + + self.assertAlmostEqual(9.0, data.readX(2)[1], places = DIFF_PLACES) + self.assertAlmostEqual(35640.0, data.readY(2)[1], places = DIFF_PLACES) + self.assertAlmostEqual(188.78559267062727, data.readE(2)[1], places = DIFF_PLACES) + + + def test_plus_operator_for_input_groups(self): + summed_data = Load("OFFSPEC10791+10792.raw", OutputWorkspace = self.wsname) + + self.assertTrue(isinstance(summed_data, WorkspaceGroup)) + self.assertEquals(2, summed_data.getNumberOfEntries()) + + # First group + data = summed_data[0] + self.assertEquals(245, data.getNumberHistograms()) + self.assertEquals(5000, data.blocksize()) + + self.assertAlmostEqual(25.0, data.readX(1)[1], places = DIFF_PLACES) + self.assertAlmostEqual(4.0, data.readY(1)[1], places = DIFF_PLACES) + self.assertAlmostEqual(2.0, data.readE(1)[1], places = DIFF_PLACES) + + # Second group + data = summed_data[1] + self.assertEquals(245, data.getNumberHistograms()) + self.assertEquals(5000, data.blocksize()) + + self.assertAlmostEqual(25.0, data.readX(1)[1], places = DIFF_PLACES) + self.assertAlmostEqual(1.0, data.readY(1)[1], places = DIFF_PLACES) + self.assertAlmostEqual(1.0, data.readE(1)[1], places = DIFF_PLACES) + +#==================================================================================== diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/LoadVesuvioTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/LoadVesuvioTest.py new file mode 100644 index 0000000000000000000000000000000000000000..ed37fc99070294b35579b275509d9bed10fb5e7b --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/LoadVesuvioTest.py @@ -0,0 +1,228 @@ +import stresstesting + +from mantid.api import MatrixWorkspace, mtd +from mantid.simpleapi import LoadVesuvio + +import unittest + +DIFF_PLACES = 12 + +class VesuvioTests(unittest.TestCase): + + ws_name = "evs_raw" + + + def tearDown(self): + if self.ws_name in mtd: + mtd.remove(self.ws_name) + + #================== Success cases ================================ + def test_load_with_back_scattering_spectra_produces_correct_workspace(self): + self._run_load("14188", "3-134", "DoubleDifference") + + # Check some data + evs_raw = mtd[self.ws_name] + self.assertAlmostEqual(0.078968412230231877, evs_raw.readY(0)[1], places=DIFF_PLACES) + self.assertAlmostEqual(0.12162310222873171, evs_raw.readE(0)[1], places=DIFF_PLACES) + self.assertAlmostEqual(0.018091076761311387, evs_raw.readY(131)[1188], places=DIFF_PLACES) + self.assertAlmostEqual(0.063175962622448692, evs_raw.readE(131)[1188], places=DIFF_PLACES) + + def test_consecutive_runs_with_back_scattering_spectra_gives_expected_numbers(self): + self._run_load("14188-14190", "3-134", "DoubleDifference") + + # Check some data + evs_raw = mtd[self.ws_name] + self.assertAlmostEqual(0.12812011879757312, evs_raw.readY(0)[1], places=DIFF_PLACES) + self.assertAlmostEqual(0.07005709042418834, evs_raw.readE(0)[1], places=DIFF_PLACES) + self.assertAlmostEqual(0.038491709460370394, evs_raw.readY(131)[1188], places=DIFF_PLACES) + self.assertAlmostEqual(0.036783617369284975, evs_raw.readE(131)[1188], places=DIFF_PLACES) + + def test_non_consecutive_runs_with_back_scattering_spectra_gives_expected_numbers(self): + self._run_load("14188,14190", "3-134", "DoubleDifference") + + # Check some data + evs_raw = mtd[self.ws_name] + self.assertAlmostEqual(0.17509520926405386, evs_raw.readY(0)[1], places=DIFF_PLACES) + self.assertAlmostEqual(0.085651536076367191, evs_raw.readE(0)[1], places=DIFF_PLACES) + self.assertAlmostEqual(-0.027855932189430499, evs_raw.readY(131)[1188], places=DIFF_PLACES) + self.assertAlmostEqual(0.044991428219920804, evs_raw.readE(131)[1188], places=DIFF_PLACES) + + def test_load_with_forward_scattering_spectra_produces_correct_workspace(self): + self._run_load("14188", "135-198", "SingleDifference") + + # Check some data + evs_raw = mtd[self.ws_name] + self.assertAlmostEqual(-0.4421157823659172, evs_raw.readY(0)[1], places=DIFF_PLACES) + self.assertAlmostEqual(0.23849110331150025, evs_raw.readE(0)[1], places=DIFF_PLACES) + self.assertAlmostEqual(-0.030129475930755989, evs_raw.readY(63)[1188], places=DIFF_PLACES) + self.assertAlmostEqual(0.23849110331150025, evs_raw.readE(0)[1], places=DIFF_PLACES) + + def test_consecutive_runs_with_forward_scattering_spectra_gives_expected_numbers(self): + self._run_load("14188-14190", "135-198", "SingleDifference") + + # Check some data + evs_raw = mtd[self.ws_name] + self.assertAlmostEqual(-0.33023675686822429, evs_raw.readY(0)[1], places=DIFF_PLACES) + self.assertAlmostEqual(0.13839181298987582, evs_raw.readE(0)[1], places=DIFF_PLACES) + self.assertAlmostEqual(-0.0005762703884557574, evs_raw.readY(63)[1188], places=DIFF_PLACES) + self.assertAlmostEqual(0.022314627606989094, evs_raw.readE(63)[1188], places=DIFF_PLACES) + + def test_non_consecutive_runs_with_forward_scattering_spectra_gives_expected_numbers(self): + self._run_load("14188,14190", "135-198", "SingleDifference") + + # Check some data + evs_raw = mtd[self.ws_name] + self.assertAlmostEqual(-0.31382658620745474, evs_raw.readY(0)[1], places=DIFF_PLACES) + self.assertAlmostEqual(0.16935354944452052, evs_raw.readE(0)[1], places=DIFF_PLACES) + self.assertAlmostEqual(0.0013599866184859088, evs_raw.readY(63)[1188], places=DIFF_PLACES) + self.assertAlmostEqual(0.16935354944452052, evs_raw.readE(0)[1], places=DIFF_PLACES) + + def test_load_with_spectra_mixed_from_forward_backward_gives_expected_numbers(self): + self._run_load("14188", "134,135", "DoubleDifference") + + # Check some data + evs_raw = mtd[self.ws_name] + self.assertAlmostEqual(0.43816507168120111, evs_raw.readY(0)[1], places=DIFF_PLACES) + self.assertAlmostEqual(0.23224859590051541, evs_raw.readE(0)[1], places=DIFF_PLACES) + self.assertAlmostEqual(0.013611354662030284, evs_raw.readY(1)[1188], places=DIFF_PLACES) + self.assertAlmostEqual(0.031506182465619419, evs_raw.readE(1)[1188], places=DIFF_PLACES) + + def test_foilout_mode_gives_expected_numbers(self): + self._run_load("14188", "3", "FoilOut") + + evs_raw = mtd[self.ws_name] + self.assertAlmostEqual(18753.00, evs_raw.readY(0)[1], places=DIFF_PLACES) + self.assertAlmostEqual(136.94159338929865, evs_raw.readE(0)[1], places=DIFF_PLACES) + + def test_foilin_mode_gives_expected_numbers(self): + self._run_load("14188", "3", "FoilIn") + + evs_raw = mtd[self.ws_name] + self.assertAlmostEqual(37594.0, evs_raw.readY(0)[1], places=DIFF_PLACES) + self.assertAlmostEqual(193.89172236070317, evs_raw.readE(0)[1], places=DIFF_PLACES) + + def test_using_ip_file_adjusts_instrument_and_attaches_parameters(self): + self._run_load("14188", "3", "SingleDifference","IP0005.dat") + + # Check some data + evs_raw = mtd[self.ws_name] + det0 = evs_raw.getDetector(0) + param = det0.getNumberParameter("t0") + self.assertEqual(1, len(param)) + self.assertAlmostEqual(-0.4157, param[0],places=4) + + def test_sumspectra_set_to_true_gives_single_spectra_summed_over_all_inputs(self): + self._run_load("14188", "135-142", "SingleDifference","IP0005.dat",sum=True) + evs_raw = mtd[self.ws_name] + + # Verify + self.assertEquals(1, evs_raw.getNumberHistograms()) + self.assertAlmostEqual(-1.5288171762918328, evs_raw.readY(0)[0], places=DIFF_PLACES) + self.assertAlmostEqual(-0.079412793053402098, evs_raw.readY(0)[-1], places=DIFF_PLACES) + self.assertAlmostEqual(0.52109203357613976, evs_raw.readE(0)[0], places=DIFF_PLACES) + self.assertAlmostEqual(0.10617318614513051, evs_raw.readE(0)[-1], places=DIFF_PLACES) + + def test_sumspectra_with_multiple_groups_gives_number_output_spectra_as_input_groups(self): + self._run_load("14188", "135-148;152-165", "SingleDifference","IP0005.dat",sum=True) + + evs_raw = mtd[self.ws_name] + + # Verify + self.assertEquals(2, evs_raw.getNumberHistograms()) + self.assertAlmostEqual(-0.713877795283, evs_raw.readY(0)[0], places=DIFF_PLACES) + self.assertAlmostEqual(-3.00125465604, evs_raw.readY(1)[0], places=DIFF_PLACES) + self.assertAlmostEqual(0.6219299465, evs_raw.readE(0)[0], places=DIFF_PLACES) + self.assertAlmostEqual(0.676913729914, evs_raw.readE(1)[0], places=DIFF_PLACES) + + def _run_load(self, runs, spectra, diff_opt, ip_file="", sum=False): + LoadVesuvio(Filename=runs,OutputWorkspace=self.ws_name, + SpectrumList=spectra,Mode=diff_opt,InstrumentParFile=ip_file, + SumSpectra=sum) + + self._do_ads_check(self.ws_name) + + def expected_size(): + if sum: + if ";" in spectra: + return 2 + else: + return 1 + elif "-" in spectra: + elements = spectra.split("-") + min,max=(int(elements[0]), int(elements[1])) + return max - min + 1 + elif "," in spectra: + elements = spectra.strip().split(",") + return len(elements) + else: + return 1 + + self._do_size_check(self.ws_name, expected_size()) + loaded_data = mtd[self.ws_name] + if "Difference" in diff_opt: + self.assertTrue(not loaded_data.isHistogramData()) + else: + self.assertTrue(loaded_data.isHistogramData()) + + def _do_ads_check(self, name): + self.assertTrue(name in mtd) + self.assertTrue(type(mtd[name]) == MatrixWorkspace) + + def _do_size_check(self,name, expected_nhist): + loaded_data = mtd[name] + self.assertEquals(expected_nhist, loaded_data.getNumberHistograms()) + + #================== Failure cases ================================ + + def test_missing_spectra_property_raises_error(self): + self.assertRaises(RuntimeError, LoadVesuvio, Filename="14188", + OutputWorkspace=self.ws_name) + + def test_load_with_invalid_spectra_raises_error(self): + self.assertRaises(RuntimeError, LoadVesuvio, Filename="14188", + OutputWorkspace=self.ws_name, SpectrumList="200") + + def test_load_with_spectra_that_are_just_monitors_raises_error(self): + self.assertRaises(RuntimeError, LoadVesuvio, Filename="14188", + OutputWorkspace=self.ws_name, SpectrumList="1") + self.assertRaises(RuntimeError, LoadVesuvio, Filename="14188", + OutputWorkspace=self.ws_name, SpectrumList="1-2") + + def test_load_with_invalid_difference_option_raises_error(self): + self.assertRaises(ValueError, LoadVesuvio, Filename="14188", + OutputWorkspace=self.ws_name, Mode="Unknown",SpectrumList="3-134") + + def test_load_with_difference_option_not_applicable_to_current_spectra_raises_error(self): + self.assertRaises(ValueError, LoadVesuvio, Filename="14188", + OutputWorkspace=self.ws_name, Mode="",SpectrumList="3-134") + + def test_raising_error_removes_temporary_raw_workspaces(self): + self.assertRaises(RuntimeError, LoadVesuvio, Filename="14188,14199", # Second run is invalid + OutputWorkspace=self.ws_name, Mode="SingleDifference",SpectrumList="3-134") + + self._do_test_temp_raw_workspaces_not_left_around() + + def _do_test_temp_raw_workspaces_not_left_around(self): + self.assertTrue("__loadraw_evs" not in mtd) + self.assertTrue("__loadraw_evs_monitors" not in mtd) + + +#==================================================================================== + +class LoadVesuvioTest(stresstesting.MantidStressTest): + + def runTest(self): + self._success = False + # Custom code to create and run this single test suite + suite = unittest.TestSuite() + suite.addTest( unittest.makeSuite(VesuvioTests, "test") ) + runner = unittest.TextTestRunner() + # Run using either runner + res = runner.run(suite) + if res.wasSuccessful(): + self._success = True + else: + self._success = False + + def validate(self): + return self._success diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/MDWorkspaceTests.py b/Code/Mantid/Testing/SystemTests/tests/analysis/MDWorkspaceTests.py new file mode 100644 index 0000000000000000000000000000000000000000..29df48e4871b0cb6a3e3244bd9fd9ea1e8947196 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/MDWorkspaceTests.py @@ -0,0 +1,184 @@ +""" +Test some features of MDWorkspaces, such as +file-backed MDWorkspaces. +""" + +import stresstesting +import os +from mantid.simpleapi import * +from mantid.api import * +from mantid.kernel import * + +############################################################################### +class PlusMDTest(stresstesting.MantidStressTest): + + _saved_filename = None + + def compare_binned(self, wsname): + """ Compare the given workspace to the previously-binned original """ + BinMD(InputWorkspace=wsname,AlignedDim0='Q_lab_x, -3, 3, 100',AlignedDim1='Q_lab_y, -3, 3, 100',AlignedDim2='Q_lab_z, -3, 3, 100',ForceOrthogonal='1',OutputWorkspace="test_binned") + ws = mtd["test_binned"] + EqualToMD(LHSWorkspace=ws, RHSWorkspace=self.original_binned, OutputWorkspace='comparison') + comparison = mtd['comparison'] + for i in xrange(comparison.getNPoints()): + if not comparison.signalAt(i): + raise Exception("Difference in workspace %s vs original_binned at index %d" % (wsname, i)) + + def runTest(self): + # Some platforms can't clean up the open file handle on cncs.nxs from the last test, so run cleanup here as well + barefilename = "cncs.nxs" + config = ConfigService.Instance() + self._saved_filename = os.path.join(config["defaultsave.directory"], barefilename) + self.cleanup() + + # Load then convert to Q in the lab frame + LoadEventNexus(Filename=r'CNCS_7860_event.nxs',OutputWorkspace='cncs_nxs') + + ConvertToDiffractionMDWorkspace(InputWorkspace='cncs_nxs', OutputWorkspace='cncs_original', SplitInto=2) + alg = SaveMD(InputWorkspace='cncs_original', Filename=barefilename) + + self.assertDelta( mtd['cncs_original'].getNPoints(), 112266, 1) + BinMD(InputWorkspace='cncs_original',AlignedDim0='Q_lab_x, -3, 3, 100',AlignedDim1='Q_lab_y, -3, 3, 100',AlignedDim2='Q_lab_z, -3, 3, 100',ForceOrthogonal='1',OutputWorkspace='cncs_original_binned') + # Scale by 2 to account for summing + self.original_binned = mtd['cncs_original_binned'] + self.original_binned *= 2 + + # Load into memory + LoadMD(Filename='cncs.nxs',FileBackEnd='0',Memory='100',OutputWorkspace='cncs_mem') + + # ======== Mem + Mem =========== + LoadMD(Filename='cncs.nxs',FileBackEnd='0',OutputWorkspace='cncs_mem2') + PlusMD(LHSWorkspace="cncs_mem2", RHSWorkspace="cncs_mem", OutputWorkspace="cncs_mem2") + self.assertDelta( mtd['cncs_mem2'].getNPoints(), 112266*2, 1) + self.compare_binned('cncs_mem2') + DeleteWorkspace('cncs_mem2') + + # ======== File + mem, with write buffer =========== + LoadMD(Filename='cncs.nxs',FileBackEnd='1',Memory='100',OutputWorkspace='cncs_file') + PlusMD(LHSWorkspace="cncs_file", RHSWorkspace="cncs_mem", OutputWorkspace="cncs_file") + self.compare_binned('cncs_file') + SaveMD("cncs_file", UpdateFileBackEnd="1") + self.assertDelta( mtd['cncs_file'].getNPoints(), 112266*2, 1) + self.compare_binned('cncs_file') + DeleteWorkspace('cncs_file') + + # Refresh the original file + SaveMD(InputWorkspace='cncs_original', Filename='cncs.nxs') + + # ======== File + mem, with a small write buffer (only 1MB) ======== + LoadMD(Filename='cncs.nxs',FileBackEnd='1',Memory='1',OutputWorkspace='cncs_file_small_buffer') + PlusMD(LHSWorkspace="cncs_file_small_buffer", RHSWorkspace="cncs_mem", OutputWorkspace="cncs_file_small_buffer") + SaveMD("cncs_file_small_buffer", UpdateFileBackEnd="1") + self.assertDelta( mtd['cncs_file_small_buffer'].getNPoints(), 112266*2, 1) + self.compare_binned('cncs_file_small_buffer') + DeleteWorkspace('cncs_file_small_buffer') + + # Refresh the original file + SaveMD(InputWorkspace='cncs_original', Filename='cncs.nxs') + + # ======== File + mem, without a write buffer ======== + LoadMD(Filename='cncs.nxs',FileBackEnd='1',Memory='0',OutputWorkspace='cncs_file_nobuffer') + PlusMD(LHSWorkspace="cncs_file_nobuffer", RHSWorkspace="cncs_mem", OutputWorkspace="cncs_file_nobuffer") + SaveMD("cncs_file_nobuffer", UpdateFileBackEnd="1") + self.assertDelta( mtd['cncs_file_nobuffer'].getNPoints(), 112266*2, 1) + self.compare_binned('cncs_file_nobuffer') + DeleteWorkspace('cncs_file_nobuffer') + + # Refresh the original file + SaveMD(InputWorkspace='cncs_original', Filename='cncs.nxs') + + # ======== File + mem to a new (cloned) file ======== + LoadMD(Filename='cncs.nxs',FileBackEnd='1',Memory='100',OutputWorkspace='cncs_file') + PlusMD(LHSWorkspace="cncs_file", RHSWorkspace="cncs_mem", OutputWorkspace="cncs_added") + SaveMD("cncs_added", UpdateFileBackEnd="1") + self.compare_binned('cncs_added') + self.assertDelta( mtd['cncs_added'].getNPoints(), 112266*2, 1) + + # Make sure we delete the workspaces so the file handles are freed + workspaces_to_delete = ["cncs_file", "cncs_mem", "cncs_added"] + for name in workspaces_to_delete: + DeleteWorkspace(name) + + def doValidation(self): + # If we reach here, no validation failed + return True + + def cleanup(self): + """ + Remove files create during test + """ + if self._saved_filename is not None: + try: + os.remove(self._saved_filename) + Logger.get("MDWorkspaceTests").notice("Removed %s" % self._saved_filename) + except OSError: + Logger.get("MDWorkspaceTests").notice("Failed to remove %s" % self._saved_filename) + + # Plus the _clone version + filename = os.path.splitext(self._saved_filename)[0] + filename += '_clone.nxs' + try: + os.remove(filename) + Logger.get("MDWorkspaceTests").notice("Removed %s " % filename) + except OSError: + Logger.get("MDWorkspaceTests").notice("Failed to remove %s" % self._saved_filename) + +############################################################################### +class MergeMDTest(stresstesting.MantidStressTest): + + _saved_filenames = [] + + def make_files_to_merge_string(self): + filenames_string = '' + + for filename in self._saved_filenames: + filenames_string += filename + ',' + + filenames_string = filenames_string[:-1] # Remove trailing comma + + return filenames_string + + def runTest(self): + config = ConfigService.Instance() + + LoadEventNexus(Filename='CNCS_7860_event.nxs', + OutputWorkspace='CNCS_7860_event_NXS',CompressTolerance=0.1) + + for omega in xrange(0, 5): + print "Starting omega %03d degrees" % omega + CreateMDWorkspace(Dimensions='3',Extents='-5,5,-5,5,-5,5',Names='Q_sample_x,Q_sample_y,Q__sample_z',Units='A,A,A',SplitInto='3',SplitThreshold='200',MaxRecursionDepth='3', + MinRecursionDepth='3', OutputWorkspace='CNCS_7860_event_MD') + + # Convert events to MD events + AddSampleLog("CNCS_7860_event_NXS", "omega", "%s.0" % omega, "Number Series") + AddSampleLog("CNCS_7860_event_NXS", "chi", "%s" % 0.0, "Number Series") + AddSampleLog("CNCS_7860_event_NXS", "phi", "%s" % 0.0, "Number Series") + # V2 of ConvertToDiffractionMD needs Goniometer to be set on workspace. + SetGoniometer(Workspace='CNCS_7860_event_NXS',Axis0='omega,0,0,1,1',Axis1='chi,1,0,0,1',Axis2='phi,0,1,0,1') + + ConvertToDiffractionMDWorkspace(InputWorkspace='CNCS_7860_event_NXS',OutputWorkspace='CNCS_7860_event_MD',OutputDimensions='Q (sample frame)',LorentzCorrection='1', Append=True) + + barefilename = "CNCS_7860_event_rotated_%03d.nxs" % omega + filename = os.path.join(config["defaultsave.directory"], barefilename) + alg = SaveMD("CNCS_7860_event_MD", Filename=filename) + self._saved_filenames.append(filename) + # End for loop + filename = os.path.join(config["defaultsave.directory"], r'merged.nxs') + alg = MergeMDFiles(Filenames=self.make_files_to_merge_string(), OutputFilename=filename, OutputWorkspace='merged') + self._saved_filenames.append(filename) + + # 5 times the number of events in the output workspace. + self.assertDelta( mtd['merged'].getNPoints(), 553035, 1) + + def doValidation(self): + # If we reach here, no validation failed + return True + + def cleanup(self): + for filename in self._saved_filenames: + try: + os.remove(filename) + Logger.get("MDWorkspaceTests").notice("Removed %s" % filename) + except OSError: + Logger.get("MDWorkspaceTests").notice("Failed to remove %s" % filename) + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/MuonLoadTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/MuonLoadTest.py new file mode 100644 index 0000000000000000000000000000000000000000..fd2042b52802acb14029cf4d4ec6f258d0639692 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/MuonLoadTest.py @@ -0,0 +1,46 @@ +import stresstesting +from mantid.simpleapi import * + +class MuonLoadTest(stresstesting.MantidStressTest): + + def runTest(self): + # Create custom grouping + grouping = WorkspaceFactory.createTable() + grouping.addColumn("vector_int", "Detectors") + grouping.addRow([range(33,65)]) + grouping.addRow([range(1,33)]) + mtd.addOrReplace("MuonLoad_Grouping", grouping) + + # Create custom dead times + deadTimes = WorkspaceFactory.createTable() + deadTimes.addColumn("int", "Index") + deadTimes.addColumn("double", "Value") + for i in range(1, 65): + deadTimes.addRow([i, i * 0.01]) + mtd.addOrReplace("MuonLoad_DeadTimes", deadTimes) + + MuonLoad(Filename = "MUSR00015192", + DetectorGroupingTable = "MuonLoad_Grouping", + ApplyDeadTimeCorrection = True, + CustomDeadTimeTable = "MuonLoad_DeadTimes", + FirstPeriod = 1, + SecondPeriod = 0, + PeriodOperation = "-", + TimeZero = 0.6, + Xmin = 0.11, + Xmax = 10.0, + RebinParams = "0.032", + OutputType = "PairAsymmetry", + PairFirstIndex = 0, + PairSecondIndex = 1, + Alpha = 0.8, + OutputWorkspace = "MuonLoad_MUSR00015192" + ) + + def validate(self): + return "MuonLoad_MUSR00015192", "MuonLoad_MUSR00015192.nxs" + + def cleanup(self): + mtd.remove("MuonLoad_MUSR00015192") + mtd.remove("MuonLoad_Grouping") + mtd.remove("MuonLoad_DeadTimes") diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/OFFSPECLoadingTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/OFFSPECLoadingTest.py new file mode 100644 index 0000000000000000000000000000000000000000..fc44f4bb5dadabcd813066f64ff3e3dcdd98d968 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/OFFSPECLoadingTest.py @@ -0,0 +1,17 @@ +from LoadAndCheckBase import * + +''' +Test File loading and basic data integrity checks of OFFSPEC data in Mantid. +''' +class OFFSPECLoadingTest(LoadAndCheckBase): + def get_raw_workspace_filename(self): + return "OFFSPEC00010791.raw" + + def get_nexus_workspace_filename(self): + return "OFFSPEC00010791.nxs" + + def get_expected_number_of_periods(self): + return 2 + + def get_integrated_reference_workspace_filename(self): + return "OFFSPEC00010791_1Integrated.nxs" \ No newline at end of file diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/OffspecSESANS.py b/Code/Mantid/Testing/SystemTests/tests/analysis/OffspecSESANS.py new file mode 100644 index 0000000000000000000000000000000000000000..3c726ab0433d44a553a0dd2f6767ef2ef069448c --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/OffspecSESANS.py @@ -0,0 +1,26 @@ +from stresstesting import MantidStressTest +from mantid.simpleapi import mtd, config + +class OffspecSESANS(MantidStressTest): + + def skipTests(self): + skip = False + try: + import offspec + except ImportError: + skip = True + return skip + + def requiredFiles(self): + return ["OFFSPEC00010791.raw","OFFSPEC00010792.raw","OFFSPEC00010793.raw"] + + def runTest(self): + import offspec + binning=["2.0","0.2","12.0","2"] + config["default.instrument"] = "OFFSPEC" + offspec.nrSESANSP0Fn("10792","P055","109","119","2","1",binning) + offspec.nrSESANSFn("10791+10793","dPMMA","","P055pol", + "100","130","2","1","2","3009.9",binning,"2","0") + + def validate(self): + return "dPMMASESANS","OffspecSESANS.nxs" diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/OffspecSESANSP0.py b/Code/Mantid/Testing/SystemTests/tests/analysis/OffspecSESANSP0.py new file mode 100644 index 0000000000000000000000000000000000000000..e5325cbe5760e1db2a168d0c36a726625d712c31 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/OffspecSESANSP0.py @@ -0,0 +1,27 @@ +from stresstesting import MantidStressTest +from mantid.simpleapi import config,mtd + +class OffspecSESANSP0(MantidStressTest): + + def skipTests(self): + skip = False + try: + import offspec + except ImportError: + skip = True + return skip + + def requiredFiles(self): + return ["OFFSPEC00010792.raw"] + + def runTest(self): + import offspec + binning=["2.0","0.2","12.0","2"] + config["default.instrument"] = "OFFSPEC" + offspec.nrSESANSP0Fn("10792","P055","109","119","2","1",binning) + + def cleanup(self): + pass + + def validate(self): + return "P055pol","OffspecSESANSP0.nxs" diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/PEARLPowderDiffraction.py b/Code/Mantid/Testing/SystemTests/tests/analysis/PEARLPowderDiffraction.py new file mode 100644 index 0000000000000000000000000000000000000000..9e076c475a25faf85dde757afcc9e43aeeb70ebc --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/PEARLPowderDiffraction.py @@ -0,0 +1,44 @@ +import stresstesting +from mantid.simpleapi import * + +class PEARLPowderDiffraction(stresstesting.MantidStressTest): + + sample = "PEARL00073987.raw" + calfile = "pearl_offset_11_4.cal" + groupfile = "pearl_group_11_2_TT88.cal" + reffile = "PEARLPowderDiffraction.nxs" + + def requiredFiles(self): + return [self.sample, self.calfile, self.groupfile, self.reffile] + + def runTest(self): + LoadRaw(Filename=self.sample, OutputWorkspace='work',LoadLogFiles='0') + ConvertUnits(InputWorkspace='work',OutputWorkspace='work',Target='Wavelength') + + LoadRaw(Filename=self.sample, OutputWorkspace='monitor73987',LoadLogFiles='0',SpectrumMax='1') + ConvertUnits(InputWorkspace='monitor73987',OutputWorkspace='monitor73987',Target='Wavelength') + CropWorkspace(InputWorkspace='monitor73987',OutputWorkspace='monitor73987', + XMin=0.03,XMax=6.0) + + MaskBins(InputWorkspace='monitor73987',OutputWorkspace='monitor73987',XMin=3.45,XMax=3.7) + MaskBins(InputWorkspace='monitor73987',OutputWorkspace='monitor73987',XMin=2.96,XMax=3.2) + MaskBins(InputWorkspace='monitor73987',OutputWorkspace='monitor73987',XMin=2.1,XMax=2.26) + MaskBins(InputWorkspace='monitor73987',OutputWorkspace='monitor73987',XMin=1.73,XMax=1.98) + + SplineBackground(InputWorkspace='monitor73987',OutputWorkspace='monitor73987',NCoeff=20) + NormaliseToMonitor(InputWorkspace='work',OutputWorkspace='work',MonitorWorkspace='monitor73987', + IntegrationRangeMin=0.6,IntegrationRangeMax=5.0) + ConvertUnits(InputWorkspace='work',OutputWorkspace='work',Target='TOF') + + rb_params = [1500,-0.0006,19900] + Rebin(InputWorkspace='work',OutputWorkspace='work',Params=rb_params) + AlignDetectors(InputWorkspace='work',OutputWorkspace='work', CalibrationFile=self.calfile) + DiffractionFocussing(InputWorkspace='work',OutputWorkspace='focus', + GroupingFileName=self.groupfile) + + ConvertUnits(InputWorkspace='focus',OutputWorkspace='focus',Target='TOF') + Rebin(InputWorkspace='focus',OutputWorkspace='focus',Params=rb_params) + CropWorkspace(InputWorkspace='focus',OutputWorkspace='focus',XMin=0.1) + + def validate(self): + return 'focus','PEARLPowderDiffraction.nxs' diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/PEARLSystemTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/PEARLSystemTest.py new file mode 100644 index 0000000000000000000000000000000000000000..35230dba3f33ad688ddb50df85ecf35d13119fbb --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/PEARLSystemTest.py @@ -0,0 +1,384 @@ +import stresstesting +from mantid.simpleapi import * +from mantid import * +import os +import numpy as n +from abc import ABCMeta, abstractmethod + +'''Test adapted from actual script used by the scientists''' +class PEARL_Reduction(stresstesting.MantidStressTest): + __metaclass__ = ABCMeta # Mark as an abstract class + + def __init__(self): + stresstesting.MantidStressTest.__init__(self) + self.attenfile = "PRL112_DC25_10MM_FF.OUT" + self.tofbinning="1500,-0.0006,19900" + self.calfile="pearl_offset_12_1.cal" + self.groupfile="pearl_group_12_1_TT70.cal" + self.vanfile="van_spline_TT70_cycle_12_1.nxs" + self.cycle="12_1" + self.instver="new2" + self.mode="all" + self.tt_mode="TT70" + self.saved_outfile = '' + self.saved_gssfile = '' + self.reference_nexus = '' + self.reference_gss = '' + self.reference_workspace = '' + + def runTest(self): + self.do_focus() + + def doValidation(self): + '''Override doValidation to vaildate two things at the same time''' + # reset validate() method to call validateNexus() instead + self.validate = self.validateNexus + res = self.validateWorkspaceToNeXus() + if not res: + return False + # reset validate() method to call validateGSS() instead + self.validate = self.validateGSS + res = self.validateASCII() + return res + + def cleanup(self): + '''Remove temporary files''' + if os.path.exists(self.saved_outfile): + os.remove(self.saved_outfile) + if os.path.exists(self.saved_gssfile): + os.remove(self.saved_gssfile) + + @abstractmethod + def do_focus(self): + raise NotImplementedError("Implmenent do_focus to do actual test.") + + def validateNexus(self): + '''Compare the result of reduction with the reference nexus file''' + return self.reference_workspace,self.reference_nexus + + def validateGSS(self): + '''Validate the created gss file''' + from mantid.api import FileFinder + return self.saved_gssfile,FileFinder.getFullPath(self.reference_gss) + + def PEARL_getlambdarange(self): + return 0.03,6.00 + + def PEARL_getmonitorspectrum(self, runno): + return 1 + + def PEARL_getfilename(self, run_number,ext): + digit=len(str(run_number)) + + numdigits=8 + filename="PEARL" + + for i in range(0,numdigits-digit): + filename=filename+"0" + + filename+=str(run_number)+"."+ext + return filename + + def PearlLoad(self, files,ext,outname): + + if type(files) is int: + infile=self.PEARL_getfilename(files,ext) + LoadRaw(Filename=infile,OutputWorkspace=outname,LoadLogFiles="0") + else: + loop=0 + num=files.split("_") + frange=range(int(num[0]),int(num[1])+1) + for i in frange: + infile=self.PEARL_getfilename(i,ext) + outwork="run"+str(i) + LoadRaw(Filename=infile,OutputWorkspace=outwork,LoadLogFiles="0") + loop=loop+1 + if loop == 2: + firstwk="run"+str(i-1) + secondwk="run"+str(i) + Plus(LHSWorkspace=firstwk,RHSWorkspace=secondwk,OutputWorkspace=outname) + mtd.remove(firstwk) + mtd.remove(secondwk) + elif loop > 2: + secondwk="run"+str(i) + Plus(LHSWorkspace=outname,RHSWorkspace=secondwk,OutputWorkspace=outname) + mtd.remove(secondwk) + return + + def PearlLoadMon(self, files,ext,outname): + + if type(files) is int: + infile=self.PEARL_getfilename(files,ext) + mspectra=self.PEARL_getmonitorspectrum(files) + LoadRaw(Filename=infile,OutputWorkspace=outname,SpectrumMin=mspectra,SpectrumMax=mspectra,LoadLogFiles="0") + else: + loop=0 + num=files.split("_") + frange=range(int(num[0]),int(num[1])+1) + mspectra=self.PEARL_getmonitorspectrum(int(num[0])) + for i in frange: + infile=self.PEARL_getfilename(i,ext) + outwork="mon"+str(i) + LoadRaw(Filename=infile,OutputWorkspace=outwork,SpectrumMin=mspectra,SpectrumMax=mspectra,LoadLogFiles="0") + loop=loop+1 + if loop == 2: + firstwk="mon"+str(i-1) + secondwk="mon"+str(i) + Plus(LHSWorkspace=firstwk,RHSWorkspace=secondwk,OutputWorkspace=outname) + mtd.remove(firstwk) + mtd.remove(secondwk) + elif loop > 2: + secondwk="mon"+str(i) + Plus(LHSWorkspace=outname,RHSWorkspace=secondwk,OutputWorkspace=outname) + mtd.remove(secondwk) + return + + + + def PEARL_getmonitor(self, number,ext,spline_terms=20): + + works="monitor"+str(number) + self.PearlLoadMon(number,ext,works) + ConvertUnits(InputWorkspace=works,OutputWorkspace=works,Target="Wavelength") + lmin,lmax=self.PEARL_getlambdarange() + CropWorkspace(InputWorkspace=works,OutputWorkspace=works,XMin=lmin,XMax=lmax) + ex_regions=n.zeros((2,4)) + ex_regions[:,0]=[3.45,3.7] + ex_regions[:,1]=[2.96,3.2] + ex_regions[:,2]=[2.1,2.26] + ex_regions[:,3]=[1.73,1.98] + + for reg in range(0,4): + MaskBins(InputWorkspace=works,OutputWorkspace=works,XMin=ex_regions[0,reg],XMax=ex_regions[1,reg]) + + SplineBackground(InputWorkspace=works,OutputWorkspace=works,WorkspaceIndex=0,NCoeff=spline_terms) + return works + + + def PEARL_read(self, number,ext,outname): + self.PearlLoad(number,ext,outname) + ConvertUnits(InputWorkspace=outname,OutputWorkspace=outname,Target="Wavelength") + monitor=self.PEARL_getmonitor(number,ext,spline_terms=20) + NormaliseToMonitor(InputWorkspace=outname,OutputWorkspace=outname,MonitorWorkspace=monitor,IntegrationRangeMin=0.6,IntegrationRangeMax=5.0) + ConvertUnits(InputWorkspace=outname,OutputWorkspace=outname,Target="TOF") + mtd.remove(monitor) + return + + def PEARL_focus(self, number,ext="raw",fmode="trans",ttmode="TT70",atten=True,van_norm=True): + + self.tt_mode=ttmode + self.mode=fmode + + work="work" + focus="focus" + + if type(number) is int: + outfile="PRL"+str(number)+".nxs" + gssfile="PRL"+str(number)+".gss" + outwork="PRL"+str(number) + else: + outfile="PRL"+number+".nxs" + gssfile="PRL"+number+".gss" + outwork="PRL"+number + + self.PEARL_read(number,ext,work) + Rebin(InputWorkspace=work,OutputWorkspace=work,Params=self.tofbinning) + AlignDetectors(InputWorkspace=work,OutputWorkspace=work,CalibrationFile=self.calfile) + DiffractionFocussing(InputWorkspace=work,OutputWorkspace=focus,GroupingFileName=self.groupfile) + + mtd.remove(work) + + for i in range(0,14): + output="mod"+str(i+1) + van="van"+str(i+1) + rdata="rdata"+str(i+1) + if (van_norm): + LoadNexus(Filename=self.vanfile,OutputWorkspace=van,EntryNumber=i+1) + ExtractSingleSpectrum(InputWorkspace=focus,OutputWorkspace=rdata,WorkspaceIndex=i) + Rebin(InputWorkspace=van,OutputWorkspace=van,Params=self.tofbinning) + ConvertUnits(InputWorkspace=rdata,OutputWorkspace=rdata,Target="TOF") + Rebin(InputWorkspace=rdata,OutputWorkspace=rdata,Params=self.tofbinning) + Divide(LHSWorkspace=rdata,RHSWorkspace=van,OutputWorkspace=output) + CropWorkspace(InputWorkspace=output,OutputWorkspace=output,XMin=0.1) + Scale(InputWorkspace=output,OutputWorkspace=output,Factor=10) + else: + ExtractSingleSpectrum(InputWorkspace=focus,OutputWorkspace=rdata,WorkspaceIndex=i) + ConvertUnits(InputWorkspace=rdata,OutputWorkspace=rdata,Target="TOF") + Rebin(InputWorkspace=rdata,OutputWorkspace=output,Params=self.tofbinning) + CropWorkspace(InputWorkspace=output,OutputWorkspace=output,XMin=0.1) + + mtd.remove(focus) + + if (self.mode=="all"): + CloneWorkspace(InputWorkspace="mod1",OutputWorkspace="bank1") + for i in range(1,9): + toadd="mod"+str(i+1) + Plus(LHSWorkspace="bank1",RHSWorkspace=toadd,OutputWorkspace="bank1") + Scale(InputWorkspace="bank1",OutputWorkspace="bank1",Factor=0.111111111111111) + SaveGSS(InputWorkspace="bank1",Filename=gssfile,Append=False,Bank=1) + ConvertUnits(InputWorkspace="bank1",OutputWorkspace="bank1",Target="dSpacing") + SaveNexus(Filename=outfile,InputWorkspace="bank1",Append=False) + for i in range(0,5): + tosave="mod"+str(i+10) + SaveGSS(InputWorkspace=tosave,Filename=gssfile,Append=True,Bank=i+2) + ConvertUnits(InputWorkspace=tosave,OutputWorkspace=tosave,Target="dSpacing") + SaveNexus(Filename=outfile,InputWorkspace=tosave,Append=True) + + for i in range(0,14): + output="mod"+str(i+1) + van="van"+str(i+1) + rdata="rdata"+str(i+1) + mtd.remove(rdata) + mtd.remove(van) + mtd.remove(output) + mtd.remove("bank1") + + elif (self.mode=="groups"): + CloneWorkspace(InputWorkspace="mod1",OutputWorkspace="group1") + CloneWorkspace(InputWorkspace="mod4",OutputWorkspace="group2") + CloneWorkspace(InputWorkspace="mod7",OutputWorkspace="group3") + for i in range(1,3): + toadd="mod"+str(i+1) + Plus(LHSWorkspace="group1",RHSWorkspace=toadd,OutputWorkspace="group1") + Scale(InputWorkspace="group1",OutputWorkspace="group1",Factor=0.333333333333) + for i in range(1,3): + toadd="mod"+str(i+4) + Plus(LHSWorkspace="group2",RHSWorkspace=toadd,OutputWorkspace="group2") + Scale(InputWorkspace="group2",OutputWorkspace="group2",Factor=0.333333333333) + for i in range(1,3): + toadd="mod"+str(i+7) + Plus(LHSWorkspace="group3",RHSWorkspace=toadd,OutputWorkspace="group3") + Scale(InputWorkspace="group3",OutputWorkspace="group3",Factor=0.333333333333) + Plus(LHSWorkspace="group2",RHSWorkspace="group3",OutputWorkspace="group23") + Scale(InputWorkspace="group23",OutputWorkspace="group23",Factor=0.5) + SaveGSS("group1",Filename=gssfile,Append=False,Bank=1) + ConvertUnits(InputWorkspace="group1",OutputWorkspace="group1",Target="dSpacing") + SaveNexus(Filename=outfile,InputWorkspace="group1",Append=False) + SaveGSS(InputWorkspace="group2",Filename=gssfile,Append=True,Bank=2) + ConvertUnits(InputWorkspace="group2",OutputWorkspace="group2",Target="dSpacing") + SaveNexus(Filename=outfile,InputWorkspace="group2",Append=True) + SaveGSS(InputWorkspace="group3",Filename=gssfile,Append=True,Bank=3) + ConvertUnits(InputWorkspace="group3",OutputWorkspace="group3",Target="dSpacing") + SaveNexus(Filename=outfile,InputWorkspace="group3",Append=True) + SaveGSS(InputWorkspace="group23",Filename=gssfile,Append=True,Bank=4) + ConvertUnits(InputWorkspace="group23",OutputWorkspace="group23",Target="dSpacing") + SaveNexus(Filename=outfile,InputWorkspace="group23",Append=True) + for i in range(0,3): + tosave="mod"+str(i+10) + SaveGSS(InputWorkspace=tosave,Filename=gssfile,Append=True,Bank=i+5) + ConvertUnits(InputWorkspace=tosave,OutputWorkspace=tosave,Target="dSpacing") + SaveNexus(Filename=outfile,InputWorkspace=tosave,Append=True) + for i in range(0,14): + output="mod"+str(i+1) + van="van"+str(i+1) + rdata="rdata"+str(i+1) + mtd.remove(rdata) + mtd.remove(van) + mtd.remove(output) + mtd.remove("group1") + mtd.remove("group2") + mtd.remove("group3") + mtd.remove("group23") + + elif (self.mode=="trans"): + CloneWorkspace(InputWorkspace="mod1",OutputWorkspace="bank1") + for i in range(1,9): + toadd="mod"+str(i+1) + Plus(LHSWorkspace="bank1",RHSWorkspace=toadd,OutputWorkspace="bank1") + Scale(InputWorkspace="bank1",OutputWorkspace="bank1",Factor=0.111111111111111) + if (atten): + ConvertUnits(InputWorkspace="bank1",OutputWorkspace="bank1",Target="dSpacing") + CloneWorkspace(InputWorkspace="bank1",OutputWorkspace=outwork+"_noatten") + self.PEARL_atten("bank1","bank1") + ConvertUnits(InputWorkspace="bank1",OutputWorkspace="bank1",Target="TOF") + + SaveGSS(InputWorkspace="bank1",Filename=gssfile,Append=False,Bank=1) + ConvertUnits(InputWorkspace="bank1",OutputWorkspace="bank1",Target="dSpacing") + SaveNexus(Filename=outfile,InputWorkspace="bank1",Append=False) + for i in range(0,9): + tosave="mod"+str(i+1) + ConvertUnits(InputWorkspace=tosave,OutputWorkspace=tosave,Target="dSpacing") + SaveNexus(Filename=outfile,InputWorkspace=tosave,Append=True) + + for i in range(0,14): + output="mod"+str(i+1) + van="van"+str(i+1) + rdata="rdata"+str(i+1) + mtd.remove(rdata) + mtd.remove(van) + mtd.remove(output) + mtd.remove("bank1") + + elif (self.mode=="mods"): + for i in range(0,12): + output="mod"+str(i+1) + van="van"+str(i+1) + rdata="rdata"+str(i+1) + if (i==0): + SaveGSS(InputWorkspace=output,Filename=gssfile,Append=False,Bank=i+1) + ConvertUnits(InputWorkspace=output,OutputWorkspace=output,Target="dSpacing") + SaveNexus(Filename=outfile,InputWorkspace=output,Append=False) + else: + SaveGSS(InputWorkspace=output,Filename=gssfile,Append=True,Bank=i+1) + ConvertUnits(InputWorkspace=output,OutputWorkspace=output,Target="dSpacing") + SaveNexus(Filename=outfile,InputWorkspace=output,Append=True) + + mtd.remove(rdata) + mtd.remove(van) + mtd.remove(output) + + else: + print "Sorry I don't know that mode", mode + return + + LoadNexus(Filename=outfile,OutputWorkspace=outwork) + + # temporary nxs file to be deleted on cleanup + self.saved_outfile = os.path.join(config['defaultsave.directory'],outfile) + # temporary gss file to be deleted on cleanup + self.saved_gssfile = os.path.join(config['defaultsave.directory'],gssfile).replace('.gss','-0.gss') + # name of the reference nxs file which is the same as outfile + self.reference_nexus = outfile.replace('PRL','PEARL') + # name of the reference gss file + self.reference_gss = gssfile.replace('.gss','-0.gss').replace('PRL','PEARL') + # workspace to be compared with reference_nexus + self.reference_workspace = outwork + + def PEARL_atten(self, work,outwork): + PearlMCAbsorption(Filename=self.attenfile,OutputWorkspace="wc_atten") + ConvertToHistogram(InputWorkspace="wc_atten",OutputWorkspace="wc_atten") + RebinToWorkspace(WorkspaceToRebin="wc_atten",WorkspaceToMatch=work,OutputWorkspace="wc_atten") + Divide(LHSWorkspace=work,RHSWorkspace="wc_atten",OutputWorkspace=outwork) + mtd.remove("wc_atten") + return + +#================================================================================ +class PEARL_Mode_trans(PEARL_Reduction): + def do_focus(self): + #self.reference_nexus = "PRL75318_75323.nxs" + return self.PEARL_focus("75318_75323","raw",fmode="trans",ttmode="TT70",atten=True) + + def doValidation(self): + '''Validate an additional workspace''' + res = PEARL_Reduction.doValidation(self) + if not res: + return False + self.validate = self.validateNoAtten + return self.validateWorkspaceToNeXus() + + def validateNoAtten(self): + return 'PRL75318_75323_noatten','PEARL75318_75323_noatten.nxs' + +#================================================================================ +class PEARL_Mode_all_Si(PEARL_Reduction): + def do_focus(self): + #self.reference_nexus = "PRL74798_74800.nxs" + return self.PEARL_focus("74798_74800","raw",fmode="all",ttmode="TT70",atten=False) + +#================================================================================ +class PEARL_Mode_all_CeO2(PEARL_Reduction): + def do_focus(self): + #self.reference_nexus = "PRL74795_74797.nxs" + return self.PEARL_focus("74795_74797","raw",fmode="all",ttmode="TT70",atten=False) + +#================================================================================ diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/POLDIAnalyseResidualsTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/POLDIAnalyseResidualsTest.py new file mode 100644 index 0000000000000000000000000000000000000000..529c1e01d65ec1302a29b181b6da21aa5d0a7df0 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/POLDIAnalyseResidualsTest.py @@ -0,0 +1,53 @@ +import stresstesting +from mantid.simpleapi import * +import numpy as np + +'''This test checks that the residual analysis algorithm for POLDI works correctly.''' +class POLDIAnalyseResidualsTest(stresstesting.MantidStressTest): + def runTest(self): + dataFiles = ["poldi2014n019874"] + + self.loadReferenceData(dataFiles) + self.runResidualAnalysis(dataFiles) + self.analyseResults(dataFiles) + + def loadReferenceData(self, filenames): + for dataFile in filenames: + Load(Filename="%s_fortran_fit.nxs" % (dataFile), OutputWorkspace="%s_fortran_fit" % (dataFile)) + Load(Filename="%s_fortran_residuals.nxs" % (dataFile), OutputWorkspace="%s_fortran_residuals" % (dataFile)) + + def runResidualAnalysis(self, filenames): + for dataFile in filenames: + LoadSINQFile(Instrument='POLDI',Filename=dataFile + ".hdf",OutputWorkspace=dataFile) + LoadInstrument(Workspace=dataFile, InstrumentName="POLDI", RewriteSpectraMap=True) + PoldiTruncateData(InputWorkspace=dataFile,OutputWorkspace=dataFile) + PoldiAnalyseResiduals(MeasuredCountData=dataFile, FittedCountData="%s_fortran_fit" % (dataFile), MaxIterations=1, OutputWorkspace=dataFile + "Residuals") + + def analyseResults(self, filenames): + for dataFile in filenames: + workspaceNameTemplate = "Comparison_%s" % (dataFile) + + referenceData = mtd["%s_fortran_residuals" % (dataFile)].dataY(0) + calculatedData = mtd["%sResiduals" % (dataFile)].dataY(0) + + self.assertEqual(calculatedData.shape[0], referenceData.shape[0], "Number of d-values does not match for %s (is: %i, should: %i)" % (dataFile, calculatedData.shape[0], referenceData.shape[0])) + + CreateWorkspace(referenceData, calculatedData, OutputWorkspace=workspaceNameTemplate) + + fitNameTemplate = "Fit_%s" % (dataFile) + Fit("name=LinearBackground", mtd[workspaceNameTemplate], StartX=np.min(referenceData), EndX=np.max(referenceData), Output=fitNameTemplate) + + fitResult = mtd[fitNameTemplate + "_Parameters"] + + slope = fitResult.cell(1, 1) + self.assertDelta(slope, 1.0, 1e-2, "Slope is larger than 1.0 for %s (is: %d)" % (dataFile, slope)) + + relativeSlopeError = fitResult.cell(1, 2) / slope + self.assertLessThan(relativeSlopeError, 5e-3, "Relative error of slope is too large for %s (is: %d)" % (dataFile, relativeSlopeError)) + + intercept = fitResult.cell(0, 1) + self.assertDelta(intercept, 0.0, 1e-3, "Intercept deviates too far from 0 %s (is: %d)" % (dataFile, intercept)) + + residuals = mtd[fitNameTemplate + "_Workspace"].dataY(2) + maxAbsoluteResidual = np.max(np.abs(residuals)) + self.assertLessThan(maxAbsoluteResidual, 1.0, "Maximum absolute residual is too large for %s (is: %d)" % (dataFile, maxAbsoluteResidual)) diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/POLDIAutoCorrelationTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/POLDIAutoCorrelationTest.py new file mode 100644 index 0000000000000000000000000000000000000000..1bc1ed974b37f3928ccbc54172b22eb6b9a21717 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/POLDIAutoCorrelationTest.py @@ -0,0 +1,56 @@ +import stresstesting +from mantid.simpleapi import * +import numpy as np + +'''This test checks that the results of PoldiAutoCorrelation match the expected outcome.''' +class POLDIAutoCorrelationTest(stresstesting.MantidStressTest): + def runTest(self): + dataFiles = ["poldi2013n006903", "poldi2013n006904", "poldi2014n019874", "poldi2014n019881"] + + self.loadReferenceData(dataFiles) + self.runAutoCorrelation(dataFiles) + self.analyseResults(dataFiles) + + def loadReferenceData(self, filenames): + for dataFile in filenames: + Load(Filename="%s_reference.nxs" % (dataFile), OutputWorkspace="%s_reference" % (dataFile)) + + def runAutoCorrelation(self, filenames): + for dataFile in filenames: + LoadSINQFile(Instrument='POLDI',Filename=dataFile + ".hdf",OutputWorkspace=dataFile) + LoadInstrument(Workspace=dataFile, InstrumentName="POLDI", RewriteSpectraMap=True) + PoldiTruncateData(InputWorkspace=dataFile,OutputWorkspace=dataFile) + PoldiAutoCorrelation(InputWorkspace=dataFile, wlenmin=1.1, wlenmax=5.0, OutputWorkspace=dataFile + "Corr") + + def analyseResults(self, filenames): + for dataFile in filenames: + workspaceNameTemplate = "Comparison_%s" % (dataFile) + + referenceData = mtd["%s_reference" % (dataFile)].dataY(0) + calculatedData = mtd["%sCorr" % (dataFile)].dataY(0) + + self.assertEqual(calculatedData.shape[0], referenceData.shape[0], "Number of d-values does not match for %s (is: %i, should: %i)" % (dataFile, calculatedData.shape[0], referenceData.shape[0])) + + CreateWorkspace(referenceData, calculatedData, OutputWorkspace=workspaceNameTemplate) + + fitNameTemplate = "Fit_%s" % (dataFile) + Fit("name=LinearBackground", mtd[workspaceNameTemplate], StartX=np.min(referenceData), EndX=np.max(referenceData), Output=fitNameTemplate) + + fitResult = mtd[fitNameTemplate + "_Parameters"] + + slope = fitResult.cell(1, 1) + self.assertDelta(slope, 1.0, 1e-4, "Slope is larger than 1.0 for %s (is: %d)" % (dataFile, slope)) + + relativeSlopeError = fitResult.cell(1, 2) / slope + self.assertLessThan(relativeSlopeError, 5e-4, "Relative error of slope is too large for %s (is: %d)" % (dataFile, relativeSlopeError)) + + intercept = fitResult.cell(0, 1) + self.assertDelta(intercept, 0.0, 1.0, "Intercept deviates too far from 0 %s (is: %d)" % (dataFile, intercept)) + + relativeInterceptError = fitResult.cell(0, 2) / intercept + self.assertLessThan(relativeInterceptError, 1, "Relative error of intercept is too large for %s (is: %d)" % (dataFile, relativeInterceptError)) + + residuals = mtd[fitNameTemplate + "_Workspace"].dataY(2) + maxAbsoluteResidual = np.max(np.abs(residuals)) + self.assertLessThan(maxAbsoluteResidual, 1.0, "Maximum absolute residual is too large for %s (is: %d)" % (dataFile, maxAbsoluteResidual)) + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/POLDIFitPeaks1DTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/POLDIFitPeaks1DTest.py new file mode 100644 index 0000000000000000000000000000000000000000..db35acce4e8bc78e37be8da4a2cfedfec0d58efd --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/POLDIFitPeaks1DTest.py @@ -0,0 +1,99 @@ +import stresstesting +from mantid.simpleapi import * +import numpy as np + +'''Checking results of PoldiFitPeaks1D.''' +class POLDIFitPeaks1DTest(stresstesting.MantidStressTest): + # The errors of fitted parameters in version 2 are a bit small + # because of the "fabricated data", so a larger margin has to be allowed. + versionDeltas = {1: 2.0e-4, 2: 1.5e-3} + errorMultiplier = {1: 1.0, 2: 4.0} + + def runTest(self): + dataFiles = ["poldi2013n006904", "poldi_2_phases_theoretical"] + versions = [1, 2] + deleteList = [[], ['12-16', '10', '9']] + + self.loadReferenceCorrelationData(dataFiles) + self.loadReferenceFitResults(dataFiles) + self.runPeakSearch(dataFiles, deleteList) + self.runPoldiFitPeaks1D(dataFiles, versions) + self.analyseResults(dataFiles, versions) + + def loadReferenceCorrelationData(self, filenames): + for dataFile in filenames: + Load(Filename="%s_reference.nxs" % (dataFile), OutputWorkspace=dataFile) + + def runPeakSearch(self, filenames, deleteList): + for dataFile,deleteRowList in zip(filenames, deleteList): + PoldiPeakSearch(InputWorkspace=dataFile, + MinimumPeakSeparation=8, + OutputWorkspace="%s_Peaks" % (dataFile)) + + for deleteRows in deleteRowList: + DeleteTableRows(TableWorkspace="%s_Peaks" % (dataFile), Rows=deleteRows) + + def loadReferenceFitResults(self, filenames): + for dataFile in filenames: + Load(Filename="%s_reference_1DFit.nxs" % (dataFile), OutputWorkspace="%s_reference_1DFit" % (dataFile)) + + def runPoldiFitPeaks1D(self, filenames, versions): + for dataFile, version in zip(filenames, versions): + args = {"InputWorkspace": dataFile, + "FwhmMultiples": 4, + "PoldiPeakTable": "%s_Peaks" % (dataFile), + "OutputWorkspace": "%s_Peaks_Refined" % (dataFile), + "FitPlotsWorkspace": "%s_FitPlots" % (dataFile), + "Version": version} + + if version == 2: + args["AllowedOverlap"] = 0.1 + + PoldiFitPeaks1D(**args) + + # This test makes sure that: + # - standard deviations of position and relative fwhm are acceptably small (indicates reasonable fit) + # - refined peak positions are within one standard deviation of reference results obtained from existing program + # - fwhms do not deviate too much from reference results + # - currently, only the first 10 peaks are compared (as in the peak search test) + def analyseResults(self, filenames, versions): + for dataFile, version in zip(filenames, versions): + calculatedPeaks = mtd["%s_Peaks_Refined" % (dataFile)] + referencePeaks = mtd["%s_reference_1DFit" % (dataFile)] + self.assertEqual(calculatedPeaks.rowCount(), referencePeaks.rowCount()) + + positions = calculatedPeaks.column(2) + referencePositions = [float(x) for x in referencePeaks.column(0)] + + fwhms = calculatedPeaks.column(4) + referenceFwhms = [float(x) for x in referencePeaks.column(1)] + + for i in range(10): + # extract position and fwhm with uncertainties + positionparts = positions[i].split() + position = [float(positionparts[0]), float(positionparts[2])] + + fwhmparts = fwhms[i].split() + fwhm = [float(fwhmparts[0]), float(fwhmparts[2])] + + self.assertTrue(self.positionAcceptable(position)) + self.assertTrue(self.fwhmAcceptable(fwhm)) + + # find closest reference peak + deltas = np.array([np.abs(position[0] - x) for x in referencePositions]) + + + self.assertDelta(deltas.min(), 0.0, self.versionDeltas[version]) + minIndex = deltas.argmin() + + self.assertTrue(self.uncertainValueEqualsReference(position, referencePositions[minIndex], self.errorMultiplier[version])) + self.assertDelta(fwhm[0], referenceFwhms[minIndex], self.versionDeltas[version]) + + def positionAcceptable(self, position): + return position[1] < 1e-3 + + def fwhmAcceptable(self, fwhm): + return fwhm[1] < 3e-3 + + def uncertainValueEqualsReference(self, value, reference, sigmas): + return np.abs(value[0] - reference) < (sigmas * value[1]) diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/POLDIFitPeaks2DTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/POLDIFitPeaks2DTest.py new file mode 100644 index 0000000000000000000000000000000000000000..f741019bd6bbb74ab32e9901ec72a7035f0425bc --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/POLDIFitPeaks2DTest.py @@ -0,0 +1,76 @@ +import stresstesting +from mantid.simpleapi import * +import numpy as np + +'''The system test currently checks that the calculation of 2D spectra +works correctly.''' +class POLDIFitPeaks2DTest(stresstesting.MantidStressTest): + def runTest(self): + dataFiles = ["poldi2013n006904"] + + self.loadAndPrepareData(dataFiles) + self.loadReferencePeakData(dataFiles) + self.loadReferenceSpectrum(dataFiles) + self.runCalculateSpectrum2D(dataFiles) + self.analyseResults(dataFiles) + + def loadAndPrepareData(self, filenames): + for dataFile in filenames: + LoadSINQFile(Instrument='POLDI',Filename=dataFile + ".hdf",OutputWorkspace=dataFile) + LoadInstrument(Workspace=dataFile, InstrumentName="POLDI", RewriteSpectraMap=True) + PoldiTruncateData(InputWorkspace=dataFile, OutputWorkspace=dataFile) + + def loadReferencePeakData(self, filenames): + for dataFile in filenames: + Load(Filename="%s_2d_reference_Peaks.nxs" % (dataFile), OutputWorkspace="%s_reference_Peaks" % (dataFile)) + + def loadReferenceSpectrum(self, filenames): + for dataFile in filenames: + Load(Filename="%s_2d_reference_Spectrum.nxs" % (dataFile), OutputWorkspace="%s_2d_reference_Spectrum" % (dataFile)) + Load(Filename="%s_1d_reference_Spectrum.nxs" % (dataFile), OutputWorkspace="%s_1d_reference_Spectrum" % (dataFile)) + + def runCalculateSpectrum2D(self, filenames): + for dataFile in filenames: + PoldiFitPeaks2D(InputWorkspace=dataFile, + PoldiPeakWorkspace="%s_reference_Peaks" % (dataFile), + PeakProfileFunction="Gaussian", + RefinedPoldiPeakWorkspace="%s_refined_Peaks" % (dataFile), + OutputWorkspace="%s_2d_calculated_Spectrum" % (dataFile), + Calculated1DSpectrum="%s_1d_calculated_Spectrum" % (dataFile), + MaximumIterations=0) + + def analyseResults(self, filenames): + for dataFile in filenames: + calculatedSpectrum = mtd["%s_2d_calculated_Spectrum" % (dataFile)] + referenceSpectrum = mtd["%s_2d_reference_Spectrum" % (dataFile)] + + self.assertEqual(calculatedSpectrum.getNumberHistograms(), referenceSpectrum.getNumberHistograms()) + + for i in range(calculatedSpectrum.getNumberHistograms()): + calHisto = calculatedSpectrum.readY(i) + + if not referenceSpectrum.getDetector(i).isMasked(): + refHisto = referenceSpectrum.readY(i) + + absDiff = np.fabs(refHisto - calHisto) + self.assertTrue(np.all(absDiff < 7e-4)) + else: + self.assertTrue(np.all(calHisto == 0.0)) + + spectra1D = ["%s_1d_%s_Spectrum"] + + for wsName in spectra1D: + calculatedSpectrum1D = mtd[wsName % (dataFile, "calculated")] + referenceSpectrum1D = mtd[wsName % (dataFile, "reference")] + + xDataCalc = calculatedSpectrum1D.readX(0) + yDataCalc = calculatedSpectrum1D.readY(0) + + xDataRef = referenceSpectrum1D.readX(0) + yDataRef = referenceSpectrum1D.readY(0) + + indices = np.nonzero(yDataRef) + maxDifference = np.abs(np.max((yDataCalc[indices] - yDataRef[indices]) / yDataCalc[indices])) + + self.assertTrue(np.all(xDataCalc == xDataRef)) + self.assertLessThan(maxDifference, 0.0031) diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/POLDIMergeTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/POLDIMergeTest.py new file mode 100644 index 0000000000000000000000000000000000000000..abea1836eb2ae78638edb9c4e86ecf5c06182f62 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/POLDIMergeTest.py @@ -0,0 +1,64 @@ +import stresstesting +from mantid.simpleapi import * +import numpy as np + +'''This test checks that the results of PoldiMerge match the expected outcome.''' +class POLDIMergeTest(stresstesting.MantidStressTest): + def runTest(self): + self.testHappyCase() + self.testDifferentTimings() + + + def testDifferentTimings(self): + dataFiles = ["poldi2014n019874", "poldi2014n019881"] + self.loadData(dataFiles) + + try: + self.runPoldiMerge(dataFiles, "Dummy") + self.assertTrue(False) + except RuntimeError: + self.assertTrue(True) + + + def testHappyCase(self): + dataFiles = ["poldi2013n006903", "poldi2013n006904"] + sumWorkspace = "poldi_sum_6903_6904" + + self.loadData(dataFiles) + self.runPoldiMerge(dataFiles, sumWorkspace) + + self.loadReferenceData(sumWorkspace) + self.analyseResults(sumWorkspace) + + sumWorkspaceGroup = GroupWorkspaces(dataFiles) + workspaceGroupResult = self.testGroupWorkspace(sumWorkspaceGroup) + + # compare result of workspace group merging to previously checked results + self.compareWorkspaces(workspaceGroupResult, mtd['poldi_sum_6903_6904']) + + + + def testGroupWorkspace(self, groupWorkspace): + return PoldiMerge(groupWorkspace) + + + def loadData(self, filenames): + for dataFile in filenames: + LoadSINQFile(Instrument='POLDI',Filename=dataFile + ".hdf",OutputWorkspace=dataFile) + LoadInstrument(Workspace=dataFile, InstrumentName="POLDI", RewriteSpectraMap=True) + + def runPoldiMerge(self, workspaceNames, outputWorkspaceName): + PoldiMerge(WorkspaceNames=workspaceNames, OutputWorkspace=outputWorkspaceName) + + def loadReferenceData(self, outputWorkspaceName): + Load(Filename=outputWorkspaceName + "_reference.nxs", OutputWorkspace=outputWorkspaceName + "_reference") + + def analyseResults(self, outputWorkspaceName): + for i in range(mtd[outputWorkspaceName + '_reference'].getNumberHistograms()): + # reference spectrum is still in the "original order", so for one of the workspaces, the index has to be reversed. + self.assertTrue(np.array_equal(mtd[outputWorkspaceName].dataY(i), mtd[outputWorkspaceName + '_reference'].dataY(399 - i))) + + def compareWorkspaces(self, left, right): + for i in range(left.getNumberHistograms()): + self.assertTrue(np.array_equal(left.dataY(i), right.dataY(i))) + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/POLDIPeakSearchTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/POLDIPeakSearchTest.py new file mode 100644 index 0000000000000000000000000000000000000000..bd495127818f0897264e846bc51544294f23e725 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/POLDIPeakSearchTest.py @@ -0,0 +1,46 @@ +import stresstesting +from mantid.simpleapi import * +import numpy as np + +'''This test checks that the results of PoldiAutoCorrelation match the expected outcome.''' +class POLDIPeakSearchTest(stresstesting.MantidStressTest): + def runTest(self): + dataFiles = ["poldi2013n006903", "poldi2013n006904"] + + self.loadReferenceCorrelationData(dataFiles) + self.loadReferencePeakData(dataFiles) + self.runPeakSearch(dataFiles) + self.analyseResults(dataFiles) + + def loadReferenceCorrelationData(self, filenames): + for dataFile in filenames: + Load(Filename="%s_reference.nxs" % (dataFile), OutputWorkspace=dataFile) + + def loadReferencePeakData(self, filenames): + for dataFile in filenames: + Load(Filename="%s_reference_Peaks.nxs" % (dataFile), OutputWorkspace="%s_reference_Peaks" % (dataFile)) + + def runPeakSearch(self, filenames): + for dataFile in filenames: + PoldiPeakSearch(InputWorkspace=dataFile, OutputWorkspace="%s_Peaks" % (dataFile)) + + def analyseResults(self, filenames): + for dataFile in filenames: + calculatedPeaks = mtd["%s_Peaks" % (dataFile)] + referencePeaks = mtd["%s_reference_Peaks" % (dataFile)] + self.assertEqual(calculatedPeaks.rowCount(), referencePeaks.rowCount()) + + positions = calculatedPeaks.column(2) + referencePositions = referencePeaks.column(0) + + # In this test we only compare positions, because the height + # and error estimates are derived differently than in the + # original software, so the results are not exactly the same. + # + # Most important in this case are peak positions. Since the order + # depends on height, it may be different, so the comparison can not + # be done 1:1. + for position in positions[:10]: + deltas = [np.abs(float(position) - x) for x in referencePositions] + + self.assertDelta(min(deltas), 0.0, 1e-6) diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/POLDITruncateDataTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/POLDITruncateDataTest.py new file mode 100644 index 0000000000000000000000000000000000000000..7921bd88ec29f35ee60f9433a3e909ab1f21c385 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/POLDITruncateDataTest.py @@ -0,0 +1,81 @@ +import stresstesting +from mantid.simpleapi import * + +'''This test checks that the results of PoldiAutoCorrelation match the expected outcome.''' +class POLDITruncateDataTest(stresstesting.MantidStressTest): + def runTest(self): + self.dataFileName = "poldi2013n006903" + + self.loadDataFiles() + self.workingAnalysis() + self.workspaceAlreadyCorrect() + self.workspaceTooSmall() + + def loadDataFiles(self,): + LoadSINQFile(Instrument='POLDI',Filename=self.dataFileName + ".hdf",OutputWorkspace=self.dataFileName) + LoadInstrument(Workspace=self.dataFileName, InstrumentName="POLDI") + + def workingAnalysis(self): + # In this method the "normal behavior" is tested, if everything is + # running as expected. + currentWs = mtd[self.dataFileName] + + # Input data has 10 extra bins + self.assertEqual(len(currentWs.readX(0)), 510) + + # First without keeping the additional data + truncated = PoldiTruncateData(currentWs) + + self.assertEqual(truncated.getNumberHistograms(), currentWs.getNumberHistograms()) + self.assertEqual(len(truncated.readX(0)), 500) + + # now keeping the additional data + truncated = PoldiTruncateData(currentWs, ExtraCountsWorkspaceName="extra") + + self.assertTrue(mtd.doesExist("extra")) + + extraWs = mtd['extra'] + + self.assertEqual(extraWs.getNumberHistograms(), 1) + extraCounts = extraWs.readY(0) + self.assertEqual(len(extraCounts), 10) + + # there are 13 counts in the first bin + self.assertEqual(extraCounts[0], 13.0) + + # and none in the others + for y in extraCounts[1:]: + self.assertEqual(y, 0.0) + + def workspaceAlreadyCorrect(self): + # This method tests expected behavior if the workspace + # already has the correct size + currentWs = mtd[self.dataFileName] + + cropped = CropWorkspace(currentWs, XMax=1497.0) + self.assertEqual(len(cropped.readX(0)), 500) + + truncated = PoldiTruncateData(cropped) + self.assertEqual(len(truncated.readX(0)), len(cropped.readX(0))) + + # Now there are no extra bins. + truncated = PoldiTruncateData(cropped, ExtraCountsWorkspaceName="moreCounts") + + # "extraCounts" should not be in the analysis data service + self.assertTrue(not mtd.doesExist("moreCounts")) + + def workspaceTooSmall(self): + # When the workspace is too small, the whole analysis fails. + # This is reasonable since the timing information is then + # very likely to be incorrect, so that the data file is not usable + currentWs = mtd[self.dataFileName] + + cropped = CropWorkspace(currentWs, XMax=1197.0) + self.assertEqual(len(cropped.readX(0)), 400) + + truncated = PoldiTruncateData(cropped) + self.assertTrue(truncated is None) + + PoldiTruncateData(InputWorkspace=cropped, OutputWorkspace="NamedWorkspaceTest") + self.assertTrue(not mtd.doesExist("NamedWorkspaceTest")) + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/POLREFLoadingTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/POLREFLoadingTest.py new file mode 100644 index 0000000000000000000000000000000000000000..3383084c927dabb6e60613d25e1cd599191df57f --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/POLREFLoadingTest.py @@ -0,0 +1,20 @@ +from LoadAndCheckBase import * + +''' +Test File loading and basic data integrity checks of POLREF data in Mantid. +''' +class POLREFLoadingTest(LoadAndCheckBase): + def get_raw_workspace_filename(self): + return "POLREF00004699.raw" + + def get_nexus_workspace_filename(self): + return "POLREF00004699.nxs" + + def get_expected_number_of_periods(self): + return 2 + + def get_integrated_reference_workspace_filename(self): + return "POLREF00004699_1Integrated.nxs" + + def get_expected_instrument_name(self): + return "POLREF" \ No newline at end of file diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/Peak2ConvCell_Test.py b/Code/Mantid/Testing/SystemTests/tests/analysis/Peak2ConvCell_Test.py new file mode 100644 index 0000000000000000000000000000000000000000..9263716cd454565e9b75415eaa77483728d58d8e --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/Peak2ConvCell_Test.py @@ -0,0 +1,930 @@ +#This script creates numerous PeaksWorkspaces for different Crystal Types and Centerings. Random errors +#are also introduced into the peak's. Each PeaksWorkspace is sent through the algorithm's FindPeaksMD, +#FindUBUsingFFT, and SelectByForm to determine the corresponding Primitive and Conventional cells. These +#results are tested against the theoretical results that should have been gotten + +#NOTE; THIS TEST TAKES AN EXTREMELY LONG TIME. DELETE "XXX" IN requiredFiles method to get it to run. +#!!!!!!!!! REPLACE THE "XXX" OR else !!!!!!!!!! + + +import stresstesting +import numpy +from numpy import matrix +from numpy import linalg +import math +import random +import mantid +from mantid.simpleapi import * +#from mantid.simpleapi import * +#TODO premultiply cases, fix up.. Maybe not needed Cause Conv cell was "Nigglied" +#TODO: SWitch cases, if use approx inequality, may get error cause low level code [does Not](does) premult but when it [should](should not) +class Peak2ConvCell_Test:#(stresstesting.MantidStressTest): + conventionalUB=numpy.zeros(shape=(3,3)) + Cubic=[1,3,5] + Tetr=[6,7,11,15,18,21] + Orth=[8,13,16,19,23,26,32,36,38,40,42] + Hex = [2,4,9,12,22,24] + Tricl=[31,44] + Mon=[28,29,30,33,34,35,43] + MonI=[17,27] + MonC=[10,14,20,25,37,39,41] + CentP=[3,11,12,21,22,31,32,33,34,35,44] + CentF=[1,16,26] + CentI=[2,4,5,6,7,8,9,10,14,15,17,18,19,20,24,25,27,37,39,41,42,43] + CentC=[10,13,14,17,20,23,25,27,28,29,30,36,37,38,39,40,41] + + + def CalcConventionalUB(self,a,b,c,alpha,beta,gamma,type): + Res= matrix([[0.,0.,0.],[0.,0.,0.],[0.,0.,0.]]) + + if type=='O': + + Res[0,0]=1./a + Res[1,1]=1./b + Res[2,2]=1./c + + elif type=='H': + Res[0,0]= a*1.0 + Res[1,0]= -a/2. + Res[1,1]= a*.866 + Res[2,2]=c*1.0 + Res=Res.I + else: + if alpha <=90: + self.conventionalUB = None + return None + Res[0,0] = a*1.0 + Res[1,1] = b*1.0 + Alpha = (alpha*math.pi/180) + Res[2,0] = c*math.cos( Alpha) + Res[2,2] = c*math.sin(Alpha) + # Now Nigglify the matrix( get 3 smallest sides) + + n =0 + YY=0 + if( a <=c): + n = (int)(-Res[2,0]/a) + YY= Res[2,0] +n*a + + else: + + n= (int)(-a*Res[2,0]/(c*c)-.5) + YY=n*Res[2,0]+a + + #print ["A",YY,n] + sgn=1 + if( a <= c): + + if ( math.fabs( YY + a ) < math.fabs( YY ) and a <= c ): + + YY += a + sgn = -1 + n=n+1 + + + elif( (YY+Res[2,0])*(YY+Res[2,0])+(n+1)*(n+1)*Res[2,2]*Res[2,2] < a*a): + + YY+=Res[2,0] + n=n+1 + sgn = -1 + + #print ["B",YY,sgn,n] + + if( n>0 ): + if( a <= c): + + Res[2,0]= sgn*YY + Res[2,2] *=sgn + + else: + + if( YY*Res[2,0]+n*Res[2,2]*Res[2,2] > 0): + sgn =-1 + + else: + sgn = 1 + Res[0,0]= sgn*YY + Res[0,2] =sgn*n*Res[2,2] + + + Res=Res.I + + + self.conventionalUB = Res + + return Res + + + def Niggli( self, Res): + RUB= Res.I + X=RUB*RUB.T + done = False + + while not done: + done = True + for i in range(2): + if X[i,i]>X[i+1,i+1]: + done = False + for j in range(3): + sav= RUB[i,j] + RUB[i,j]=RUB[i+1,j] + RUB[i+1,j]=sav + X=RUB*RUB.T + + if not done: + continue + #do bc,ac,then ab + for kk in range(3): + jj=2 + if kk>1: + jj=1 + i=0 + else: + i=jj-kk-1 + if X[i,i]<2*math.fabs(X[i,jj]): + sgn=1 + if X[i,jj] >0: + sgn=-1 + for j in range(3): + RUB[jj,j]=RUB[jj,j]+sgn*RUB[i,j] + done=False + X=RUB*RUB.T + + break + + + if( numpy.linalg.det( RUB )< 0): + for cc in range(3): + RUB[0,cc] *=-1 + + + return RUB.I + + def CalcNiggliUB( self,a, b,c,alpha, beta, gamma,type, Center): + + if( Center=='P'): + X = self.CalcConventionalUB( a,b,c,alpha,beta,gamma,type) + return X + + Res= matrix([[0.,0.,0.],[0.,0.,0.],[0.,0.,0.]]) + ConvUB = self.CalcConventionalUB(a,b,c,alpha,beta,gamma,type) + if( ConvUB== None): + return None + + ResP = numpy.matrix.copy(ConvUB) + ResP =ResP.I + + if( type=='H' and Center =='I'): + Center ='R' + + if( Center == 'I'): + + s1=1 + s2=1 + for r in range(0,3): + for cc in range(3): + + + if( cc==0): + if( r>0): + + s1 = (-1)**r + s2 =-s1 + + Res[r,cc] =ResP[0,cc]/2+s1*ResP[1,cc]/2+s2*ResP[2,cc]/2 + + + Res=Res.I + + elif( Center =='F'): + + if( type =='H' or type=='M'): + return None + + ss = [0,0,0] + + for r in range(3): + for cc in range(3): + + ss=[1,1,1] + ss[r]=0 + + Res[r,cc]=ss[0]*ResP[0,cc]/2+ss[1]*ResP[1,cc]/2+ss[2]*ResP[2,cc]/2 + + + + Res=Res.I + + elif( Center =='A' or Center=='B'or Center=='C'): + + if( type =='H' ): + return None + if( type =='M' and Center== 'B'): + return None + + r=2 + if( Center =='A') : + + r=0 + if( b==c and type=='O'):# result would be orthorhombic primitive + return None + + elif( Center =='B'): + + r=1 + if( a==c and type=='O'): + return None + + elif( a==b and type=='O'): + return None + + k=0 + + Res[r,0]= ResP[r,0] + Res[r,1]= ResP[r,1] + Res[r,2]= ResP[r,2] + for i in range(1,3): + + if( k==r): + k=k+1 + for cc in range(3) : + + R = (r+1)%3 + s = (-1)**i + + Res[k,cc]= ResP[(R)%3,cc]/2+s*ResP[(R+1)%3,cc]/2 + + k=k+1 + + Res=Res.I + + + + elif( Center =='R'): + + if( type != 'H' or alpha >120):#alpha =120 planar, >120 no go or c under a-b plane. + + self.conventionalUB=NiggliUB = None + return None + + #Did not work with 0 error. FindUBUsingFFT failed + #Alpha = alpha*math.pi/180 + + #Res[0,0] = a + #Res[1,0] =(a*math.cos( Alpha )) + #Res[1,1] = (a*math.sin( Alpha )) + #Res[2,0] =(a*math.cos( Alpha )) + #Res[2,1] =(a*Res[1,0] -Res[2,0]*Res[1,0])/Res[1,1] + #Res[2,2] =math.sqrt( a*a- Res[2,1]*Res[2,1]-Res[2,0]*Res[2,0]) + Res[0,0]=.5*a + Res[0,1]=math.sqrt(3)*a/2 + Res[0,2]=.5*b + Res[1,0]=-a + Res[1,1]=0 + Res[1,2]=.5*b + Res[2,0]=.5*a + Res[2,1]=-math.sqrt(3)*a/2 + Res[2,2]=.5*b + + + Rhomb2Hex= matrix([[1. ,-1., 0.],[-1. ,0., 1.],[-1. ,-1., -1.]]) + + self.conventionalUB=Rhomb2Hex*Res + Res=Res.I + + self.conventionalUB=self.Niggli(self.conventionalUB.I) + + Res = self.Niggli(Res) + if( numpy.linalg.det( Res )< 0): + for cc in range(3): + Res[cc,0] *=-1 + + + + return Res + + def Perturb( self,val, error): + return val+random.random()*error-error/2 + + def Next( self, hkl1): + #print "Next" + hkl=matrix([[hkl1[0,0]],[hkl1[1,0]],[hkl1[2,0]]]) + S =(math.fabs( hkl[0,0])+math.fabs( hkl[1,0])+math.fabs( hkl[2,0])) + #print ["S=",S] + #The sum of abs hkl's = S until not possible. Increasing lexicographically + if( hkl[2,0] < 0): + #print "Nexta" + hkl[2,0] = -hkl[2,0] + #print hkl + return hkl + + if( math.fabs( hkl[0,0])+ math.fabs( hkl[1,0]+1 ) <= S): + + #print "Nextb" + hkl[1,0] +=1 + hkl[2,0] = -(S -math.fabs( hkl[0,0])- math.fabs( hkl[1,0] )) + elif( math.fabs( hkl[0,0]+1 ) <= S): + + #print "Nextc" + hkl[0,0]= hkl[0,0]+1.0 + hkl[1,0] = -(S - math.fabs( hkl[0,0])) + hkl[2,0] = 0 + else: + + #print "Nextd" + hkl[1,0]=0 + hkl[2,0]=0 + hkl[0,0] = -S-1 + #print hkl + return hkl + + def FixLatParams( self,List): + npos=0 + nneg=0 + if len(List)<6: + return List + has90=False + for i in range(3,6): + if math.fabs(List[i]-90)<.05: + nneg =nneg+1 + has90=True + elif List[i] <90: + npos=npos+1 + else: + nneg=nneg+1 + over90=False + if nneg ==3 or has90 or nneg==1: + over90= True + + for i in range(3,6): + if List[i]>90 and not over90: + List[i]=180-List[i] + elif List[i]<90 and over90: + List[i]=180-List[i] + + bdotc = math.cos(List[3]/180.*math.pi)*List[1]*List[2] + adotc= math.cos(List[4]/180.*math.pi)*List[0]*List[2] + adotb= math.cos(List[5]/180.*math.pi)*List[1]*List[0] + if List[0] > List[1] or (List[0] == List[1] and math.fabs(bdotc)>math.fabs(adotc)): + List = self.XchangeSides( List,0,1) + bdotc = math.cos(List[3]/180.*math.pi)*List[1]*List[2] + adotc= math.cos(List[4]/180.*math.pi)*List[0]*List[2] + adotb= math.cos(List[5]/180.*math.pi)*List[1]*List[0] + if List[1] > List[2] or (List[1] == List[2] and math.fabs(adotc)>math.fabs(adotb)): + List = self.XchangeSides(List,1,2) + bdotc = math.cos(List[3]/180.*math.pi)*List[1]*List[2] + adotc= math.cos(List[4]/180.*math.pi)*List[0]*List[2] + adotb= math.cos(List[5]/180.*math.pi)*List[1]*List[0] + + if List[0] > List[1] or (List[0] == List[1] and math.fabs(bdotc)>math.fabs(adotc)): + List = self.XchangeSides( List,0,1) + + return List + + def FixUpPlusMinus( self, UB):#TODO make increasing lengthed sides too + M= matrix([[1.0,0.0,0.0],[0.0,1.0,0.0],[0.0,0.0,1.0]]) + M1= matrix([[1.0,0.0,0.0],[0.0,1.0,0.0],[0.0,0.0,1.0]]) + G= UB.T*UB + G.I + + if G[0,1]>0: + if G[0,2]>0: + if G[1,2]>0: + return UB + else: + M[1,1]=M[2,2]=-1 + elif G[1,2]>0: + M[0,0]=M[2,2]=-1 + else: + M[1,1]=M[0,0]=-1 + else: + if G[0,2]>0: + if G[1,2]>0: + M[1,1]=M[0,0]=-1 + else: + M[0,0]=M[2,2]=-1 + elif G[1,2]>0: + M[2,2]=M[1,1]=-1 + else: + return UB + + + return UB*M + # is closeness to 90 deg( cos of ), and equal sides + def FixUB(self,UB, tolerance): + done = 1 + print "A" + while done==1: + done=0 + X = UB.T*UB + X.I + + print "B1",X + if X[0,0]> X[1,1] or (math.fabs(X[0,0]-X[1,1])<tolerance/10 and math.fabs(X[1,2])>math.fabs(X[0,2])+tolerance/10): + done = 1 + for i in range(0,3): + sav= UB[i,0] + UB[i,0]=UB[i,1] + UB[i,1]=sav + print "B" + continue + + print "B2" + if X[1,1]>X[2,2] or (math.fabs(X[1,1]-X[2,2])<tolerance and math.fabs(X[1,0])< math.fabs(X[2,0])-tolerance/10): + done = 1 + for i in range(0,3): + sav= UB[i,1] + UB[i,1]=UB[i,2] + UB[i,2]=sav + + print "C" + continue + + print "B3" + if numpy.linalg.det(UB) < 0: + for i in range(0,3): + UB[i,0]=-1*UB[i,0] + + print "D" + done=1 + continue + print "E" + L= [X[0,1],X[0,2],X[1,2]] + + nneg=0 + is90=False + odd=-1 + for i in range(0,3): + + if math.fabs(L[i])<tolerance: + is90=True + odd=i + nneg=nneg+1 + elif L[i]<0: + nneg=nneg+1 + + if nneg==3 or nneg==0: + continue + + for i in range(0,3): + if is90 : + if nneg ==1: + odd=i + break + if nneg==2 and odd !=i and L[i]>0: + odd=i + break + + + elif nneg==1 and L[i]<0: + odd=i + elif nneg==2 and L[i]>0: + odd = i + odd= 2-odd + i1=(odd+1)%3 + i2=(odd+2)%3 + print ["L=",L, odd,i1,i2, is90,tolerance] + print UB + for i in range(0,3): + UB[i,i1]=-1*UB[i,i1] + UB[i,i2]=-1*UB[i,i2] + print UB + done = 1 + return UB + + + + + + + + def getPeaks( self,Inst,UB, error,Npeaks): + + CreatePeaksWorkspace(InstrumentWorkspace="Sws",NumberOfPeaks=0,OutputWorkspace="Peaks") + Peaks=mtd["Peaks"] + + + MinAbsQ = 100000000 + UBi= matrix([[0.0,0.0,0.0],[0.0,0.0,0.0],[0.0,0.0,0.0]]) + + for ii in range(3): + for jj in range(ii,3): + + UBi = UB[ii,jj] + if( math.fabs( UBi ) < MinAbsQ and UBi !=0): + MinAbsQ = math.fabs(UBi ) + + hkl=matrix([[0.0],[0.0],[0.0]]) + + Error = error*MinAbsQ + npeaks=0 + + + a1= hkl[0,0] + a2=hkl[1,0] + a3=hkl[2,0] + done = False + while not done: + + + Qs = (UB*hkl) + Qs=Qs*(2*math.pi) + + for qs in range(3): + Qs[qs,0] = self.Perturb(Qs[qs,0],Error) + + + + if( Qs is not None and Qs[2,0] > 0): + #QQ= numpy.array([Qs[0,0],Qs[1,0],Qs[2,0]]) + QQ = mantid.kernel.V3D(Qs[0,0],Qs[1,0],Qs[2,0]) + norm = QQ.norm() + + + if norm>.3 and norm < 30: + peak =Peaks.createPeak( QQ, 1.0) + + peak.setQLabFrame(mantid.kernel.V3D(Qs[0,0],Qs[1,0],Qs[2,0]),1.0) + + Peaks.addPeak(peak) + npeaks = npeaks+1 + + + hkl = self.Next( hkl) + if npeaks>= Npeaks: + done =True + if math.fabs(hkl[0,0])>15: + done = True + if math.fabs(hkl[1,0])>15: + done = True + if math.fabs(hkl[2,0])>15: + done = True + + + + return Peaks + + + def newSetting( self, side1,side2,Xtal,Center,ang, i1,i2a): + C=Center + if Center =='A' or Center =='B' or Center=='C': + C='C' + if( Xtal=='O'): + if( ang>20 or i1>0 or i2a >1): + return False + elif (side1==0 and side2 !=0) and (C=='F' or C=='C'):#No Tetragonal "F" or C Center + return False + elif (C=='F'or C=='C') and ( side1==side2 and side1 !=0): + return False + else: + return True + + if(Xtal=='H'): + if ang > 20 or i2a>1 or not(C=='P' or C=='I'): + return False + elif side2>side1: + return False + else: + return True + + if( Xtal!='M'): + return False + return True + + def MonoClinicRearrange(self, Sides,Xtal,Center, i1,i2a): + i1q =i1 + i2q = (i1+i2a)%3 + i3q=(i2q+1)%3 + if( i1q==i3q): + i3q = (i3q+1)%3 + a = Sides[i1q] + b= Sides[ i2q] + c = Sides[i3q] + + return [a,b,c] + + def getMatrixAxis( self,v, Xtal): + ident= matrix([[1.0,0.0,0.0],[0.0,1.0,0.0],[0.0,0.0,1.0]]) + if Xtal !='H' or v>=2: + return ident + ident[v,v] =0 + ident[2,2] =0 + v1= 2 + ident[v,v1] =1 + ident[v1,v] =1 + return ident + + def getLat( self, UB): + G=UB.T*UB + G1=G.I + Res=[math.sqrt(G1[0,0]),math.sqrt(G1[1,1]),math.sqrt(G1[2,2])] + Res.append(math.acos( G1[1,2]/Res[1]/Res[2])*180.0/math.pi) + Res.append(math.acos( G1[0,2]/Res[0]/Res[2])*180.0/math.pi) + Res.append(math.acos( G1[0,1]/Res[0]/Res[1])*180.0/math.pi) + return Res + + + def AppendForms( self, condition, Center,CenterTarg, FormNums, List2Append): + L= List2Append + if condition and Center != CenterTarg: + for i in range(len(FormNums)): + L.append(FormNums[i]) + elif Center ==CenterTarg: + for i in range(len(FormNums)): + L.append(FormNums[i]) + return L + + def Xlate(self,Xtal,Center,sides,LatNiggle): #sides are sides of conventional cell + if Xtal=='O': + C=Center + if sides[0] == sides[1]: + if sides[1]==sides[2]: + X="Cubic" + Z1=list(self.Cubic) + else: + X="Tetragonal" + Z1=list(self.Tetr) + elif sides[0]==sides[2]: + X="Tetragonal" + Z1=list(self.Tetr) + elif sides[1]==sides[2]: + X="Tetragonal" + Z1=list(self.Tetr) + else: + X="Orthorhombic" + Z1=list(self.Orth) + + if C=='A' or C =='B': + C ='C' + + elif Xtal=='H': + if Center =='I': + C ='R' + X='Rhombohedral' + Z1=list(self.Hex) + else: + C='P' + X="Hexagonal" + Z1=list(self.Hex) + else:#Monoclinic + X="Monoclinic" + Z1=list(self.Mon) + C=Center + LL=[math.cos(LatNiggle[5]/180*math.pi)*LatNiggle[0]*LatNiggle[1], math.cos(LatNiggle[4]/180*math.pi)*LatNiggle[0]*LatNiggle[2],math.cos(LatNiggle[3]/180*math.pi)*LatNiggle[2]*LatNiggle[1]] + + if C=='A' or C =='B': + C ='C' + + if C=='C' or C=='I':#'I': + + Z1=self.AppendForms( LatNiggle[2]*LatNiggle[2]<4*math.fabs(LL[2])+.001, 'C',C,[10,14,39], Z1) + Z1=self.AppendForms( LatNiggle[0]*LatNiggle[0]<4*math.fabs(LL[1])+.001, 'C',C,[20,25,41], Z1) + + Z1=self.AppendForms( LatNiggle[1]*LatNiggle[1]<4*math.fabs(LL[2]+.001), 'C',C,[37], Z1) + + Z1=self.AppendForms( 3*LatNiggle[0]*LatNiggle[0] < LatNiggle[2]*LatNiggle[2]+2*math.fabs(LL[1])+.001, 'I',C,[17], Z1) + Z1=self.AppendForms( 3*LatNiggle[1]*LatNiggle[1]< LatNiggle[2]*LatNiggle[2]+2*math.fabs(LL[2]+.001), 'I',C,[27], Z1) + + if( C=='P'): + Z2=self.CentP + elif C=='F': + Z2=self.CentF + elif C=='I' or C=='R': + Z2=self.CentI + elif C=='C': + Z2=self.CentC + Z1=sorted(Z1) + return [X,C, Z1, Z2] + + + + def MatchXtlparams( self, List1a, List2, tolerance, message): + List1=List1a + + + self.assertEqual(len(List1a),6,"Not the correct number of Xtal parameters."+message) + self.assertEqual(len(List2),6,"Not the correct number of Xtal parameters."+message) + Var=["a","b","c","alpha","beta","gamma"] + self.assertDelta( List1[0],List2[0],tolerance, message +"for "+Var[0]) + self.assertDelta( List1[1],List2[1],tolerance, message +"for "+Var[1]) + self.assertDelta( List1[2],List2[2],tolerance, message +"for "+Var[2]) + angtolerance = tolerance*180/math.pi + if List1[3]<90 and List2[3]>=90: + List1[3]= 180-List1[3] + List1[4]= 180-List1[4] + List1[5]= 180-List1[5] + + + if List1[0] >List1[1]-tolerance: + if List1[1]>List1[2]-tolerance: # 3 equal sides + match = False + + i=0 + + for i in range(0,3): + match= math.fabs(List1[3]-List2[3])<angtolerance and math.fabs(List1[4]-List2[4])<angtolerance and math.fabs(List1[5]-List2[5])<angtolerance + + if match: + break + List1=self.XchangeSides( List1,1,0) + + match= math.fabs(List1[3]-List2[3])<angtolerance and math.fabs(List1[4]-List2[4])<angtolerance and math.fabs(List1[5]-List2[5])<angtolerance + if match: + break + + List1=self.XchangeSides( List1,1,2) + + match= math.fabs(List1[3]-List2[3])<angtolerance and math.fabs(List1[4]-List2[4])<angtolerance and math.fabs(List1[5]-List2[5])<angtolerance + self.assertTrue( match,"Angles do not match in any order") + else: + self.assertDelta( List1[5],List2[5],angtolerance,"Error in "+Var[5]) + if math.fabs(List1[3]-List2[3])>angtolerance: + List1 = self.XchangeSides( List1,0,1) + self.assertDelta( List1[3],List2[3],angtolerance,"Error in "+Var[3]) + self.assertDelta( List1[4],List2[4],angtolerance,"Error in "+Var[4]) + elif List1[1]> List1[2]-tolerance: + self.assertDelta(List1[3],List2[3],angtolerance,"Error in "+Var[3]) + if math.fabs(List1[4]-List2[4])>angtolerance: + List1= self.XchangeSides(List1,1,2) + + self.assertDelta(List1[4],List2[4],angtolerance,"Error in "+Var[5]) + + self.assertDelta(List1[5],List2[5],angtolerance,"Error in "+Var[5]) + else: + self.assertDelta(List1[3],List2[3],angtolerance,"Error in "+Var[3]) + + self.assertDelta(List1[4],List2[4],angtolerance,"Error in "+Var[5]) + + self.assertDelta(List1[5],List2[5],angtolerance,"Error in "+Var[5]) + + + def XchangeSides( self, Lat1, s1,s2): + Lat=list(Lat1) + if s1<0 or s2<0 or s1>=3 or s2>2 or s1==s2: + return Lat + sav=Lat[s1] + Lat[s1]=Lat[s2] + Lat[s2]=sav + sav=Lat[s1+3] + Lat[s1+3]=Lat[s2+3] + Lat[s2+3]=sav + + return Lat + + def GetConvCell( self,Peaks,XtalCenter1,wsName, nOrigIndexed,tolerance,matchLat): + + CopySample(Peaks,wsName,CopyMaterial="0",CopyEnvironment="0",CopyName="0",CopyShape="0",CopyLattice="1") + OrLat= mtd[wsName].sample().getOrientedLattice() + Lat1= [OrLat.a(),OrLat.b(),OrLat.c(),OrLat.alpha(),OrLat.beta(),OrLat.gamma()] + FormXtal=XtalCenter1[2] + FormCenter= XtalCenter1[3] + i1=0 + i2=0 + Lat0= self.FixLatParams( matchLat) + Lat1= self.FixLatParams( Lat1) + # print "--------------------- Getting the Conventional Cell for--------------------------------" + # print Lat1 + # print Lat0 + # print [FormXtal,FormCenter] + angTolerance = tolerance*180/math.pi + while i1< len(FormXtal) and i2 < len(FormCenter): + if FormXtal[i1]<FormCenter[i2]: + i1=i1+1 + elif FormXtal[i1]>FormCenter[i2]: + i2=i2+1 + else: + Res=SelectCellWithForm(Peaks, FormXtal[i1],True) + + if Res[0] > .85* nOrigIndexed: + CopySample(Peaks,"Temp",CopyMaterial="0",CopyEnvironment="0",CopyName="0",CopyShape="0",CopyLattice="1") + OrLat= mtd["Temp"].sample().getOrientedLattice() + Lat1= [OrLat.a(),OrLat.b(),OrLat.c(),OrLat.alpha(),OrLat.beta(),OrLat.gamma()] + Lat1 = self.FixLatParams(Lat1) + print ["Formnum,Lat1,Lat0",FormXtal[i1],Lat1,Lat0] + if math.fabs(Lat0[0]-Lat1[0])<tolerance and math.fabs(Lat0[1]-Lat1[1])<tolerance and math.fabs(Lat0[2]-Lat1[2])<tolerance: + + for i in range(3): + if math.fabs(Lat0[3]-Lat1[3])<angTolerance and math.fabs(Lat0[4]-Lat1[4])<angTolerance and math.fabs(Lat0[5]-Lat1[5])<angTolerance: + break + if Lat1[0]>Lat1[1]-tolerance: + Lat1=self.XchangeSides( Lat1,0,1) + + if math.fabs(Lat0[3]-Lat1[3])<angTolerance and math.fabs(Lat0[4]-Lat1[4])<angTolerance and math.fabs(Lat0[5]-Lat1[5])<angTolerance: + break + if Lat1[1]>Lat1[2]- tolerance: + Lat1=self.XchangeSides( Lat1,1,2) + + if math.fabs(Lat0[3]-Lat1[3])<angTolerance and math.fabs(Lat0[4]-Lat1[4])<angTolerance and math.fabs(Lat0[5]-Lat1[5])<angTolerance: + break + + if math.fabs(Lat0[3]-Lat1[3])<angTolerance and math.fabs(Lat0[4]-Lat1[4])<angTolerance and math.fabs(Lat0[5]-Lat1[5])<angTolerance: + return Lat1 + i1=i1+1 + i2=i2+1 + CopySample(wsName, Peaks,CopyMaterial="0",CopyEnvironment="0",CopyName="0",CopyShape="0",CopyLattice="1") + return [] + + + + def runTest(self): + + CreateSingleValuedWorkspace(OutputWorkspace="Sws",DataValue="3") + + CreateSingleValuedWorkspace(OutputWorkspace="Temp",DataValue="3") + LoadInstrument(Workspace="Sws",InstrumentName="TOPAZ") + Inst= mtd["Sws"].getInstrument() + startA = 2 + side1Ratios =[1.0, 1.2, 3.0, 8.0] + alphas =[20,50,80,110,140] + xtal=['O','M','H']#['O','M','H'] + centerings = ['P','I','F','A', 'B', 'C'] + #['P','I','F','A', 'B', 'C'] + error=[0.0] #[ 0, .05, 0.1, 0, 0.15] + Npeaks=150 + for Error in error: + for side1 in range(0,4):#make (0,4) + for side2 in range(side1,4):#make side1,4 + for Xtal in xtal: + for Center in centerings: + for ang in alphas: + for i1 in range(3): + for i2a in range(1,3): + if self.newSetting( side1,side2,Xtal,Center,ang, i1,i2a): + print "=============================================================" + Sides=[startA, startA*side1Ratios[side1],startA*side1Ratios[side2]] + Sides= self.MonoClinicRearrange( Sides,Xtal,Center,i1,i2a) + print [Sides,Error,Xtal,Center,ang,i1,i2a] + + UBconv= self.CalcConventionalUB(Sides[0],Sides[1],Sides[2],ang,ang,ang,Xtal) + + UBnig= self.CalcNiggliUB(Sides[0],Sides[1],Sides[2],ang,ang,ang,Xtal,Center) + + UBconv = self.conventionalUB + V =self.getMatrixAxis( i1,Xtal) + if UBconv == None: + continue + if UBnig==None: + continue + UBnig= V*UBnig + UBconv = V*UBconv + #UBnig1= self.FixUB(UBnig,.05) + UBnig = self.FixUpPlusMinus(UBnig) + UBconv= self.FixUpPlusMinus(UBconv) + Lat0= self.getLat(UBnig) + + + + Lat0=self.FixLatParams(Lat0) + print ["UBnig",UBnig,Lat0] + + Peaks=self.getPeaks(Inst,UBnig, Error,Npeaks +Error*300) + + #------------------------Failed tests because of FindUBUsingFFT ------------------------------------ + + if side1==1 and side2==2 and Error==0.0 and Xtal=='M' and Center=='C' and i1==0 and i2a==1 and ang==140: + continue + + if side1==2 and side2==2 and Error==0.0 and Xtal=='M' and Center=='P' and i1==1 and i2a==1 and ang==110: + continue # one side doubled + + if side1==3 and side2==3 and Error==0.0 and Xtal=='M' and Center=='I' and i1 == 1 and i2a==2 : + continue + + if side1==3 and side2==3 and Error==0.0 and Xtal=='M' and Center=='I' and i1 == 2 and i2a==1 : + continue + + if side1==3 and side2==3 and Error==0.0 and Xtal=='H' and Center=='I' and i1 == 2 and i2a==1 and ang==20: + continue + #------------------------------ end Failed FindUB test---------------------------- + FindUBUsingFFT(Peaks,Lat0[0]*.5,Lat0[2]*2.0,.15) + InPks=IndexPeaks(Peaks,.10) + + + CopySample(Peaks,"Sws",CopyMaterial="0",CopyEnvironment="0",CopyName="0",CopyShape="0",CopyLattice="1") + OrLat= mtd["Sws"].sample().getOrientedLattice() + + Lat1= [OrLat.a(),OrLat.b(),OrLat.c(),OrLat.alpha(),OrLat.beta(),OrLat.gamma()] + + Lat1=self.FixLatParams(Lat1) + + MatchXtalTol= .03*(1+4*Error)*(side1Ratios[side2]) + print Lat0 + print Lat1 + self.MatchXtlparams( Lat1, Lat0, MatchXtalTol, "Niggli values do not match") + + + #Now see if the conventional cell is in list + XtalCenter1= self.Xlate(Xtal,Center,Sides,Lat0) #get proper strings for SelectCellOfType + + Lat0= self.getLat(UBconv) + Lat0=self.FixLatParams(Lat0) + Lat1 = self.GetConvCell( Peaks,XtalCenter1,"Sws",InPks[0],MatchXtalTol,Lat0) + + + Lat1=self.FixLatParams(Lat1) + + self.MatchXtlparams( Lat1, Lat0, MatchXtalTol, "Conventional lattice parameter do not match") + self.assertTrue( len(Lat1)>4,"Conventional values do not match") + #"XYXYZS" + def requiredFiles(self): + return [] diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/PolrefExample.py b/Code/Mantid/Testing/SystemTests/tests/analysis/PolrefExample.py new file mode 100644 index 0000000000000000000000000000000000000000..0fdd30dd62a1134abb3d1e84dffd73f7c60c07cb --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/PolrefExample.py @@ -0,0 +1,39 @@ +import stresstesting +from mantid.simpleapi import * + +''' Sample script from Tim Charlton. Described as Mantid version of quick:lam''' +''' +Owen Arnold +29/06/2012 +The analysis performed here is a subset of what is done in ReflectometryISIS.py. We may want to remove this test in the furture to avoid duplication. However, +I'm leaving this in here for now because Tim Charlton suggests making the ReflectometryISIS.py test more generic for every reflectometry instrument. +''' +class PolrefExample(stresstesting.MantidStressTest): + + def runTest(self): + LoadRaw(Filename="POLREF00003014.raw",OutputWorkspace="W",SpectrumMax="4",LoadMonitors="Separate") + ConvertUnits(InputWorkspace="W_monitors",OutputWorkspace="M",Target="Wavelength",AlignBins="1") + DeleteWorkspace(Workspace="W_monitors") + CalculateFlatBackground(InputWorkspace="M",OutputWorkspace="M",WorkspaceIndexList="0,1,2",StartX="15",EndX="17") + ConvertUnits(InputWorkspace="W",OutputWorkspace="D",Target="Wavelength",AlignBins="1") + DeleteWorkspace(Workspace="W") + OneMinusExponentialCor(InputWorkspace="D",OutputWorkspace="D",C="1.99012524619") + ExponentialCorrection(InputWorkspace="D",OutputWorkspace="D",C1="0.0100836650034") + PolynomialCorrection(InputWorkspace="D",OutputWorkspace="D",Coefficients="-1.3697,0.8602,-0.7839,0.2866,-0.0447,0.0025") + ExponentialCorrection(InputWorkspace="M",OutputWorkspace="M",C1="0.42672",Operation="Multiply") + CreateSingleValuedWorkspace(OutputWorkspace="shift",DataValue="3.16666666667") + Plus(LHSWorkspace="M",RHSWorkspace="shift",OutputWorkspace="M") + OneMinusExponentialCor(InputWorkspace="M",OutputWorkspace="M",C="0.42672") + RebinToWorkspace(WorkspaceToRebin="M",WorkspaceToMatch="D",OutputWorkspace="M") + CropWorkspace(InputWorkspace="M",OutputWorkspace="I0",StartWorkspaceIndex="2") + DeleteWorkspace(Workspace="M") + Divide(LHSWorkspace="D",RHSWorkspace="I0",OutputWorkspace="R") + DeleteWorkspace(Workspace="D") + DeleteWorkspace(Workspace="I0") + + def validate(self): + # Need to disable checking of the Spectra-Detector map because it isn't + # fully saved out to the nexus file (it's limited to the spectra that + # are actually present in the saved workspace). + self.disableChecking.append('SpectraMap') + return 'R_1','PolrefTest.nxs' diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/PowderDiffProfileCalibrateTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/PowderDiffProfileCalibrateTest.py new file mode 100644 index 0000000000000000000000000000000000000000..6fc0813712b108370775264467000adda9483a18 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/PowderDiffProfileCalibrateTest.py @@ -0,0 +1,236 @@ +######################################################################## +# +# This is the system test for workflow algorithms +# 1. ExaminePowder... +# 2. SeqRefinement... +# Both of which are based on LeBailFit to do peak profile calibration +# for powder diffractometers. +# +######################################################################## +import stresstesting +import mantid.simpleapi as api +from mantid.simpleapi import * + +def getSaveDir(): + """determine where to save - the current working directory""" + import os + return os.path.abspath(os.path.curdir) + +class VulcanExamineProfile(stresstesting.MantidStressTest): + irf_file = 'arg_powder.irf' + dat_file = 'arg_si.dat' + bkgd_file = 'arg_si_bkgd_polynomial.nxs' + + def requiredFiles(self): + files = [self.irf_file, self.dat_file, self.bkgd_file] + return files + + def runTest(self): + savedir = getSaveDir() + + LoadAscii(Filename=self.dat_file, OutputWorkspace='arg_si',Unit='TOF') + + LoadNexusProcessed(Filename=self.bkgd_file, OutputWorkspace='Arg_Si_Bkgd_Parameter') + + CreateLeBailFitInput(FullprofParameterFile=self.irf_file, + GenerateBraggReflections='1',LatticeConstant='5.4313640', + InstrumentParameterWorkspace='Arg_Bank1', BraggPeakParameterWorkspace='ReflectionTable') + + # run the actual code + ExaminePowderDiffProfile( + InputWorkspace = 'arg_si', + StartX = 1990., + EndX = 29100., + ProfileType = 'Back-to-back exponential convoluted with PseudoVoigt', + ProfileWorkspace = 'Arg_Bank1', + BraggPeakWorkspace = 'ReflectionTable', + BackgroundParameterWorkspace = 'Arg_Si_Bkgd_Parameter', + BackgroundType = 'Polynomial', + BackgroundWorkspace = 'Arg_Si_Background', + OutputWorkspace = 'Arg_Si_Calculated') + + + # load output gsas file and the golden one + Load(Filename = "Arg_Si_ref.nxs", OutputWorkspace = "Arg_Si_golden") + + def validateMethod(self): + self.tolerance=1.0e-6 + return "ValidateWorkspaceToWorkspace" + + def validate(self): + self.tolerance=1.0e-6 + return ('Arg_Si_Calculated','Arg_Si_golden') + +class VulcanSeqRefineProfileFromScratch(stresstesting.MantidStressTest): + """ System test for sequential refinement + """ + irf_file = 'VULCAN_SNS_1.irf' + dat_file = 'VULCAN_22946_NOM.dat' + + def requiredFiles(self): + files = [self.irf_file, self.dat_file] + return files + + def runTest(self): + savedir = getSaveDir() + + # Data + LoadAscii(Filename=self.dat_file, OutputWorkspace='VULCAN_22946_NOM',Unit='TOF') + + # Reflections and starting profile parameters + CreateLeBailFitInput(FullprofParameterFile=self.irf_file, + GenerateBraggReflections='1',LatticeConstant='5.431364000', + InstrumentParameterWorkspace='Vulcan_B270_Profile', + BraggPeakParameterWorkspace='GeneralReflectionTable') + + # Pre-refined background + paramnames = ["Bkpos", "A0", "A1", "A2", "A3", "A4", "A5"] + paramvalues = [11000.000, 0.034, 0.027, -0.129, 0.161, -0.083, .015] + bkgdtablewsname = "VULCAN_22946_Bkgd_Parameter" + api.CreateEmptyTableWorkspace(OutputWorkspace=bkgdtablewsname) + ws = mtd[bkgdtablewsname] + ws.addColumn("str", "Name") + ws.addColumn("double", "Value") + for i in xrange(len(paramnames)): + ws.addRow([paramnames[i], paramvalues[i]]) + + # Examine profile + ExaminePowderDiffProfile( + InputWorkspace = "VULCAN_22946_NOM", + LoadData = False, + StartX = 7000., + EndX = 33000., + ProfileType = "Back-to-back exponential convoluted with PseudoVoigt", + ProfileWorkspace = "Vulcan_B270_Profile", + BraggPeakWorkspace = "GeneralReflectionTable", + GenerateInformationWS = False, + BackgroundParameterWorkspace = "VULCAN_22946_Bkgd_Parameter", + ProcessBackground = False, + BackgroundType = "FullprofPolynomial", + BackgroundWorkspace = "Dummy", + OutputWorkspace = "VULCAN_22946_Calculated") + + # Set up sequential refinement + api.RefinePowderDiffProfileSeq( + InputWorkspace = "VULCAN_22946_NOM", + SeqControlInfoWorkspace = "", + InputProfileWorkspace = "Vulcan_B270_Profile", + InputBraggPeaksWorkspace = "GeneralReflectionTable", + InputBackgroundParameterWorkspace = "VULCAN_22946_Bkgd_Parameter", + StartX = 7000., + EndX = 33000., + FunctionOption = "Setup", # or "Refine" + RefinementOption = "Random Walk", + ParametersToRefine = "Alph0", + NumRefineCycles = 1000, + ProfileType = "Neutron Back-to-back exponential convoluted with pseudo-voigt", + BackgroundType = "FullprofPolynomial", + ProjectID = "IDx890") + + # Refine step 1 + api.RefinePowderDiffProfileSeq( + InputWorkspace = "VULCAN_22946_NOM", + SeqControlInfoWorkspace = "RecordIDx890Table", + InputProfileWorkspace = "Vulcan_B270_Profile", + InputBraggPeaksWorkspace = "GeneralReflectionTable", + InputBackgroundParameterWorkspace = "VULCAN_22946_Bkgd_Parameter", + StartX = 7000., + EndX = 33000., + FunctionOption = "Refine", # or "Refine" + RefinementOption = "Random Walk", + ParametersToRefine = "Alph0", + NumRefineCycles = 1000, + ProfileType = "Neutron Back-to-back exponential convoluted with pseudo-voigt", + BackgroundType = "FullprofPolynomial", + ProjectID = "IDx890") + + + # Refine step 2 + api.RefinePowderDiffProfileSeq( + InputWorkspace = "VULCAN_22946_NOM", + SeqControlInfoWorkspace = "RecordIDx890Table", + # InputProfileWorkspace = "Vulcan_B270_Profile", + # InputBraggPeaksWorkspace = "GeneralReflectionTable", + # InputBackgroundParameterWorkspace = "VULCAN_22946_Bkgd_Parameter", + StartX = 7000., + EndX = 33000., + FunctionOption = "Refine", # or "Refine" + RefinementOption = "Random Walk", + ParametersToRefine = "Beta0, Beta1", + NumRefineCycles = 100, + # ProfileType = "Neutron Back-to-back exponential convoluted with psuedo-voigt", + # BackgroundType = "FullprofPolynomial" + ProjectID = "IDx890") + + + # Refine step 3 (not from previous cycle) + api.RefinePowderDiffProfileSeq( + InputWorkspace = "VULCAN_22946_NOM", + SeqControlInfoWorkspace = "RecordIDx890Table", + StartX = 7000., + EndX = 33000., + FunctionOption = "Refine", # or "Refine" + RefinementOption = "Random Walk", + ParametersToRefine = "Beta0, Beta1", + NumRefineCycles = 100, + FromStep = 1, + ProjectID = "IDx890") + + # Save + api.RefinePowderDiffProfileSeq( + InputWorkspace = "VULCAN_22946_NOM", + SeqControlInfoWorkspace = "RecordIDx890Table", + FunctionOption = "Save", + OutputProjectFilename = "temp991.nxs", + ProjectID = "IDx890") + + return + + def validateMethod(self): + """ Return None as running is all that we want at this moment. + """ + return None + + def validate(self): + self.tolerance=1.0e-6 + return ('VULCAN_22946_Calculated', 'VULCAN_22946_Calculated') + +class VulcanSeqRefineProfileLoadPlus(stresstesting.MantidStressTest): + """ System test for sequential refinement + """ + seqfile = "VULCAN_Calibrate_Seq.nxs" + + def requiredFiles(self): + files = [self.seqfile] + return files + + def runTest(self): + savedir = getSaveDir() + + # Load + api.RefinePowderDiffProfileSeq( + FunctionOption = "Load", + InputProjectFilename = self.seqfile, + ProjectID = "IDx890") + + # Refine step 4 + api.RefinePowderDiffProfileSeq( + InputWorkspace = "VULCAN_22946_NOM", + SeqControlInfoWorkspace = "RecordIDx890Table", + startx = 7000., + EndX = 33000., + FunctionOption = "Refine", # or "Refine" + RefinementOption = "Random Walk", + ParametersToRefine = "Alph1", + NumRefineCycles = 200, + ProjectID = "IDx890") + + + def validateMethod(self): + """ Return None as running is all that we want at this moment. + """ + return None + + def validate(self): + self.tolerance=1.0e-6 + return ('VULCAN_22946_Calculated', 'VULCAN_22946_Calculated') diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/REFMReduction.py b/Code/Mantid/Testing/SystemTests/tests/analysis/REFMReduction.py new file mode 100644 index 0000000000000000000000000000000000000000..a020eb64ed51a4d7272fbe45dbb2db50ec95dd80 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/REFMReduction.py @@ -0,0 +1,35 @@ +import stresstesting +from mantid import * + +from mantid.simpleapi import * + +class REFMReduction(stresstesting.MantidStressTest): + def runTest(self): + RefReduction(DataRun=str(9709), + NormalizationRun=str(9684), + SignalPeakPixelRange=[216, 224], + SubtractSignalBackground=True, + SignalBackgroundPixelRange=[172, 197], + PerformNormalization=True, + NormPeakPixelRange=[226, 238], + NormBackgroundPixelRange=[130, 183], + SubtractNormBackground=False, + CropLowResDataAxis=True, + CropLowResNormAxis=False, + LowResDataAxisPixelRange = [86, 159], + NBins=40, + Theta=0.086, + PolarizedData=True, + Instrument="REF_M", + OutputWorkspacePrefix='reflectivity') + + def validate(self): + # Be more tolerant with the output, mainly because of the errors. + # The following tolerance check the errors up to the third digit. + self.tolerance = 0.25 + self.disableChecking.append('Instrument') + self.disableChecking.append('Sample') + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + return "reflectivity-Off_Off", 'REFMReduction_off_off.nxs' + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/RROAutoFunctionalityTests.py b/Code/Mantid/Testing/SystemTests/tests/analysis/RROAutoFunctionalityTests.py new file mode 100644 index 0000000000000000000000000000000000000000..c5333ea91d55b35c0af1535ebd30bdec32c7810f --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/RROAutoFunctionalityTests.py @@ -0,0 +1,169 @@ +import stresstesting +from algorithm_decorator import make_decorator +from mantid.simpleapi import * +import mantid.api + +class RROAutoFunctionalityTest(stresstesting.MantidStressTest): + """ + This test is to check the functionality of ReflectometryReductionOneAuto. Data testing is done separately + """ + + + def __init__(self): + super(RROAutoFunctionalityTest, self).__init__() + data_ws = Load('INTER00013460.nxs') + self.__data_ws = data_ws + trans_ws_1 = Load('INTER00013463.nxs') + self.__trans_ws_1 = trans_ws_1 + trans_ws_2 = Load('INTER00013464.nxs') + self.__trans_ws_2 = trans_ws_2 + line_detector_ws = Load('POLREF00004699.nxs') + self.__line_detector_ws = line_detector_ws + + def __del__(self): + DeleteWorkspace(self.__data_ws) + DeleteWorkspace(self.__trans_ws_1) + DeleteWorkspace(self.__trans_ws_2) + DeleteWorkspace(self.__self.__line_detector_ws) + + + def construct_standard_algorithm(self): + alg = make_decorator(ReflectometryReductionOneAuto) + alg.set_WavelengthMin(0.0) + alg.set_WavelengthMax(1.0) + alg.set_I0MonitorIndex(0) + alg.set_ProcessingInstructions("0, 1") + alg.set_MonitorBackgroundWavelengthMin(0.0) + alg.set_MonitorBackgroundWavelengthMax(1.0) + alg.set_MonitorIntegrationWavelengthMin(0.0) + alg.set_MonitorIntegrationWavelengthMax(1.0) + alg.set_additional({'OutputWorkspaceWavelength': 'out_ws_wav'}) + return alg + + def test_point_detector_run_with_single_transmission_workspace(self): + alg = self.construct_standard_algorithm() + alg.set_InputWorkspace(self.__data_ws) + alg.set_ProcessingInstructions("3,4") + alg.set_FirstTransmissionRun(self.__trans_ws_1) + alg.set_ThetaIn(0.2) + + out_ws_q, out_ws_lam, theta = alg.execute() + self.assertEqual(0.2, theta, "Theta in and out should be the same") + + self.assertTrue(isinstance(out_ws_lam, mantid.api.MatrixWorkspace), "Should be a matrix workspace") + self.assertEqual("Wavelength", out_ws_lam.getAxis(0).getUnit().unitID()) + + self.assertTrue(isinstance(out_ws_q, mantid.api.MatrixWorkspace), "Should be a matrix workspace") + self.assertEqual("MomentumTransfer", out_ws_q.getAxis(0).getUnit().unitID()) + + self.assertEqual(2, out_ws_lam.getNumberHistograms()) + + def test_point_detector_run_with_two_transmission_workspaces(self): + alg = self.construct_standard_algorithm() + + alg.set_InputWorkspace(self.__data_ws) + alg.set_ProcessingInstructions("3,4") + alg.set_FirstTransmissionRun(self.__trans_ws_1) + alg.set_SecondTransmissionRun(self.__trans_ws_2) + alg.set_ThetaIn(0.2) + + out_ws_q, out_ws_lam, theta = alg.execute() + + + def test_spectrum_map_mismatch_throws_when_strict(self): + alg = self.construct_standard_algorithm() + ''' + Here we convert the transmission run to Lam. The workspace will NOT have the same spectra map as the input workspace, + and strict checking is turned on, so this will throw upon execution. + ''' + trans_run1_lam = ConvertUnits(self.__trans_ws_1, Target='Wavelength') + trans_run1_lam = CropWorkspace(trans_run1_lam, EndWorkspaceIndex=1) + + alg.set_InputWorkspace(self.__data_ws) + alg.set_ProcessingInstructions("3,4") # This will make spectrum numbers in input workspace different from denominator + alg.set_FirstTransmissionRun(trans_run1_lam) + alg.set_StrictSpectrumChecking(True) + + self.assertRaises(Exception, alg.execute) # Should throw due to spectrum missmatch. + + + def test_spectrum_map_mismatch_doesnt_throw_when_not_strict(self): + alg = self.construct_standard_algorithm() + + ''' + Here we convert the transmission run to Lam. The workspace will NOT have the same spectra map as the input workspace, + and strict checking is turned off, so this will NOT throw upon execution. + ''' + trans_run1_lam = ConvertUnits(self.__trans_ws_1, Target='Wavelength') + trans_run1_lam = CropWorkspace(trans_run1_lam, EndWorkspaceIndex=1) + + alg.set_InputWorkspace(self.__data_ws) + alg.set_ProcessingInstructions("3,4") # This will make spectrum numbers in input workspace different from denominator + alg.set_FirstTransmissionRun(trans_run1_lam) + alg.set_StrictSpectrumChecking(False) # Will not crash-out on spectrum checking. + + alg.execute()# Should not throw + + + def test_multidetector_run(self): + alg = self.construct_standard_algorithm() + + alg.set_InputWorkspace(self.__line_detector_ws[0]) + alg.set_AnalysisMode("MultiDetectorAnalysis") + alg.set_DetectorComponentName('lineardetector') + alg.set_ProcessingInstructions("10") # Fictional values + alg.set_CorrectDetectorPositions(False) + alg.set_RegionOfDirectBeam("20, 30") # Fictional values + alg.set_ThetaIn(0.1) # Fictional values + + out_ws_q, out_ws_lam, theta = alg.execute() + + self.assertTrue(isinstance(out_ws_lam, mantid.api.MatrixWorkspace), "Should be a matrix workspace") + self.assertEqual("Wavelength", out_ws_lam.getAxis(0).getUnit().unitID()) + + self.assertTrue(isinstance(out_ws_q, mantid.api.MatrixWorkspace), "Should be a matrix workspace") + self.assertEqual("MomentumTransfer", out_ws_q.getAxis(0).getUnit().unitID()) + + def test_multidetector_run_correct_positions(self): + alg = self.construct_standard_algorithm() + + alg.set_InputWorkspace(self.__line_detector_ws[0]) + alg.set_AnalysisMode("MultiDetectorAnalysis") + alg.set_DetectorComponentName('lineardetector') + alg.set_ProcessingInstructions("73") # Fictional values + alg.set_CorrectDetectorPositions(True) + alg.set_RegionOfDirectBeam("28, 29") # Fictional values + alg.set_ThetaIn(0.49 / 2) # Fictional values + + out_ws_q, out_ws_lam, theta = alg.execute() + + self.assertTrue(isinstance(out_ws_lam, mantid.api.MatrixWorkspace), "Should be a matrix workspace") + self.assertEqual("Wavelength", out_ws_lam.getAxis(0).getUnit().unitID()) + + self.assertTrue(isinstance(out_ws_q, mantid.api.MatrixWorkspace), "Should be a matrix workspace") + self.assertEqual("MomentumTransfer", out_ws_q.getAxis(0).getUnit().unitID()) + + instrument = out_ws_lam.getInstrument() + detector_pos = instrument.getComponentByName("lineardetector").getPos() + + self.assertDelta(-0.05714, detector_pos.Z(), 0.0001) + + + def runTest(self): + + self.test_point_detector_run_with_single_transmission_workspace() + + self.test_point_detector_run_with_two_transmission_workspaces() + + self.test_spectrum_map_mismatch_throws_when_strict() + + self.test_spectrum_map_mismatch_doesnt_throw_when_not_strict() + + self.test_multidetector_run() + + self.test_multidetector_run_correct_positions() + + + + def validate(self): + return True diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/RawVNexus.py b/Code/Mantid/Testing/SystemTests/tests/analysis/RawVNexus.py new file mode 100644 index 0000000000000000000000000000000000000000..ec956b76629447dc838915fda6e884e12de4ba5d --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/RawVNexus.py @@ -0,0 +1,11 @@ +import stresstesting +from mantid.simpleapi import * + +''' Simply tests that our LoadRaw and LoadISISNexus algorithms produce the same workspace''' +class RawVNexus(stresstesting.MantidStressTest): + + def runTest(self): + Raw = LoadRaw(Filename='SANS2D00000808.raw') + + def validate(self): + return 'Raw','SANS2D00000808.nxs' diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ReduceOneSCD_Run.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ReduceOneSCD_Run.py new file mode 100644 index 0000000000000000000000000000000000000000..20f26976d16700da2134c087f57b72053c0af040 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ReduceOneSCD_Run.py @@ -0,0 +1,257 @@ +# File: ReduceOneSCD_Run.py +# +# Version 2.0, modified to work with Mantid's new python interface. +# +# This script will reduce one SCD run. The configuration is set up in the +# first few lines in the method runTest. This script will load, find peaks, +# index and integrate either found or predicted peaks for the specified run. +# Either sphere integration or the Mantid PeakIntegration algorithms are +# currently supported, but it may be updated to support other integration +# methods. Users should make a directory to hold the output of this script, +# and must specify that output directory with the other configuration +#information. +# +# +import os +import sys +import shutil +import time + +import stresstesting +import numpy + + +from mantid.api import * +#sys.path.append("/home/ruth/GIT_MantidBuild/bin/") +from mantid.simpleapi import * + +class ReduceOneSCD_Run( stresstesting.MantidStressTest): + + + def requiredMemoryMB(self): + """ Require about 12GB free """ + return 6000 + + def runTest(self): + start_time = time.time() + + + instrument_name = "TOPAZ" + calibration_file_1 = "TOPAZ_2011_02_16.DetCal" + calibration_file_2 = None + #data_directory = params_dictionary[ "data_directory" ] + + import os + self.output_directory = os.path.abspath(os.path.curdir) + # = params_dictionary[ "output_directory" ] + + min_tof = "400" + max_tof = "16666" + min_monitor_tof = "1000" + max_monitor_tof = "12500" + monitor_index = "0" + cell_type = "Orthorhombic" + centering = "P" + num_peaks_to_find = "150" + min_d = "4" + max_d = "12" + tolerance = ".12" + integrate_predicted_peaks = False + min_pred_wl = ".25" + max_pred_wl = "3.5" + min_pred_dspacing = ".2" + max_pred_dspacing = "2.5" + use_sphere_integration = True + use_fit_peaks_integration = False + peak_radius = ".2" + bkg_inner_radius = ".2" + bkg_outer_radius = ".25" + integrate_if_edge_peak = False + rebin_step = "-.004" + preserve_events = True + use_ikeda_carpenter = False + n_bad_edge_pixels = "10" + + rebin_params = min_tof+ ","+ rebin_step +"," +max_tof + run = "3132" + self.saved=False; +# +# Get the fully qualified input run file name, either from a specified data +# directory or from findnexus +# + + full_name = instrument_name + "_" + (run) + "_event.nxs" + + print "\nProcessing File: " + full_name + " ......\n" + +# +# Name the files to write for this run +# + run_niggli_matrix_file = self.output_directory + "/" + run + "_Niggli.mat" + run_niggli_integrate_file = self.output_directory + "/" + run + "_Niggli.integrate" + + +# +# Load the run data and find the total monitor counts +# + event_ws = LoadEventNexus( Filename=full_name, + FilterByTofMin=min_tof, FilterByTofMax=max_tof ) + + if (calibration_file_1 is not None) or (calibration_file_2 is not None): + LoadIsawDetCal( event_ws, + Filename=calibration_file_1) + + monitor_ws = LoadNexusMonitors( Filename=full_name ) + + integrated_monitor_ws = Integration( InputWorkspace=monitor_ws, + RangeLower=min_monitor_tof, RangeUpper=max_monitor_tof, + StartWorkspaceIndex=monitor_index, EndWorkspaceIndex=monitor_index ) + + monitor_count = integrated_monitor_ws.dataY(0)[0] + print "\n", run, " has calculated monitor count", monitor_count, "\n" + +# +# Make MD workspace using Lorentz correction, to find peaks +# + MDEW = ConvertToMD( InputWorkspace=event_ws, QDimensions="Q3D", + dEAnalysisMode="Elastic", QConversionScales="Q in A^-1", + LorentzCorrection='1', MinValues="-50,-50,-50", MaxValues="50,50,50", + SplitInto='2', SplitThreshold='50',MaxRecursionDepth='11' ) +# +# Find the requested number of peaks. Once the peaks are found, we no longer +# need the weighted MD event workspace, so delete it. +# + distance_threshold = 0.9 * 6.28 / float(max_d) + peaks_ws = FindPeaksMD( MDEW, MaxPeaks=num_peaks_to_find, + PeakDistanceThreshold=distance_threshold ) + + AnalysisDataService.remove( MDEW.getName() ) +# SaveIsawPeaks( InputWorkspace=peaks_ws, AppendFile=False, +# Filename='A'+run_niggli_integrate_file ) +# +# Find a Niggli UB matrix that indexes the peaks in this run +# + FindUBUsingFFT( PeaksWorkspace=peaks_ws, MinD=min_d, MaxD=max_d, Tolerance=tolerance ) + IndexPeaks( PeaksWorkspace=peaks_ws, Tolerance=tolerance ) + +# +# Save UB and peaks file, so if something goes wrong latter, we can at least +# see these partial results +# +# SaveIsawUB( InputWorkspace=peaks_ws,Filename=run_niggli_matrix_file ) +# SaveIsawPeaks( InputWorkspace=peaks_ws, AppendFile=False, +# Filename=run_niggli_integrate_file ) + +# +# Get complete list of peaks to be integrated and load the UB matrix into +# the predicted peaks workspace, so that information can be used by the +# PeakIntegration algorithm. +# + if integrate_predicted_peaks: + print "PREDICTING peaks to integrate...." + peaks_ws = PredictPeaks( InputWorkspace=peaks_ws, + WavelengthMin=min_pred_wl, WavelengthMax=max_pred_wl, + MinDSpacing=min_pred_dspacing, MaxDSpacing=max_pred_dspacing, + ReflectionCondition='Primitive' ) + else: + print "Only integrating FOUND peaks ...." +# +# Set the monitor counts for all the peaks that will be integrated +# + num_peaks = peaks_ws.getNumberPeaks() + for i in range(num_peaks): + peak = peaks_ws.getPeak(i) + peak.setMonitorCount( monitor_count ) + + if use_sphere_integration: +# +# Integrate found or predicted peaks in Q space using spheres, and save +# integrated intensities, with Niggli indexing. First get an un-weighted +# workspace to do raw integration (we don't need high resolution or +# LorentzCorrection to do the raw sphere integration ) +# + MDEW = ConvertToDiffractionMDWorkspace( InputWorkspace=event_ws, + LorentzCorrection='0', OutputDimensions='Q (lab frame)', + SplitInto='2', SplitThreshold='500', MaxRecursionDepth='5' ) + + peaks_ws = IntegratePeaksMD( InputWorkspace=MDEW, PeakRadius=peak_radius, + BackgroundOuterRadius=bkg_outer_radius, + BackgroundInnerRadius=bkg_inner_radius, + PeaksWorkspace=peaks_ws, + IntegrateIfOnEdge=integrate_if_edge_peak ) + + elif use_fit_peaks_integration: + event_ws = Rebin( InputWorkspace=event_ws, + Params=rebin_params, PreserveEvents=preserve_events ) + peaks_ws = PeakIntegration( InPeaksWorkspace=peaks_ws, InputWorkspace=event_ws, + IkedaCarpenterTOF=use_ikeda_carpenter, + MatchingRunNo=True, + NBadEdgePixels=n_bad_edge_pixels ) +# +# Save the final integrated peaks, using the Niggli reduced cell. +# This is the only file needed, for the driving script to get a combined +# result.(UNComment to get new values if algorithms change) +# +# SaveIsawPeaks( InputWorkspace=peaks_ws, AppendFile=False, +# Filename=run_niggli_integrate_file ) + +# +# If requested, also switch to the specified conventional cell and save the +# corresponding matrix and integrate file +# + if (not cell_type is None) and (not centering is None) : + self.run_conventional_matrix_file = self.output_directory + "/" + run + "_" + \ + cell_type + "_" + centering + ".mat" + run_conventional_integrate_file = self.output_directory + "/" + run + "_" + \ + cell_type + "_" + centering + ".integrate" + SelectCellOfType( PeaksWorkspace=peaks_ws, + CellType=cell_type, Centering=centering, + Apply=True, Tolerance=tolerance ) + # UNCOMMENT the line below to get new output values if an algorithm changes + #SaveIsawPeaks( InputWorkspace=peaks_ws, AppendFile=False, Filename=run_conventional_integrate_file ) + SaveIsawUB( InputWorkspace=peaks_ws, Filename=self.run_conventional_matrix_file ) + self.saved = True + + end_time = time.time() + + CreateSingleValuedWorkspace(OutputWorkspace="XX1",DataValue="3") + + + LoadIsawUB(InputWorkspace="XX1",Filename=self.run_conventional_matrix_file ) + s1 = mtd["XX1"].sample() + + LoadIsawPeaks(OutputWorkspace="PeaksP", Filename=os.path.join(os.path.dirname(__file__), 'ReferenceResults',"3132_Orthorhombic_P.integrate")) + LoadIsawUB(InputWorkspace=peaks_ws,Filename=os.path.join(os.path.dirname(__file__), 'ReferenceResults',"3132_Orthorhombic_P.mat")) + IndexPeaks( PeaksWorkspace=peaks_ws, Tolerance=tolerance ) + CreateSingleValuedWorkspace(OutputWorkspace="XX2",DataValue="3") + LoadIsawUB(InputWorkspace="XX2",Filename=os.path.join(os.path.dirname(__file__), 'ReferenceResults',"3132_Orthorhombic_P.mat")) + + s2 = mtd["XX2"].sample() + ol = s1.getOrientedLattice() + o2 = s2.getOrientedLattice() + self.assertDelta( ol.a(), ol.a(), 0.01, "Correct lattice a value not found.") + self.assertDelta( ol.b(), ol.b(), 0.01, "Correct lattice b value not found.") + self.assertDelta( ol.c(), ol.c(), 0.01, "Correct lattice c value not found.") + self.assertDelta( ol.alpha(), ol.alpha(), 0.4, "Correct lattice angle alpha value not found.") + self.assertDelta( ol.beta(), ol.beta(), 0.4, "Correct lattice angle beta value not found.") + self.assertDelta( ol.gamma(), ol.gamma(), 0.4, "Correct lattice angle gamma value not found.") + + self.__reduced_ws_name = str(peaks_ws) + + print '\nReduced run ' + str(run) + ' in ' + str(end_time - start_time) + ' sec' + print ["output directory=",self.output_directory] + + def cleanup(self): + if self.saved: + import os + os.remove( self.run_conventional_matrix_file) + + def validateMethod(self): + return "ValidateWorkspaceToWorkspace" + + def validate(self): + return [self.__reduced_ws_name,'PeaksP'] + + def requiredFiles(self): + + return [os.path.join(os.path.dirname(__file__), 'ReferenceResults',"3132_Orthorhombic_P.integrate"),os.path.join(os.path.dirname(__file__), 'ReferenceResults',"3132_Orthorhombic_P.mat")] diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ReflectometryISIS.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ReflectometryISIS.py new file mode 100644 index 0000000000000000000000000000000000000000..b6b7b9993976f7075d5a68c35a36284fc03c51cd --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ReflectometryISIS.py @@ -0,0 +1,101 @@ +""" +These system tests are to verify the behaviour of the ISIS reflectometry reduction scripts +""" + +import stresstesting +from mantid.simpleapi import * + +from abc import ABCMeta, abstractmethod + +class ReflectometryISIS(stresstesting.MantidStressTest): + + __metaclass__ = ABCMeta # Mark as an abstract class + + @abstractmethod + def get_workspace_name(self): + """Returns the name of the workspace""" + raise NotImplementedError("Implement get_workspace_name to return ") + + def runTest(self): + + workspace_name = self.get_workspace_name() + workspace_nexus_file = workspace_name + ".nxs" + + PIX=1.1E-3 #m + SC=75 + avgDB=29 + Load(Filename=workspace_nexus_file,OutputWorkspace=workspace_name) + X=mtd[workspace_name] + X = ConvertUnits(InputWorkspace=X,Target="Wavelength",AlignBins="1") + # Reference intensity to normalise by + CropWorkspace(InputWorkspace=X,OutputWorkspace='Io',XMin=0.8,XMax=14.5,StartWorkspaceIndex=2,EndWorkspaceIndex=2) + # Crop out transmission and noisy data + CropWorkspace(InputWorkspace=X,OutputWorkspace='D',XMin=0.8,XMax=14.5,StartWorkspaceIndex=3) + Io=mtd['Io'] + D=mtd['D'] + + # Peform the normaisation step + Divide(LHSWorkspace=D,RHSWorkspace=Io,OutputWorkspace='I', + AllowDifferentNumberSpectra='1',ClearRHSWorkspace='1') + I=mtd['I'][0] + + # Automatically determine the SC and averageDB + FindReflectometryLines(InputWorkspace=I, StartWavelength=10, OutputWorkspace='spectrum_numbers') + spectrum_table = mtd['spectrum_numbers'] + self.assertTrue(2 == spectrum_table.columnCount()) + self.assertTrue(1 == spectrum_table.rowCount()) + self.assertTrue(SC == spectrum_table.cell(0, 0)) #Check that the algorithm found the expected answer for the reflected line + self.assertTrue(avgDB == spectrum_table.cell(0, 1)) #Check that the algorithm found the expected answer for the transmisson line + + # Move the detector so that the detector channel matching the reflected beam is at 0,0 + MoveInstrumentComponent(Workspace=I,ComponentName="lineardetector",X=0,Y=0,Z=-PIX*( (SC-avgDB)/2.0 +avgDB) ) + + # Should now have signed theta vs Lambda + ConvertSpectrumAxis(InputWorkspace=I,OutputWorkspace='SignedTheta_vs_Wavelength',Target='signed_theta') + + # Check that signed two theta is being caluclated correctly (not normalised) + ws1 = mtd['SignedTheta_vs_Wavelength'] + upperHistogram = ws1.getNumberHistograms()-1 + for i in range(0, upperHistogram): + thisTheta = ws1.detectorSignedTwoTheta(ws1.getDetector(i)) + nextTheta = ws1.detectorSignedTwoTheta(ws1.getDetector(i+1)) + #This check would fail if negative values were being normalised. + self.assertTrue(thisTheta < nextTheta) + + # MD transformations + ConvertToReflectometryQ(InputWorkspace='SignedTheta_vs_Wavelength',OutputWorkspace='QxQy',OutputDimensions='Q (lab frame)', Extents='-0.0005,0.0005,0,0.12') + ConvertToReflectometryQ(InputWorkspace='SignedTheta_vs_Wavelength',OutputWorkspace='KiKf',OutputDimensions='K (incident, final)', Extents='0,0.05,0,0.05') + ConvertToReflectometryQ(InputWorkspace='SignedTheta_vs_Wavelength',OutputWorkspace='PiPf',OutputDimensions='P (lab frame)', Extents='0,0.1,-0.02,0.15') + + # Bin the outputs to histograms because observations are not important. + BinMD(InputWorkspace='QxQy',AxisAligned='0',BasisVector0='Qx,(Ang^-1),1,0',BasisVector1='Qz,(Ang^-1),0,1',OutputExtents='-0.0005,0.0005,0,0.12',OutputBins='100,100',Parallel='1',OutputWorkspace='QxQy_rebinned') + BinMD(InputWorkspace='KiKf',AxisAligned='0',BasisVector0='Ki,(Ang^-1),1,0',BasisVector1='Kf,(Ang^-1),0,1',OutputExtents='0,0.05,0,0.05',OutputBins='200,200',Parallel='1',OutputWorkspace='KiKf_rebinned') + BinMD(InputWorkspace='PiPf',AxisAligned='0',BasisVector0='Pz_i + Pz_f,(Ang^-1),1,0',BasisVector1='Pz_i - Pz_f,(Ang^-1),0,1',OutputExtents='0,0.1,-0.02,0.15',OutputBins='50,50',Parallel='1',OutputWorkspace='PiPf_rebinned') + + # Fetch benchmarks for testing against + LoadMD(Filename="POLREF_qxqy_benchmark.nxs", OutputWorkspace="QxQy_benchmark") + LoadMD(Filename="POLREF_kikf_benchmark.nxs", OutputWorkspace="KiKf_benchmark") + LoadMD(Filename="POLREF_pipf_benchmark.nxs", OutputWorkspace="PiPf_benchmark") + + # Check the outputs + qxqy_comparison = CompareMDWorkspaces(Workspace1='QxQy_rebinned',Workspace2='QxQy_benchmark', Tolerance=0.01, CheckEvents=False) + kikf_comparison = CompareMDWorkspaces(Workspace1='KiKf_rebinned',Workspace2='KiKf_benchmark', Tolerance=0.01, CheckEvents=False) + pipf_comparison = CompareMDWorkspaces(Workspace1='PiPf_rebinned',Workspace2='PiPf_benchmark', Tolerance=0.01, CheckEvents=False) + + # Assert against the outputs + self.assertTrue(int(qxqy_comparison[0]) == 1) + self.assertTrue(int(kikf_comparison[0]) == 1) + self.assertTrue(int(pipf_comparison[0]) == 1) + + return True; + + def doValidate(self): + return True; + +# Specialisation for testing POLREF +class POLREF_ReflectometryISIS(ReflectometryISIS): + def get_workspace_name(self): + return "POLREF4699" + + +#Others to follow here. diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ReflectometryQuickCombineMulti.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ReflectometryQuickCombineMulti.py new file mode 100644 index 0000000000000000000000000000000000000000..85f805fcfc8dddceb80efb59f22ec379ca086010 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ReflectometryQuickCombineMulti.py @@ -0,0 +1,55 @@ +import stresstesting +from mantid.simpleapi import * +from isis_reflectometry import quick +from isis_reflectometry import combineMulti + +class ReflectometryQuickCombineMulti(stresstesting.MantidStressTest): + """ + This is a system test for the top-level CombineMulti routines. Quick is the name given to the + ISIS reflectometry reduction scripts. CombineMulti is used for stitching together runs converted Into I/I0 vs |Q| taken at + different incident angles (and hence covering different Q-ranges) + """ + + __stitchedWorkspaceName = "stitched_13460_13462" + + def doQuickOnRun(self, runNumber, transmissionNumbers, instrument, incidentAngle): + defaultInstKey = 'default.instrument' + defaultInstrument = config[defaultInstKey] + try: + config[defaultInstKey] = instrument + LoadISISNexus(Filename=str(runNumber), OutputWorkspace=str(runNumber)) + for transmissionNumber in transmissionNumbers: + LoadISISNexus(Filename=str(transmissionNumber), OutputWorkspace=str(transmissionNumber)) + + transmissionRuns = ",".join(map(str, transmissionNumbers)) + # Run quick + quick.quick(str(runNumber), trans=transmissionRuns, theta=incidentAngle) + finally: + config[defaultInstKey] = defaultInstrument + return mtd[str(runNumber) + '_IvsQ'] + + def createBinningParam(self, low, step, high): + return "%f,%f,%f" %(low, step, high) + + def runTest(self): + step = 0.040 + run1QLow = 0.010 + run1QHigh = 0.06 + run2QLow = 0.035 + run2QHigh = 0.300 + + # Create IvsQ workspaces + IvsQ1 = self.doQuickOnRun(runNumber=13460, transmissionNumbers=[13463,13464], instrument='INTER', incidentAngle=0.7) + IvsQ1Binned = Rebin(InputWorkspace=IvsQ1, Params=self.createBinningParam(run1QLow, -step, run1QHigh)) + + # Create IvsQ workspaces + IvsQ2 = self.doQuickOnRun(runNumber=13462, transmissionNumbers=[13463,13464], instrument='INTER', incidentAngle=2.3) + IvsQ2Binned = Rebin(InputWorkspace=IvsQ2, Params=self.createBinningParam(run2QLow, -step, run2QHigh)) + + # Peform the stitching + combineMulti.combineDataMulti([IvsQ1Binned.name(), IvsQ2Binned.name()], self.__stitchedWorkspaceName, [run1QLow, run2QLow], [run1QHigh, run2QHigh], run1QLow, run2QHigh, -step, 1) + + + def validate(self): + self.disableChecking.append('Instrument') + return self.__stitchedWorkspaceName,'QuickStitchedReferenceResult.nxs' diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ReflectometryQuickMultiDetector.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ReflectometryQuickMultiDetector.py new file mode 100644 index 0000000000000000000000000000000000000000..99c1859228d2eca42f39b3b0a6661a077ec70f75 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ReflectometryQuickMultiDetector.py @@ -0,0 +1,25 @@ +import stresstesting +from mantid.simpleapi import * +from isis_reflectometry import quick + +class ReflectometryQuickMultiDetector(stresstesting.MantidStressTest): + """ + This is a system test for the top-level quick routines. Quick is the name given to the + ISIS reflectometry reduction scripts. + + This test uses the multidetector functionality within the script. No transmission runs are passed, so it uses correction algorithms instead. + """ + + def runTest(self): + workspace_name = "POLREF4699" + workspace_nexus_file = workspace_name + ".nxs" + ws = Load(workspace_nexus_file, OutputWorkspace=workspace_name) + + first_ws = ws[0] + + quick.quick_explicit(first_ws, i0_monitor_index=0, lambda_min=0.8, lambda_max=14.5, background_min=0.8, background_max=14.5, int_min=0.8, int_max=14.5, + point_detector_start=0, point_detector_stop=245, multi_detector_start=1, theta=0, pointdet=False, roi=[74,74]) + + def validate(self): + self.disableChecking.append('Instrument') + return '4699_IvsQ','4699_IvsQ_Result.nxs' diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ReflectometryQuickPointDetector.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ReflectometryQuickPointDetector.py new file mode 100644 index 0000000000000000000000000000000000000000..18e42cd8d2e69ea666b92090faa9eaac72a8d1ac --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ReflectometryQuickPointDetector.py @@ -0,0 +1,30 @@ +import stresstesting +from mantid.simpleapi import * +from isis_reflectometry import quick + +class ReflectometryQuickPointDetector(stresstesting.MantidStressTest): + """ + This is a system test for the top-level quick routines. Quick is the name given to the + ISIS reflectometry reduction scripts. Uses the point detector functionality with real transmission corrections. + + """ + + def runTest(self): + defaultInstKey = 'default.instrument' + defaultInstrument = config[defaultInstKey] + try: + config[defaultInstKey] = 'INTER' + LoadISISNexus(Filename='13463', OutputWorkspace='13463') + LoadISISNexus(Filename='13464', OutputWorkspace='13464') + LoadISISNexus(Filename='13460', OutputWorkspace='13460') + + transmissionRuns = '13463,13464' + runNo = '13460' + incidentAngle = 0.7 + quick.quick(runNo, trans=transmissionRuns, theta=incidentAngle) + finally: + config[defaultInstKey] = defaultInstrument + + def validate(self): + self.disableChecking.append('Instrument') + return '13460_IvsQ','QuickReferenceResult.nxs' diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ReflectometryQuickPointDetectorMakeTransmission.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ReflectometryQuickPointDetectorMakeTransmission.py new file mode 100644 index 0000000000000000000000000000000000000000..3644e3f384dd4851de59b164e6b8148a20721f86 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ReflectometryQuickPointDetectorMakeTransmission.py @@ -0,0 +1,32 @@ +import stresstesting +from mantid.simpleapi import * +from isis_reflectometry import quick + +class ReflectometryQuickPointDetectorMakeTransmission(stresstesting.MantidStressTest): + """ + This is a system test for the top-level quick routines. Quick is the name given to the + ISIS reflectometry reduction scripts. Uses the point detector functionality with real transmission corrections. + + """ + + def runTest(self): + defaultInstKey = 'default.instrument' + defaultInstrument = config[defaultInstKey] + try: + config[defaultInstKey] = 'INTER' + LoadISISNexus(Filename='13463', OutputWorkspace='13463') + LoadISISNexus(Filename='13464', OutputWorkspace='13464') + LoadISISNexus(Filename='13460', OutputWorkspace='13460') + + transmissionRuns = '13463,13464' + runNo = '13460' + incidentAngle = 0.7 + transmissionWs=quick.make_trans_corr(transmissionRuns, stitch_start_overlap=10, + stitch_end_overlap=12, stitch_params=[1.5,0.02,17]) + quick.quick(runNo, trans=transmissionWs, theta=incidentAngle) + finally: + config[defaultInstKey] = defaultInstrument + + def validate(self): + self.disableChecking.append('Instrument') + return '13460_IvsQ','QuickReferenceResult.nxs' diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ReuseExistingCalibration.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ReuseExistingCalibration.py new file mode 100644 index 0000000000000000000000000000000000000000..3f678a8704e593ef8c236a7d67888ac129ba743c --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ReuseExistingCalibration.py @@ -0,0 +1,36 @@ +""" + Verifies that a calibration file can be loaded once and reused to apply, using CopyInstrumentParameters, the same calibration + in successive reductions. +""" +import stresstesting + +class ReuseExistingCalibration(stresstesting.MantidStressTest): + + def requiredFiles(self): + return ["HRP39180.RAW", "HRP38094Calib.nxs"] + + def runTest(self): + from mantid.simpleapi import Load, CopyInstrumentParameters, MoveInstrumentComponent + + def do_reduction(calibration): + # load data + data = Load("HRP39180.RAW") + # copy parameters from calibration to data + CopyInstrumentParameters(calibration, data) + # Now move component on data workspace using a relative move, where that component was a detector in the calibrated workspace + MoveInstrumentComponent(data, DetectorID=1100,X=0.0,Y=0.0,Z=5.0,RelativePosition=True) + return data.getDetector(0).getPos() + #### + + # load calibration + calibration = Load("HRP38094Calib") + self.det_pos_first_run = do_reduction(calibration) + # again not reloading of calibration + self.det_pos_second_run = do_reduction(calibration) + + def validate(self): + if self.det_pos_second_run == self.det_pos_first_run: + return True + else: + print "Error: Detector position is not the same after the second reduction!" + return False \ No newline at end of file diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DBatch.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DBatch.py new file mode 100644 index 0000000000000000000000000000000000000000..57403ba5816dafd5d3035891f851fd88539a1cbb --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DBatch.py @@ -0,0 +1,61 @@ +import stresstesting + +from mantid.simpleapi import * +from ISISCommandInterface import * +from mantid.simpleapi import * +from mantid import config +from SANSBatchMode import * +import os.path + +# test batch mode with sans2d and selecting a period in batch mode +class SANS2DBatch(stresstesting.MantidStressTest): + + def runTest(self): + + SANS2D() + Set1D() + Detector("rear-detector") + MaskFile('MASKSANS2Doptions.091A') + Gravity(True) + + csv_file = FileFinder.getFullPath('SANS2D_periodTests.csv') + + BatchReduce(csv_file, 'nxs', plotresults=False, saveAlgs={'SaveCanSAS1D':'xml','SaveNexus':'nxs'}) + + os.remove(os.path.join(config['defaultsave.directory'],'5512p7_SANS2DBatch.xml')) + + def validate(self): + # Need to disable checking of the Spectra-Detector map because it isn't + # fully saved out to the nexus file (it's limited to the spectra that + # are actually present in the saved workspace). + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + self.disableChecking.append('Instrument') + + return '5512p7_SANS2DBatch','SANS2DBatch.nxs' + +class SANS2DNewSettingsCarriedAcrossInBatchMode(stresstesting.MantidStressTest): + """ + We want to make sure that any settings saved in the PropertyManager objects + are used across all iterations of the reduction in Batch mode. The MASKFILE + command uses this new way of storing settings in ISIS SANS, and so we'll + see if the same masks get applied in the second iteration as they do in the + first. + """ + def runTest(self): + config['default.instrument'] = 'SANS2D' + SANS2D() + Set1D() + Detector("rear-detector") + # This contains two MASKFILE commands, each resulting in a seperate call to MaskDetectors. + MaskFile('MaskSANS2DReductionGUI_MaskFiles.txt') + Gravity(True) + + # This does 2 seperate reductions of the same data, but saving the result of each to a different workspace. + csv_file = FileFinder.getFullPath("SANS2D_mask_batch.csv") + BatchReduce(csv_file, 'nxs', plotresults=False) + + def validate(self): + self.tolerance_is_reller = True + self.tolerance = 1.0e-2 + return "iteration_2", "SANS2DNewSettingsCarriedAcross.nxs" diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DFrontNoGrav.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DFrontNoGrav.py new file mode 100644 index 0000000000000000000000000000000000000000..3038f9f5e27947fccf45dbf61fb0ed31d2a5a051 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DFrontNoGrav.py @@ -0,0 +1,25 @@ +import stresstesting +from mantid.simpleapi import * +from ISISCommandInterface import * + +class SANS2DFrontNoGrav(stresstesting.MantidStressTest): + + def runTest(self): + + SANS2D() + MaskFile('MASKSANS2D_094i_RKH.txt') + SetDetectorOffsets('REAR', -16.0, 58.0, 0.0, 0.0, 0.0, 0.0) + SetDetectorOffsets('FRONT', -44.0, -20.0, 47.0, 0.0, 1.0, 1.0) + Gravity(False) + Set1D() + + + AssignSample('2500.nxs') + + WavRangeReduction(4.6, 12.85, False) + + def validate(self): + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + self.disableChecking.append('Instrument') + return '2500front_1D_4.6_12.85','SANS2DFrontNoGrav.nxs' \ No newline at end of file diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DLOQReloadWorkspaces.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DLOQReloadWorkspaces.py new file mode 100644 index 0000000000000000000000000000000000000000..ced0e61b7913844684e799569fce5dedb5968991 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DLOQReloadWorkspaces.py @@ -0,0 +1,326 @@ +import stresstesting +from mantid.simpleapi import * +from mantid.api import Workspace +from ISISCommandInterface import * +import numpy +import unittest + +## export PYTHONPATH=/apps/workspace/mantid_debug/bin/:/apps/mantid/systemtests/StressTestFramework/:/apps/mantid/mantid/Code/Mantid/scripts/SANS/:/apps/mantid/mantid/Code/Mantid/scripts/reduction + + +""" +Allowing the reduction to use already loaded workspace will make it easier to +deal with event mode and producing new workspaces for the reduction of data. +Till 06/2013 the reload option was available, but not implemented. + +In order to protect the system, it is suggested the following integration tests +to ensure that allowing workspaces as input to the reduction will not disturb the +reduction itself, and it is safe. + +LOQReductionShouldAcceptLoadedWorkspace ensure some requirements for the reloading. +SANS2DReductionShouldAcceptLoadedWorkspace and SANS2DReductionShouldAcceptLoadedWorkspaceRawFile +apply the same requirements for SANS2D instruments. + + +LOQReductionShouldAcceptLoadedWorkspaceStressTest, SANS2DReductionShouldAcceptLoadedWorkspaceStressTest +and SANS2DReductionShouldAcceptLoadedWorkspace are wrappers to make unittest.TestCase to fit the stresstesting +framework. + +The other tests are here to ensure the results of providing directly workspaces will be the same that loading +from files. + +""" + +class LOQReductionShouldAcceptLoadedWorkspace(unittest.TestCase): + """ + The following tests is to ensure that the reload obeys the following requirement: + * If reload is True the real data will be always reloaded from the file + * If reload is False, it will be used, if it pass the following tests: + * The instrument components have not been moved + """ + def setUp(self): + self.load_run = '54431.raw' + config["default.instrument"] = "LOQ" + LOQ() + MaskFile("MASK.094AA") + self.control_name = '54431main_1D_2.2_10.0' + self.inst_comp = 'main-detector-bank' + + def tearDown(self): + mtd.clear() + + def test_accept_loaded_workspace_only_if_reload_false(self): + my_workspace = Load(self.load_run) + #set the value for my_workspace to ensure it is the one used + aux = my_workspace.dataY(0) + aux[10]=5 + my_workspace.setY(0,aux) + # ask to use the loaded workspace + AssignSample(my_workspace,reload=False) + + ws_name = ReductionSingleton().get_sample().get_wksp_name() + + self.assertTrue(ws_name, my_workspace.name()) + + self.assertTrue(my_workspace.dataY(0)[10],5) + # ensure that it is able to execute the reduction + Reduce() + self.assertTrue(self.control_name in mtd) + + + def test_accept_loaded_workspace_but_reload_the_data_file_if_reload_true(self): + my_workspace = Load(self.load_run) + #set the value for my_workspace to ensure it is the one used + aux = my_workspace.dataY(0) + aux[10]=5 + my_workspace.setY(0,aux) + # ask to use the loaded workspace + AssignSample(my_workspace,reload=True) + + ws_name = ReductionSingleton().get_sample().get_wksp_name() + # it is different, because, it will compose the name using its rule, + # wich, for sure, will be different of my_workspace. + self.assertFalse(ws_name==my_workspace.name()) + self.assertFalse(mtd[ws_name].dataY(0)[10]==5) + # it is not necessary to ensure the Reduce occurs + + def test_should_not_accept_loaded_workspace_if_moved(self): + my_workspace = Load(self.load_run) + MoveInstrumentComponent(my_workspace,self.inst_comp,X=2,Y=1,Z=0) + ## attempt to use a workspace that has been moved + self.assertRaises(RuntimeError, AssignSample, my_workspace, False) + + + def test_should_not_accept_loaded_workspace_if_moved_2(self): + # assign sample loads and move the workspace to the defined center + AssignSample(self.load_run) + + # this makes it load this worksapce and generates an output workspace + ws_name = ReductionSingleton().get_sample().get_wksp_name() + # the workspace is renamed, so it seems another workspace + my_workspace = RenameWorkspace(ws_name) + ## trying to assing it again to AssingSample must fail + self.assertRaises(RuntimeError, AssignSample, my_workspace, False) + +class SANS2DReductionShouldAcceptLoadedWorkspace(LOQReductionShouldAcceptLoadedWorkspace): + def setUp(self): + self.load_run = '2500.nxs' + config["default.instrument"] = "SANS2D" + SANS2D() + MaskFile("MASKSANS2D_094i_RKH.txt") + self.control_name = '2500front_1D_4.6_12.85' + self.inst_comp = 'rear-detector' + +class SANS2DReductionShouldAcceptLoadedWorkspaceRawFile(SANS2DReductionShouldAcceptLoadedWorkspace): + def setUp(self): + SANS2DReductionShouldAcceptLoadedWorkspace.setUp(self) + self.load_run = '5547.raw' + self.control_name = '5547front_1D_4.6_12.85' + +class LOQReductionShouldAcceptLoadedWorkspaceStressTest(stresstesting.MantidStressTest): + cl = LOQReductionShouldAcceptLoadedWorkspace + def runTest(self): + self._success = False + # Custom code to create and run this single test suite + suite = unittest.TestSuite() + suite.addTest( unittest.makeSuite(self.cl, "test")) + runner = unittest.TextTestRunner() + # Run using either runner + res = runner.run(suite) + if res.wasSuccessful(): + self._success = True + + def validate(self): + return self._success + +class SANS2DReductionShouldAcceptLoadedWorkspaceStressTest(LOQReductionShouldAcceptLoadedWorkspaceStressTest): + cl = SANS2DReductionShouldAcceptLoadedWorkspace + +class SANS2DReductionShouldAcceptLoadedWorkspaceStressTest2(LOQReductionShouldAcceptLoadedWorkspaceStressTest): + cl = SANS2DReductionShouldAcceptLoadedWorkspaceRawFile + + +class LOQTransFitWorkspace2DWithLoadedWorkspace(stresstesting.MantidStressTest): + def runTest(self): + config["default.instrument"] = "LOQ" + LOQ() + MaskFile('MASK.094AA') + Gravity(False) + Set2D() + Detector("main-detector-bank") + Sample = LoadRaw('54431.raw') + AssignSample(Sample,False) + Can = LoadRaw('54432.raw') + AssignCan(Can,False) + LimitsWav(3,4, 0.2, 'LIN') + TransFit('LOG',3.0,8.0) + Sample_Trans = LoadRaw('54435.raw') + Sample_Direct = LoadRaw('54433.raw') + TransmissionSample(Sample_Trans, Sample_Direct, False) + Can_Trans = LoadRaw('54434.raw') + Can_Direct = LoadRaw('54433.raw') + TransmissionCan(Can_Trans, Can_Direct, False) + + #run the reduction + WavRangeReduction(3, 4, False, '_suff') + + def validate(self): + self.disableChecking.append('SpectraMap') + #when comparing LOQ files you seem to need the following + self.disableChecking.append('Axes') + self.disableChecking.append('Instrument') + return '54431main_2D_3.0_4.0_suff','LOQTransFitWorkspace2D.nxs' + +class LOQReductionOnLoadedWorkspaceMustProduceTheSameResult_1(stresstesting.MantidStressTest): + """ It will repeat the test done at LOQCentreNoGrav but using + loaded workspaces + """ + def runTest(self): + config["default.instrument"] = "LOQ" + LOQ() + + Set1D() + Detector("rear-detector") + MaskFile('MASK.094AA') + Gravity(False) + Sample = LoadRaw('54431.raw') + Trans_Sample = LoadRaw('54435.raw') + Trans_Direct = LoadRaw('54433.raw') + Can = LoadRaw('54432.raw') + CanTrans_Sample = LoadRaw('54434.raw') + CanTrans_Direct = LoadRaw('54433.raw') + + AssignSample(Sample, False) + TransmissionSample(Trans_Sample, Trans_Direct, False) + AssignCan(Can, False) + TransmissionCan(CanTrans_Sample, CanTrans_Direct, False) + + FindBeamCentre(60,200, 9) + + WavRangeReduction(3, 9, DefaultTrans) + + def validate(self): + return '54431main_1D_3.0_9.0','LOQCentreNoGravSearchCentreFixed.nxs' + +class LOQReductionOnLoadedWorkspaceMustProduceTheSameResult_2(stresstesting.MantidStressTest): + """Before ticket #8461 test LOQReductionOnLoadedWorkspaceMustProduceTheSameResult_1 used + to produce a workspace that matches LOQCentreNoGrav.nxs. This test is created to ensure + that if we put the same centre that was produced before, we finish in the same result + for the reduction""" + def runTest(self): + config["default.instrument"] = "LOQ" + LOQ() + + Set1D() + Detector("rear-detector") + MaskFile('MASK.094AA') + Gravity(False) + Sample = LoadRaw('54431.raw') + Trans_Sample = LoadRaw('54435.raw') + Trans_Direct = LoadRaw('54433.raw') + Can = LoadRaw('54432.raw') + CanTrans_Sample = LoadRaw('54434.raw') + CanTrans_Direct = LoadRaw('54433.raw') + + SetCentre(324.765, 327.670) + + AssignSample(Sample, False) + TransmissionSample(Trans_Sample, Trans_Direct, False) + AssignCan(Can, False) + TransmissionCan(CanTrans_Sample, CanTrans_Direct, False) + + WavRangeReduction(3, 9, DefaultTrans) + + def validate(self): + # Need to disable checking of the Spectra-Detector map becauseit isn't + # fully saved out to the nexus file (it's limited to the spectra that + # are actually present in the saved workspace). + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + self.disableChecking.append('Instrument') + + return '54431main_1D_3.0_9.0','LOQCentreNoGrav.nxs' + + +class SANSLOQCan2DReloadWorkspace(stresstesting.MantidStressTest): + + def runTest(self): + config["default.instrument"] = "LOQ" + LOQ() + Set2D() + Detector("main-detector-bank") + MaskFile('MASK.094AA') + # apply some small artificial shift + SetDetectorOffsets('REAR', -1.0, 1.0, 0.0, 0.0, 0.0, 0.0) + Gravity(True) + sample = Load('99630') + can = Load('99631') + AssignSample(sample, False) + AssignCan(can, False) + + WavRangeReduction(None, None, False) + + + def validate(self): + # Need to disable checking of the Spectra-Detector map because it isn't + # fully saved out to the nexus file (it's limited to the spectra that + # are actually present in the saved workspace). + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Instrument') + #when comparing LOQ files you seem to need the following + self.disableChecking.append('Axes') + # the change in number is because the run number reported from 99630 is 53615 + return '53615main_2D_2.2_10.0','SANSLOQCan2D.nxs' + +class SANS2DFrontNoGravReloadWorkspace(stresstesting.MantidStressTest): + + def runTest(self): + config["default.instrument"] = "SANS2D" + SANS2D() + MaskFile('MASKSANS2D_094i_RKH.txt') + SetDetectorOffsets('REAR', -16.0, 58.0, 0.0, 0.0, 0.0, 0.0) + SetDetectorOffsets('FRONT', -44.0, -20.0, 47.0, 0.0, 1.0, 1.0) + Gravity(False) + Set1D() + Sample = LoadNexus('2500') + AssignSample(Sample, False) + WavRangeReduction(4.6, 12.85, False) + + def validate(self): + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + self.disableChecking.append('Instrument') + return '2500front_1D_4.6_12.85','SANS2DFrontNoGrav.nxs' + +class SANS2DWaveloopsReloadWorkspace(stresstesting.MantidStressTest): + + def runTest(self): + config["default.instrument"] = "SANS2D" + SANS2D() + MaskFile('MASKSANS2D.091A') + Gravity(True) + Set1D() + s = Load('992') + s_t = Load('988') + direct = Load('987') + direct_can = CloneWorkspace(direct) + c = Load('993') + c_t = Load('989') + AssignSample(s,False) + TransmissionSample(s_t, direct, False) + AssignCan(c, False) + TransmissionCan(c_t, direct_can, False) + + CompWavRanges([3, 5, 7, 11], False) + + def validate(self): + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + self.disableChecking.append('Instrument') + # testing one of the workspaces that is produced, best not to choose the + # first one in produced by the loop as this is the least error prone + return '992rear_1D_7.0_11.0','SANS2DWaveloops.nxs' + + +if __name__ == "__main__": + unittest.main() diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DLimitEventsTime.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DLimitEventsTime.py new file mode 100644 index 0000000000000000000000000000000000000000..29e74f2a2fec60289c6d8ec6652cb4e5d707dfd6 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DLimitEventsTime.py @@ -0,0 +1,17 @@ +import stresstesting +from mantid.simpleapi import * +from ISISCommandInterface import * + +class SANS2DLimitEventsTime(stresstesting.MantidStressTest): + + def runTest(self): + SANS2D() + MaskFile('MaskSANS2DReductionGUI_LimitEventsTime.txt') + AssignSample('22048') + reduced = WavRangeReduction() + + def validate(self): + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + self.disableChecking.append('Instrument') + return '22048rear_1D_1.5_12.5','SANSReductionGUI_LimitEventsTime.nxs' \ No newline at end of file diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DMultiPeriod.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DMultiPeriod.py new file mode 100644 index 0000000000000000000000000000000000000000..a075de22d982ea6d58574e9b55e767025b14ad3c --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DMultiPeriod.py @@ -0,0 +1,48 @@ +import stresstesting + +from mantid.simpleapi import * +from ISISCommandInterface import * +from mantid.simpleapi import * +from mantid import config +from SANSBatchMode import * +import os.path + +# test batch mode with sans2d and selecting a period in batch mode +class SANS2DMultiPeriodSingle(stresstesting.MantidStressTest): + + def runTest(self): + + SANS2D() + Set1D() + Detector("rear-detector") + MaskFile('MASKSANS2Doptions.091A') + Gravity(True) + + AssignSample('5512') + self.reduced = WavRangeReduction() + + def validate(self): + # Need to disable checking of the Spectra-Detector map because it isn't + # fully saved out to the nexus file (it's limited to the spectra that + # are actually present in the saved workspace). + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + self.disableChecking.append('Instrument') + + return mtd[self.reduced][6].name(),'SANS2DBatch.nxs' + +class SANS2DMultiPeriodBatch(SANS2DMultiPeriodSingle): + + def runTest(self): + + SANS2D() + Set1D() + Detector("rear-detector") + MaskFile('MASKSANS2Doptions.091A') + Gravity(True) + + csv_file = FileFinder.getFullPath('SANS2D_multiPeriodTests.csv') + + BatchReduce(csv_file, 'nxs', saveAlgs={}) + self.reduced = '5512_SANS2DBatch' + \ No newline at end of file diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DMultiPeriodAddFiles.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DMultiPeriodAddFiles.py new file mode 100644 index 0000000000000000000000000000000000000000..2d66c401afa692ae4ad0132059fc4c57e7adbebc --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DMultiPeriodAddFiles.py @@ -0,0 +1,38 @@ +import stresstesting +from mantid.simpleapi import * +from mantid import config +from ISISCommandInterface import * + +class SANS2DMultiPeriodAddFiles(stresstesting.MantidStressTest): + + def requiredMemoryMB(self): + """Requires 2.5Gb""" + return 2500 + + def runTest(self): + + SANS2D() + Set1D() + Detector("rear-detector") + MaskFile('MASKSANS2Doptions.091A') + Gravity(True) + + add_runs( ('5512', '5512') ,'SANS2D', 'nxs', lowMem=True) + + #one period of a multi-period Nexus file + AssignSample('5512-add.nxs', period=7) + + WavRangeReduction(2, 4, DefaultTrans) + + os.remove(os.path.join(config['defaultsave.directory'],'SANS2D00005512-add.nxs')) + os.remove(os.path.join(config['defaultsave.directory'],'SANS2D00005512.log')) + + def validate(self): + # Need to disable checking of the Spectra-Detector map because it isn't + # fully saved out to the nexus file (it's limited to the spectra that + # are actually present in the saved workspace). + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Instrument') + self.disableChecking.append('Axes') + + return '5512p7rear_1D_2.0_4.0Phi-45.0_45.0','SANS2DMultiPeriodAddFiles.nxs' diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DReductionGUI.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DReductionGUI.py new file mode 100644 index 0000000000000000000000000000000000000000..27b19964cc358ce52bb33d92999eabfaf7a2c4a3 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DReductionGUI.py @@ -0,0 +1,290 @@ +""" +These tests ensure that all the steps that the SANS Interface GUI performs to reduce SANS data +on the SANS2D instrument is avalailable and is conforming to this test. + +Although verbotic, it is all the steps that the GUI calls when asked to perform a full reduction +on SANS2D instrument. + +This test also allows an easy comparison of the steps used by the reduction in batch mode and in single mode. + +The first 2 Tests ensures that the result provided by the GUI are the same for the minimalistic script. + +Test was first created to apply to Mantid Release 3.0. +""" + +import sys +import os + +if __name__ == "__main__": + # it is just to allow running this test in Mantid, allowing the following import + sys.path.append('/apps/mantid/systemtests/StressTestFramework/') + +import stresstesting +from mantid.simpleapi import * +import isis_reducer +import ISISCommandInterface as i +import isis_instrument +import isis_reduction_steps +import copy + +MASKFILE = FileFinder.getFullPath('MaskSANS2DReductionGUI.txt') +BATCHFILE = FileFinder.getFullPath('sans2d_reduction_gui_batch.csv') + +def s(obj): + print '!'+str(obj)+'!',type(obj) + +class SANS2DMinimalBatchReduction(stresstesting.MantidStressTest): + """Minimal script to perform full reduction in batch mode + """ + def __init__(self): + super(SANS2DMinimalBatchReduction, self).__init__() + config['default.instrument'] = 'SANS2D' + + def runTest(self): + import SANSBatchMode as batch + i.SANS2D() + i.MaskFile(MASKFILE) + fit_settings = batch.BatchReduce(BATCHFILE,'.nxs', combineDet='rear') + + def validate(self): + self.tolerance_is_reller = True + self.tolerance = 1.0e-2 + return "trans_test_rear","SANSReductionGUI.nxs" + + + +class SANS2DMinimalSingleReduction(SANS2DMinimalBatchReduction): + """Minimal script to perform full reduction in single mode""" + def runTest(self): + i.SANS2D() + i.MaskFile(MASKFILE) + i.AssignSample('22048') + i.AssignCan('22023') + i.TransmissionSample('22041','22024') + i.TransmissionCan('22024', '22024') + reduced = i.WavRangeReduction() + RenameWorkspace(reduced, OutputWorkspace='trans_test_rear') + + + + +class SANS2DGUIBatchReduction(SANS2DMinimalBatchReduction): + """Script executed by SANS GUI Interface to perform Batch Reduction""" + + def checkFloat(self, f1, f2): + self.assertDelta(f1,f2,0.0001) + + def checkStr(self, s1, s2): + self.assertTrue(s1==s2, '%s != %s'%(s1,s2)) + + def checkObj(self, ob1, ob2): + self.assertTrue(ob1 == ob2, '%s != %s'%(str(ob1),str(ob2))) + + def checkFirstPart(self): + self.checkObj(i.ReductionSingleton().instrument.listDetectors(),('rear-detector', 'front-detector')) + self.checkStr(i.ReductionSingleton().instrument.cur_detector().name() , 'rear-detector') + self.checkFloat(i.ReductionSingleton().mask.min_radius, 0.041) + self.checkFloat(i.ReductionSingleton().mask.max_radius, -0.001) + self.checkFloat(i.ReductionSingleton().to_wavelen.wav_low, 1.5) + self.checkFloat(i.ReductionSingleton().to_wavelen.wav_high, 12.5) + self.checkFloat(i.ReductionSingleton().to_wavelen.wav_step, 0.125) + self.checkStr(i.ReductionSingleton().to_Q.binning, " .001,.001,.0126,-.08,.2") + self.checkFloat(i.ReductionSingleton().QXY2,0.05) + self.checkFloat(i.ReductionSingleton().DQXY, 0.001) + self.checkFloat(i.ReductionSingleton().transmission_calculator.lambdaMin('SAMPLE'), 1.5) + self.checkStr(i.ReductionSingleton().transmission_calculator.fitMethod('SAMPLE'), 'LOGARITHMIC') + self.checkFloat(i.ReductionSingleton().transmission_calculator.lambdaMin('CAN'), 1.5) + self.checkFloat(i.ReductionSingleton().instrument.WAV_RANGE_MIN, 2.0) + self.checkFloat(i.ReductionSingleton().instrument.WAV_RANGE_MAX, 14.0) + self.checkFloat(i.ReductionSingleton().transmission_calculator.lambdaMax('CAN'), 12.5) + self.checkStr(i.ReductionSingleton().transmission_calculator.fitMethod('CAN'), 'LOGARITHMIC') + self.checkFloat(i.ReductionSingleton().transmission_calculator.lambdaMin('SAMPLE'), 1.5) + self.checkStr(i.ReductionSingleton().transmission_calculator.fitMethod('SAMPLE'), 'LOGARITHMIC') + self.checkFloat(i.ReductionSingleton().instrument.getDetector('FRONT').rescaleAndShift.scale, 1.0) + self.checkFloat(i.ReductionSingleton().instrument.getDetector('FRONT').rescaleAndShift.shift, 0.0) + self.assertTrue(not i.ReductionSingleton().instrument.getDetector('FRONT').rescaleAndShift.fitScale) + self.assertTrue(not i.ReductionSingleton().instrument.getDetector('FRONT').rescaleAndShift.fitShift) + self.assertTrue(not i.ReductionSingleton().instrument.getDetector('FRONT').rescaleAndShift.qRangeUserSelected) + self.checkFloat(i.ReductionSingleton().instrument.get_incident_mon(), 1) + self.checkFloat(i.ReductionSingleton().instrument.incid_mon_4_trans_calc, 1) + self.assertTrue(i.ReductionSingleton().instrument.is_interpolating_norm()) + self.assertTrue(i.ReductionSingleton().transmission_calculator.interpolate) + self.assertTrue("DIRECTM1_15785_12m_31Oct12_v12.dat" in i.ReductionSingleton().instrument.detector_file('rear')) + self.assertTrue("DIRECTM1_15785_12m_31Oct12_v12.dat" in i.ReductionSingleton().instrument.detector_file('front')) + self.checkStr(i.ReductionSingleton().prep_normalize.getPixelCorrFile('REAR'), "") + self.checkStr(i.ReductionSingleton().prep_normalize.getPixelCorrFile('FRONT'), "") + self.checkFloat(i.ReductionSingleton()._corr_and_scale.rescale, 7.4) + self.checkFloat(i.ReductionSingleton().instrument.SAMPLE_Z_CORR, 0.053) + self.assertDelta(i.ReductionSingleton().get_beam_center('rear')[0], 0.15545,0.0001) + self.checkFloat(i.ReductionSingleton().get_beam_center('rear')[1], -0.16965) + self.checkFloat(i.ReductionSingleton().get_beam_center('front')[0], 0.15545) + self.checkFloat(i.ReductionSingleton().get_beam_center('front')[1], -0.16965) + self.assertTrue(i.ReductionSingleton().to_Q.get_gravity()) + self.checkStr(i.ReductionSingleton().instrument.det_selection, 'REAR') + self.checkFloat(i.ReductionSingleton().mask.phi_min, -90.0) + self.checkFloat(i.ReductionSingleton().mask.phi_max, 90.0) + self.checkStr(i.ReductionSingleton().mask.spec_mask_r, ",H0,H190>H191,H167>H172,V0,V191") + self.checkStr(i.ReductionSingleton().mask.spec_mask_f, ",H0,H190>H191,V0,V191,H156>H159") + self.checkStr(i.ReductionSingleton().mask.time_mask, ";17500 22000") + self.checkStr(i.ReductionSingleton().mask.time_mask_r, "") + self.checkStr(i.ReductionSingleton().mask.time_mask_f, "") + self.checkStr(i.ReductionSingleton().mask.time_mask_f, "") + self.assertTrue(i.ReductionSingleton().mask.arm_width is None) + self.assertTrue(i.ReductionSingleton().mask.arm_angle is None) + self.assertTrue(i.ReductionSingleton().mask.arm_x is None) + self.assertTrue(i.ReductionSingleton().mask.arm_y is None) + self.assertTrue(i.ReductionSingleton().mask.phi_mirror) + + def applyGUISettings(self): + i.ReductionSingleton().instrument.setDetector('rear-detector') + i.ReductionSingleton().to_Q.output_type='1D' + i.ReductionSingleton().user_settings.readLimitValues('L/R '+'41 '+'-1 '+'1', i.ReductionSingleton()) + i.LimitsWav(1.5,12.5,0.125,'LIN') + i.ReductionSingleton().user_settings.readLimitValues('L/Q .001,.001,.0126,-.08,.2', i.ReductionSingleton()) + i.LimitsQXY(0.0,0.05,0.001,'LIN') + i.SetPhiLimit(-90.0,90.0, True) + i.SetDetectorFloodFile('','REAR') + i.SetDetectorFloodFile('','FRONT') + i.TransFit(mode='Logarithmic', lambdamin='1.5', lambdamax='12.5', selector='BOTH') + i.SetFrontDetRescaleShift(scale=1.0,shift=0.0) + i.Gravity(True) + i.SetSampleOffset('53') + i.SetMonitorSpectrum('1',True) + i.SetTransSpectrum('1',True) + i.SetCentre('155.45','-169.6','rear') + i.SetCentre('155.45','-169.6','front') + i.Mask('MASK/CLEAR') + i.Mask('MASK/CLEAR/TIME') + i.Mask('MASK/REAR H0') + i.Mask('MASK/REAR H190>H191') + i.Mask('MASK/REAR H167>H172') + i.Mask('MASK/REAR V0') + i.Mask('MASK/REAR V191') + i.Mask('MASK/FRONT H0') + i.Mask('MASK/FRONT H190>H191') + i.Mask('MASK/FRONT V0') + i.Mask('MASK/FRONT V191') + i.Mask('MASK/FRONT H156>H159') + i.Mask('MASK/TIME 17500 22000') + i.Mask('L/PHI -90.0 90.0') + i.SetVerboseMode(True) + + def checkFittingSettings(self, fitdict): + self.checkFloat(fitdict['scale'], 1.0) + self.checkFloat(fitdict['shift'], 0.0) + + + + def initialization(self): + if i.ReductionSingleton().get_instrument() != 'SANS2D': + i.ReductionSingleton.clean(isis_reducer.ISISReducer) + i.ReductionSingleton().set_instrument(isis_instrument.SANS2D()) + + i.ReductionSingleton.clean(isis_reducer.ISISReducer) + i.ReductionSingleton().set_instrument(isis_instrument.SANS2D()) + i.ReductionSingleton().user_settings =isis_reduction_steps.UserFile(MASKFILE); + i.ReductionSingleton().user_settings.execute(i.ReductionSingleton()) + return i + + def runTest(self): + self.initialization() + + self.checkFirstPart() + + import SANSBatchMode as batch + + self.applyGUISettings() + + _user_settings_copy = copy.deepcopy(i.ReductionSingleton().user_settings) + + fit_settings={'scale':1.0,'shift':0.0} + fit_settings = batch.BatchReduce(BATCHFILE,'.nxs', saveAlgs={}, reducer=i.ReductionSingleton().reference(),combineDet='rear'); + + self.checkFittingSettings(fit_settings) + + def validate(self): + self.tolerance_is_reller = True + self.tolerance = 1.0e-2 + return "trans_test_rear","SANSReductionGUI.nxs" + +class SANS2DGUIReduction(SANS2DGUIBatchReduction): + """Script executed by SANS GUI Interface to perform reduction in single mode""" + + def checkAfterLoad(self): + self.checkFloat(i.ReductionSingleton().get_sample().loader.periods_in_file, 1) + self.checkFloat(i.ReductionSingleton().background_subtracter.periods_in_file, 1) + self.checkFloat(i.ReductionSingleton().samp_trans_load.direct.periods_in_file, 1) + self.checkFloat(i.ReductionSingleton().can_trans_load.direct.periods_in_file,1) + self.assertTrue(not i.GetMismatchedDetList()) + + def loadSettings(self): + i.ReductionSingleton().instrument.setDetector('rear-detector') + i.SetCentre('155.45','-169.6','rear') + i.SetCentre('155.45','-169.6','front') + SCATTER_SAMPLE, logvalues = i.AssignSample(r'SANS2D00022048.nxs', reload = True, period = 1) + + i.SetCentre('155.45','-169.6','rear') + i.SetCentre('155.45','-169.6','front') + SCATTER_SAMPLE, logvalues = i.AssignCan(r'SANS2D00022023.nxs', reload = True, period = 1) + + t1, t2 = i.TransmissionSample(r'SANS2D00022041.nxs', r'SANS2D00022024.nxs', period_t=1, period_d=1) + + t1, t2 = i.TransmissionCan(r'SANS2D00022024.nxs', r'SANS2D00022024.nxs', period_t=1, period_d=1) + + def applySampleSettings(self): + i.ReductionSingleton().get_sample().geometry.shape = 3 + i.ReductionSingleton().get_sample().geometry.height = 8 + i.ReductionSingleton().get_sample().geometry.width = 8 + i.ReductionSingleton().get_sample().geometry.thickness = 2 + + + def checkFittingSettings(self): + settings = {'scale':i.ReductionSingleton().instrument.getDetector('FRONT').rescaleAndShift.scale, + 'shift':i.ReductionSingleton().instrument.getDetector('FRONT').rescaleAndShift.shift} + super(SANS2DGUIReduction,self).checkFittingSettings(settings) + + + def cleanReduction(self, user_settings): + i.ReductionSingleton.clean(isis_reducer.ISISReducer) + i.ReductionSingleton().set_instrument(isis_instrument.SANS2D()) + #i.ReductionSingleton().user_file_path='' + i.ReductionSingleton().user_settings = user_settings + i.ReductionSingleton().user_settings.execute(i.ReductionSingleton()); + + + + def singleModePrepare(self): + self.initialization() + + self.checkFirstPart() + + self.loadSettings() + + self.checkAfterLoad() + + self.applyGUISettings() + + self.applySampleSettings() + + def runTest(self): + self.singleModePrepare() + + _user_settings_copy = copy.deepcopy(i.ReductionSingleton().user_settings) + + reduced = i.WavRangeReduction(full_trans_wav=False, resetSetup=False) + + self.checkFittingSettings() + + RenameWorkspace(reduced, OutputWorkspace='trans_test_rear') + + self.cleanReduction(_user_settings_copy) + + _user_settings_copy = copy.deepcopy(i.ReductionSingleton().user_settings) + + + +if __name__ == "__main__": + #test = SANS2DGUIBatchReduction() + #test.execute() + test = SANS2DGUIReduction() + test.execute() diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DReductionGUIAdded.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DReductionGUIAdded.py new file mode 100644 index 0000000000000000000000000000000000000000..3f3d963d1c3beace8008a879338735ef6ecfc4b5 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DReductionGUIAdded.py @@ -0,0 +1,58 @@ +import sys +import os + +if __name__ == "__main__": + # it is just to allow running this test in Mantid, allowing the following import + sys.path.append('/apps/mantid/systemtests/StressTestFramework/') + +from mantid.simpleapi import * +import ISISCommandInterface as i +import isis_reducer +import isis_instrument +import isis_reduction_steps +import copy +import SANS2DReductionGUI as sansgui + +class SANS2DReductionGUIAddedFiles(sansgui.SANS2DGUIReduction): + def runTest(self): + self.initialization() + + self.checkFirstPart() + + # add files (SAMPLE and CAN) + import SANSadd2 + SANSadd2.add_runs(('22048','22048'),'SANS2D', '.nxs', rawTypes=('.add','.raw','.s*'), lowMem=False) + SANSadd2.add_runs(('22023','22023'),'SANS2D', '.nxs', rawTypes=('.add','.raw','.s*'), lowMem=False) + + # load values: + i.SetCentre('155.45','-169.6','rear') + i.SetCentre('155.45','-169.6','front') + SCATTER_SAMPLE, logvalues = i.AssignSample(r'SANS2D00022048-add.nxs', reload = True, period = 1) + SCATTER_SAMPLE, logvalues = i.AssignCan(r'SANS2D00022023-add.nxs', reload = True, period = 1) + i.TransmissionSample(r'SANS2D00022041.nxs', r'SANS2D00022024.nxs', period_t=1, period_d=1) + i.TransmissionCan(r'SANS2D00022024.nxs', r'SANS2D00022024.nxs', period_t=1, period_d=1) + + self.checkAfterLoad() + + self.applyGUISettings() + + self.applySampleSettings() + _user_settings_copy = copy.deepcopy(i.ReductionSingleton().user_settings) + + reduced = i.WavRangeReduction(full_trans_wav=False, resetSetup=False) + RenameWorkspace(reduced, OutputWorkspace='trans_test_rear') + + self.checkFittingSettings() + self.cleanReduction(_user_settings_copy) + + def validate(self): + # we have double the sample and the can, this means that the reduced data will be + # almost the same + self.tolerance_is_reller = True + self.tolerance = 0.35 + return "trans_test_rear","SANSReductionGUI.nxs" + + +if __name__ == "__main__": + test = SANS2DReductionGUIAddedFiles() + test.execute() diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DSearchCentreGUI.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DSearchCentreGUI.py new file mode 100644 index 0000000000000000000000000000000000000000..6f88b70ac38048e141567911c134917b2132edea --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DSearchCentreGUI.py @@ -0,0 +1,69 @@ +import sys +import os +if __name__ == "__main__": + # it is just to allow running this test in Mantid, allowing the following import + sys.path.append('/apps/mantid/systemtests/StressTestFramework/') +from mantid.simpleapi import * +import ISISCommandInterface as i +import isis_reducer +import isis_instrument +import isis_reduction_steps +import SANS2DReductionGUI as sansgui + +class SANS2DGUISearchCentre(sansgui.SANS2DGUIReduction): + + def checkCentreResult(self): + self.checkFloat(i.ReductionSingleton().get_beam_center('rear')[0], 0.165) + self.checkFloat(i.ReductionSingleton().get_beam_center('rear')[1], -0.145 ) + + def runTest(self): + self.singleModePrepare() + + i.FindBeamCentre(rlow=41,rupp=280,MaxIter=3,xstart=float(150)/1000.,ystart=float(-160)/1000., tolerance=0.0001251) + self.checkCentreResult() + # clean up + + i.ReductionSingleton.clean(isis_reducer.ISISReducer) + i.ReductionSingleton().set_instrument(isis_instrument.SANS2D()) + i.ReductionSingleton().user_settings =isis_reduction_steps.UserFile(sansgui.MASKFILE) + i.ReductionSingleton().user_settings.execute(i.ReductionSingleton()) + + def validate(self): + # there is no workspace to be checked against + return True + +if __name__ == "__main__": + test = SANS2DGUISearchCentre() + test.execute() + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DSlicing.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DSlicing.py new file mode 100644 index 0000000000000000000000000000000000000000..9633884aadf7c870fb93d9beffc59decc350e20e --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DSlicing.py @@ -0,0 +1,46 @@ +import sys + +if __name__ == "__main__": + # it is just to allow running this test in Mantid, allowing the following import + sys.path.append('/apps/mantid/systemtests/StressTestFramework/') + +import stresstesting + +from mantid.simpleapi import * +import ISISCommandInterface as i + +MASKFILE = FileFinder.getFullPath('MaskSANS2DReductionGUI.txt') +BATCHFILE = FileFinder.getFullPath('sans2d_reduction_gui_batch.csv') + +class SANS2DMinimalBatchReductionSliced(stresstesting.MantidStressTest): + def __init__(self): + super(SANS2DMinimalBatchReductionSliced, self).__init__() + config['default.instrument']='SANS2D' + def runTest(self): + import SANSBatchMode as batch + i.SANS2D() + i.MaskFile(MASKFILE) + i.SetEventSlices("0.0-451, 5-10") + fit_settings = batch.BatchReduce(BATCHFILE, '.nxs',saveAlgs={}, combineDet='rear') + + def validate(self): + self.tolerance = 0.02 + self.tolerance_is_reller=True + return str(mtd['trans_test_rear'][0]), 'SANSReductionGUI.nxs' + +class SANS2DMinimalSingleReductionSliced(SANS2DMinimalBatchReductionSliced): + def runTest(self): + i.SANS2D() + i.MaskFile(MASKFILE) + i.AssignSample('22048') + i.AssignCan('22023') + i.TransmissionSample('22041','22024') + i.TransmissionCan('22024', '22024') + i.SetEventSlices("0.0-450, 5-10") + reduced = i.WavRangeReduction() + RenameWorkspace(reduced, OutputWorkspace='trans_test_rear') + + +if __name__ == "__main__": + test = SANS2DMinimalSingleReductionSliced() + test.execute() diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DWaveloops.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DWaveloops.py new file mode 100644 index 0000000000000000000000000000000000000000..1edc294405067163ffb138757a8a0ede3fb7f53f --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SANS2DWaveloops.py @@ -0,0 +1,27 @@ +import stresstesting +from mantid.simpleapi import * +from ISISCommandInterface import * + +class SANS2DWaveloops(stresstesting.MantidStressTest): + + def runTest(self): + + SANS2D() + MaskFile('MASKSANS2D.091A') + Gravity(True) + Set1D() + + AssignSample('992.raw') + TransmissionSample('988.raw', '987.raw') + AssignCan('993.raw') + TransmissionCan('989.raw', '987.raw') + + CompWavRanges([3, 5, 7, 11], False) + + def validate(self): + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + self.disableChecking.append('Instrument') + # testing one of the workspaces that is produced, best not to choose the + # first one in produced by the loop as this is the least error prone + return '992rear_1D_7.0_11.0','SANS2DWaveloops.nxs' diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SANSCentreSample.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SANSCentreSample.py new file mode 100644 index 0000000000000000000000000000000000000000..a2057a4f1f1a404c2eaaed5fe616ae7a91cc691a --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SANSCentreSample.py @@ -0,0 +1,27 @@ +import stresstesting +from mantid.simpleapi import * +from ISISCommandInterface import * + +class SANSCentreSample(stresstesting.MantidStressTest): + + def runTest(self): + + SANS2D() + + Set1D() + Detector("rear-detector") + MaskFile('MASKSANS2D.091A') + + AssignSample('992.raw') + + FindBeamCentre(60, 280, 19, 100.0/1000.0, -200.0/1000.0) + + def validate(self): + # Need to disable checking of the Spectra-Detector map because it isn't + # fully saved out to the nexus file (it's limited to the spectra that + # are actually present in the saved workspace). + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + self.disableChecking.append('Instrument') + + return '992_sans_raw','SANSCentreSample.nxs' diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SANSLOQBatch.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SANSLOQBatch.py new file mode 100644 index 0000000000000000000000000000000000000000..4d2c72dd712adf9c3663235e8981ad4ba691a98e --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SANSLOQBatch.py @@ -0,0 +1,44 @@ +import stresstesting +from mantid.simpleapi import * +from mantid import config +from ISISCommandInterface import * +from SANSBatchMode import * +import os.path + +class SANSLOQBatch(stresstesting.MantidStressTest): + + def runTest(self): + #DataPath("../Data/LOQ/") + #UserPath("../Data/LOQ/") + + #here we are testing the LOQ setup + LOQ() + #rear detector + Detector("main-detector-bank") + #test batch mode, although only the analysis from the last line is checked + # Find the file , this should really be in the BatchReduce reduction step + csv_file = FileFinder.getFullPath('batch_input.csv') + + Set1D() + MaskFile('MASK.094AA') + Gravity(True) + + BatchReduce(csv_file, 'raw', plotresults=False, saveAlgs={'SaveCanSAS1D':'xml','SaveNexus':'nxs'}) + + LoadNexus(Filename='54433sans.nxs',OutputWorkspace= 'result') + Plus(LHSWorkspace='result',RHSWorkspace= '99630sanotrans',OutputWorkspace= 'result') + + os.remove(os.path.join(config['defaultsave.directory'],'54433sans.nxs')) + os.remove(os.path.join(config['defaultsave.directory'],'99630sanotrans.nxs')) + os.remove(os.path.join(config['defaultsave.directory'],'54433sans.xml')) + os.remove(os.path.join(config['defaultsave.directory'],'99630sanotrans.xml')) + + def validate(self): + # Need to disable checking of the Spectra-Detector map because it isn't + # fully saved out to the nexus file (it's limited to the spectra that + # are actually present in the saved workspace). + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Axes') + self.disableChecking.append('Instrument') + + return 'result','SANSLOQBatch.nxs' diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SANSLOQCan2D.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SANSLOQCan2D.py new file mode 100644 index 0000000000000000000000000000000000000000..1b01a008accb72c238b1099782a8832ea4118006 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SANSLOQCan2D.py @@ -0,0 +1,34 @@ +import stresstesting +from mantid.simpleapi import * +from ISISCommandInterface import * + +# Test is giving odd results on Linux, but only this 2D one. + +class SANSLOQCan2D(stresstesting.MantidStressTest): + + def runTest(self): + + LOQ() + Set2D() + Detector("main-detector-bank") + MaskFile('MASK.094AA') + # apply some small artificial shift + SetDetectorOffsets('REAR', -1.0, 1.0, 0.0, 0.0, 0.0, 0.0) + Gravity(True) + + AssignSample('99630.RAW') + AssignCan('99631.RAW') + + WavRangeReduction(None, None, False) + + + def validate(self): + # Need to disable checking of the Spectra-Detector map because it isn't + # fully saved out to the nexus file (it's limited to the spectra that + # are actually present in the saved workspace). + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Instrument') + #when comparing LOQ files you seem to need the following + self.disableChecking.append('Axes') + + return '99630main_2D_2.2_10.0','SANSLOQCan2D.nxs' diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SANSLoadersTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SANSLoadersTest.py new file mode 100644 index 0000000000000000000000000000000000000000..6bcbcb3ddeb0babaf105d7d86fe2a5569d969021 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SANSLoadersTest.py @@ -0,0 +1,164 @@ +""" +Check the loaders of ISIS SANS reduction. It is created as systemtest because it does +take considerable time because it involves loading data. Besides, it uses data that is +currently available inside the systemtests. +""" + +import unittest +import stresstesting +from mantid.simpleapi import * +import isis_reduction_steps as steps +import ISISCommandInterface as ici +import isis_reducer + +class LoadRunTest(unittest.TestCase): + def setUp(self): + config['default.instrument'] = 'SANS2D' + ici.SANS2D() + + + def loadAndAssign(self, run_spec,options=dict()): + loadRun = steps.LoadRun(str(run_spec), **options) + loadRun._assignHelper(ici.ReductionSingleton()) + return loadRun + + def passWsAndAssign(self, ws, options=dict()): + loadRun = steps.LoadRun(ws, **options) + loadRun._assignHelper(ici.ReductionSingleton()) + return loadRun + + + def basicChecks(self, loadRun, file_path, runnum, periods_in_file, ws_name): + self.assertTrue('Data/SANS2D/'+file_path in loadRun._data_file.replace('\\','/'), 'Wrong data file: ' + loadRun._data_file) + self.assertEqual(loadRun.periods_in_file, periods_in_file) + self.assertEqual(loadRun.wksp_name, ws_name) + self.assertEqual(loadRun.shortrun_no, runnum) + + if periods_in_file == 1: + self.assertEqual(loadRun._wksp_name, ws_name) + self.assertTrue(not loadRun.move2ws(0)) + self.assertEqual(loadRun.wksp_name, ws_name) + else: + self.assertTrue(loadRun.move2ws(0)) + self.assertEqual(loadRun.wksp_name, ws_name) + + + + def test_single_period_nxs_file(self): + runnum = 22048 + loadRun = self.loadAndAssign(runnum) + self.basicChecks(loadRun, 'SANS2D00022048.nxs', runnum, 1, '22048_sans_nxs') + + self.assertEqual(loadRun._period, -1) + self.assertEqual(loadRun.ext, 'nxs') + + def test_single_period_raw_file(self): + runnum = 5547 + loadRun = self.loadAndAssign(runnum) + self.basicChecks(loadRun, 'SANS2D0000%d.raw'%(runnum), runnum, 1, '5547_sans_raw') + self.assertEqual(loadRun._period, -1) + self.assertEqual(loadRun.ext, 'raw') + + + def test_single_period_from_workspace_reload_true(self): + runnum = 22048 + ws22048 = Load(str(runnum)) + loadRun = self.passWsAndAssign(ws22048) + self.basicChecks(loadRun, 'SANS2D00022048.nxs', runnum, 1, '22048_sans_nxs') + + self.assertEqual(loadRun._period, -1) + self.assertEqual(loadRun.ext, 'nxs') + + def test_single_period_from_workspace_reload_false(self): + runnum = 22048 + ws22048 = Load(str(runnum)) + loadRun = self.passWsAndAssign(ws22048, {'reload':False}) + self.basicChecks(loadRun, 'SANS2D00022048.nxs', runnum, 1, ws22048.name()) + + self.assertEqual(loadRun._period, -1) + self.assertEqual(loadRun.ext, 'nxs') + + def test_single_period_trans_raw(self): + runnum = 988 + loadRun = self.loadAndAssign(runnum, {'trans':True}) + self.basicChecks(loadRun, 'SANS2D00000988.raw', runnum, 1, '988_trans_raw') + self.assertEqual(loadRun._period, -1) + self.assertEqual(loadRun.ext, 'raw') + + def test_multiperiod_nxs_file(self): + runnum = 5512 + loadRun = self.loadAndAssign(runnum) + self.basicChecks(loadRun, 'SANS2D00005512.nxs', runnum, 13, '5512_sans_nxs_1') + self.assertEqual(loadRun._period, -1) + self.assertTrue(loadRun.move2ws(12)) + self.assertEqual(loadRun.wksp_name, '5512_sans_nxs_13') + + def test_multiperiod_from_workspace_reload_false(self): + runnum = 5512 + ws5512 = Load(str(runnum)) + loadRun = self.passWsAndAssign(ws5512, {'reload':False}) + self.basicChecks(loadRun, 'SANS2D00005512.nxs', runnum, 13, ws5512[0].name()) + self.assertEqual(loadRun._period, -1) + self.assertTrue(loadRun.move2ws(12)) + self.assertEqual(loadRun.wksp_name, ws5512[12].name()) + + def test_loading_single_period_in_multiperiod(self): + runnum = 5512 + loadRun = self.loadAndAssign(runnum, {'entry':5}) + name = '5512p5_sans_nxs' + self.basicChecks(loadRun, 'SANS2D00005512.nxs', runnum, 1, name) + self.assertEqual(loadRun._period, 5) + self.assertTrue(not loadRun.move2ws(1)) + self.assertEqual(loadRun.wksp_name, name) + +class LoadSampleTest(unittest.TestCase): + """LoadSample extends LoadRun in order to move the workspaces to the defined centre""" + def setUp(self): + config['default.instrument'] = 'SANS2D' + ici.SANS2D() + + def test_single_period_nxs_file(self): + ici.SetCentre(1,-2) + loadSample = steps.LoadSample('22048') + loadSample.execute(ici.ReductionSingleton(), True) + self.assertEqual(loadSample.wksp_name, '22048_sans_nxs') + self.assertTrue(not loadSample.entries) + cur_pos = ici.ReductionSingleton().instrument.cur_detector_position(loadSample.wksp_name) + self.assertAlmostEqual(cur_pos[0],1/1000.0) + self.assertAlmostEqual(cur_pos[1], -2/1000.0) + + def test_multiperiod_nxs_file(self): + ici.SetCentre(1, -2) + loadSample = steps.LoadSample('5512') + loadSample.execute(ici.ReductionSingleton(), True) + self.assertEqual(loadSample.wksp_name, '5512_sans_nxs_1') + self.assertEqual(loadSample.entries, range(0,13)) + for index in [0,5,12]: + loadSample.move2ws(index) + self.assertEqual(loadSample.wksp_name, '5512_sans_nxs_'+str(index+1)) + cur_pos = ici.ReductionSingleton().instrument.cur_detector_position(loadSample.wksp_name) + self.assertAlmostEqual(cur_pos[0], 0.001) + self.assertAlmostEqual(cur_pos[1], -0.002) + + +class LoadSampleTestStressTest(stresstesting.MantidStressTest): + def runTest(self): + self._success = False + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(LoadRunTest, 'test')) + suite.addTest(unittest.makeSuite(LoadSampleTest, 'test')) + runner = unittest.TextTestRunner() + res = runner.run(suite) + if res.wasSuccessful(): + self._success = True + + def requiredMemoryMB(self): + return 2000 + + def validate(self): + return self._success + + + +if __name__ == '__main__': + unittest.main() diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SEQUOIAreduction.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SEQUOIAreduction.py new file mode 100644 index 0000000000000000000000000000000000000000..8c88add11396e8f14804410784b951be3b0a7339 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SEQUOIAreduction.py @@ -0,0 +1,267 @@ +""" +Test the SNS inelatic reduction scripts. +""" + +import stresstesting +import os +import shutil +import glob +import mantid +from mantid.simpleapi import * +from numpy import * + +class DirectInelaticSNSTest(stresstesting.MantidStressTest): + + #setup routines + def topbottom(self): + #create top and bottom mask + LoadEventNexus(Filename='SEQ_12384_event.nxs', OutputWorkspace='mask',CompressTolerance=0.1) + Rebin(InputWorkspace='mask',OutputWorkspace='mask',Params="500,15500,16000",PreserveEvents=False) + w=mtd['mask'] + indexlist=[] + for i in range(w.getNumberHistograms()): + if (i%128) in [0,1,2,3,4,5,6,7,120,121,122,123,124,125,126,127]: + indexlist.append(i) + + MaskDetectors(Workspace='mask',WorkspaceIndexList=indexlist) + SaveNexus(InputWorkspace="mask",Filename = os.path.join(self.customDataDir,"mask_top_bottom.nxs")) + DeleteWorkspace('mask') + + def setupFiles(self): + self.customDataDir = os.path.join(mantid.config['defaultsave.directory'], 'temp') + datasearch = mantid.config.getDataSearchDirs() + filename='' + for d in datasearch: + temp = os.path.join(d, 'SEQ_12384_event.nxs') + if os.path.exists(temp): + filename=temp + self.cleanup() + os.mkdir(self.customDataDir) + shutil.copyfile(filename,os.path.join(self.customDataDir,'SEQ_12384_event.nxs')) + shutil.copyfile(filename,os.path.join(self.customDataDir,'SEQ_12385_event.nxs')) + self.topbottom() + + + #Routines from SNS scripts + def createanglelist(self,ws,amin,amax,astep): + """ + Function to create a map of detectors corresponding to angles in a certain range + """ + bin_angles=arange(amin+astep*0.5,amax+astep*0.5,astep) + a=[[] for i in range(len(bin_angles))] #list of list with detector IDs + w=mtd[ws] + origin = w.getInstrument().getSample().getPos() + for i in range(w.getNumberHistograms()): + ang=w.getDetector(i).getTwoTheta(origin,mantid.kernel.V3D(0,0,1))*180/math.pi + index=int((ang-amin)/astep) + if (index>=0) and (index<len(a)) and ((w.getDetector(i).getID())>0): + a[index].append(w.getSpectrum(i).getSpectrumNo()) + #create lists with angles and detector ID only for bins where there are detectors + ang_list=[] + detIDlist=[] + for elem,ang in zip(a,bin_angles): + if len(elem)>0: + detIDlist.append(elem) + ang_list.append(ang) + # file with grouping information + f = open(os.path.join(self.customDataDir,"group.map"),'w') + print >>f,len(ang_list) + for i in range(len(ang_list)): + print >>f,i + print >>f,len(detIDlist[i]) + mystring=str(detIDlist[i]).strip(']').strip('[') + mystring=mystring.replace(',','') + print >>f,mystring + f.close() + # par file + f = open(os.path.join(self.customDataDir,"group.par"),'w') + print >>f,len(ang_list) + for i in range(len(ang_list)): + print >>f,5.5,ang_list[i],0.0,1.0,1.0,1 + f.close() + return [ang_list,detIDlist] + + def GetEiT0(self,ws_name,EiGuess): + """ + Function to get Ei and -T0 + """ + alg=GetEi(InputWorkspace=ws_name,EnergyEstimate=EiGuess)#Run GetEi algorithm + [Ei,Tzero]=[alg[0],-alg[3]] #Extract incident energy and T0 + return [Ei,Tzero] + + def LoadPathMaker(self,runs,folder,prefix,suffix): + """ + Function to create paths to files from runnumbers + return a list of lists with the path, and a corrected list of runs. Files in the inner lists are added together + side effects: none + """ + path=[] + newruns=[] + try: + len(runs) + except: + runs=[runs] + for r in runs: + try: + len(r) + except: + r=[r] + temppath=[] + tempnewruns=[] + for i in range(len(r)): + temppath.append(os.path.join(folder,prefix+str(r[i])+suffix)) + tempnewruns.append(r[i]) + if (not(os.path.isfile(temppath[i]))): + raise IOError(temppath[i]+" not found") + path.append(temppath) + newruns.append(tempnewruns) + return [path,newruns] + + def CreateMasksAndVanadiumNormalization(self,vanfile,maskfile=''): + """ + Creates the Van workspace, one bin for each histogram, containing the integrated Vanadium intensity + VAN also contains the mask. + """ + if not os.path.isfile(os.path.join(self.customDataDir, "van.nx5")): + LoadEventNexus(Filename=vanfile,OutputWorkspace="VAN") + + Rebin(InputWorkspace="VAN",OutputWorkspace="VAN",Params="1000,15000,16000",PreserveEvents=False) #integrate all events between 1000 and 16000 microseconds + NormaliseByCurrent(InputWorkspace="VAN",OutputWorkspace="VAN") #normalize by proton charge + MedianDetectorTest(InputWorkspace="VAN",OutputWorkspace="MASK",SignificanceTest=100,HighThreshold =100) #determine which detectors to mask, and store them in the "MASK" workspace + if len(maskfile)>0: + LoadNexus(Filename=maskfile,OutputWorkspace="temp_mask") + MaskDetectors(Workspace="MASK",MaskedWorkspace="temp_mask") #add detectors masked in "temp_mask" to "MASK" + DeleteWorkspace(Workspace="temp_mask") + MaskDetectors(Workspace="VAN",MaskedWorkspace="MASK") #Mask "VAN". This prevents dividing by 0 + DeleteWorkspace(Workspace="MASK") #Mask is carried by VAN workspace + SaveNexus(InputWorkspace="VAN",Filename=os.path.join(self.customDataDir,"van.nx5")) + else: + LoadNexus(Filename=os.path.join(self.customDataDir,"van.nx5"),OutputWorkspace="VAN") + + + #functions from stresstesting + def requiredFiles(self): + return ['SEQ_12384_event.nxs'] + + + def cleanup(self): + for ws in ['IWS', 'OWST', 'VAN', 'monitor_ws']: + if mantid.AnalysisDataService.doesExist(ws): + DeleteWorkspace(ws) + if os.path.exists(self.customDataDir): + shutil.rmtree(self.customDataDir) + + def runTest(self): + self.setupFiles() + runs=[[12384,12385]] + maskfile = os.path.join(self.customDataDir,'mask_top_bottom.nxs') + V_file=os.path.join(self.customDataDir, 'SEQ_12384_event.nxs') + Eguess=35.0 #initial energy guess + Erange="-10.0,0.25,32.0" #Energy bins: Emin,Estep,Emax + datadir=self.customDataDir #Data directory + outdir=self.customDataDir #Output directory + fout_prefix="Ei_35.0_" + ang_offset=0.0 + angle_name='SEOCRot' #Name of the angle to read + maskandnormalize=True #flag to do the masking and normalization to Vanadium + flag_spe=False #flag to generate an spe file + flag_nxspe=True #flag to generate an nxspe file + do_powder=True #group detectors by angle + anglemin=0. #minumum angle + anglemax=70. #maximum angle + anglestep=1. #angle step - this can be fine tuned for pixel arc over detectors + + if (maskandnormalize): + self.CreateMasksAndVanadiumNormalization(V_file,maskfile=maskfile) #Creates a worspaces for Vanadium normalization and masking + + [paths,runs]=self.LoadPathMaker(runs,self.customDataDir,'SEQ_','_event.nxs') #process teh runlist + for flist,rlist,i in zip(paths,runs,range(len(paths))): #rlist is the inner list of runnumbers + psitmp=[] + for f,j in zip(flist,range(len(flist))): + if (j==0): + LoadEventNexus(Filename=f,OutputWorkspace="IWS") #Load an event Nexus file + LoadNexusMonitors(Filename=f,OutputWorkspace="monitor_ws") #Load monitors + else: + LoadEventNexus(Filename=f,OutputWorkspace="IWS_temp") #Load an event Nexus file + LoadNexusMonitors(Filename=f,OutputWorkspace="monitor_ws_temp") #Load monitors + Plus(LHSWorkspace="IWS",RHSWorkspace="IWS_temp",OutputWorkspace="IWS") #Add events to the original workspcace + Plus(LHSWorkspace="monitor_ws",RHSWorkspace="monitor_ws_temp",OutputWorkspace="monitor_ws") #Add monitors to the original monitor workspcace + #cleanup + DeleteWorkspace("IWS_temp") + DeleteWorkspace("monitor_ws_temp") + w=mtd["IWS"] + psi=array(w.getRun()[angle_name].value).mean()+ang_offset + FilterBadPulses(InputWorkspace="IWS",OutputWorkspace = "IWS",LowerCutoff = 50) # get psi before filtering bad pulses + [Efixed,T0]=self.GetEiT0("monitor_ws",Eguess) #Get Ei and -T0 using the function defined before + ChangeBinOffset(InputWorkspace="IWS",OutputWorkspace="OWS",Offset=T0) #Change all TOF by -T0 + NormaliseByCurrent(InputWorkspace="OWS",OutputWorkspace="OWS") #normalize by proton charge + ConvertUnits(InputWorkspace="OWS",OutputWorkspace="OWS",Target="Wavelength",EMode="Direct",EFixed=Efixed) #The algorithm for He3 tube efficiency requires wavelength units + He3TubeEfficiency(InputWorkspace="OWS",OutputWorkspace="OWS") #Apply correction due to absorption in He3 + ConvertUnits(InputWorkspace="OWS",OutputWorkspace="OWS",Target="DeltaE",EMode="Direct",EFixed=Efixed) #Switch to energy transfer + CorrectKiKf(InputWorkspace="OWS",OutputWorkspace="OWS") # apply ki/kf correction + Rebin(InputWorkspace="OWS",OutputWorkspace="OWST",Params=Erange,PreserveEvents=False) # go to histogram mode (forget events) + ConvertToDistribution(Workspace="OWST") #Convert to differential cross section by dividing by the energy bin width + DeleteWorkspace("OWS") + if (maskandnormalize): + MaskDetectors(Workspace="OWST",MaskedWorkspace="VAN") #apply overall mask + # the following is commented, since it's the same run, not a real vanadium + #Divide(LHSWorkspace="OWST",RHSWorkspace="VAN",OutputWorkspace="OWST") #normalize by Vanadium, if desired + if (do_powder): + if (i==0): + mapping=self.createanglelist("OWST",anglemin,anglemax,anglestep) + GroupDetectors(InputWorkspace="OWST",OutputWorkspace="OWST",MapFile=os.path.join(self.customDataDir,"group.map"),Behaviour="Sum") + SolidAngle(InputWorkspace="OWST",OutputWorkspace="sa") + Divide(LHSWorkspace="OWST",RHSWorkspace="sa",OutputWorkspace="OWST") + DeleteWorkspace("sa") + barefname = "%s%d_%g" % (fout_prefix,rlist[0],psi) + fname_out = os.path.join(outdir, barefname) + if flag_spe: + SaveSPE(InputWorkspace="OWST",Filename=fname_out+".spe") #save the data in spe format. + if (i==0): + SavePHX(InputWorkspace="OWST",Filename=fname_out+".spe") + if flag_nxspe: + #save in NXSPE format + nxspe_name = fname_out+".nxspe" + self._nxspe_filename = nxspe_name + if (do_powder): + SaveNXSPE(InputWorkspace="OWST",Filename=nxspe_name,Efixed=Efixed,psi=psi,KiOverKfScaling=True, + ParFile=os.path.join(outdir, "group.par")) + else: + SaveNXSPE(InputWorkspace="OWST",Filename=nxspe_name,Efixed=Efixed,psi=psi,KiOverKfScaling=True) + + def validate(self): + #check if required files are created + mapfile = os.path.join(self.customDataDir, 'group.map') + parfile = os.path.join(self.customDataDir, 'group.par') + self.assertTrue(os.path.exists(mapfile)) + self.assertDelta(os.path.getsize(mapfile),700000,100000) + self.assertTrue(os.path.exists(parfile)) + self.assertGreaterThan(os.path.getsize(parfile),1000) + vanadiumfile = os.path.join(self.customDataDir, 'van.nx5') + self.assertTrue(os.path.exists(vanadiumfile)) + self.assertGreaterThan(os.path.getsize(vanadiumfile),10000000) + + # Check saved file (there should only be one) + #find the nxspe filename: it should be only one, but the name might depend on the rounding of phi + nxspelist=glob.glob(os.path.join(self.customDataDir,'*.nxspe')) + if len(nxspelist)>1 or len(nxspelist) == 0: + print "Error: Expected single nxspe file in %s. Found %d" % (self.customDataDir, len(nxspelist)) + return False + + # Name encodes rotation + self.assertGreaterThan(os.path.getsize(self._nxspe_filename),100000) + psi_part=self._nxspe_filename.split('12384_')[1] + psi_param=float(psi_part.split('.nxspe')[0]) + self.assertDelta(psi_param,-24,0.01) + + #input workspace + self.assertLessThan(mtd["IWS"].getNumberEvents(),100000) + self.assertGreaterThan(mtd["IWS"].getNumberEvents(),90000) + + # Need to disable checking of the Spectra-Detector map because it isn't + # fully saved out to the nexus file; some masked detectors should be picked + # up with by the mask values in the spectra + self.disableChecking.append('SpectraMap') + self.disableChecking.append('Instrument') + return "OWST",'SEQUOIAReduction.nxs' + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SNSConvertToMDTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SNSConvertToMDTest.py new file mode 100644 index 0000000000000000000000000000000000000000..c711a1713874628fefda182c1fc078e636eb0876 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SNSConvertToMDTest.py @@ -0,0 +1,204 @@ +import stresstesting +import numpy +import os +from mantid.simpleapi import * + +###################################################################### +# Common configuration +# Main data file /SNS/SEQ/IPTS-4783/data +DATA_FILE = "SEQ_11499_event.nxs" +# Vanadium file +VAN_FILE = "SEQ_van.nxs" +# Initial energy guess +E_GUESS = 50 +# Energy bins: Emin, Estep, Emax +E_RANGE = "-10.0,0.2,45.0" +####################################################################### + +def makeOutputName(ws_name, dohist, doproj): + md_ws_name = ws_name + '_md' + tag="" + if dohist: + tag += "h" + else: + tag += "e" + if doproj: + tag += "wp" + else: + tag += "np" + + md_ws_name += "_" + tag + return md_ws_name + +def execReduction(dohist, doproj): + # Set the facility + config['default.facility'] = "SNS" + # SPE workspace name + workspace_name = "reduced" + # Run the reduction + DgsReduction(SampleInputFile=DATA_FILE, + IncidentBeamNormalisation="ByCurrent", + OutputWorkspace=workspace_name, + IncidentEnergyGuess=E_GUESS, + EnergyTransferRange=E_RANGE, + SofPhiEIsDistribution=dohist, + DetectorVanadiumInputFile=VAN_FILE, + UseProcessedDetVan=True) + + # Set the goniometer. Add a rotation angle fix as well. + SetGoniometer(Workspace=workspace_name, Axis0="CCR13VRot,0,1,0,1", + Axis1="49.73,0,1,0,1") + + # Set the information for the UB matrix + SetUB(Workspace=workspace_name, + a=3.643, b=3.643, c=5.781, alpha=90, beta=90, gamma=120, + u='1,1,0', v='0,0,1') + + # Create the MDEventWorkspace + md_output_ws = makeOutputName(workspace_name, dohist, doproj) + + if not doproj: + ConvertToMD(InputWorkspace=workspace_name, + OutputWorkspace=md_output_ws, + QDimensions='Q3D', MinValues='-5,-5,-5,-10', + QConversionScales='HKL', + MaxValues='5,5,5,45', MaxRecursionDepth='1') + else: + ConvertToMD(InputWorkspace=workspace_name, + OutputWorkspace=md_output_ws, + QDimensions='Q3D', MinValues='-5,-5,-5,-10', + QConversionScales='HKL', + MaxValues='5,5,5,45', MaxRecursionDepth='1', + Uproj='1,1,0', Vproj='1,-1,0', Wproj='0,0,1') + + # Remove SPE workspace + DeleteWorkspace(Workspace=workspace_name) + + return md_output_ws + +def validateMD(result,reference,tol=1.e-5,class_name='dummy',mismatchName=None): + """Returns the name of the workspace & file to compare""" + #elf.disableChecking.append('SpectraMap') + #elf.disableChecking.append('Instrument') + + valNames = [result,reference] + from mantid.simpleapi import Load,CompareMDWorkspaces,FrameworkManager,SaveNexus + + if not (reference in mtd): + Load(Filename=reference,OutputWorkspace=valNames[1]) + + checker = AlgorithmManager.create("CompareMDWorkspaces") + checker.setLogging(True) + checker.setPropertyValue("Workspace1",result) + checker.setPropertyValue("Workspace2",valNames[1]) + checker.setPropertyValue("Tolerance", str(tol)) + checker.setPropertyValue("IgnoreBoxID", "1") + checker.setPropertyValue("CheckEvents", "1") + + checker.execute() + if checker.getPropertyValue("Equals") != "1": + print " Workspaces do not match, result: ",checker.getPropertyValue("Result") + print " Test {0} fails".format(class_name) + if mismatchName: + targetFilename = class_name+mismatchName+'-mismatch.nxs' + else: + targetFilename = class_name+'-mismatch.nxs' + + SaveMD(InputWorkspace=valNames[0],Filename=targetFilename ) + return False + else: + return True; + + + +class SNSConvertToMDNoHistNoProjTest(stresstesting.MantidStressTest): + truth_file = "SEQ_11499_md_enp.nxs" + + def requiredMemoryMB(self): + """ Require about 2.5GB free """ + return 2500 + + def requiredFiles(self): + files = [self.truth_file, DATA_FILE] + return files + + def runTest(self): + self.output_ws = execReduction(False, False) + + self.gold_ws_name = self.truth_file.split('.')[0] + "_golden" + LoadMD(self.truth_file, OutputWorkspace=self.gold_ws_name) + + + def validate(self): + self.tolerance = 1.0e-1 + return validateMD(self.output_ws, self.gold_ws_name,self.tolerance,self.__class__.__name__); + +class SNSConvertToMDHistNoProjTest(stresstesting.MantidStressTest): + truth_file = "SEQ_11499_md_hnp.nxs" + + def requiredMemoryMB(self): + """ Require about 2.5GB free """ + return 2500 + + def requiredFiles(self): + config.appendDataSearchDir("/home/builder/data/SystemTests/AnalysisTests/ReferenceResults/"); + files = [self.truth_file, DATA_FILE] + return files + + def runTest(self): + self.output_ws = execReduction(True, False) + + self.gold_ws_name = self.truth_file.split('.')[0] + "_golden" + LoadMD(self.truth_file, OutputWorkspace=self.gold_ws_name) + + def validate(self): + self.tolerance = 1.0e-1 + return validateMD(self.output_ws, self.gold_ws_name,self.tolerance,self.__class__.__name__,self.gold_ws_name); + +class SNSConvertToMDNoHistProjTest(stresstesting.MantidStressTest): + truth_file = "SEQ_11499_md_ewp.nxs" + + def requiredMemoryMB(self): + """ Require about 2.5GB free """ + return 2500 + + def requiredFiles(self): + files = [self.truth_file, DATA_FILE] + return files + + def runTest(self): + self.output_ws = execReduction(False, True) + + self.gold_ws_name = self.truth_file.split('.')[0] + "_golden" + LoadMD(self.truth_file, OutputWorkspace=self.gold_ws_name) + + + def validate(self): + self.tolerance = 1.0e-3 + return validateMD(self.output_ws, self.gold_ws_name,self.tolerance,self.__class__.__name__,self.gold_ws_name); + #return (self.output_ws, self.gold_ws_name) + +class SNSConvertToMDHistProjTest(stresstesting.MantidStressTest): + truth_file = "SEQ_11499_md_hwp.nxs" + + def requiredMemoryMB(self): + """ Require about 2.5GB free """ + return 2500 + + def requiredFiles(self): + config.appendDataSearchDir("/home/builder/data/SystemTests/AnalysisTests/ReferenceResults/"); + files = [self.truth_file, DATA_FILE] + return files + + def runTest(self): + self.output_ws = execReduction(True, True) + + self.gold_ws_name = self.truth_file.split('.')[0] + "_golden" + LoadMD(self.truth_file, OutputWorkspace=self.gold_ws_name) + + + def validate(self): + self.tolerance = 1.0e-3 + return validateMD(self.output_ws, self.gold_ws_name,self.tolerance,self.__class__.__name__,self.gold_ws_name); + #return (self.output_ws, self.gold_ws_name) + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SNSPowderRedux.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SNSPowderRedux.py new file mode 100644 index 0000000000000000000000000000000000000000..4f8878a0c6ae75bb8fb5c6db330547aac22b1ac7 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SNSPowderRedux.py @@ -0,0 +1,234 @@ +import stresstesting +from mantid.simpleapi import * +from mantid.api import FileFinder + +import os + +def getSaveDir(): + """determine where to save - the current working directory""" + import os + return os.path.abspath(os.path.curdir) + +def do_cleanup(): + Files = ["PG3_9829.gsa", + "PG3_9829.py", + "PG3_9830.gsa", + "PG3_9830.py"] + for file in Files: + absfile = FileFinder.getFullPath(file) + if os.path.exists(absfile): + os.remove(absfile) + return True + +class PG3Analysis(stresstesting.MantidStressTest): + ref_file = 'PG3_4844_reference.gsa' + cal_file = "PG3_FERNS_d4832_2011_08_24.cal" + char_file = "PG3_characterization_2011_08_31-HR.txt" + + def cleanup(self): + do_cleanup() + return True + + def requiredFiles(self): + files = [self.ref_file, self.cal_file, self.char_file] + files.append("PG3_4844_event.nxs") # /SNS/PG3/IPTS-2767/0/ + files.append("PG3_4866_event.nxs") # /SNS/PG3/IPTS-2767/0/ + files.append("PG3_5226_event.nxs") # /SNS/PG3/IPTS-2767/0/ + return files + + def runTest(self): + savedir = getSaveDir() + + # run the actual code + SNSPowderReduction(Instrument="PG3", RunNumber=4844, Extension="_event.nxs", + PreserveEvents=True, + CalibrationFile=self.cal_file, + CharacterizationRunsFile=self.char_file, + LowResRef=15000, RemovePromptPulseWidth=50, + Binning=-0.0004, BinInDspace=True, FilterBadPulses=95, + SaveAs="gsas and fullprof and pdfgetn", OutputDirectory=savedir, + FinalDataUnits="dSpacing") + + + # load output gsas file and the golden one + LoadGSS(Filename="PG3_4844.gsa", OutputWorkspace="PG3_4844") + LoadGSS(Filename=self.ref_file, OutputWorkspace="PG3_4844_golden") + + def validateMethod(self): + self.tolerance = 1.0e-2 + return "ValidateWorkspaceToWorkspace" + + def validate(self): + self.tolerance = 1.0e-2 + return ('PG3_4844','PG3_4844_golden') + +class PG3StripPeaks(stresstesting.MantidStressTest): + ref_file = 'PG3_4866_reference.gsa' + cal_file = "PG3_FERNS_d4832_2011_08_24.cal" + + def cleanup(self): + do_cleanup() + return True + + def requiredFiles(self): + files = [self.ref_file, self.cal_file] + files.append("PG3_4866_event.nxs") # vanadium + return files + + def runTest(self): + # determine where to save + import os + savedir = os.path.abspath(os.path.curdir) + + LoadEventNexus(Filename="PG3_4866_event.nxs", + OutputWorkspace="PG3_4866", + Precount=True) + FilterBadPulses(InputWorkspace="PG3_4866", + OutputWorkspace="PG3_4866") + RemovePromptPulse(InputWorkspace="PG3_4866", + OutputWorkspace="PG3_4866", + Width=50) + CompressEvents(InputWorkspace="PG3_4866", + OutputWorkspace="PG3_4866", + Tolerance=0.01) + SortEvents(InputWorkspace="PG3_4866") + CropWorkspace(InputWorkspace="PG3_4866", + OutputWorkspace="PG3_4866", + XMax=16666.669999999998) + LoadCalFile(InputWorkspace="PG3_4866", + CalFilename=self.cal_file, + WorkspaceName="PG3") + MaskDetectors(Workspace="PG3_4866", + MaskedWorkspace="PG3_mask") + AlignDetectors(InputWorkspace="PG3_4866", + OutputWorkspace="PG3_4866", + OffsetsWorkspace="PG3_offsets") + ConvertUnits(InputWorkspace="PG3_4866", + OutputWorkspace="PG3_4866", + Target="TOF") + UnwrapSNS(InputWorkspace="PG3_4866", + OutputWorkspace="PG3_4866", + LRef=62) + RemoveLowResTOF(InputWorkspace="PG3_4866", + OutputWorkspace="PG3_4866", + ReferenceDIFC=1500) + ConvertUnits(InputWorkspace="PG3_4866", + OutputWorkspace="PG3_4866", + Target="dSpacing") + Rebin(InputWorkspace="PG3_4866", + OutputWorkspace="PG3_4866", + Params=(0.1,-0.0004,2.2)) + SortEvents(InputWorkspace="PG3_4866") + DiffractionFocussing(InputWorkspace="PG3_4866", + OutputWorkspace="PG3_4866", + GroupingWorkspace="PG3_group") + EditInstrumentGeometry(Workspace="PG3_4866", + PrimaryFlightPath=60, + SpectrumIDs=[1], + L2=[3.2208], + Polar=[90.8074], + Azimuthal=[0]) + ConvertUnits(InputWorkspace="PG3_4866", + OutputWorkspace="PG3_4866", + Target="TOF") + Rebin(InputWorkspace="PG3_4866", + OutputWorkspace="PG3_4866", + Params=[-0.0004]) + ConvertUnits(InputWorkspace="PG3_4866", + OutputWorkspace="PG3_4866", + Target="dSpacing") + StripVanadiumPeaks(InputWorkspace="PG3_4866", + OutputWorkspace="PG3_4866", + PeakPositionTolerance=0.05, + FWHM=8, + BackgroundType="Quadratic") + ConvertUnits(InputWorkspace="PG3_4866", + OutputWorkspace="PG3_4866", + Target="TOF") + SaveGSS(InputWorkspace="PG3_4866", + Filename=os.path.join(savedir, "PG3_4866.gsa"), + SplitFiles=False, + Append=False, + Format="SLOG", + MultiplyByBinWidth=False, + ExtendedHeader=True) + + # load output gsas file and the golden one + LoadGSS(Filename="PG3_4866.gsa", OutputWorkspace="PG3_4866") + LoadGSS(Filename=self.ref_file, OutputWorkspace="PG3_4866_golden") + + def validateMethod(self): + self.tolerance = 1.0e-2 + return "ValidateWorkspaceToWorkspace" + + def validate(self): + self.tolerance = 1.0e-2 + return ('PG3_4866','PG3_4866_golden') + +class SeriesAndConjoinFilesTest(stresstesting.MantidStressTest): + cal_file = "PG3_FERNS_d4832_2011_08_24.cal" + char_file = "PG3_characterization_2012_02_23-HR-ILL.txt" + ref_files = ['PG3_9829_reference.gsa', 'PG3_9830_reference.gsa'] + data_files = ['PG3_9829_event.nxs', 'PG3_9830_event.nxs'] + + def cleanup(self): + do_cleanup() + return True + + def requiredMemoryMB(self): + """Requires 3Gb""" + return 3000 + + def requiredFiles(self): + files = [self.cal_file, self.char_file] + files.extend(self.ref_files) + files.extend(self.data_files) + return files + + def runTest(self): + savedir = getSaveDir() + + # reduce a sum of runs - and drop it + SNSPowderReduction(Instrument="PG3", RunNumber=[9829,9830], Extension="_event.nxs", + Sum=True, # This is the difference with the next call + PreserveEvents=True, VanadiumNumber=-1, + CalibrationFile=self.cal_file, + CharacterizationRunsFile=self.char_file, + LowResRef=15000, RemovePromptPulseWidth=50, + Binning=-0.0004, BinInDspace=True, FilterBadPulses=True, + SaveAs="gsas", OutputDirectory=savedir, + FinalDataUnits="dSpacing") + + # reduce a series of runs + SNSPowderReduction(Instrument="PG3", RunNumber=[9829,9830], Extension="_event.nxs", + PreserveEvents=True, VanadiumNumber=-1, + CalibrationFile=self.cal_file, + CharacterizationRunsFile=self.char_file, + LowResRef=15000, RemovePromptPulseWidth=50, + Binning=-0.0004, BinInDspace=True, FilterBadPulses=True, + SaveAs="gsas", OutputDirectory=savedir, + FinalDataUnits="dSpacing") + + # needs to be set for ConjoinFiles to work + config['default.facility'] = 'SNS' + config['default.instrument'] = 'POWGEN' + + # load back in the resulting gsas files + ConjoinFiles(RunNumbers=[9829,9830], OutputWorkspace='ConjoinFilesTest', Directory=savedir) + # convert units makes sure the geometry was picked up + ConvertUnits(InputWorkspace='ConjoinFilesTest', OutputWorkspace='ConjoinFilesTest', + Target="dSpacing") + + # prepare for validation + LoadGSS(Filename="PG3_9829.gsa", OutputWorkspace="PG3_9829") + LoadGSS(Filename=self.ref_files[0], OutputWorkspace="PG3_4844_golden") + #LoadGSS("PG3_9830.gsa", "PG3_9830") # can only validate one workspace + #LoadGSS(self.ref_file[1], "PG3_9830_golden") + + def validateMethod(self): + return None # it running is all that we need + + def validate(self): + self.tolerance = 1.0e-2 + return ('PG3_9829','PG3_9829_golden') + #return ('PG3_9830','PG3_9830_golden') # can only validate one workspace diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SXDAnalysis.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SXDAnalysis.py new file mode 100644 index 0000000000000000000000000000000000000000..c6f6f6e4bee20d6e54bd0cac2869f9de920e7b50 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SXDAnalysis.py @@ -0,0 +1,54 @@ +import stresstesting +from mantid.simpleapi import * + +class SXDAnalysis(stresstesting.MantidStressTest): + """ + Start of a system test for SXD data analyiss + """ + + def runTest(self): + + ws = Load(Filename='SXD23767.raw', LoadMonitors='Exclude') + #AddSampleLog(Workspace=ws,LogName='NUM_THREADS',LogText='0',LogType='Number') + from time import clock + + # A lower SplitThreshold, with a reasonable bound on the recursion depth, helps find weaker peaks at higher Q. + start = clock(); + QLab = ConvertToDiffractionMDWorkspace(InputWorkspace=ws, OutputDimensions='Q (lab frame)', SplitThreshold=50, LorentzCorrection='1',MaxRecursionDepth='13',Extents='-15,15,-15,15,-15,15',OneEventPerBin='0') + print " ConvertToMD runs for: ",clock()-start,' sec' + + # NaCl has a relatively small unit cell, so the distance between peaks is relatively large. Setting the PeakDistanceThreshold + # higher avoids finding high count regions on the sides of strong peaks as separate peaks. + peaks_qLab = FindPeaksMD(InputWorkspace='QLab', MaxPeaks=300, DensityThresholdFactor=10, PeakDistanceThreshold=1.0) + + FindUBUsingFFT(PeaksWorkspace=peaks_qLab, MinD='3', MaxD='5',Tolerance=0.08) + + out_params = IndexPeaks(PeaksWorkspace=peaks_qLab,Tolerance=0.12,RoundHKLs=1) + number_peaks_indexed = out_params[0] + ratio_indexed = float(number_peaks_indexed)/peaks_qLab.getNumberPeaks() + self.assertTrue(ratio_indexed >= 0.8, "Not enough peaks indexed. Ratio indexed : " + str(ratio_indexed)) + + ShowPossibleCells(PeaksWorkspace=peaks_qLab,MaxScalarError='0.5') + SelectCellOfType(PeaksWorkspace=peaks_qLab, CellType='Cubic', Centering='F', Apply=True) + + unitcell_length = 5.64 # Angstroms + unitcell_angle = 90 + length_tolerance = 0.1 + # + angle_tolelerance = 0.25 # Actual tolernce seems is 0.17 + # + # Check results. + latt = peaks_qLab.sample().getOrientedLattice() + self.assertDelta( latt.a(), unitcell_length, length_tolerance, "a length is different from expected") + self.assertDelta( latt.b(), unitcell_length, length_tolerance, "b length is different from expected") + self.assertDelta( latt.c(), unitcell_length, length_tolerance, "c length is different from expected") + self.assertDelta( latt.alpha(), unitcell_angle, angle_tolelerance, "alpha angle is different from expected") + self.assertDelta( latt.beta(), unitcell_angle, angle_tolelerance, "beta angle is different from expected") + self.assertDelta( latt.gamma(), unitcell_angle, angle_tolelerance, "gamma angle length is different from expected") + + def doValidation(self): + # If we reach here, no validation failed + return True + def requiredMemoryMB(self): + """Far too slow for managed workspaces. They're tested in other places. Requires 2Gb""" + return 1000 diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SpaceGroupFactoryTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SpaceGroupFactoryTest.py new file mode 100644 index 0000000000000000000000000000000000000000..b3a653e56f1edf8b38fc3921fa9ecc542fab56df --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SpaceGroupFactoryTest.py @@ -0,0 +1,52 @@ +import stresstesting +import os +import re +from mantid.simpleapi import * +from mantid.geometry import * + +'''Check that the space groups generated by Mantid are correct.''' +class SpaceGroupFactoryTest(stresstesting.MantidStressTest): + def runTest(self): + self.spaceGroupData = self.loadReferenceData() + + availableSpaceGroups = SpaceGroupFactoryImpl.Instance().allSubscribedSpaceGroupSymbols() + + for symbol in availableSpaceGroups: + self.checkSpaceGroup(symbol) + + def checkSpaceGroup(self, symbol): + group = SpaceGroupFactoryImpl.Instance().createSpaceGroup(symbol) + + groupOperations = set(group.getSymmetryOperationStrings()) + referenceOperations = self.spaceGroupData[group.number()] + + differenceOne = groupOperations - referenceOperations + differenceTwo = referenceOperations - groupOperations + + self.assertTrue(len(differenceOne) == 0, "Problem in space group " + str(group.number()) + " (" + symbol + ")") + self.assertTrue(len(differenceTwo) == 0, "Problem in space group " + str(group.number()) + " (" + symbol + ")") + self.assertTrue(groupOperations == referenceOperations, "Problem in space group " + str(group.number()) + " (" + symbol + ")") + + def loadReferenceData(self): + # Reference data. + # Dictionary has a string set for each space group number. + separatorMatcher = re.compile("(\d+)") + + fileName = os.path.join(os.path.dirname(__file__), 'ReferenceResults','SpaceGroupSymmetryOperations.txt') + + print fileName + + fileHandle = open(fileName, 'r') + spaceGroups = {} + currentGroup = 0 + for currentLine in fileHandle: + matchedSeparator = separatorMatcher.match(currentLine) + + if matchedSeparator is not None: + currentGroup = int(matchedSeparator.group(1)) + spaceGroups[currentGroup] = set() + else: + spaceGroups[currentGroup].add(SymmetryOperationFactoryImpl.Instance().createSymOp(currentLine.strip().replace(" ", "")).identifier()) + + return spaceGroups + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SphinxWarnings.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SphinxWarnings.py new file mode 100644 index 0000000000000000000000000000000000000000..42030bcf7a64dbf2f59d9b0b54fba2281e3aacee --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SphinxWarnings.py @@ -0,0 +1,107 @@ +""" +Some of the sphinx warnings come from the C++ code, from the properties of the algorithms or from the summary string +This test tries to detect the most common such errors. +It also detects if a new category is created (i.e. someone uses Utilities instead of Utility) +""" +import stresstesting +import mantid +import re + +class SphinxWarnings(stresstesting.MantidStressTest): + def __init__(self): + stresstesting.MantidStressTest.__init__(self) + self.allowedCategories=['Arithmetic', + 'CorrectionFunctions', + 'Crystal', + 'DataHandling', + 'Diagnostics', + 'Diffraction', + 'Events', + 'Examples', + 'ISIS', + 'Inelastic', + 'MDAlgorithms', + 'MPI', + 'Muon', + 'Optimization', + 'PythonAlgorithms', + 'Quantification', + 'Reflectometry', + 'Remote', + 'SANS', + 'SINQ', + 'Sample', + 'Transforms', + 'Utility', + 'Workflow'] + self.errorMessage="" + + def checkString(self,s): + tocheck=s + outputString='' + #replace strong emphasis: Space**NotSpaceText** + sub=re.compile(r' \*\*[^ ].+?\*\*') + for i in sub.findall(tocheck): + tocheck=tocheck.replace(i," ") + #replace emphasis: Space*NotSpaceText* + sub=re.compile(r' \*[^ ].+?\*') + for i in sub.findall(tocheck): + tocheck=tocheck.replace(i," ") + #replace correctly named hyperlinks: Space`Name link>`__ + sub=re.compile(r' \`.+? <.+?.\`__') + for i in sub.findall(tocheck): + tocheck=tocheck.replace(i," ") + + #find strong emphasis errors + sub=re.compile(r' \*\*[^ ]+') + result=sub.findall(tocheck) + if len(result)>0: + outputString+="Strong emphasis error: "+str(result)+"\n" + #find emphasis errors + sub=re.compile(r' \*[^ ]+') + result=sub.findall(tocheck) + if len(result)>0: + outputString+="Emphasis error: "+str(result)+"\n" + #find potentially duplicate named hyperlinks + sub=re.compile(r' \`.+? <.+?.\`_') + result=sub.findall(tocheck) + if len(result)>0: + outputString+="Potentially unsafe named hyperlink: "+str(result)+"\n" + #find potentially wrong substitutions + sub=re.compile(r'\|.+?\|') + result=sub.findall(tocheck) + if len(result)>0: + outputString+="Potentially unsafe substitution: "+str(result)+"\n" + return outputString + + def runTest(self): + algs = mantid.AlgorithmFactory.getRegisteredAlgorithms(True) + for (name, versions) in algs.iteritems(): + for version in versions: + if mantid.api.DeprecatedAlgorithmChecker(name,version).isDeprecated()=='': + # get an instance + alg = mantid.AlgorithmManager.create(name, version) + #check categories + for cat in alg.categories(): + if cat.split("\\")[0] not in self.allowedCategories: + self.errorMessage+=name+" "+str(version)+" Category: "+cat.split("\\")[0]+" is not in the allowed list. If you need this category, please add it to the systemtest.\n" + #check summary + summary=alg.summary() + result=self.checkString(summary) + if len(result)>0: + self.errorMessage+=name+" "+str(version)+" Summary: "+result+"\n" + #check properties + properties=alg.getProperties() + for prop in properties: + propName=prop.name + propDoc=prop.documentation + result=self.checkString(propDoc) + if len(result)>0: + self.errorMessage+=name+" "+str(version)+" Property: "+propName+" Documentation: "+result +"\n" + + def validate(self): + if self.errorMessage!="": + print "Found the following errors:\n",self.errorMessage + return False + + return True diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/StepScan.py b/Code/Mantid/Testing/SystemTests/tests/analysis/StepScan.py new file mode 100644 index 0000000000000000000000000000000000000000..3e892652ecdc2793a21c1e74433065b74f8651e6 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/StepScan.py @@ -0,0 +1,13 @@ +import stresstesting +from mantid.simpleapi import * + +'''Tests the StepScan workflow algorithm''' +class StepScanWorkflowAlgorithm(stresstesting.MantidStressTest): + + def runTest(self): + LoadMask(Instrument='HYS',InputFile=r'HYSA_mask.xml',OutputWorkspace='HYSA_mask') + Load(Filename='HYSA_2934.nxs.h5',OutputWorkspace='HYSA_2934',LoadMonitors='1') + StepScan(InputWorkspace='HYSA_2934',OutputWorkspace='StepScan',MaskWorkspace='HYSA_mask',XMin='3.25',XMax='3.75',RangeUnit='dSpacing') + + def validate(self): + return 'StepScan','StepScan.nxs' diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SurfLoadingTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SurfLoadingTest.py new file mode 100644 index 0000000000000000000000000000000000000000..65b217dc4c5361772fc47e31e3bdb048b0f432b0 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SurfLoadingTest.py @@ -0,0 +1,23 @@ +from LoadAndCheckBase import * + +''' +Test File loading and basic data integrity checks of SURF data in Mantid. +''' +class SurfLoadingTest(LoadAndCheckBase): + def get_raw_workspace_filename(self): + return "SRF92132.raw" + + def get_nexus_workspace_filename(self): + return "SRF92132.nxs" + + def get_expected_number_of_periods(self): + return 22 + + def get_integrated_reference_workspace_filename(self): + return "SRF92132_1Integrated.nxs" + + def get_expected_instrument_name(self): + return "SURF" + + def enable_instrument_checking(self): + return True # No IDF in Mantid \ No newline at end of file diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/TOPAZPeakFinding.py b/Code/Mantid/Testing/SystemTests/tests/analysis/TOPAZPeakFinding.py new file mode 100644 index 0000000000000000000000000000000000000000..1275e600db0dece17f95d99f34ac764b46936ab5 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/TOPAZPeakFinding.py @@ -0,0 +1,95 @@ +""" +System test that loads TOPAZ single-crystal data, +converts to Q space, finds peaks and indexes +them. +""" +import stresstesting +import numpy +from mantid.simpleapi import * + +class TOPAZPeakFinding(stresstesting.MantidStressTest): + + def requiredMemoryMB(self): + """ Require about 2GB free """ + return 2000 + + def runTest(self): + # Load then convert to Q in the lab frame + LoadEventNexus(Filename=r'TOPAZ_3132_event.nxs',OutputWorkspace='topaz_3132') + ConvertToDiffractionMDWorkspace(InputWorkspace='topaz_3132',OutputWorkspace='topaz_3132_MD',LorentzCorrection='1',SplitInto='2',SplitThreshold='150',OneEventPerBin='0') + + # Find peaks and UB matrix + FindPeaksMD(InputWorkspace='topaz_3132_MD',PeakDistanceThreshold='0.12',MaxPeaks='200',OutputWorkspace='peaks') + FindUBUsingFFT(PeaksWorkspace='peaks',MinD='2',MaxD='16') + + # Index the peaks and check + results = IndexPeaks(PeaksWorkspace='peaks') + indexed = results[0] + if indexed < 199: + raise Exception("Expected at least 199 of 200 peaks to be indexed. Only indexed %d!" % indexed) + + # Check the oriented lattice + CopySample(InputWorkspace='peaks',OutputWorkspace='topaz_3132',CopyName='0',CopyMaterial='0',CopyEnvironment='0',CopyShape='0') + originalUB = numpy.array(mtd["topaz_3132"].sample().getOrientedLattice().getUB()) + w = mtd["topaz_3132"] + s = w.sample() + ol = s.getOrientedLattice() + self.assertDelta( ol.a(), 4.712, 0.01, "Correct lattice a value not found.") + self.assertDelta( ol.b(), 6.06, 0.01, "Correct lattice b value not found.") + self.assertDelta( ol.c(), 10.41, 0.01, "Correct lattice c value not found.") + self.assertDelta( ol.alpha(), 90, 0.4, "Correct lattice angle alpha value not found.") + self.assertDelta( ol.beta(), 90, 0.4, "Correct lattice angle beta value not found.") + self.assertDelta( ol.gamma(), 90, 0.4, "Correct lattice angle gamma value not found.") + + # Go to HKL + ConvertToDiffractionMDWorkspace(InputWorkspace='topaz_3132',OutputWorkspace='topaz_3132_HKL',OutputDimensions='HKL',LorentzCorrection='1',SplitInto='2',SplitThreshold='150') + + # Bin to a line (H=0 to 6, L=3, K=3) + BinMD(InputWorkspace='topaz_3132_HKL',AxisAligned='0', + BasisVector0='X,units,1,0,0',BasisVector1='Y,units,6.12323e-17,1,0',BasisVector2='2,units,-0,0,1', + Translation='-0,3,6',OutputExtents='0,6, -0.1,0.1, -0.1,0.1',OutputBins='60,1,1', + OutputWorkspace='topaz_3132_HKL_line') + + # Now check the integrated bin and the peaks + w = mtd["topaz_3132_HKL_line"] + self.assertLessThan( w.signalAt(1), 1e4, "Limited background signal" ) + # The following tests are unstable for flips in HKL: + #self.assertDelta( w.signalAt(10), 1043651, 10e3, "Peak 1") + #self.assertDelta( w.signalAt(20), 354159, 10e3, "Peak 2") + #self.assertDelta( w.signalAt(30), 231615, 10e3, "Peak 3") + + # Now do the same peak finding with Q in the sample frame + ConvertToDiffractionMDWorkspace(InputWorkspace='topaz_3132',OutputWorkspace='topaz_3132_QSample',OutputDimensions='Q (sample frame)',LorentzCorrection='1',SplitInto='2',SplitThreshold='150') + FindPeaksMD(InputWorkspace='topaz_3132_QSample',PeakDistanceThreshold='0.12',MaxPeaks='200',OutputWorkspace='peaks_QSample') + FindUBUsingFFT(PeaksWorkspace='peaks_QSample',MinD='2',MaxD='16') + CopySample(InputWorkspace='peaks_QSample',OutputWorkspace='topaz_3132',CopyName='0',CopyMaterial='0',CopyEnvironment='0',CopyShape='0') + + # Index the peaks and check + results = IndexPeaks(PeaksWorkspace='peaks_QSample') + indexed = results[0] + if indexed < 199: + raise Exception("Expected at least 199 of 200 peaks to be indexed. Only indexed %d!" % indexed) + + # Check the UB matrix + w = mtd["topaz_3132"] + s = w.sample() + ol = s.getOrientedLattice() + self.assertDelta( ol.a(), 4.714, 0.01, "Correct lattice a value not found.") + self.assertDelta( ol.b(), 6.06, 0.01, "Correct lattice b value not found.") + self.assertDelta( ol.c(), 10.42, 0.01, "Correct lattice c value not found.") + self.assertDelta( ol.alpha(), 90, 0.4, "Correct lattice angle alpha value not found.") + self.assertDelta( ol.beta(), 90, 0.4, "Correct lattice angle beta value not found.") + self.assertDelta( ol.gamma(), 90, 0.4, "Correct lattice angle gamma value not found.") + + # Compare new and old UBs + newUB = numpy.array(mtd["topaz_3132"].sample().getOrientedLattice().getUB()) + # UB Matrices are not necessarily the same, some of the H,K and/or L sign can be reversed + diff = abs(newUB) - abs(originalUB) < 0.001 + for c in xrange(3): + # This compares each column, allowing old == new OR old == -new + if not (numpy.all(diff[:,c]) ): + raise Exception("More than 0.001 difference between UB matrices: Q (lab frame):\n%s\nQ (sample frame):\n%s" % (originalUB, newUB) ) + + def doValidation(self): + # If we reach here, no validation failed + return True diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/TobyFitResolutionSimulationTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/TobyFitResolutionSimulationTest.py new file mode 100644 index 0000000000000000000000000000000000000000..8d40be9d876c2948a60636bcf008e3d220baeb91 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/TobyFitResolutionSimulationTest.py @@ -0,0 +1,127 @@ +"""Testing of the VATES quantification using +the TobyFitResolutionModel +""" +from stresstesting import MantidStressTest +from mantid.simpleapi import * + +def create_cuboid_xml(xlength,ylength,zlength): + xml = """<cuboid id="sample0"> +<left-front-bottom-point x="%(xpt)f" y="-%(ypt)f" z="-%(zpt)f" /> +<left-front-top-point x="%(xpt)f" y="-%(ypt)f" z="%(zpt)f" /> +<left-back-bottom-point x="-%(xpt)f" y="-%(ypt)f" z="-%(zpt)f" /> +<right-front-bottom-point x="%(xpt)f" y="%(ypt)f" z="-%(zpt)f" /> +</cuboid> +<algebra val="sample0" /> +""" + return xml % {"xpt": xlength/2.0,"ypt":ylength/2.0,"zpt":zlength/2.0} + +class TobyFitResolutionSimulationTest(MantidStressTest): + + _success = False + + def skipTests(self): + return False + + def requiredMemoryMB(self): + return 16000 + + def runTest(self): + ei = 300. + bins = [-30,3,279] + temperature = 6. + chopper_speed = 600. + + # Oriented lattice & goniometer. + alatt = 5.57 + blatt = 5.51 + clatt = 12.298 + uvec = [9.700000e-03,9.800000e-03,9.996000e-01] + vvec = [9.992000e-01,-3.460000e-02,-4.580000e-02] + + omega = 0.0 + alpha = 0.0 + beta = 0.0 + gamma = 0.0 + + # sample dimensions + sx = 0.05 # Perp + sy = 0.025 # Up direction + sz = 0.04 # Beam direction + + # Crystal mosaic + eta_sig = 4.0 + + fake_data = CreateSimulationWorkspace(Instrument='MERLIN', + BinParams=bins,UnitX='DeltaE', + DetectorTableFilename='MER06398.raw') + + ## + ## Required log entries, can be taken from real ones by placing an instrument parameter of the same + ## name pointing to the log name + ## + AddSampleLog(Workspace=fake_data, LogName='Ei',LogText=str(ei), LogType="Number") + AddSampleLog(Workspace=fake_data, LogName='temperature_log',LogText=str(temperature), LogType="Number") + AddSampleLog(Workspace=fake_data, LogName='chopper_speed_log',LogText=str(chopper_speed), LogType="Number") + AddSampleLog(Workspace=fake_data, LogName='eta_sigma',LogText=str(eta_sig), LogType="Number") + + ## + ## Sample shape + ## + CreateSampleShape(InputWorkspace=fake_data, ShapeXML=create_cuboid_xml(sx,sy,sz)) + + ## + ## Chopper & Moderator models. + ## + CreateModeratorModel(Workspace=fake_data,ModelType='IkedaCarpenterModerator', + Parameters="TiltAngle=32,TauF=2.7,TauS=0,R=0") + CreateChopperModel(Workspace=fake_data,ModelType='FermiChopperModel', + Parameters="AngularVelocity=chopper_speed_log,ChopperRadius=0.049,SlitThickness=0.0023,SlitRadius=1.3,Ei=Ei,JitterSigma=0.0") + + ## + ## UB matrix + ## + SetUB(Workspace=fake_data,a=alatt,b=blatt,c=clatt,u=uvec,v=vvec) + + ## + ## Sample rotation. Simulate 1 run at zero degrees psi + ## + + psi = 0.0 + AddSampleLog(Workspace=fake_data,LogName='psi',LogText=str(psi),LogType='Number') + SetGoniometer(Workspace=fake_data,Axis0="psi,0,1,0,1") + + # Create the MD workspace + qscale = 'Q in A^-1' + fake_md = ConvertToMD(InputWorkspace=fake_data, QDimensions="Q3D", QConversionScales=qscale, + SplitInto=[3], SplitThreshold=100, + MinValues="-15,-15,-15,-30", MaxValues="25,25,25,279",OverwriteExisting=True) + + # Run the simulation. + resol_model = "TobyFitResolutionModel" + xsec_model = "Strontium122" + parameters = "Seff=0.7,J1a=38.7,J1b=-5.0,J2=27.3,SJc=10.0,GammaSlope=0.08,MultEps=0,TwinType=0,MCLoopMin=10,MCLoopMax=10,MCType=1" # Use sobol & restart each pixel to ensure reproducible result + simulated = SimulateResolutionConvolvedModel(InputWorkspace=fake_md, + ResolutionFunction=resol_model, + ForegroundModel=xsec_model, + Parameters=parameters) + # Take a slice + slice_ws = BinMD(InputWorkspace=simulated, + AlignedDim0='[H,0,0], -12.000000, 9.000000, 100', + AlignedDim1='[0,K,0], -6.000000, 7.000000, 100', + AlignedDim2='[0,0,L], 0.000000, 6.000000, 1', + AlignedDim3='DeltaE, 100.000000, 150.000000, 1') + + # Check + ref_file = LoadMD(Filename='TobyFitResolutionSimulationTest.nxs') + result = CheckWorkspacesMatch(Workspace1=slice_ws, + Workspace2=ref_file, + Tolerance=1e-08) + self._success = ('success' in result.lower()) + + if not self._success: + SaveMD(InputWorkspace=slice_ws, + Filename='TobyFitResolutionSimulationTest-mismatch.nxs') + + def validate(self): + return self._success + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/UserAlgotithmsBuild.py b/Code/Mantid/Testing/SystemTests/tests/analysis/UserAlgotithmsBuild.py new file mode 100644 index 0000000000000000000000000000000000000000..4cec914af17601765d29c07b1332ee47818788dc --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/UserAlgotithmsBuild.py @@ -0,0 +1,41 @@ +import stresstesting +import sys +import os + +class UserAlgorithmsBuild(stresstesting.MantidStressTest): + + build_success = False + + def skipTests(self): + " We skip this test if the system is not Windows." + if sys.platform.startswith('win'): + return False + else: + return True + + def runTest(self): + """ + System test for testing that the UserAlgorithm build script works + """ + # Run the build + import subprocess + retcode = subprocess.call(["C:\\MantidInstall\\UserAlgorithms\\build.bat","--quiet"]) + if retcode == 0: + self.build_success = True + else: + self.build_success = False + + def cleanup(self): + # Remove build files as they will be loaded by the next + # process that runs this test and it then can't remove them! + install_dir = r'C:\MantidInstall\plugins' + lib_name = 'UserAlgorithms' + exts = ['.dll', '.exp', '.lib'] + for ext in exts: + try: + os.remove(os.path.join(install_dir, lib_name + ext)) + except OSError: + pass + + def validate(self): + return self.build_success diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ValidateFacilitiesFile.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ValidateFacilitiesFile.py new file mode 100644 index 0000000000000000000000000000000000000000..7ef8988def68309348cfbf6572b2b8687295960b --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ValidateFacilitiesFile.py @@ -0,0 +1,46 @@ +from mantid import config +import os +import re +import stresstesting +import glob + + +EXPECTED_EXT = '.expected' + +class ValidateFacilitiesFile(stresstesting.MantidStressTest): + + def skipTests(self): + try: + import genxmlif + import minixsv + except ImportError: + return True + return False + + + def runTest(self): + """Main entry point for the test suite""" + from genxmlif import GenXmlIfError + from minixsv import pyxsval + direc = config['instrumentDefinition.directory'] + filename = os.path.join(direc,'Facilities.xml') + xsdFile = os.path.join(direc,'Schema/Facilities/1.0/','FacilitiesSchema.xsd') + + # run the tests + failed = [] + try: + print "----------------------------------------" + print "Validating Facilities.xml" + pyxsval.parseAndValidateXmlInput(filename, xsdFile=xsdFile, validateSchema=0) + except Exception, e: + print "VALIDATION OF Facilities.xml FAILED WITH ERROR:" + print e + failed.append(filename) + + # final say on whether or not it 'worked' + print "----------------------------------------" + if len(failed) != 0: + print "SUMMARY OF FAILED FILES" + raise RuntimeError("Failed Validation of Facilities.xml") + else: + print "Succesfully Validated Facilities.xml" \ No newline at end of file diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ValidateGroupingFiles.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ValidateGroupingFiles.py new file mode 100644 index 0000000000000000000000000000000000000000..802e2fc1cfe3b4e267f5543c5ee40422cb1e66f8 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ValidateGroupingFiles.py @@ -0,0 +1,62 @@ +from mantid import config +import os +import re +import stresstesting +import glob + +EXPECTED_EXT = '.expected' + +class ValidateGroupingFiles(stresstesting.MantidStressTest): + + def skipTests(self): + try: + import genxmlif + import minixsv + except ImportError: + return True + return False + + def __getDataFileList__(self): + # get a list of directories to look in + direc = config['instrumentDefinition.directory'] + direc = os.path.join(direc,'Grouping') + print "Looking for Grouping files in: %s" % direc + cwd = os.getcwd() + os.chdir(direc) + myFiles = glob.glob("*Grouping*.xml") + os.chdir(cwd) + files = [] + for filename in myFiles: + files.append(os.path.join(direc, filename)) + return files + + def runTest(self): + """Main entry point for the test suite""" + from genxmlif import GenXmlIfError + from minixsv import pyxsval + direc = config['instrumentDefinition.directory'] + self.xsdFile = os.path.join(direc,'Schema/Grouping/1.0/','GroupingSchema.xsd') + files = self.__getDataFileList__() + + # run the tests + failed = [] + for filename in files: + try: + print "----------------------------------------" + print "Validating '%s'" % filename + pyxsval.parseAndValidateXmlInput(filename, xsdFile=self.xsdFile, validateSchema=0) + except Exception, e: + print "VALIDATION OF '%s' FAILED WITH ERROR:" % filename + print e + failed.append(filename) + + # final say on whether or not it 'worked' + print "----------------------------------------" + if len(failed) != 0: + print "SUMMARY OF FAILED FILES" + for filename in failed: + print filename + raise RuntimeError("Failed Validation for %d of %d files" \ + % (len(failed), len(files))) + else: + print "Succesfully Validated %d files" % len(files) diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ValidateInstrumentDefinitionFiles.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ValidateInstrumentDefinitionFiles.py new file mode 100644 index 0000000000000000000000000000000000000000..4c65a1306f5e552dfbc11a265689521a0ce13e40 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ValidateInstrumentDefinitionFiles.py @@ -0,0 +1,92 @@ +from mantid import config +import os +import re +import stresstesting +import glob + + +EXPECTED_EXT = '.expected' + +class ValidateInstrumentDefinitionFiles(stresstesting.MantidStressTest): + + def skipTests(self): + try: + import genxmlif + import minixsv + except ImportError: + return True + return False + + def __getDataFileList__(self): + # get a list of directories to look in + direc = config['instrumentDefinition.directory'] + print "Looking for instrument definition files in: %s" % direc + cwd = os.getcwd() + os.chdir(direc) + myFiles = glob.glob("*Definition*.xml") + os.chdir(cwd) + files = [] + for filename in myFiles: + files.append(os.path.join(direc, filename)) + return files + + def runTest(self): + """Main entry point for the test suite""" + from genxmlif import GenXmlIfError + from minixsv import pyxsval + + # need to extend minixsv library to add method for that forces it to + # validate against local schema when the xml file itself has + # reference to schema online. The preference is to systemtest against + # a local schema file to avoid this systemtest failing is + # external url temporariliy not available. Secondary it also avoid + # having to worry about proxies. + + class MyXsValidator(pyxsval.XsValidator): + ######################################## + # force validation of XML input against local file + # + def validateXmlInputForceReadFile (self, xmlInputFile, inputTreeWrapper, xsdFile): + xsdTreeWrapper = self.parse (xsdFile) + xsdTreeWrapperList = [] + xsdTreeWrapperList.append(xsdTreeWrapper) + self._validateXmlInput (xmlInputFile, inputTreeWrapper, xsdTreeWrapperList) + for xsdTreeWrapper in xsdTreeWrapperList: + xsdTreeWrapper.unlink() + return inputTreeWrapper + + def parseAndValidateXmlInputForceReadFile (inputFile, xsdFile=None, **kw): + myXsValidator = MyXsValidator(**kw) + # parse XML input file + inputTreeWrapper = myXsValidator.parse (inputFile) + # validate XML input file + return myXsValidator.validateXmlInputForceReadFile (inputFile, inputTreeWrapper, xsdFile) + + + + direc = config['instrumentDefinition.directory'] + self.xsdFile = os.path.join(direc,'Schema/IDF/1.0/','IDFSchema.xsd') + files = self.__getDataFileList__() + + # run the tests + failed = [] + for filename in files: + try: + print "----------------------------------------" + print "Validating '%s'" % filename + parseAndValidateXmlInputForceReadFile(filename, xsdFile=self.xsdFile) + except Exception, e: + print "VALIDATION OF '%s' FAILED WITH ERROR:" % filename + print e + failed.append(filename) + + # final say on whether or not it 'worked' + print "----------------------------------------" + if len(failed) != 0: + print "SUMMARY OF FAILED FILES" + for filename in failed: + print filename + raise RuntimeError("Failed Validation for %d of %d files" \ + % (len(failed), len(files))) + else: + print "Succesfully Validated %d files" % len(files) diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ValidateParameterFiles.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ValidateParameterFiles.py new file mode 100644 index 0000000000000000000000000000000000000000..49aec6296346c763d9b1e7d201c68bc6f50f7352 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ValidateParameterFiles.py @@ -0,0 +1,62 @@ +from mantid import config +import os +import re +import stresstesting +import glob +import time + +EXPECTED_EXT = '.expected' + +class ValidateParameterFiles(stresstesting.MantidStressTest): + + def skipTests(self): + try: + import genxmlif + import minixsv + except ImportError: + return True + return False + + def __getDataFileList__(self): + # get a list of directories to look in + direc = config['instrumentDefinition.directory'] + print "Looking for instrument definition files in: %s" % direc + cwd = os.getcwd() + os.chdir(direc) + myFiles = glob.glob("*Parameters*.xml") + os.chdir(cwd) + files = [] + for filename in myFiles: + files.append(os.path.join(direc, filename)) + return files + + def runTest(self): + """Main entry point for the test suite""" + from genxmlif import GenXmlIfError + from minixsv import pyxsval + direc = config['instrumentDefinition.directory'] + self.xsdFile = os.path.join(direc,'Schema/ParameterFile/1.0/','ParameterFileSchema.xsd') + files = self.__getDataFileList__() + + # run the tests + failed = [] + for filename in files: + try: + print "----------------------------------------" + print "Validating '%s'" % filename + pyxsval.parseAndValidateXmlInput(filename, xsdFile=self.xsdFile, validateSchema=0) + except Exception, e: + print "VALIDATION OF '%s' FAILED WITH ERROR:" % filename + print e + failed.append(filename) + + # final say on whether or not it 'worked' + print "----------------------------------------" + if len(failed) != 0: + print "SUMMARY OF FAILED FILES" + for filename in failed: + print filename + raise RuntimeError("Failed Validation for %d of %d files" \ + % (len(failed), len(files))) + else: + print "Succesfully Validated %d files" % len(files) diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/VesuvioFittingTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/VesuvioFittingTest.py new file mode 100644 index 0000000000000000000000000000000000000000..07f29ba93ee0ff566eec3039d1099495ff587bc1 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/VesuvioFittingTest.py @@ -0,0 +1,114 @@ +import stresstesting +from mantid.simpleapi import * + +import platform + +#------------------------------------------------------------------------------------------------------------------ +WS_PREFIX="fit" + +def do_fit_no_background(k_is_free): + """ + Run the Vesuvio fit without background. If k_is_free is False then it is fixed to f0.Width*sqrt(2)/12 + """ + function_str = \ + "composite=ComptonScatteringCountRate,NumDeriv=1,IntensityConstraints=\"Matrix(1|3)0|-1|3\";"\ + "name=GramCharlierComptonProfile,Mass=1.007940,HermiteCoeffs=1 0 1;"\ + "name=GaussianComptonProfile,Mass=27.000000;"\ + "name=GaussianComptonProfile,Mass=91.000000" + # Run fit + _do_fit(function_str, k_is_free) + +def do_fit_with_quadratic_background(): + """ + Run the Vesuvio fit without background. If k_is_free is False then it is fixed to f0.Width*sqrt(2)/12 + """ + function_str = \ + "composite=ComptonScatteringCountRate,NumDeriv=1,IntensityConstraints=\"Matrix(1|3)0|-1|3\";"\ + "name=GramCharlierComptonProfile,Mass=1.007940,HermiteCoeffs=1 0 1;"\ + "name=GaussianComptonProfile,Mass=27.000000;"\ + "name=GaussianComptonProfile,Mass=91.000000;name=Polynomial,n=2,A0=0,A1=0,A2=0" + # Run fit + _do_fit(function_str, k_is_free=False) + +def _do_fit(function_str, k_is_free): + """ + Run the Vesuvio . If k_is_free is False then it is fixed to f0.Width*sqrt(2)/12 + + """ + LoadVesuvio(Filename='14188-14190',OutputWorkspace='raw_ws',SpectrumList='135',Mode='SingleDifference', + InstrumentParFile=r'IP0005.dat') + CropWorkspace(InputWorkspace='raw_ws',OutputWorkspace='raw_ws',XMin=50,XMax=562) + # Convert to seconds + ScaleX(InputWorkspace='raw_ws',OutputWorkspace='raw_ws',Operation='Multiply',Factor=1e-06) + + if k_is_free: + ties_str = "f1.Width=10.000000,f2.Width=25.000000" + else: + ties_str = "f1.Width=10.000000,f2.Width=25.000000,f0.FSECoeff=f0.Width*sqrt(2)/12" + + constraints_str = "2.000000 < f0.Width < 7.000000" + + Fit(InputWorkspace='raw_ws',Function=function_str,Ties=ties_str,Constraints=constraints_str, + Output=WS_PREFIX, CreateOutput=True,OutputCompositeMembers=True,MaxIterations=5000, + Minimizer="Levenberg-Marquardt,AbsError=1e-08,RelError=1e-08") + # Convert to microseconds + ScaleX(InputWorkspace=WS_PREFIX + '_Workspace',OutputWorkspace=WS_PREFIX + '_Workspace',Operation='Multiply',Factor=1e06) + +def tolerance(): + # Not too happy about this but the gsl seems to behave slightly differently on Windows/Mac but the reference result is from Linux + # The results however are still acceptable + system = platform.system() + if system == "Windows": + if platform.architecture()[0] == "64bit": + return 1e-2 # Other fitting tests seem to require this level too. + else: + return 1e-1 + elif system == "Darwin": + return 1e-1 # Other fitting tests seem to require this level too. + else: + return 1e-6 + +#------------------------------------------------------------------------------------------------------------------ + +class VesuvioFittingTest(stresstesting.MantidStressTest): + + def runTest(self): + do_fit_no_background(k_is_free=False) + + self.assertTrue(WS_PREFIX + "_Workspace" in mtd, "Expected function workspace in ADS") + self.assertTrue(WS_PREFIX + "_Parameters" in mtd, "Expected parameters workspace in ADS") + self.assertTrue(WS_PREFIX + "_NormalisedCovarianceMatrix" in mtd, "Expected covariance workspace in ADS") + + def validate(self): + self.tolerance = tolerance() + return "fit_Workspace","VesuvioFittingTest.nxs" + +#------------------------------------------------------------------------------------------------------------------ + +class VesuvioFittingWithKFreeTest(stresstesting.MantidStressTest): + + def runTest(self): + do_fit_no_background(k_is_free=True) + + self.assertTrue(WS_PREFIX + "_Workspace" in mtd, "Expected function workspace in ADS") + self.assertTrue(WS_PREFIX + "_Parameters" in mtd, "Expected parameters workspace in ADS") + self.assertTrue(WS_PREFIX + "_NormalisedCovarianceMatrix" in mtd, "Expected covariance workspace in ADS") + + def validate(self): + self.tolerance = tolerance() + return "fit_Workspace","VesuvioFittingWithKFreeTest.nxs" + +#------------------------------------------------------------------------------------------------------------------ + +class VesuvioFittingWithQuadraticBackgroundTest(stresstesting.MantidStressTest): + + def runTest(self): + do_fit_with_quadratic_background() + + self.assertTrue(WS_PREFIX + "_Workspace" in mtd, "Expected function workspace in ADS") + self.assertTrue(WS_PREFIX + "_Parameters" in mtd, "Expected parameters workspace in ADS") + self.assertTrue(WS_PREFIX + "_NormalisedCovarianceMatrix" in mtd, "Expected covariance workspace in ADS") + + def validate(self): + self.tolerance = tolerance() + return "fit_Workspace","VesuvioFittingWithQuadraticBackgroundTest.nxs" diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/WishAnalysis.py b/Code/Mantid/Testing/SystemTests/tests/analysis/WishAnalysis.py new file mode 100644 index 0000000000000000000000000000000000000000..9cb2a9d4da82f469a7f6542fdf62a7f9d4b7215b --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/WishAnalysis.py @@ -0,0 +1,54 @@ +import stresstesting +from mantid.simpleapi import * + +class WishAnalysis(stresstesting.MantidStressTest): + """ + Runs the WISH analysis chain on one bank of data + """ + + def runTest(self): + # MG: 5/5/2010: The test machine only has 1 Gb of RAM and can't handle a whole bank of WISH + # load Data + LoadRaw(Filename="WISH00016748.raw",OutputWorkspace="w16748-1",LoadLogFiles="0",SpectrumMin="6",SpectrumMax="5000") + ConvertUnits(InputWorkspace="w16748-1",OutputWorkspace="w16748-1",Target="Wavelength") + # load monitors + LoadRaw(Filename="WISH00016748.raw",OutputWorkspace="monitor16748",LoadLogFiles="0",SpectrumMin="4",SpectrumMax="4") + ConvertUnits(InputWorkspace="monitor16748",OutputWorkspace="monitor16748",Target="Wavelength") + #etract integral section of monitor + CropWorkspace(InputWorkspace="monitor16748",OutputWorkspace="monitor16748",XMin="0.6",XMax="9.8") + ConvertToDistribution(Workspace="monitor16748") + # mask out vanadium peaks + MaskBins(InputWorkspace="monitor16748",OutputWorkspace="monitor16748",XMin="4.57",XMax="4.76") + MaskBins(InputWorkspace="monitor16748",OutputWorkspace="monitor16748",XMin="3.87",XMax="4.12") + MaskBins(InputWorkspace="monitor16748",OutputWorkspace="monitor16748",XMin="2.75",XMax="2.91") + MaskBins(InputWorkspace="monitor16748",OutputWorkspace="monitor16748",XMin="2.24",XMax="2.5") + #generate sspline and smooth + SplineBackground(InputWorkspace="monitor16748",OutputWorkspace="monitor16748",NCoeff="30") + SmoothData(InputWorkspace="monitor16748",OutputWorkspace="monitor16748",NPoints="50") + ConvertFromDistribution(Workspace="monitor16748") + #normalise data to the monitor in wavelength + NormaliseToMonitor(InputWorkspace="w16748-1",OutputWorkspace="w16748-1",MonitorWorkspace="monitor16748") + NormaliseToMonitor(InputWorkspace="w16748-1",OutputWorkspace="w16748-1",MonitorWorkspace="monitor16748",IntegrationRangeMin="0.6",IntegrationRangeMax="9.8") + #align detectors + ConvertUnits(InputWorkspace="w16748-1",OutputWorkspace="w16748-1",Target="TOF") + ReplaceSpecialValues(InputWorkspace="w16748-1",OutputWorkspace="w16748-1",NaNValue="0",InfinityValue="0") + AlignDetectors(InputWorkspace="w16748-1",OutputWorkspace="w16748-1",CalibrationFile="wish_grouping_noends2_no_offsets_nov2009.cal") + #focus data + DiffractionFocussing(InputWorkspace="w16748-1",OutputWorkspace="w16748-1foc",GroupingFileName="wish_grouping_noends2_no_offsets_nov2009.cal") + DeleteWorkspace(Workspace="w16748-1") + CropWorkspace(InputWorkspace="w16748-1foc",OutputWorkspace="w16748-1foc",XMin="0.83",XMax="45") + #load pre-processed empty and subtract + LoadNexusProcessed(Filename="emptycryo3307-1foc.nx5",OutputWorkspace="empty") + RebinToWorkspace(WorkspaceToRebin="empty",WorkspaceToMatch="w16748-1foc",OutputWorkspace="empty") + Minus(LHSWorkspace="w16748-1foc",RHSWorkspace="empty",OutputWorkspace="w16748-1foc") + DeleteWorkspace(Workspace="empty") + #Load preprocessed Vanadium and divide + LoadNexusProcessed(Filename="vana3123-1foc-SS.nx5",OutputWorkspace="vana") + RebinToWorkspace(WorkspaceToRebin="vana",WorkspaceToMatch="w16748-1foc",OutputWorkspace="vana") + Divide(LHSWorkspace="w16748-1foc",RHSWorkspace="vana",OutputWorkspace="w16748-1foc") + DeleteWorkspace(Workspace="vana") + #convert back to TOF for ouput to GSAS/Fullprof + ConvertUnits(InputWorkspace="w16748-1foc",OutputWorkspace="w16748-1foc",Target="TOF") + + def validate(self): + return 'w16748-1foc','WishAnalysis.nxs' diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/WishDiffuseScattering.py b/Code/Mantid/Testing/SystemTests/tests/analysis/WishDiffuseScattering.py new file mode 100644 index 0000000000000000000000000000000000000000..2e825993f1e86ed9bef2409744873e0f1ecd2701 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/WishDiffuseScattering.py @@ -0,0 +1,62 @@ +""" +Tests diffuse scattering reduction as used on WISH +If this breaks for whatever reason, there is a good chance that unregistered scripts will also be broken. +- Email Pascal Manuel @ ISIS if things break here and let him know how his scripts may need to be modified. +""" + +import stresstesting +from mantid.simpleapi import * + +class WishDiffuseScattering(stresstesting.MantidStressTest): + + def requiredMemoryMB(self): + return 2000 + + def runTest(self): + + Load(Filename= 'Wish_Diffuse_Scattering_C.nxs',OutputWorkspace='C',LoadLogFiles='0',LoadMonitors='Exclude') + NormaliseByCurrent(InputWorkspace='C',OutputWorkspace='C') + CropWorkspace(InputWorkspace='C',OutputWorkspace='C',XMin='6000',XMax='99000') + Rebin(InputWorkspace='C',OutputWorkspace='C',Params='6000,-0.004,99900') + SmoothNeighbours(InputWorkspace='C',OutputWorkspace='Csn',RadiusUnits='NumberOfPixels',Radius='3',NumberOfNeighbours='25',PreserveEvents='0') + + Load(Filename= 'Wish_Diffuse_Scattering_B.nxs',OutputWorkspace='B',LoadLogFiles='0',LoadMonitors='Exclude') + NormaliseByCurrent(InputWorkspace='B',OutputWorkspace='B') + CropWorkspace(InputWorkspace='B',OutputWorkspace='B',XMin='6000',XMax='99000') + Rebin(InputWorkspace='B',OutputWorkspace='B',Params='6000,-0.004,99900') + SmoothNeighbours(InputWorkspace='B',OutputWorkspace='Bsn',RadiusUnits='NumberOfPixels',Radius='3',NumberOfNeighbours='25',PreserveEvents='0') + + Load(Filename= 'Wish_Diffuse_Scattering_A.nxs',OutputWorkspace='A',LoadLogFiles='0',LoadMonitors='Exclude') + NormaliseByCurrent(InputWorkspace='A',OutputWorkspace='A') + CropWorkspace(InputWorkspace='A',OutputWorkspace='A',XMin='6000',XMax='99000') + Rebin(InputWorkspace='A',OutputWorkspace='A',Params='6000,-0.004,99900') + SmoothNeighbours(InputWorkspace='A',OutputWorkspace='Asn',RadiusUnits='NumberOfPixels',Radius='3',NumberOfNeighbours='25',PreserveEvents='0') + SmoothData(InputWorkspace='Asn',OutputWorkspace='Asn-smooth',NPoints='50') + + Divide(LHSWorkspace='Csn',RHSWorkspace='Asn-smooth',OutputWorkspace='C_div_A_sn_smooth') + ReplaceSpecialValues(InputWorkspace='C_div_A_sn_smooth',OutputWorkspace='C_div_A_sn_smooth',NaNValue='0',InfinityValue='100000',BigNumberThreshold='99000') + + Divide(LHSWorkspace='Bsn',RHSWorkspace='Asn-smooth',OutputWorkspace='B_div_A_sn_smooth') + ReplaceSpecialValues(InputWorkspace='B_div_A_sn_smooth',OutputWorkspace='B_div_A_sn_smooth',NaNValue='0',InfinityValue='100000',BigNumberThreshold='99000') + + Minus(LHSWorkspace='C_div_A_sn_smooth',RHSWorkspace='B_div_A_sn_smooth',OutputWorkspace='CminusB_smooth') + + LoadIsawUB(InputWorkspace='CminusB_smooth',Filename='Wish_Diffuse_Scattering_ISAW_UB.mat') + + AddSampleLog(Workspace='CminusB_smooth',LogName='psi',LogText='0.0',LogType='Number Series') + SetGoniometer(Workspace='CminusB_smooth',Axis0='psi,0,1,0,1') + ConvertToDiffractionMDWorkspace(InputWorkspace='CminusB_smooth',OutputWorkspace='CminusB_smooth_MD_HKL',OutputDimensions='HKL',Version=2) + + + BinMD(InputWorkspace='CminusB_smooth_MD_HKL',AlignedDim0='[H,0,0],-1.0,8.0,200',AlignedDim1='[0,K,0],-1.0,8.0,200',AlignedDim2='[0,0,L],0,1.5,200',OutputWorkspace='test_rebin') + + #Quick sanity checks. No comparison with a saved workspace because SliceMD is too expensive compared to BinMD. + result = mtd['test_rebin'] + self.assertTrue(result.getNumDims() == 3) + self.assertTrue(result.getNPoints() == 8000000) + + return True; + + def doValidate(self): + return True; + diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/WishMasking.py b/Code/Mantid/Testing/SystemTests/tests/analysis/WishMasking.py new file mode 100644 index 0000000000000000000000000000000000000000..833af066ff34f270817665a90491249b3f6db03c --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/WishMasking.py @@ -0,0 +1,147 @@ +""" +Tests masking functionality specific to WISH. Working masking behaviour is critical in general, but is heavily used on WISH. +- Email Pascal Manuel @ ISIS if things break here and let him know how his scripts may need to be modified. +""" + +import stresstesting +import os +from mantid.simpleapi import * + +class WishMasking(stresstesting.MantidStressTest): + + # Utility function will return the masking corresponding to a workspace index from a cal file. + def get_masking_for_index(self, cal_file, requested_index): + while True: + line = cal_file.readline() + if line == "": + raise LookupError + line_contents = line.split() + try: + index = int(line_contents[0].strip()) + select = int(line_contents[3].strip()) + group = int(line_contents[4].strip()) + if(index == requested_index): + return select + except ValueError: + continue + + # Tests that the cal file is being created in the expected way. + # 1) Uses the masks to create a cal file + # 2) Read the cal file + # 3) Use the known masking boundaries to determine whether the cal file has been created propertly accoring to the function inputs. + def do_test_cal_file(self, masked_workspace, should_invert, expected_masking_identifier, expected_not_masking_identifier, masking_edge): + + cal_filename = 'wish_masking_system_test_temp.cal' + cal_file_full_path = os.path.join(config['defaultsave.directory'],cal_filename) + MaskWorkspaceToCalFile(InputWorkspace=masked_workspace, OutputFile=cal_file_full_path, Invert=should_invert) + file = open(cal_file_full_path, 'r') + try: + mask_boundary_inside = self.get_masking_for_index(file, masking_edge) + mask_boundary_outside = self.get_masking_for_index(file, masking_edge+1) + self.assertTrue(mask_boundary_inside == expected_masking_identifier) + self.assertTrue(mask_boundary_outside == expected_not_masking_identifier) + except LookupError: + print "Could not find the requested index" + self.assertTrue(False) + finally: + file.close() + os.remove(cal_file_full_path) + + def requiredMemoryMB(self): + return 2000 + + def runTest(self): + Load(Filename='WISH00016748.raw',OutputWorkspace='wish_ws') + ws = mtd['wish_ws'] + MaskDetectors(Workspace=ws, WorkspaceIndexList='0,1,2,3,4,5,6,7,8,9') + + # We just masked all detectors up to index == 9 + masking_edge = 9 + + # Test the 'isMasked' property on the detectors of the original workspace + self.assertTrue( ws.getDetector(masking_edge).isMasked() ) + self.assertTrue( not ws.getDetector(masking_edge + 1).isMasked() ) + + # Extract a masking workspace + ExtractMask( InputWorkspace=ws, OutputWorkspace='masking_wish_workspace' ) + mask_ws = mtd['masking_wish_workspace'] + + ## COMPLETE TESTS: These following are the tests that should pass when everything works. See below for reasons why. + + # Test the 'isMasked' property on the detectors of the masked workspace + # The following tests have been added even though they are broken because extracted workspaces currently do not preserve the Masking flags (buty they SHOULD!). Hopefully the broken functionality will be fixed and I can enable them. + #self.assertTrue( mask_ws.getDetector(masking_edge).isMasked() ) + #self.assertTrue( not mask_ws.getDetector(masking_edge + 1).isMasked() ) + + # Save masking + mask_file = 'wish_masking_system_test_mask_file_temp.xml' + SaveMask(InputWorkspace=mask_ws,OutputFile=mask_file) + mask_file_path = os.path.join(config['defaultsave.directory'], mask_file) + # Check the mask file was created. + self.assertTrue(os.path.isfile(mask_file_path)) + # ... and has the correct contents + masking_xml = open(mask_file_path, 'r') + found_correct_ids = False + for line in masking_xml: + if "<detids>1-5,1100000-1100019</detids>" in line: + found_correct_ids = True + masking_xml.close() + self.assertTrue(found_correct_ids) + os.remove(mask_file_path) + + ## END COMPLETE TESTS + + ## CHARACTERISATION TESTS: These tests characterise the current breakage of the masking code. + ## I've included these false-positives as a testing strategy because it will flag up that the functionality has been fixed when these tests start failing (we can then test the right thing, see above) + + # Testing that the isMasking is the same on both sides of the masking boundary. If things were working properly the following would not pass! + self.assertTrue( mask_ws.getDetector(masking_edge).isMasked() == mask_ws.getDetector(masking_edge + 1).isMasked() ) + ## END CHARACTERISATION TESTS + + #Test creation with normal masking + invert_masking = False; + self.do_test_cal_file(ws, invert_masking, 0, 1, masking_edge) + + #Test with masking inversed, because that is a real schenario too. + invert_masking = True; + self.do_test_cal_file(ws, invert_masking, 1, 0, masking_edge) + + #Test merge cal files + master_cal_file_name = 'master.cal' + update_cal_file_name = 'update.cal' + merged_cal_file_name = 'merged.cal' + save_path = config['defaultsave.directory'] + master_cal_file_path = os.path.join(save_path,master_cal_file_name) + update_cal_file_path = os.path.join(save_path,update_cal_file_name) + merged_cal_file_path = os.path.join(save_path,merged_cal_file_name) + + try: + MaskWorkspaceToCalFile(InputWorkspace=ws, OutputFile=master_cal_file_name, Invert=False) + MaskWorkspaceToCalFile(InputWorkspace=ws, OutputFile=update_cal_file_name, Invert=True) + + MergeCalFiles(UpdateFile=update_cal_file_path, MasterFile=master_cal_file_path, + OutputFile=merged_cal_file_name, MergeSelections=True) + + update_cal_file = open(update_cal_file_path, 'r') + merged_cal_file = open(merged_cal_file_path, 'r') + + merged_mask_boundary_inside = self.get_masking_for_index(merged_cal_file, masking_edge) + merged_mask_boundary_outside = self.get_masking_for_index(merged_cal_file, masking_edge+1) + update_mask_boundary_inside = self.get_masking_for_index(update_cal_file, masking_edge) + update_mask_boundary_outside = self.get_masking_for_index(update_cal_file, masking_edge+1) + + #Test that the merged output cal file has actually taken the masking from the update file. + self.assertTrue(merged_mask_boundary_inside != merged_mask_boundary_outside) + self.assertTrue(merged_mask_boundary_inside == update_mask_boundary_inside) + self.assertTrue(merged_mask_boundary_outside == update_mask_boundary_outside) + + finally: + #clean up no matter what. + merged_cal_file.close() + update_cal_file.close() + os.remove(master_cal_file_path) + os.remove(update_cal_file_path) + os.remove(merged_cal_file_path) + + def doValidate(self): + return True; diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/utils.py b/Code/Mantid/Testing/SystemTests/tests/analysis/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..d009a6f64974fde1b182835f8405192f489b36d8 --- /dev/null +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/utils.py @@ -0,0 +1,296 @@ +''' SVN Info: The variables below will only get subsituted at svn checkout if + the repository is configured for variable subsitution. + + $Id$ + $HeadURL$ +|=============================================================================|=======| +1 80 <tab> +''' +import sys, os +import dis, inspect, opcode +def ls(): + print os.getcwd() + files=os.listdir(os.getcwd()) + for i in range(0,len(files)): + + print files[i] +def pwd(): + print os.getcwd() +def cd(dir_str): + os.chdir(dir_str) +def lineno(): + """ + call signature(s):: + lineno() + + Returns the current line number in our program. + + No Arguments. + + + Working example + >>> print "This is the line number ",lineno(),"\n" + + """ + return inspect.currentframe().f_back.f_lineno + +def decompile(code_object): + ''' taken from http://thermalnoise.wordpress.com/2007/12/30/exploring-python-bytecode/ + + decompile extracts dissasembly information from the byte code and stores it in a + list for further use. + + call signature(s):: + instructions=decompile(f.f_code) + + Required arguments: + ========= ===================================================================== + f.f_code A bytecode object ectracted with inspect.currentframe() + or anyother mechanism that returns byte code. + + Optional keyword arguments: NONE + + Outputs: + ========= ===================================================================== + instructions a list of offsets, op_codes, names, arguments, argument_type, + argument_value which can be deconstructed to find out various things + about a function call. + + Examples: + + f = inspect.currentframe().f_back.f_back + i = f.f_lasti # index of the last attempted instruction in byte code + ins=decompile(f.f_code) + pretty_print(ins) + + + ''' + code = code_object.co_code + variables = code_object.co_cellvars + code_object.co_freevars + instructions = [] + n = len(code) + i = 0 + e = 0 + while i < n: + i_offset = i + i_opcode = ord(code[i]) + i = i + 1 + if i_opcode >= opcode.HAVE_ARGUMENT: + i_argument = ord(code[i]) + (ord(code[i+1]) << (4*2)) + e + i = i +2 + if i_opcode == opcode.EXTENDED_ARG: + e = iarg << 16 + else: + e = 0 + if i_opcode in opcode.hasconst: + i_arg_value = repr(code_object.co_consts[i_argument]) + i_arg_type = 'CONSTANT' + elif i_opcode in opcode.hasname: + i_arg_value = code_object.co_names[i_argument] + i_arg_type = 'GLOBAL VARIABLE' + elif i_opcode in opcode.hasjrel: + i_arg_value = repr(i + i_argument) + i_arg_type = 'RELATIVE JUMP' + elif i_opcode in opcode.haslocal: + i_arg_value = code_object.co_varnames[i_argument] + i_arg_type = 'LOCAL VARIABLE' + elif i_opcode in opcode.hascompare: + i_arg_value = opcode.cmp_op[i_argument] + i_arg_type = 'COMPARE OPERATOR' + elif i_opcode in opcode.hasfree: + i_arg_value = variables[i_argument] + i_arg_type = 'FREE VARIABLE' + else: + i_arg_value = i_argument + i_arg_type = 'OTHER' + else: + i_argument = None + i_arg_value = None + i_arg_type = None + instructions.append( (i_offset, i_opcode, opcode.opname[i_opcode], i_argument, i_arg_type, i_arg_value) ) + return instructions + +# Print the byte code in a human readable format +def pretty_print(instructions): + print '%5s %-20s %3s %5s %-20s %s' % ('OFFSET', 'INSTRUCTION', 'OPCODE', 'ARG', 'TYPE', 'VALUE') + for (offset, op, name, argument, argtype, argvalue) in instructions: + print '%5d %-20s (%3d) ' % (offset, name, op), + if argument != None: + print '%5d %-20s (%s)' % (argument, argtype, argvalue), + print + +def expecting(): + #{{{ + ''' + call signature(s):: + + + Return how many values the caller is expecting + + Required arguments: NONE + + Optional keyword arguments: NONE + + + Outputs: + ========= ===================================================================== + numReturns Number of return values on expected on the left of the equal sign. + + Examples: + + This function is not designed for cammand line use. Using in a function can + follow the form below. + + + def test1(): + def f(): + r = expecting() + print r + if r == 0: + return None + if r == 1: + return 0 + return range(r) + + f() + print "---" + a = f() + print "---", a + a, b = f() + print "---", a,b + a, b = c = f() + print "---", a,b,c + a, b = c = d = f() + print "---", a,b,c + a = b = f() + print "---", a,b + a = b, c = f() + print "---", a,b,c + a = b = c, d = f() + print "---", a,b,c,d + a = b, c = d = f() + print "---", a,b,c,d + a, b = c, d = f() + print "---", a,b,c,d + ''' + #}}} + + """ Developers Notes: + + Now works with an multiple assigments correctly. This is verified by + test() and test1() below + """ + f = inspect.currentframe().f_back.f_back + i = f.f_lasti # index of the last attempted instruction in byte code + ins=decompile(f.f_code) + #pretty_print(ins) + for (offset, op, name, argument, argtype, argvalue) in ins: + if offset > i: + if name == 'POP_TOP': + return 0 + if name == 'UNPACK_SEQUENCE': + return argument + if name == 'CALL_FUNCTION': + return 1 + +def lhs(output='names'): + ''' + call signature(s):: + + Return how many values the caller is expecting + + Required arguments: NONE + + Optional keyword arguments: NONE + + + Outputs: + ========= ===================================================================== + numReturns Number of return values on expected on the left of the equal sign. + + Examples: + + This function is not designed for cammand line use. Using in a function can + follow the form below. + + ''' + """ Developers Notes: + """ + f = inspect.currentframe().f_back.f_back + i = f.f_lasti # index of the last attempted instruction in byte code + ins=decompile(f.f_code) + #pretty_print(ins) + + CallFunctionLocation={} + first=False; StartIndex=0; StartOffset=0 + # we must list all of the operators that behave like a function call in byte-code + OperatorNames=set(['CALL_FUNCTION','UNARY_POSITIVE','UNARY_NEGATIVE','UNARY_NOT','UNARY_CONVERT','UNARY_INVERT','GET_ITER', 'BINARY_POWER','BINARY_MULTIPLY','BINARY_DIVIDE', 'BINARY_FLOOR_DIVIDE', 'BINARY_TRUE_DIVIDE', 'BINARY_MODULO','BINARY_ADD','BINARY_SUBTRACT','BINARY_SUBSCR','BINARY_LSHIFT','BINARY_RSHIFT','BINARY_AND','BINARY_XOR','BINARY_OR']) + + for index in range(len(ins)): + (offset, op, name, argument, argtype, argvalue) = ins[index] + if name in OperatorNames: + if not first: + CallFunctionLocation[StartOffset] = (StartIndex,index) + StartIndex=index + StartOffset = offset + + (offset, op, name, argument, argtype, argvalue) = ins[-1] + CallFunctionLocation[StartOffset]=(StartIndex,len(ins)-1) # append the index of the last entry to form the last boundary + + #print CallFunctionLocation + #pretty_print( ins[CallFunctionLocation[i][0]:CallFunctionLocation[i][1]] ) + # In our case i should always be the offset of a Call_Function instruction. We can use this to baracket + # the bit which we are interested in + + OutputVariableNames=[] + (offset, op, name, argument, argtype, argvalue) = ins[CallFunctionLocation[i][0] + 1] + if name == 'POP_TOP': # no Return Values + pass + #return OutputVariableNames + if name == 'STORE_FAST' or name == 'STORE_NAME': # One Return Value + OutputVariableNames.append(argvalue) + if name == 'UNPACK_SEQUENCE': # Many Return Values, One equal sign + for index in range(argvalue): + (offset_, op_, name_, argument_, argtype_, argvalue_) = ins[CallFunctionLocation[i][0] + 1 + 1 +index] + OutputVariableNames.append(argvalue_) + maxReturns = len(OutputVariableNames) + if name == 'DUP_TOP': # Many Return Values, Many equal signs + # The output here should be a multi-dim list which mimics the variable unpacking sequence. + # For instance a,b=c,d=f() => [ ['a','b'] , ['c','d'] ] + # a,b=c=d=f() => [ ['a','b'] , 'c','d' ] So on and so forth. + + # put this in a loop and stack the results in an array. + count = 0; maxReturns = 0 # Must count the maxReturns ourselves in this case + while count < len(ins[CallFunctionLocation[i][0] :CallFunctionLocation[i][1]]): + (offset_, op_, name_, argument_, argtype_, argvalue_) = ins[CallFunctionLocation[i][0]+count] + #print 'i= ',i,'count = ', count, 'maxReturns = ',maxReturns + if name_ == 'UNPACK_SEQUENCE': # Many Return Values, One equal sign + hold=[] + #print 'argvalue_ = ', argvalue_, 'count = ',count + if argvalue_ > maxReturns: + maxReturns=argvalue_ + for index in range(argvalue_): + (_offset_, _op_, _name_, _argument_, _argtype_, _argvalue_) = ins[CallFunctionLocation[i][0] + count+1+index] + hold.append(_argvalue_) + count = count + argvalue_ + OutputVariableNames.append(hold) + # Need to now skip the entries we just appended with the for loop. + if name_ == 'STORE_FAST' or name_ == 'STORE_NAME': # One Return Value + if 1 > maxReturns: + maxReturns = 1 + OutputVariableNames.append(argvalue_) + count = count + 1 + + + # Now that OutputVariableNames is filled with the right stuff we need to output the correct thing. Either the maximum number of + # variables to unpack in the case of multiple ='s or just the length of the array or just the naames of the variables. + + if output== 'names': + return OutputVariableNames + elif output == 'number': + return maxReturns + elif output == 'both': + return (maxReturns,OutputVariableNames) + + return 0 # Should never get to here +