diff --git a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/ExportSampleLogsToCSVFile.py b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/ExportSampleLogsToCSVFile.py index b8b09e7549e5c7dd2d377601a1cad53ce09e26b9..557fec9a92f6aeae24edfa9b6bb1e21bd11a19a2 100644 --- a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/ExportSampleLogsToCSVFile.py +++ b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/ExportSampleLogsToCSVFile.py @@ -1,4 +1,4 @@ -#pylint: disable=no-init,invalid-name +#pylint: disable=no-init,invalid-name,too-many-instance-attributes from mantid.api import * from mantid.kernel import * import os @@ -182,7 +182,7 @@ class ExportSampleLogsToCSVFile(PythonAlgorithm): ofile = open(self._outputfilename, "w") ofile.write(wbuf) ofile.close() - except IOError as err: + except IOError: raise NotImplementedError("Unable to write file %s. Check permission." % (self._outputfilename)) return @@ -232,7 +232,7 @@ class ExportSampleLogsToCSVFile(PythonAlgorithm): wbuf = "" currtimeindexes = [] - for i in xrange(len(logtimeslist)): + for dummy_i in xrange(len(logtimeslist)): currtimeindexes.append(0) nextlogindexes = [] @@ -271,7 +271,7 @@ class ExportSampleLogsToCSVFile(PythonAlgorithm): ofile = open(self._outputfilename, "w") ofile.write(wbuf) ofile.close() - except IOError as err: + except IOError: raise NotImplementedError("Unable to write file %s. Check permission." % (self._outputfilename)) return @@ -334,7 +334,6 @@ class ExportSampleLogsToCSVFile(PythonAlgorithm): wbuf = "%.6f\t%.6f\t" % (abstime, reltime) # Log valuess - tmplogvalues = [] for i in xrange(len(logvaluelist)): timeindex = currtimeindexes[i] if not i in nexttimelogindexes: diff --git a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/GenerateGroupingSNSInelastic.py b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/GenerateGroupingSNSInelastic.py index c01f61cbfe01f62396ba7682f469ae45ba0c6b96..f165df35ac187caa8d313e0bc285d91f3fd30a83 100644 --- a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/GenerateGroupingSNSInelastic.py +++ b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/GenerateGroupingSNSInelastic.py @@ -3,7 +3,7 @@ import mantid import mantid.api import mantid.simpleapi import mantid.kernel -from numpy import arange +import numpy class GenerateGroupingSNSInelastic(mantid.api.PythonAlgorithm): @@ -35,7 +35,8 @@ class GenerateGroupingSNSInelastic(mantid.api.PythonAlgorithm): self.declareProperty("AlongTubes", "1",mantid.kernel.StringListValidator(py), "Number of pixels across tubes to be grouped") self.declareProperty("AcrossTubes", "1", mantid.kernel.StringListValidator(px), "Number of pixels across tubes to be grouped") - self.declareProperty("Instrument", instrument[0], mantid.kernel.StringListValidator(instrument), "The instrument for wich to create grouping") + self.declareProperty("Instrument", instrument[0], mantid.kernel.StringListValidator(instrument), + "The instrument for wich to create grouping") f=mantid.api.FileProperty("Filename","",mantid.api.FileAction.Save,".xml") self.declareProperty(f,"Output filename.") @@ -65,7 +66,7 @@ class GenerateGroupingSNSInelastic(mantid.api.PythonAlgorithm): y=__w.extractY() numdet=(y[y==1]).size - spectra = arange(numdet).reshape(-1,8,128) + spectra = numpy.arange(numdet).reshape(-1,8,128) banks = numdet/8/128 @@ -76,9 +77,9 @@ class GenerateGroupingSNSInelastic(mantid.api.PythonAlgorithm): groupnum = 0 - for i in arange(banks): - for j in arange(8/pixelsx)*pixelsx: - for k in arange(128/pixelsy)*pixelsy: + for i in numpy.arange(banks): + for j in numpy.arange(8/pixelsx)*pixelsx: + for k in numpy.arange(128/pixelsy)*pixelsy: groupname = str(groupnum) ids = spectra[i, j:j+pixelsx, k:k+pixelsy].reshape(-1) diff --git a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/GetEiT0atSNS.py b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/GetEiT0atSNS.py index 582beaf012a82ded5ae58c5decf28dc40bc6055b..5f9552241b415c145c0bebc5cacde24ed7426059 100644 --- a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/GetEiT0atSNS.py +++ b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/GetEiT0atSNS.py @@ -22,7 +22,8 @@ class GetEiT0atSNS(mantid.api.PythonAlgorithm): def PyInit(self): """ Declare properties """ - self.declareProperty(mantid.api.WorkspaceProperty("MonitorWorkspace", "",direction=mantid.kernel.Direction.InOut), "Monitor workspace") + self.declareProperty(mantid.api.WorkspaceProperty("MonitorWorkspace", "",direction=mantid.kernel.Direction.InOut), + "Monitor workspace") self.declareProperty("IncidentEnergyGuess",-1.,doc="Incident energy guess") self.declareProperty("Ei",0.0,mantid.kernel.Direction.Output) self.declareProperty("T0",0.0,mantid.kernel.Direction.Output) @@ -72,8 +73,8 @@ class GetEiT0atSNS(mantid.api.PythonAlgorithm): wtemp=mantid.simpleapi.ChangeBinOffset(wm,t1f*16667,sp1,sp1) wtemp=mantid.simpleapi.ChangeBinOffset(wtemp,t2f*16667,sp2,sp2) wtemp=mantid.simpleapi.Rebin(InputWorkspace=wtemp,Params="1",PreserveEvents=True) - - alg=mantid.simpleapi.GetEi(InputWorkspace=wtemp,Monitor1Spec=sp1+1,Monitor2Spec=sp2+1,EnergyEstimate=EGuess) #Run GetEi algorithm + #Run GetEi algorithm + alg=mantid.simpleapi.GetEi(InputWorkspace=wtemp,Monitor1Spec=sp1+1,Monitor2Spec=sp2+1,EnergyEstimate=EGuess) Ei=alg[0] Tzero=alg[3] #Extract incident energy and T0 mantid.simpleapi.DeleteWorkspace(wtemp) diff --git a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/LRSubtractAverageBackground.py b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/LRSubtractAverageBackground.py index 88c8a5919965f9855030b4b6e001d1bc6e5f7541..d70dac37767eb635f380211488569c1dcbe1ba81 100644 --- a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/LRSubtractAverageBackground.py +++ b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/LRSubtractAverageBackground.py @@ -48,7 +48,7 @@ class LRSubtractAverageBackground(PythonAlgorithm): x_range = self.getProperty("LowResolutionRange").value x_min = int(x_range[0]) x_max = int(x_range[1]) - + sum_peak = self.getProperty("SumPeak").value # Number of pixels in each direction @@ -81,7 +81,7 @@ class LRSubtractAverageBackground(PythonAlgorithm): YPixelMax=bck_max, ErrorWeighting = True, SumPixels=True, NormalizeSum=True) - + if right_bck is not None and left_bck is not None: average = (left_bck + right_bck) / 2.0 elif right_bck is not None: @@ -118,7 +118,7 @@ class LRSubtractAverageBackground(PythonAlgorithm): AnalysisDataService.remove(str(right_bck)) if AnalysisDataService.doesExist(average_name): AnalysisDataService.remove(average_name) - + self.setProperty('OutputWorkspace', workspace) AlgorithmFactory.subscribe(LRSubtractAverageBackground) diff --git a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/LiquidsReflectometryReduction.py b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/LiquidsReflectometryReduction.py index 7cc6c80d4e26a54fcf60cd361fe3cd3f6e26f913..697a69ed09ffe16c58caad6de1017210a5715ff4 100644 --- a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/LiquidsReflectometryReduction.py +++ b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/LiquidsReflectometryReduction.py @@ -111,7 +111,7 @@ class LiquidsReflectometryReduction(PythonAlgorithm): file_list.append(data_file) runs = reduce((lambda x, y: '%s+%s' % (x, y)), file_list) ws_event_data = Load(Filename=runs, OutputWorkspace="REF_L_%s" % dataRunNumbers[0]) - + # Compute the primary fraction using the unprocessed workspace apply_primary_fraction = self.getProperty("ApplyPrimaryFraction").value primary_fraction = [1.0, 0.0] @@ -168,7 +168,7 @@ class LiquidsReflectometryReduction(PythonAlgorithm): normPeakRange, bck_request, normBackRange) # Avoid leaving trash behind AnalysisDataService.remove(str(ws_event_norm)) - + # Sum up the normalization peak norm_summed = SumSpectra(InputWorkspace = norm_cropped) norm_summed = RebinToWorkspace(WorkspaceToRebin=norm_summed, @@ -191,7 +191,7 @@ class LiquidsReflectometryReduction(PythonAlgorithm): normalized_data = ConvertToPointData(InputWorkspace=normalized_data, OutputWorkspace=str(normalized_data)) normalized_data.setDistribution(True) - + # Apply scaling factors normalized_data = self.apply_scaling_factor(normalized_data) @@ -231,7 +231,7 @@ class LiquidsReflectometryReduction(PythonAlgorithm): q_rebin = Rebin(InputWorkspace=q_workspace, Params=q_range, OutputWorkspace=name_output_ws) - # Apply the primary fraction + # Apply the primary fraction if apply_primary_fraction: ws_fraction = CreateSingleValuedWorkspace(DataValue=primary_fraction[0], ErrorValue=primary_fraction[1]) @@ -270,7 +270,7 @@ class LiquidsReflectometryReduction(PythonAlgorithm): # Clean up the workspace for backward compatibility data_y = q_rebin.dataY(0) data_e = q_rebin.dataE(0) - # Again for backward compatibility, the first and last points of the + # Again for backward compatibility, the first and last points of the # raw output when not cropping was simply set to 0 += 1. if crop is False: data_y[0] = 0 @@ -334,9 +334,9 @@ class LiquidsReflectometryReduction(PythonAlgorithm): error_msg += "[%g, %g] found [%g, %g]" % (tof_range[0], tof_range[1], tof_min, tof_max) raise RuntimeError, error_msg - + tof_step = self.getProperty("TOFSteps").value - workspace = Rebin(InputWorkspace=workspace, Params=[0, tof_step, tof_max], + workspace = Rebin(InputWorkspace=workspace, Params=[0, tof_step, tof_max], PreserveEvents=False, OutputWorkspace="%s_histo" % str(workspace)) # Crop TOF range @@ -404,7 +404,7 @@ class LiquidsReflectometryReduction(PythonAlgorithm): # Get the incident medium incident_medium = self.getProperty("IncidentMediumSelected").value - + # Get the wavelength lr = workspace.getRun().getProperty('LambdaRequest').value[0] lr_value = float("{0:.2f}".format(lr)) @@ -459,7 +459,7 @@ class LiquidsReflectometryReduction(PythonAlgorithm): scaling_data = open(scaling_factor_file, 'r') file_content = scaling_data.read() scaling_data.close() - + data_found = None for line in file_content.split('\n'): if line.startswith('#'): @@ -468,7 +468,7 @@ class LiquidsReflectometryReduction(PythonAlgorithm): # Parse the line of data and produce a dict toks = line.split() data_dict = reduce(_reduce, toks, {}) - + # Get ordered list of keys keys = [] for token in toks: @@ -483,7 +483,7 @@ class LiquidsReflectometryReduction(PythonAlgorithm): elif len(keys)<10: logger.error("Bad scaling factor entry\n %s" % line) continue - + # Sanity check if keys[0] != 'IncidentMedium' and keys[1] != 'LambdaRequested' \ and keys[2] != 'S1H': diff --git a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/MaskAngle.py b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/MaskAngle.py index 7cd1366266bd4243b24f958673bd8ad0f1a75fdd..91f2ae62b5b98b87d04b371d925c6cc27b065701 100644 --- a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/MaskAngle.py +++ b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/MaskAngle.py @@ -24,12 +24,16 @@ class MaskAngle(mantid.api.PythonAlgorithm): return "Algorithm to mask detectors with scattering angles in a given interval (in degrees)." def PyInit(self): - self.declareProperty(mantid.api.WorkspaceProperty("Workspace", "",direction=mantid.kernel.Direction.Input,validator=mantid.api.InstrumentValidator()), "Input workspace") + self.declareProperty(mantid.api.WorkspaceProperty("Workspace", "",direction=mantid.kernel.Direction.Input, + validator=mantid.api.InstrumentValidator()), "Input workspace") angleValidator=mantid.kernel.FloatBoundedValidator() angleValidator.setBounds(0.,180.) - self.declareProperty(name="MinAngle", defaultValue=0.0, validator=angleValidator, direction=mantid.kernel.Direction.Input, doc="Angles above StartAngle are going to be masked") - self.declareProperty(name="MaxAngle", defaultValue=0.0, validator=angleValidator, direction=mantid.kernel.Direction.Input, doc="Angles above StartAngle are going to be masked") - self.declareProperty(mantid.kernel.IntArrayProperty(name="MaskedDetectors", direction=mantid.kernel.Direction.Output), doc="List of detector masked, with scatterin angles between MinAngle and MaxAngle") + self.declareProperty(name="MinAngle", defaultValue=0.0, validator=angleValidator, + direction=mantid.kernel.Direction.Input, doc="Angles above StartAngle are going to be masked") + self.declareProperty(name="MaxAngle", defaultValue=0.0, validator=angleValidator, + direction=mantid.kernel.Direction.Input, doc="Angles above StartAngle are going to be masked") + self.declareProperty(mantid.kernel.IntArrayProperty(name="MaskedDetectors", direction=mantid.kernel.Direction.Output), + doc="List of detector masked, with scatterin angles between MinAngle and MaxAngle") def PyExec(self): ws = self.getProperty("Workspace").value diff --git a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/ApplyPaalmanPingsCorrection.py b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/ApplyPaalmanPingsCorrection.py index d37d518831a02d8268a2b68529381316ec9b503d..992b5f8ce74cbf8e684dc6b9a7a54f52f0cc7d60 100644 --- a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/ApplyPaalmanPingsCorrection.py +++ b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/ApplyPaalmanPingsCorrection.py @@ -1,3 +1,4 @@ +#pylint: disable=no-init,too-many-instance-attributes from mantid.simpleapi import * from mantid.api import PythonAlgorithm, AlgorithmFactory, MatrixWorkspaceProperty, WorkspaceGroupProperty, \ PropertyMode, MatrixWorkspace @@ -27,23 +28,21 @@ class ApplyPaalmanPingsCorrection(PythonAlgorithm): def PyInit(self): - self.declareProperty(MatrixWorkspaceProperty('SampleWorkspace', '', - direction=Direction.Input), + self.declareProperty(MatrixWorkspaceProperty('SampleWorkspace', '', direction=Direction.Input), doc='Name for the input Sample workspace.') self.declareProperty(WorkspaceGroupProperty('CorrectionsWorkspace', '', - optional=PropertyMode.Optional, direction=Direction.Input), + optional=PropertyMode.Optional, direction=Direction.Input), doc='Name for the input Corrections workspace.') self.declareProperty(MatrixWorkspaceProperty('CanWorkspace', '', - optional=PropertyMode.Optional, direction=Direction.Input), + optional=PropertyMode.Optional, direction=Direction.Input), doc='Name for the input Can workspace.') self.declareProperty(name='CanScaleFactor', defaultValue=1.0, doc='Factor to scale the can data') - self.declareProperty(MatrixWorkspaceProperty('OutputWorkspace', '', - direction=Direction.Output), + self.declareProperty(MatrixWorkspaceProperty('OutputWorkspace', '', direction=Direction.Output), doc='The output corrections workspace.') diff --git a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/CreateMD.py b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/CreateMD.py index dcc85255c6ee9758beeb27baecb5e916e3a67e39..33a68dfd8f5fd9d597606706c2560d8732257289 100644 --- a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/CreateMD.py +++ b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/CreateMD.py @@ -84,7 +84,7 @@ class CreateMD(DataProcessorAlgorithm): if input_workspace.sample().hasOrientedLattice(): logger.warning("Sample already has a UB. This will not be overwritten by %s. Use ClearUB and re-run."%self.name()) else: - self._set_ub(workspace=input_workspace, a=alatt[0], b=alatt[1], c=alatt[2], + self._set_ub(workspace=input_workspace, a=alatt[0], b=alatt[1], c=alatt[2], alpha=angdeg[0], beta=angdeg[1], gamma=angdeg[2], u=u, v=v) if any(goniometer_params): @@ -109,19 +109,27 @@ class CreateMD(DataProcessorAlgorithm): self.declareProperty('Emode', defaultValue='Direct', validator=StringListValidator(self._possible_emodes()), direction=Direction.Input, doc='Analysis mode ' + str(self._possible_emodes()) ) - self.declareProperty(FloatArrayProperty('Alatt', values=[], validator=FloatArrayMandatoryValidator(), direction=Direction.Input ), doc='Lattice parameters' ) + self.declareProperty(FloatArrayProperty('Alatt', values=[], validator=FloatArrayMandatoryValidator(), + direction=Direction.Input ), doc='Lattice parameters' ) - self.declareProperty(FloatArrayProperty('Angdeg', values=[], validator=FloatArrayMandatoryValidator(), direction=Direction.Input ), doc='Lattice angles' ) + self.declareProperty(FloatArrayProperty('Angdeg', values=[], validator=FloatArrayMandatoryValidator(), + direction=Direction.Input ), doc='Lattice angles' ) - self.declareProperty(FloatArrayProperty('u', values=[], validator=FloatArrayMandatoryValidator(), direction=Direction.Input ), doc='Lattice vector parallel to neutron beam' ) + self.declareProperty(FloatArrayProperty('u', values=[], validator=FloatArrayMandatoryValidator(), + direction=Direction.Input ), doc='Lattice vector parallel to neutron beam' ) - self.declareProperty(FloatArrayProperty('v', values=[], validator=FloatArrayMandatoryValidator(), direction=Direction.Input ), doc='Lattice vector perpendicular to neutron beam in the horizontal plane' ) + self.declareProperty(FloatArrayProperty('v', values=[], validator=FloatArrayMandatoryValidator(), + direction=Direction.Input ), + doc='Lattice vector perpendicular to neutron beam in the horizontal plane' ) - self.declareProperty(FloatArrayProperty('Psi', values=[], direction=Direction.Input), doc='Psi rotation in degrees. Optional or one entry per run.' ) + self.declareProperty(FloatArrayProperty('Psi', values=[], direction=Direction.Input), + doc='Psi rotation in degrees. Optional or one entry per run.' ) - self.declareProperty(FloatArrayProperty('Gl', values=[], direction=Direction.Input), doc='gl rotation in degrees. Optional or one entry per run.' ) + self.declareProperty(FloatArrayProperty('Gl', values=[], direction=Direction.Input), + doc='gl rotation in degrees. Optional or one entry per run.' ) - self.declareProperty(FloatArrayProperty('Gs', values=[], direction=Direction.Input), doc='gs rotation in degrees. Optional or one entry per run.' ) + self.declareProperty(FloatArrayProperty('Gs', values=[], direction=Direction.Input), + doc='gs rotation in degrees. Optional or one entry per run.' ) self.declareProperty(IMDWorkspaceProperty('OutputWorkspace', '', direction=Direction.Output ), doc='Output MDWorkspace') @@ -178,33 +186,34 @@ class CreateMD(DataProcessorAlgorithm): v = self.getProperty('v').value psi = self.getProperty('Psi').value gl = self.getProperty('Gl').value - gs = self.getProperty('Gs').value - + gs = self.getProperty('Gs').value + input_workspaces = self.getProperty("InputWorkspaces").value - + ws_entries = len(input_workspaces) - + self._validate_inputs() - + if len(psi) == 0: psi = [0.0] * ws_entries - + if len(gl) == 0: gl = [0.0] * ws_entries - + if len(gs) == 0: gs = [0.0] * ws_entries - + output_workspace = None run_md = None to_merge_names = list() - + run_data = zip(input_workspaces, psi, gl, gs) for run_entry in run_data: ws_name, psi_entry, gl_entry, gs_entry = run_entry ws = AnalysisDataService.retrieve(ws_name) - run_md = self._single_run(input_workspace=ws, emode=emode, alatt=alatt, angdeg=angdeg, u=u, v=v, psi=psi_entry, gl=gl_entry, gs=gs_entry) + run_md = self._single_run(input_workspace=ws, emode=emode, alatt=alatt, angdeg=angdeg, u=u, v=v, + psi=psi_entry, gl=gl_entry, gs=gs_entry) to_merge_name = ws_name + "_md" AnalysisDataService.addOrReplace(to_merge_name, run_md) to_merge_names.append(to_merge_name) diff --git a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/DensityOfStates.py b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/DensityOfStates.py index 3722e26c75cdf9525ff876ccc9e2261c239d5b89..ad1ce52e9f9756d412860593395737826f8ba27b 100644 --- a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/DensityOfStates.py +++ b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/DensityOfStates.py @@ -1,4 +1,4 @@ -#pylint: disable=no-init,invalid-name +#pylint: disable=no-init,invalid-name,anomalous-backslash-in-string from mantid.kernel import * from mantid.api import * from mantid.simpleapi import * @@ -149,9 +149,7 @@ class DensityOfStates(PythonAlgorithm): if k in self._ions: partial_ions[k] = v - partial_workspaces, sum_workspace = self._compute_partial_ion_workflow( - partial_ions, frequencies, - eigenvectors, weights) + partial_workspaces, sum_workspace = self._compute_partial_ion_workflow(partial_ions, frequencies, eigenvectors, weights) if self._sum_contributions: # Discard the partial workspaces @@ -174,9 +172,7 @@ class DensityOfStates(PythonAlgorithm): eigenvectors = file_data[4] - partial_workspaces, sum_workspace = self._compute_partial_ion_workflow( - self._ion_dict, frequencies, - eigenvectors, weights) + partial_workspaces, sum_workspace = self._compute_partial_ion_workflow(self._ion_dict, frequencies, eigenvectors, weights) # Discard the partial workspaces for partial_ws in partial_workspaces: @@ -459,7 +455,7 @@ class DensityOfStates(PythonAlgorithm): weights = weights[:self._num_branches] # Speed of light in vaccum in m/s - c = scipy.constants.c + #c = scipy.constants.c #unused for now # Wavelength of the laser laser_wavelength = 514.5e-9 # Planck's constant diff --git a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/ElasticWindowMultiple.py b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/ElasticWindowMultiple.py index 537d76c3e484a58abd12dd3fa5486188bb8f664c..d98f568175e3981e77b9b1f52b611ee35f9420a1 100644 --- a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/ElasticWindowMultiple.py +++ b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/ElasticWindowMultiple.py @@ -1,4 +1,4 @@ -#pylint: disable=no-init +#pylint: disable=no-init,too-many-instance-attributes,too-many-branches from mantid.simpleapi import * from mantid.kernel import * from mantid.api import * diff --git a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/HFIRSANSReduction.py b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/HFIRSANSReduction.py index 901625ad06ca4cc010c47f3fccb5112c477c09a5..196b2efd97a91313849582f0a5245a2d92a6adeb 100644 --- a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/HFIRSANSReduction.py +++ b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/HFIRSANSReduction.py @@ -1,4 +1,4 @@ -#pylint: disable=no-init,invalid-name +#pylint: disable=no-init,invalid-name,too-many-branches import mantid.simpleapi as api from mantid.api import * from mantid.kernel import * @@ -59,7 +59,7 @@ class HFIRSANSReduction(PythonAlgorithm): if i==0: output_str += _load_data(data_file[i], workspace) # Use the first file location as the default output directory - head, tail = os.path.split(data_file[0]) + head, dummy_tail = os.path.split(data_file[0]) if os.path.isdir(head): self.default_output_dir = head else: diff --git a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/ISISIndirectEnergyTransfer.py b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/ISISIndirectEnergyTransfer.py index 3979814f91c1314cdcffa186061997875fb3c8ac..419418d24e831de9b6ea2c0003b24f35b5d48cda 100644 --- a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/ISISIndirectEnergyTransfer.py +++ b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/ISISIndirectEnergyTransfer.py @@ -1,3 +1,4 @@ +#pylint: disable=invalid-name,attribute-defined-outside-init,too-many-instance-attributes,too-many-branches from mantid.kernel import * from mantid.api import * from mantid.simpleapi import * @@ -29,8 +30,8 @@ class ISISIndirectEnergyTransfer(DataProcessorAlgorithm): self.declareProperty(name='SumFiles', defaultValue=False, doc='Toggle input file summing or sequential processing') - self.declareProperty(WorkspaceProperty('CalibrationWorkspace', '', - direction=Direction.Input, optional=PropertyMode.Optional), doc='Workspace contining calibration data') + self.declareProperty(WorkspaceProperty('CalibrationWorkspace', '', direction=Direction.Input, optional=PropertyMode.Optional), + doc='Workspace contining calibration data') # Instrument configuration properties self.declareProperty(name='Instrument', defaultValue='', doc='Instrument used during run.', @@ -40,8 +41,7 @@ class ISISIndirectEnergyTransfer(DataProcessorAlgorithm): self.declareProperty(name='Reflection', defaultValue='', doc='Reflection number for instrument setup during run.', validator=StringListValidator(['002', '004', '006'])) - self.declareProperty(IntArrayProperty(name='SpectraRange', values=[0, 1], - validator=IntArrayMandatoryValidator()), + self.declareProperty(IntArrayProperty(name='SpectraRange', values=[0, 1],validator=IntArrayMandatoryValidator()), doc='Comma separated range of spectra number to use.') self.declareProperty(FloatArrayProperty(name='BackgroundRange'), doc='Range of background to subtact from raw data in time of flight.') @@ -55,11 +55,9 @@ class ISISIndirectEnergyTransfer(DataProcessorAlgorithm): self.declareProperty(name='GroupingMethod', defaultValue='IPF', validator=StringListValidator(['Individual', 'All', 'File', 'Workspace', 'IPF']), doc='Method used to group spectra.') - self.declareProperty(WorkspaceProperty('GroupingWorkspace', '', - direction=Direction.Input, optional=PropertyMode.Optional), + self.declareProperty(WorkspaceProperty('GroupingWorkspace', '', direction=Direction.Input, optional=PropertyMode.Optional), doc='Workspace containing spectra grouping.') - self.declareProperty(FileProperty('MapFile', '', - action=FileAction.OptionalLoad, extensions=['.map']), + self.declareProperty(FileProperty('MapFile', '', action=FileAction.OptionalLoad, extensions=['.map']), doc='Workspace containing spectra grouping.') # Output properties @@ -69,8 +67,7 @@ class ISISIndirectEnergyTransfer(DataProcessorAlgorithm): self.declareProperty(name='Plot', defaultValue='None', doc='Type of plot to output after reduction.', validator=StringListValidator(['None', 'Spectra', 'Contour', 'Both'])) - self.declareProperty(WorkspaceGroupProperty('OutputWorkspace', '', - direction=Direction.Output), + self.declareProperty(WorkspaceGroupProperty('OutputWorkspace', '', direction=Direction.Output), doc='Workspace group for the resulting workspaces.') diff --git a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/IndirectFlatPlateAbsorption.py b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/IndirectFlatPlateAbsorption.py index 4651084b326a647d8b49c97c2de61c46865b40cb..3034d13f565467cdd277e285d1f12f913ef6e8ea 100644 --- a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/IndirectFlatPlateAbsorption.py +++ b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/IndirectFlatPlateAbsorption.py @@ -1,3 +1,4 @@ +#pylint: disable=no-init,too-many-instance-attributes,too-many-branches from mantid.simpleapi import * from mantid.api import DataProcessorAlgorithm, AlgorithmFactory, MatrixWorkspaceProperty, PropertyMode, Progress, WorkspaceGroupProperty from mantid.kernel import StringMandatoryValidator, Direction, logger, FloatBoundedValidator @@ -139,14 +140,14 @@ class IndirectFlatPlateAbsorption(DataProcessorAlgorithm): SetSampleMaterial(can_wave_ws, ChemicalFormula=self._can_chemical_formula, SampleNumberDensity=self._can_number_density) FlatPlateAbsorption(InputWorkspace=can_wave_ws, - OutputWorkspace=self._acc_ws, - SampleHeight=self._sample_height, - SampleWidth=self._sample_width, - SampleThickness=self._can_front_thickness + self._can_back_thickness, - ElementSize=self._element_size, - EMode='Indirect', - EFixed=efixed, - NumberOfWavelengthPoints=10) + OutputWorkspace=self._acc_ws, + SampleHeight=self._sample_height, + SampleWidth=self._sample_width, + SampleThickness=self._can_front_thickness + self._can_back_thickness, + ElementSize=self._element_size, + EMode='Indirect', + EFixed=efixed, + NumberOfWavelengthPoints=10) Divide(LHSWorkspace=can_wave_ws, RHSWorkspace=self._acc_ws, OutputWorkspace=can_wave_ws) Minus(LHSWorkspace=sample_wave_ws, RHSWorkspace=can_wave_ws, OutputWorkspace=sample_wave_ws) diff --git a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/OSIRISDiffractionReduction.py b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/OSIRISDiffractionReduction.py index af5b458c3459c8c4719eef3db62fb007eeedeeb4..de614e7f285ad987f816809deabf051835ea37b8 100644 --- a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/OSIRISDiffractionReduction.py +++ b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/OSIRISDiffractionReduction.py @@ -121,8 +121,8 @@ def getIntersectionsOfRanges(rangeList): at the same point. Also, all ranges should obey range[0] <= range[1]. """ # Sanity check. - for range in rangeList: - assert len(range) == 2, "Unable to find the intersection of a malformed range." + for myrange in rangeList: + assert len(myrange) == 2, "Unable to find the intersection of a malformed range." # Find all combinations of ranges, and see where they intersect. rangeCombos = list(itertools.combinations(rangeList, 2)) @@ -137,8 +137,8 @@ def getIntersectionsOfRanges(rangeList): return intersections def isInRanges(rangeList, n): - for range in rangeList: - if range[0] < n < range[1]: + for myrange in rangeList: + if myrange[0] < n < myrange[1]: return True return False @@ -157,8 +157,9 @@ class OSIRISDiffractionReduction(PythonAlgorithm): return 'Diffraction;PythonAlgorithms' def summary(self): - return "This Python algorithm performs the operations necessary for the reduction of diffraction data from the Osiris instrument at ISIS \ - into dSpacing, by correcting for the monitor and linking the various d-ranges together." + return "This Python algorithm performs the operations necessary for the reduction of diffraction data \ + from the Osiris instrument at ISIS \ + into dSpacing, by correcting for the monitor and linking the various d-ranges together." def PyInit(self): runs_desc='The list of run numbers that are part of the sample run. \ @@ -166,9 +167,11 @@ class OSIRISDiffractionReduction(PythonAlgorithm): self.declareProperty('Sample', '', doc=runs_desc) self.declareProperty('Vanadium', '', doc=runs_desc) self.declareProperty(FileProperty('CalFile', '', action=FileAction.Load), - doc='Filename of the .cal file to use in the [[AlignDetectors]] and [[DiffractionFocussing]] child algorithms.') + doc='Filename of the .cal file to use in the [[AlignDetectors]] and \ + [[DiffractionFocussing]] child algorithms.') self.declareProperty(MatrixWorkspaceProperty('OutputWorkspace', '', Direction.Output), - doc="Name to give the output workspace. If no name is provided, one will be generated based on the run numbers.") + doc="Name to give the output workspace. If no name is provided, \ + one will be generated based on the run numbers.") self._cal = None self._outputWsName = None @@ -196,8 +199,8 @@ class OSIRISDiffractionReduction(PythonAlgorithm): self._vans = self.findRuns(self.getPropertyValue("Vanadium")) # Load all sample and vanadium files, and add the resulting workspaces to the DRangeToWsMaps. - for file in self._sams + self._vans: - Load(Filename=file, OutputWorkspace=file, SpectrumMin=3, SpectrumMax=962) + for fileName in self._sams + self._vans: + Load(Filename=fileName, OutputWorkspace=fileName, SpectrumMin=3, SpectrumMax=962) for sam in self._sams: self._samMap.addWs(sam) for van in self._vans: @@ -255,13 +258,16 @@ class OSIRISDiffractionReduction(PythonAlgorithm): intersections = getIntersectionsOfRanges(self._samMap.getMap().keys()) dataX = result.dataX(0) - dataY = []; dataE = [] + dataY = [] + dataE = [] for i in range(0, len(dataX)-1): x = ( dataX[i] + dataX[i+1] ) / 2.0 if isInRanges(intersections, x): - dataY.append(2); dataE.append(2) + dataY.append(2) + dataE.append(2) else: - dataY.append(1); dataE.append(1) + dataY.append(1) + dataE.append(1) # apply scalar data to result workspace for i in range(0, result.getNumberHistograms()): diff --git a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/QLines.py b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/QLines.py index 0d8c34b1def5f952f361ddbc07af8c30def559c7..d7c02fa003d9e27f00f4bccf2dff930c69befb2d 100644 --- a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/QLines.py +++ b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/QLines.py @@ -14,18 +14,25 @@ class QLines(PythonAlgorithm): return "The program estimates the quasielastic components of each of the groups of spectra and requires the resolution file (.RES file) and optionally the normalisation file created by ResNorm." def PyInit(self): - self.declareProperty(name='InputType',defaultValue='File',validator=StringListValidator(['File','Workspace']), doc='Origin of data input - File (.nxs) or Workspace') - self.declareProperty(name='Instrument',defaultValue='iris',validator=StringListValidator(['irs','iris','osi','osiris']), doc='Instrument') - self.declareProperty(name='Analyser',defaultValue='graphite002',validator=StringListValidator(['graphite002','graphite004']), doc='Analyser & reflection') + self.declareProperty(name='InputType',defaultValue='File',validator=StringListValidator(['File','Workspace']), + doc='Origin of data input - File (.nxs) or Workspace') + self.declareProperty(name='Instrument',defaultValue='iris',validator=StringListValidator(['irs','iris','osi','osiris']), + doc='Instrument') + self.declareProperty(name='Analyser',defaultValue='graphite002',validator=StringListValidator(['graphite002','graphite004']), + doc='Analyser & reflection') self.declareProperty(name='Program',defaultValue='QL',validator=StringListValidator(['QL','QSe']), doc='Name of program to run') self.declareProperty(name='SamNumber',defaultValue='',validator=StringMandatoryValidator(), doc='Sample run number') - self.declareProperty(name='ResInputType',defaultValue='File',validator=StringListValidator(['File','Workspace']), doc='Origin of res input - File (_res.nxs) or Workspace') - self.declareProperty(name='ResType',defaultValue='Res',validator=StringListValidator(['Res','Data']), doc='Format of Resolution file') + self.declareProperty(name='ResInputType',defaultValue='File',validator=StringListValidator(['File','Workspace']), + doc='Origin of res input - File (_res.nxs) or Workspace') + self.declareProperty(name='ResType',defaultValue='Res',validator=StringListValidator(['Res','Data']), + doc='Format of Resolution file') self.declareProperty(name='ResNumber',defaultValue='',validator=StringMandatoryValidator(), doc='Resolution run number') self.declareProperty(name='ResNorm',defaultValue=False, doc='Use ResNorm output file') - self.declareProperty(name='ResNormInputType',defaultValue='File',validator=StringListValidator(['File','Workspace']), doc='Origin of ResNorm input - File (_red.nxs) or Workspace') + self.declareProperty(name='ResNormInputType',defaultValue='File',validator=StringListValidator(['File','Workspace']), + doc='Origin of ResNorm input - File (_red.nxs) or Workspace') self.declareProperty(name='ResNormNumber',defaultValue='', doc='ResNorm run number') - self.declareProperty(name='BackgroundOption',defaultValue='Sloping',validator=StringListValidator(['Sloping','Flat','Zero']), doc='Form of background to fit') + self.declareProperty(name='BackgroundOption',defaultValue='Sloping',validator=StringListValidator(['Sloping','Flat','Zero']), + doc='Form of background to fit') self.declareProperty(name='ElasticOption',defaultValue=True, doc='Include elastic peak in fit') self.declareProperty(name='FixWidth',defaultValue=False, doc='Fix one of the widths') self.declareProperty(name='WidthFile', defaultValue='', doc='Name of file containing fixed width values') @@ -34,10 +41,12 @@ class QLines(PythonAlgorithm): self.declareProperty(name='SamBinning', defaultValue=1, doc='Binning value (integer) for sample. Default=1') self.declareProperty(name='ResBinning', defaultValue=1, doc='Binning value (integer) for resolution - QLd only. Default=1') self.declareProperty(name='Sequence',defaultValue=True, doc='Switch Sequence Off/On') - self.declareProperty(name='Plot',defaultValue='None',validator=StringListValidator(['None','ProbBeta','Intensity','FwHm','Fit','All']), doc='Plot options') + self.declareProperty(name='Plot',defaultValue='None',validator=StringListValidator(['None','ProbBeta','Intensity','FwHm','Fit','All']), + doc='Plot options') self.declareProperty(name='Verbose',defaultValue=True, doc='Switch Verbose Off/On') self.declareProperty(name='Save',defaultValue=False, doc='Switch Save result to nxs file Off/On') + #pylint: disable=too-many-locals def PyExec(self): from IndirectImport import run_f2py_compatibility_test, is_supported_f2py_platform diff --git a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/ReactorSANSResolution.py b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/ReactorSANSResolution.py index 2ba70394d509ab67ac7739067a6a03a85960e6cd..2f61d9affc97e83beec7f32990fdea31cba5fd50 100644 --- a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/ReactorSANSResolution.py +++ b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/ReactorSANSResolution.py @@ -65,7 +65,8 @@ class ReactorSANSResolution(PythonAlgorithm): and source_sample_distance is not None and sample_detector_distance is not None: k = 2.0*math.pi/wvl res_factor = math.pow(k*source_apert_radius/source_sample_distance, 2) - res_factor += (math.pow(k*sample_apert_radius*(source_sample_distance+sample_detector_distance)/(source_sample_distance*sample_detector_distance), 2)/4.0) + res_factor += (math.pow(k*sample_apert_radius*(source_sample_distance+sample_detector_distance)/ + (source_sample_distance*sample_detector_distance), 2)/4.0) res_factor += math.pow(k*pixel_size_x/sample_detector_distance, 2)/12.0 for i in range(len(input_ws.readX(0))): diff --git a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANSAzimuthalAverage1D.py b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANSAzimuthalAverage1D.py index 88d7baf2b75ea81986419404c4b15584de63933e..b7963e100f1f0cf5e4061b66e318d824697f7613 100644 --- a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANSAzimuthalAverage1D.py +++ b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/SANSAzimuthalAverage1D.py @@ -1,4 +1,4 @@ -#pylint: disable=no-init,invalid-name +#pylint: disable=no-init,invalid-name,too-many-locals,too-many-branches from mantid.api import * from mantid.kernel import * import math diff --git a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/TransformToIqt.py b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/TransformToIqt.py index d8ec5def96a9334a6c2330ae2ed9e83556a85ee0..125a598495d33f64476f15a5df451bb92348000b 100644 --- a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/TransformToIqt.py +++ b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/TransformToIqt.py @@ -1,4 +1,4 @@ -#pylint: disable=no-init +#pylint: disable=no-init,too-many-instance-attributes from mantid.simpleapi import * from mantid.api import PythonAlgorithm, AlgorithmFactory, MatrixWorkspaceProperty, PropertyMode from mantid.kernel import Direction, logger @@ -162,8 +162,7 @@ class TransformToIqt(PythonAlgorithm): except (AttributeError, IndexError): resolution = 0.0175 - logger.warning('Could not get resolution from IPF, using default value: %f' % ( - resolution)) + logger.warning('Could not get resolution from IPF, using default value: %f' % (resolution)) resolution_bins = int(round((2 * resolution) / self._e_width)) diff --git a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/dnsdata.py b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/dnsdata.py index 852d1205ceb1b945315a90eef52bb6b77af1474c..347425c13bfc40292eed0bc7007a3cde9f8bc981 100644 --- a/Code/Mantid/Framework/PythonInterface/plugins/algorithms/dnsdata.py +++ b/Code/Mantid/Framework/PythonInterface/plugins/algorithms/dnsdata.py @@ -1,7 +1,8 @@ +#pylint: disable=invalid-name,too-many-instance-attributes,too-few-public-methods,anomalous-backslash-in-string import sys, re import datetime -class DNSdata: +class DNSdata(object): """ class which describes the DNS data structure will be used for data read-in and write-out routines diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_LETReduction.py b/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_LETReduction.py index 89d572b3894d289b78255ec4fd196e7bd9796379..a62dd1c28ee351050279198cf4b87e7c3bd937a3 100644 --- a/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_LETReduction.py +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/ISIS_LETReduction.py @@ -31,7 +31,7 @@ def find_binning_range(energy,ebin): mult=2.8868 dt_DAE = 1 else: - raise RuntimeError("Find_binning_range: unsupported/unknown instrument found") + raise RuntimeError("Find_binning_range: unsupported/unknown instrument found") energy=float(energy) diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/POLDIAnalyseResidualsTest.py b/Code/Mantid/Testing/SystemTests/tests/analysis/POLDIAnalyseResidualsTest.py index 53011cea6922691f85c60eeeb555b9522a97f73f..43807ffb1be3e6d715a162863133756cfbe4d496 100644 --- a/Code/Mantid/Testing/SystemTests/tests/analysis/POLDIAnalyseResidualsTest.py +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/POLDIAnalyseResidualsTest.py @@ -33,9 +33,8 @@ class POLDIAnalyseResidualsTest(stresstesting.MantidStressTest): referenceData = mtd["%s_fortran_residuals" % (dataFile)].dataY(0) calculatedData = mtd["%sResiduals" % (dataFile)].dataY(0) - self.assertEqual(calculatedData.shape[0], referenceData.shape[0], - "Number of d-values does not match for %s (is: %i, should: %i)" % ( - dataFile, calculatedData.shape[0], referenceData.shape[0])) + self.assertEqual(calculatedData.shape[0], referenceData.shape[0],"Number of d-values does not match for %s \ + (is: %i, should: %i)" % (dataFile, calculatedData.shape[0], referenceData.shape[0])) CreateWorkspace(referenceData, calculatedData, OutputWorkspace=workspaceNameTemplate) @@ -49,13 +48,13 @@ class POLDIAnalyseResidualsTest(stresstesting.MantidStressTest): self.assertDelta(slope, 1.0, 1e-2, "Slope is larger than 1.0 for %s (is: %d)" % (dataFile, slope)) relativeSlopeError = fitResult.cell(1, 2) / slope - self.assertLessThan(relativeSlopeError, 5e-3, "Relative error of slope is too large for %s (is: %d)" % ( - dataFile, relativeSlopeError)) + self.assertLessThan(relativeSlopeError, 5e-3, + "Relative error of slope is too large for %s (is: %d)" % (dataFile, relativeSlopeError)) intercept = fitResult.cell(0, 1) self.assertDelta(intercept, 0.0, 1e-3, "Intercept deviates too far from 0 %s (is: %d)" % (dataFile, intercept)) residuals = mtd[fitNameTemplate + "_Workspace"].dataY(2) maxAbsoluteResidual = np.max(np.abs(residuals)) - self.assertLessThan(maxAbsoluteResidual, 1.0, "Maximum absolute residual is too large for %s (is: %d)" % ( - dataFile, maxAbsoluteResidual)) + self.assertLessThan(maxAbsoluteResidual, 1.0, + "Maximum absolute residual is too large for %s (is: %d)" % (dataFile, maxAbsoluteResidual)) diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/REFMReduction.py b/Code/Mantid/Testing/SystemTests/tests/analysis/REFMReduction.py index 9028c273df3661460d918baed6886105f692e40f..80bd80cc4b45366cf6a7d201ab94b00a7ca21655 100644 --- a/Code/Mantid/Testing/SystemTests/tests/analysis/REFMReduction.py +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/REFMReduction.py @@ -7,22 +7,22 @@ from mantid.simpleapi import * class REFMReduction(stresstesting.MantidStressTest): def runTest(self): RefReduction(DataRun=str(9709), - NormalizationRun=str(9684), - SignalPeakPixelRange=[216, 224], - SubtractSignalBackground=True, - SignalBackgroundPixelRange=[172, 197], - PerformNormalization=True, - NormPeakPixelRange=[226, 238], - NormBackgroundPixelRange=[130, 183], - SubtractNormBackground=False, - CropLowResDataAxis=True, - CropLowResNormAxis=False, - LowResDataAxisPixelRange = [86, 159], - NBins=40, - Theta=0.086, - PolarizedData=True, - Instrument="REF_M", - OutputWorkspacePrefix='reflectivity') + NormalizationRun=str(9684), + SignalPeakPixelRange=[216, 224], + SubtractSignalBackground=True, + SignalBackgroundPixelRange=[172, 197], + PerformNormalization=True, + NormPeakPixelRange=[226, 238], + NormBackgroundPixelRange=[130, 183], + SubtractNormBackground=False, + CropLowResDataAxis=True, + CropLowResNormAxis=False, + LowResDataAxisPixelRange = [86, 159], + NBins=40, + Theta=0.086, + PolarizedData=True, + Instrument="REF_M", + OutputWorkspacePrefix='reflectivity') def validate(self): # Be more tolerant with the output, mainly because of the errors. diff --git a/Code/Mantid/Testing/SystemTests/tests/analysis/SphinxWarnings.py b/Code/Mantid/Testing/SystemTests/tests/analysis/SphinxWarnings.py index 4d4b02f15b9f61b9a9d3aa1d28a2c9a7808dc809..179af12b8d021fa757c80a8b1c7667e56448ac27 100644 --- a/Code/Mantid/Testing/SystemTests/tests/analysis/SphinxWarnings.py +++ b/Code/Mantid/Testing/SystemTests/tests/analysis/SphinxWarnings.py @@ -12,30 +12,30 @@ class SphinxWarnings(stresstesting.MantidStressTest): def __init__(self): stresstesting.MantidStressTest.__init__(self) self.allowedCategories=['Arithmetic', - 'CorrectionFunctions', - 'Crystal', - 'DataHandling', - 'Diagnostics', - 'Diffraction', - 'Events', - 'Examples', - 'ISIS', - 'Inelastic', - 'MDAlgorithms', - 'MPI', - 'Muon', - 'Optimization', - 'PythonAlgorithms', - 'Quantification', - 'Reflectometry', - 'Remote', - 'SANS', - 'Simulation', - 'SINQ', - 'Sample', - 'Transforms', - 'Utility', - 'Workflow'] + 'CorrectionFunctions', + 'Crystal', + 'DataHandling', + 'Diagnostics', + 'Diffraction', + 'Events', + 'Examples', + 'ISIS', + 'Inelastic', + 'MDAlgorithms', + 'MPI', + 'Muon', + 'Optimization', + 'PythonAlgorithms', + 'Quantification', + 'Reflectometry', + 'Remote', + 'SANS', + 'Simulation', + 'SINQ', + 'Sample', + 'Transforms', + 'Utility', + 'Workflow'] self.errorMessage="" def checkString(self,s): diff --git a/Code/Mantid/scripts/Inelastic/Direct/DirectEnergyConversion.py b/Code/Mantid/scripts/Inelastic/Direct/DirectEnergyConversion.py index 7e932d99679652b0df16fb9843757a9ebe281560..4ba963d811dab6b594b77dec50781d4c930337e3 100644 --- a/Code/Mantid/scripts/Inelastic/Direct/DirectEnergyConversion.py +++ b/Code/Mantid/scripts/Inelastic/Direct/DirectEnergyConversion.py @@ -635,7 +635,7 @@ class DirectEnergyConversion(object): data_ws = data_run.get_workspace() monitor_ws = data_run.get_monitors_ws() if monitor_ws is None: - raise RuntimeError("Can not find monitors workspace for workspace {0}, run N{1}".\ + raise RuntimeError("Can not find monitors workspace for workspace {0}, run N{1}".\ format(data_ws.name(),data_ws.getRunNumber())) separate_monitors = data_run.is_monws_separate() data_run.set_action_suffix('_shifted') @@ -653,8 +653,8 @@ class DirectEnergyConversion(object): # instrument is shifted in case it is shifted to this monitor (usual # case) #Find TOF range, correspondent to incident energy monitor peak - energy_rage = self.mon2_norm_energy_range - self._mon2_norm_time_range = self.get_TOF_for_energies(monitor_ws,energy_rage,\ + energy_rage = self.mon2_norm_energy_range + self._mon2_norm_time_range = self.get_TOF_for_energies(monitor_ws,energy_rage,\ [self.mon2_norm_spec],None,self._debug_mode) #end if separate_monitors: @@ -715,16 +715,16 @@ class DirectEnergyConversion(object): method = method.lower() for case in common.switch(method): if case('monitor-1'): - method,old_ws_name = self._normalize_to_monitor1(run,old_ws_name, range_offset,external_monitors_ws) - break + method,old_ws_name = self._normalize_to_monitor1(run,old_ws_name, range_offset,external_monitors_ws) + break if case('monitor-2'): - method,old_ws_name = self._normalize_to_monitor2(run,old_ws_name, range_offset,external_monitors_ws) - break + method,old_ws_name = self._normalize_to_monitor2(run,old_ws_name, range_offset,external_monitors_ws) + break if case('current'): NormaliseByCurrent(InputWorkspace=old_ws_name,OutputWorkspace=old_ws_name) break if case(): # default - raise RuntimeError('Normalization method {0} not found. It must be one of monitor-1, monitor-2, current, or None'.format(method)) + raise RuntimeError('Normalization method {0} not found. It must be one of monitor-1, monitor-2, current, or None'.format(method)) #endCase @@ -741,29 +741,29 @@ class DirectEnergyConversion(object): # get monitor's workspace separate_monitors = run.is_monws_separate() if external_monitor_ws: - separate_monitors = True - mon_ws = external_monitor_ws + separate_monitors = True + mon_ws = external_monitor_ws else: - mon_ws = run.get_monitors_ws() + mon_ws = run.get_monitors_ws() if not mon_ws: # no monitors - if self.__in_white_normalization: # we can normalize wb integrals by current separately as they often do not + if self.__in_white_normalization: # we can normalize wb integrals by current separately as they often do not # have monitors - self.normalise(run,'current',range_offset) - ws = run.get_workspace() - new_name = ws.name() - return ('current',new_name) - else: - ws = run.get_workspace() - raise RuntimeError('Normalise by monitor-1:: Workspace {0} for run {1} does not have monitors in it'\ + self.normalise(run,'current',range_offset) + ws = run.get_workspace() + new_name = ws.name() + return ('current',new_name) + else: + ws = run.get_workspace() + raise RuntimeError('Normalise by monitor-1:: Workspace {0} for run {1} does not have monitors in it'\ .format(ws.name(),run.run_number())) range = self.norm_mon_integration_range if self._debug_mode: - kwargs = {'NormFactorWS':'NormMon1_WS' + data_ws.getName()} + kwargs = {'NormFactorWS':'NormMon1_WS' + data_ws.getName()} else: - kwargs = {} + kwargs = {} mon_spect = self.prop_man.mon1_norm_spec if separate_monitors: @@ -787,28 +787,28 @@ class DirectEnergyConversion(object): # get monitor's workspace separate_monitors = run.is_monws_separate() if external_monitor_ws: - separate_monitors = True - mon_ws = external_monitor_ws + separate_monitors = True + mon_ws = external_monitor_ws else: - mon_ws = run.get_monitors_ws() + mon_ws = run.get_monitors_ws() if not mon_ws: # no monitors - if self.__in_white_normalization: # we can normalize wb integrals by current separately as they often do not + if self.__in_white_normalization: # we can normalize wb integrals by current separately as they often do not # have monitors - self.normalise(run,'current',range_offset) - ws = run.get_workspace() - new_name = ws.name() - return ('current',new_name) - else: - ws = run.get_workspace() - raise RuntimeError('Normalize by monitor-2:: Workspace {0} for run {1} does not have monitors in it'\ + self.normalise(run,'current',range_offset) + ws = run.get_workspace() + new_name = ws.name() + return ('current',new_name) + else: + ws = run.get_workspace() + raise RuntimeError('Normalize by monitor-2:: Workspace {0} for run {1} does not have monitors in it'\ .format(ws.name(),run.run_number())) # if self._debug_mode: - kwargs = {'NormFactorWS':'NormMon2_WS' + mon_ws.getName()} + kwargs = {'NormFactorWS':'NormMon2_WS' + mon_ws.getName()} else: - kwargs = {} + kwargs = {} mon_spect = self.prop_man.mon2_norm_spec mon_index = int(mon_ws.getIndexFromSpectrumNumber(mon_spect)) @@ -820,23 +820,23 @@ class DirectEnergyConversion(object): #Find TOF range, correspondent to incident energy monitor peak if self._mon2_norm_time_range: # range has been found during ei-calculations - range = self._mon2_norm_time_range - range_min = range[0] + range_offset - range_max = range[1] + range_offset - self._mon2_norm_time_range = None + range = self._mon2_norm_time_range + range_min = range[0] + range_offset + range_max = range[1] + range_offset + self._mon2_norm_time_range = None else: - mon_ws_name = mon_ws.name() #monitor's workspace and detector's workspace are e - if mon_ws_name.find('_shifted') != -1: + mon_ws_name = mon_ws.name() #monitor's workspace and detector's workspace are e + if mon_ws_name.find('_shifted') != -1: # monitor-2 normalization ranges have to be identified before the # instrument is shifted raise RuntimeError("Instrument have been shifted but no time range has been identified. Monitor-2 normalization can not be performed ") - else: + else: # instrument and workspace shifted, so TOF will be calculated wrt # shifted instrument - energy_rage = self.mon2_norm_energy_range - TOF_range = self.get_TOF_for_energies(mon_ws,energy_rage,[mon_spect],None,self._debug_mode) - range_min = TOF_range[0] - range_max = TOF_range[1] + energy_rage = self.mon2_norm_energy_range + TOF_range = self.get_TOF_for_energies(mon_ws,energy_rage,[mon_spect],None,self._debug_mode) + range_min = TOF_range[0] + range_max = TOF_range[1] # Normalize to monitor 2 NormaliseToMonitor(InputWorkspace=old_name,OutputWorkspace=old_name,IntegrationRangeMin=range_min, IntegrationRangeMax=range_max,IncludePartialBins=True,**kwargs) @@ -848,7 +848,7 @@ class DirectEnergyConversion(object): energy range requested """ if not workspace: - workspace = PropertyManager.sample_run.get_workspace() + workspace = PropertyManager.sample_run.get_workspace() spectra_id = self.prop_man.multirep_tof_specta_list if not spectra_id or len(spectra_id) == 0: @@ -875,14 +875,14 @@ class DirectEnergyConversion(object): nBlocks = len(spectra_id) if nBlocks > 1: - tof_min,t_step,tof_max = process_block(TOF_range[0]) - for ind in xrange(1,nBlocks): - tof_min1,t_step1,tof_max1 = process_block(TOF_range[ind]) - tof_min = min(tof_min,tof_min1) - tof_max = max(tof_max,tof_max1) - t_step = min(t_step,t_step1) + tof_min,t_step,tof_max = process_block(TOF_range[0]) + for ind in xrange(1,nBlocks): + tof_min1,t_step1,tof_max1 = process_block(TOF_range[ind]) + tof_min = min(tof_min,tof_min1) + tof_max = max(tof_max,tof_max1) + t_step = min(t_step,t_step1) else: - tof_min,t_step,tof_max = process_block(TOF_range) + tof_min,t_step,tof_max = process_block(TOF_range) #end # add 5% for detectors specified in Par file are shifted a bit and not min-max det any more return (0.95*tof_min,t_step,1.05*tof_max) @@ -968,7 +968,7 @@ class DirectEnergyConversion(object): """ if formats: # clear up existing save formats as one is defined in parameters - self.prop_man.save_format = None + self.prop_man.save_format = None # set up internal format variable from method parameters self.prop_man.set_input_parameters_ignore_nan(save_file_name=save_file,save_format=formats) formats = self.prop_man.save_format @@ -976,7 +976,7 @@ class DirectEnergyConversion(object): if save_file: save_file,ext = os.path.splitext(save_file) if len(ext) > 1: - formats.add(ext[1:]) + formats.add(ext[1:]) else: save_file = self.prop_man.save_file_name @@ -998,27 +998,27 @@ class DirectEnergyConversion(object): for file_format in formats: for case in common.switch(file_format): if case('nxspe'): - filename = save_file + '.nxspe' + filename = save_file + '.nxspe' # nxspe can not write workspace with / in the name # (something to do with folder names inside nxspe) - name_supported = name_orig.replace('/','of') - if name_supported != name_orig: - RenameWorkspace(InputWorkspace=name_orig,OutputWorkspace=name_supported) - SaveNXSPE(InputWorkspace=name_supported,Filename= filename,\ + name_supported = name_orig.replace('/','of') + if name_supported != name_orig: + RenameWorkspace(InputWorkspace=name_orig,OutputWorkspace=name_supported) + SaveNXSPE(InputWorkspace=name_supported,Filename= filename,\ KiOverKfScaling=prop_man.apply_kikf_correction,psi=prop_man.psi) - if name_supported != name_orig: - RenameWorkspace(InputWorkspace=name_supported,OutputWorkspace=name_orig) - break + if name_supported != name_orig: + RenameWorkspace(InputWorkspace=name_supported,OutputWorkspace=name_orig) + break if case('spe'): - filename = save_file + '.spe' - SaveSPE(InputWorkspace=workspace,Filename= filename) - break + filename = save_file + '.spe' + SaveSPE(InputWorkspace=workspace,Filename= filename) + break if case('nxs'): - filename = save_file + '.nxs' - SaveNexus(InputWorkspace=workspace,Filename= filename) - break + filename = save_file + '.nxs' + SaveNexus(InputWorkspace=workspace,Filename= filename) + break if case(): # default, could also just omit condition or 'if True' - prop_man.log("Unknown file format {0} requested to save results. No saving performed this format".\ + prop_man.log("Unknown file format {0} requested to save results. No saving performed this format".\ format(file_format)) ######### @property @@ -1159,21 +1159,21 @@ class DirectEnergyConversion(object): error = [] izerc = 0 for i in range(nhist): - try: - det = data_ws.getDetector(i) - except Exception: - continue - if det.isMasked(): - continue - sig = data_ws.readY(i)[0] - err = data_ws.readE(i)[0] - if sig != sig: #ignore NaN (hopefully it will mean mask some day) - continue - if (err <= 0) or (sig <= 0): # count Inf and negative||zero readings. - izerc+=1 # Presence of this indicates that - continue # something went wrong - signal.append(sig) - error.append(err) + try: + det = data_ws.getDetector(i) + except Exception: + continue + if det.isMasked(): + continue + sig = data_ws.readY(i)[0] + err = data_ws.readE(i)[0] + if sig != sig: #ignore NaN (hopefully it will mean mask some day) + continue + if (err <= 0) or (sig <= 0): # count Inf and negative||zero readings. + izerc+=1 # Presence of this indicates that + continue # something went wrong + signal.append(sig) + error.append(err) #---------------- Loop finished norm_factor = {} @@ -1232,11 +1232,11 @@ class DirectEnergyConversion(object): # check for NaN if (norm_factor['LibISIS'] != norm_factor['LibISIS']) | (izerc != 0): # It is an error, print diagnostics: - if norm_factor['LibISIS'] != norm_factor['LibISIS']: - log_value = '\n--------> Absolute normalization factor is NaN <----------------------------------------------\n' - else: - log_value = '\n--------> Warning, Monovanadium has zero spectra <--------------------------------------------\n' - log1_value = \ + if norm_factor['LibISIS'] != norm_factor['LibISIS']: + log_value = '\n--------> Absolute normalization factor is NaN <----------------------------------------------\n' + else: + log_value = '\n--------> Warning, Monovanadium has zero spectra <--------------------------------------------\n' + log1_value = \ "--------> Processing workspace: {0}\n"\ "--------> Monovan Integration range : min={1}, max={2} (meV)\n"\ "--------> Summed: {3} spectra with total signal: {4} and error: {5}\n"\ @@ -1248,8 +1248,8 @@ class DirectEnergyConversion(object): "--------> Abs norm factors: TGP : {11}\n"\ .format(ws_name,minmax[0],minmax[1],nhist,sum(signal),sum(error),izerc,scale_factor, norm_factor['LibISIS'],norm_factor['SigSq'],norm_factor['Poisson'],norm_factor['TGP']) - log_value = log_value + log1_value - propman.log(log_value,'error') + log_value = log_value + log1_value + propman.log(log_value,'error') else: if not self._debug_mode: monovan_run.clear_resulting_ws() @@ -1294,27 +1294,27 @@ class DirectEnergyConversion(object): #end def __getattr__(self,attr_name): - """ overloaded to return values of properties non-existing in the class dictionary + """ overloaded to return values of properties non-existing in the class dictionary from the property manager class except this property already have descriptor in self class """ - if attr_name in self._descriptors: - return object.__getattr__(self,attr_name) - else: - return getattr(self._propMan,attr_name) + if attr_name in self._descriptors: + return object.__getattr__(self,attr_name) + else: + return getattr(self._propMan,attr_name) def __setattr__(self,attr_name,attr_value): - """ overloaded to prohibit adding non-starting with _properties to the class instance + """ overloaded to prohibit adding non-starting with _properties to the class instance and add all other properties to property manager except this property already have a descriptor """ - if attr_name[0] == '_': + if attr_name[0] == '_': object.__setattr__(self,attr_name,attr_value) - else: - if attr_name in self._descriptors: - object.__setattr__(self,attr_name,attr_value) - else: - setattr(self._propMan,attr_name,attr_value) + else: + if attr_name in self._descriptors: + object.__setattr__(self,attr_name,attr_value) + else: + setattr(self._propMan,attr_name,attr_value) def initialise(self, instr,reload_instrument=False): """ @@ -1358,20 +1358,20 @@ class DirectEnergyConversion(object): instrument = workspace.getInstrument() name = instrument.getName() if name != self.prop_man.instr_name: - self.prop_man = PropertyManager(name,workspace) + self.prop_man = PropertyManager(name,workspace) def get_run_descriptor(self,run): - """ Spawn temporary run descriptor for input data given in format, + """ Spawn temporary run descriptor for input data given in format, different from run descriptor. Return existing run descriptor, if it is what provided. """ - if not isinstance(run,RunDescriptor): - tRun = copy.copy(PropertyManager._tmp_run) - tRun.__set__(None,run) - return tRun - else: - return run + if not isinstance(run,RunDescriptor): + tRun = copy.copy(PropertyManager._tmp_run) + tRun.__set__(None,run) + return tRun + else: + return run # # ------------------------------------------------------------------------------------------- # This actually does the conversion for the mono-sample and @@ -1432,10 +1432,10 @@ class DirectEnergyConversion(object): energy_bins = PropertyManager.energy_bins.get_abs_range(self.prop_man) if energy_bins: - Rebin(InputWorkspace=result_name,OutputWorkspace=result_name,Params= energy_bins,PreserveEvents=False) - if bkgr_ws: # remove background after converting units and rebinning - RemoveBackground(InputWorkspace=result_name,OutputWorkspace=result_name,BkgWorkspace=bkgr_ws,EMode='Direct') - DeleteWorkspace(bkgr_ws) + Rebin(InputWorkspace=result_name,OutputWorkspace=result_name,Params= energy_bins,PreserveEvents=False) + if bkgr_ws: # remove background after converting units and rebinning + RemoveBackground(InputWorkspace=result_name,OutputWorkspace=result_name,BkgWorkspace=bkgr_ws,EMode='Direct') + DeleteWorkspace(bkgr_ws) else: pass # TODO: investigate way of removing background from event workspace if we want # result to be an event workspace @@ -1444,8 +1444,8 @@ class DirectEnergyConversion(object): if self.apply_detector_eff and energy_bins: #should detector efficiency work on event workspace too? At the moment it is #not (01/02/2015) - DetectorEfficiencyCor(InputWorkspace=result_name,OutputWorkspace=result_name) - self.prop_man.log("_do_mono: finished DetectorEfficiencyCor for : " + result_name,'information') + DetectorEfficiencyCor(InputWorkspace=result_name,OutputWorkspace=result_name) + self.prop_man.log("_do_mono: finished DetectorEfficiencyCor for : " + result_name,'information') ############# data_run.synchronize_ws(mtd[result_name]) @@ -1468,13 +1468,13 @@ class DirectEnergyConversion(object): if time_shift != 0: # Workspace has probably been shifted, so to have correct units conversion # one needs to do appropriate shift here as # well - CopyInstrumentParameters(result_ws,bkgr_ws) + CopyInstrumentParameters(result_ws,bkgr_ws) # Adjust the TOF such that the first monitor peak is at t=0 - ScaleX(InputWorkspace=bkgr_ws,OutputWorkspace='bkgr_ws',Operation="Add",Factor=time_shift,\ + ScaleX(InputWorkspace=bkgr_ws,OutputWorkspace='bkgr_ws',Operation="Add",Factor=time_shift,\ InstrumentParameter="DelayTime",Combine=True) else: - bkgr_ws = Rebin(result_ws,Params=[bkg_range_min,(bkg_range_max - bkg_range_min) * 1.001,bkg_range_max],PreserveEvents=False) - bkgr_ws = self.normalise(bkgr_ws, self.normalise_method, time_shift) + bkgr_ws = Rebin(result_ws,Params=[bkg_range_min,(bkg_range_max - bkg_range_min) * 1.001,bkg_range_max],PreserveEvents=False) + bkgr_ws = self.normalise(bkgr_ws, self.normalise_method, time_shift) return bkgr_ws @@ -1490,10 +1490,10 @@ class DirectEnergyConversion(object): self._do_mono_ISIS(run,ei_guess,\ white_run, map_file, spectra_masks, Tzero) else: - result_name = run.set_action_suffix('_spe') - self._do_mono_SNS(run,result_name,ei_guess,\ + result_name = run.set_action_suffix('_spe') + self._do_mono_SNS(run,result_name,ei_guess,\ white_run, map_file, spectra_masks, Tzero) - run.synchronize_ws() + run.synchronize_ws() prop_man = self.prop_man ws = run.get_workspace() @@ -1507,8 +1507,8 @@ class DirectEnergyConversion(object): # Make sure that our binning is consistent if prop_man.energy_bins: - bins = PropertyManager.energy_bins.get_abs_range(prop_man) - Rebin(InputWorkspace=result_name,OutputWorkspace= result_name,Params=bins) + bins = PropertyManager.energy_bins.get_abs_range(prop_man) + Rebin(InputWorkspace=result_name,OutputWorkspace= result_name,Params=bins) # Masking and grouping result_ws = mtd[result_name] @@ -1546,12 +1546,12 @@ class DirectEnergyConversion(object): old_log_val = targ_ws.getRun().getLogData(done_Log).value done_log_VAL = self._build_white_tag() if old_log_val == done_log_VAL: - run.synchronize_ws(targ_ws) - if self._keep_wb_workspace: + run.synchronize_ws(targ_ws) + if self._keep_wb_workspace: result = run.get_ws_clone() - else: + else: result = run.get_workspace() - return result + return result else: DeleteWorkspace(Workspace=new_ws_name) else: @@ -1610,7 +1610,7 @@ def get_failed_spectra_list_from_masks(masked_wksp,prop_man): #TODO: get rid of this and use data, obtained form diagnostics failed_spectra = [] if masked_wksp is None: - return (failed_spectra,0) + return (failed_spectra,0) try: name = masked_wksp.name() except Exeption as ex: diff --git a/Code/Mantid/scripts/Inelastic/Direct/NonIDF_Properties.py b/Code/Mantid/scripts/Inelastic/Direct/NonIDF_Properties.py index 08e068dd155ab5a2158ec0bf85f95ce9d97da622..9d07a6261f37ab785d0a5297fb7d9663c09d172e 100644 --- a/Code/Mantid/scripts/Inelastic/Direct/NonIDF_Properties.py +++ b/Code/Mantid/scripts/Inelastic/Direct/NonIDF_Properties.py @@ -96,7 +96,7 @@ class NonIDF_Properties(object): If not explicitly set, white beam for sample run is used.""") # TODO: do something about it. Second white is explicitly used in # diagnostics but not accessed at all - second_white = RunDescriptor("""Second white beam run resutlts currently unused in the workflow + second_white = RunDescriptor("""Second white beam run resutlts currently unused in the workflow despite being referred to in Diagnostics. In a future it should be enabled.""") # diff --git a/Code/Mantid/scripts/Inelastic/Direct/ReductionWrapper.py b/Code/Mantid/scripts/Inelastic/Direct/ReductionWrapper.py index 3b68c9bad1d45dd12734804f2f1334f4f0656e4c..065faeb8835bec6f2be1f770e5b42f85983ef4c3 100644 --- a/Code/Mantid/scripts/Inelastic/Direct/ReductionWrapper.py +++ b/Code/Mantid/scripts/Inelastic/Direct/ReductionWrapper.py @@ -68,7 +68,7 @@ class ReductionWrapper(object): self._wvs = ReductionWrapper.var_holder(web_var) # Initialize reduced for given instrument self.reducer = DirectEnergyConversion(instrumentName) - # + # web_vars = self._wvs.get_all_vars() if web_vars : self.reducer.prop_man.set_input_parameters(**web_vars) @@ -159,7 +159,7 @@ class ReductionWrapper(object): @property def validate_run_number(self): """The property defines the run number to validate. If defined, switches reduction wrapper from - reduction to validation mode, where reduction tries to load result, previously calculated, + reduction to validation mode, where reduction tries to load result, previously calculated, for this run and then compare this result with the result, defined earlier""" return self._run_number_to_validate @@ -211,13 +211,13 @@ class ReductionWrapper(object): # def validate_result(self,Error=1.e-6,ToleranceRelErr=True): - """Method to validate result against existing validation file + """Method to validate result against existing validation file or workspace Change this method to verify different results or validate results differently""" - rez,message = ReductionWrapper.build_or_validate_result(self, + rez,message = ReductionWrapper.build_or_validate_result(self, Error,ToleranceRelErr) - return rez,message + return rez,message # def set_custom_output_filename(self): @@ -256,7 +256,7 @@ class ReductionWrapper(object): else: if len(path)>0: config.appendDataSearchDir(path) - # it there bug in getFullPath? It returns the same string if given full path + # it there bug in getFullPath? It returns the same string if given full path # but file has not been found name,fext=os.path.splitext(name) fileName = FileFinder.getFullPath(name+'.nxs') @@ -269,7 +269,7 @@ class ReductionWrapper(object): else: build_validation = True elif isinstance(validation_file,api.Workspace): - # its workspace: + # its workspace: reference_ws = validation_file build_validation = False fileName = "workspace:"+reference_ws.name() diff --git a/Code/Mantid/scripts/Inelastic/Direct/RunDescriptor.py b/Code/Mantid/scripts/Inelastic/Direct/RunDescriptor.py index 48677e349e84048fb8cdbbe66d740c5b1587dcc9..5a783b7331270faa98367368981605a59d97c607 100644 --- a/Code/Mantid/scripts/Inelastic/Direct/RunDescriptor.py +++ b/Code/Mantid/scripts/Inelastic/Direct/RunDescriptor.py @@ -844,7 +844,7 @@ class RunDescriptor(PropDescriptor): except: try: monws_name = mon_ws.name() - except: + except: monws_name = 'None' RunDescriptor._logger('*** Monitor workspace {0} does not have monitor with ID {1}. Monitor workspace set to None'.\ format(monws_name,monID),'warning') @@ -1077,7 +1077,7 @@ class RunDescriptor(PropDescriptor): except: try: ws_index = data_ws.getIndexFromSpectrumNumber(spectraID) - except: + except: raise RuntimeError('*** Error: Can not retrieve spectra with ID {0} from source workspace: {1}'.\ format(spectraID,data_ws.name())) diff --git a/Code/Mantid/scripts/Inelastic/Direct/dgreduce.py b/Code/Mantid/scripts/Inelastic/Direct/dgreduce.py index 53fa5df5ef9769f75c3f943ccd9a1d6665f5f1eb..eac4022db4dd6ce5794d85e88d09c5019d6b9096 100644 --- a/Code/Mantid/scripts/Inelastic/Direct/dgreduce.py +++ b/Code/Mantid/scripts/Inelastic/Direct/dgreduce.py @@ -151,7 +151,7 @@ def arb_units(wb_run,sample_run,ei_guess,rebin,map_file='default',monovan_run=No return res def runs_are_equal(ws1,ws2): - """Compare two run numbers, provided either as run numbers, + """Compare two run numbers, provided either as run numbers, or as workspaces or as ws names""" if ws1 == ws2: return True diff --git a/Code/Mantid/scripts/Inelastic/Direct/diagnostics.py b/Code/Mantid/scripts/Inelastic/Direct/diagnostics.py index 80751e0d2816eeb1783940b536014a04d82c21ac..363ca3c9bbc5c2d9de8e5e782461145d120e151b 100644 --- a/Code/Mantid/scripts/Inelastic/Direct/diagnostics.py +++ b/Code/Mantid/scripts/Inelastic/Direct/diagnostics.py @@ -374,10 +374,10 @@ def do_bleed_test(sample_run, max_framerate, ignored_pixels): try: nFrames= data_ws.run().getLogData('goodfrm').value except RuntimeError: - try: + try: nFrames = len(data_ws.run().getLogData('good_frame_log').value) - AddSampleLog(Workspace=data_ws, LogName='goodfrm', LogText=str(nFrames), LogType='Number') - except RuntimeError: + AddSampleLog(Workspace=data_ws, LogName='goodfrm', LogText=str(nFrames), LogType='Number') + except RuntimeError: raise RuntimeError("""Can not run bleed test as no appropriate good frame log is found in the workspace: {0}\n Disable bleed test by setting diag_bleed_test=False or add 'goodfrm' log value to the workspace\n"""\ .format(data_ws.name())) diff --git a/Code/Mantid/scripts/Interface/reduction_gui/reduction/diffraction/diffraction_reduction_script.py b/Code/Mantid/scripts/Interface/reduction_gui/reduction/diffraction/diffraction_reduction_script.py index 7be77dcb093e64600ce6339b38067c740563beb2..d555dc0f53d88e4bdaba66ba999ef7582ac4fc13 100644 --- a/Code/Mantid/scripts/Interface/reduction_gui/reduction/diffraction/diffraction_reduction_script.py +++ b/Code/Mantid/scripts/Interface/reduction_gui/reduction/diffraction/diffraction_reduction_script.py @@ -28,7 +28,7 @@ class DiffractionReductionScripter(BaseReductionScripter): def __init__(self, name="VULCAN", facility="SNS"): """ Initialization """ - # Call base class + # Call base class super(DiffractionReductionScripter, self).__init__(name=name, facility=facility) # Find whether there is stored setup XMLs @@ -36,7 +36,7 @@ class DiffractionReductionScripter(BaseReductionScripter): mantidconfigdir = os.path.join(homedir, ".mantid") self.configDir = mantidconfigdir - # create configuratin dir if it has not been + # create configuratin dir if it has not been if os.path.exists(self.configDir) is False: os.makedirs(self.configDir) @@ -48,10 +48,10 @@ class DiffractionReductionScripter(BaseReductionScripter): return def to_script(self, file_name=None): - """ Generate reduction script via observers and - (1) save the script to disk and (2) save the reduction setup to disk. + """ Generate reduction script via observers and + (1) save the script to disk and (2) save the reduction setup to disk. - Arguments: + Arguments: - file_name: name of the file to write the script to """ # Collect partial scripters from observers @@ -63,7 +63,7 @@ class DiffractionReductionScripter(BaseReductionScripter): # Construct python commands script = self.constructPythonScript(paramdict) - + # Save script to disk if file_name is None: file_name = os.path.join(self.configDir, DiffractionReductionScripter.AUTOSCRIPTNAME) @@ -75,7 +75,7 @@ class DiffractionReductionScripter(BaseReductionScripter): except IOError as e: print "Unable to save script to file. Reason: %s." % (str(e)) - # Export XML file + # Export XML file autosavexmlfname = os.path.join(self.configDir, "snspowderreduction.xml") self.to_xml(autosavexmlfname) diff --git a/Code/Mantid/scripts/Powder_Diffraction_Reduction.py b/Code/Mantid/scripts/Powder_Diffraction_Reduction.py index 18a5cd66b305a3fa25d61c2567437c33f14ddc0d..73ccc25e770f468180ed1c528dfeb8b5600fc91c 100644 --- a/Code/Mantid/scripts/Powder_Diffraction_Reduction.py +++ b/Code/Mantid/scripts/Powder_Diffraction_Reduction.py @@ -8,15 +8,15 @@ from reduction_application import ReductionGUI from PyQt4 import QtCore, uic reducer = ReductionGUI(instrument_list=["PG3", "NOM", "VULCAN"]) -if reducer.setup_layout(load_last=True): - +if reducer.setup_layout(load_last=True): + # Set up reduction configuration from previous usage try: # Find home dir - homedir = os.path.expanduser("~") + homedir = os.path.expanduser("~") mantidconfigdir = os.path.join(homedir, ".mantid") autopath = os.path.join(mantidconfigdir, 'snspowderreduction.xml') - # Load configuration + # Load configuration reducer.open_file(autopath) except IOError as e: print "[Error] Unable to load previously reduction setup from file %s.\nReason: %s." % ( diff --git a/Code/Mantid/scripts/SANS/ISISCommandInterface.py b/Code/Mantid/scripts/SANS/ISISCommandInterface.py index 127cc5410f9793efc32eb66af44bf6a30f0c2379..d0e229a05a9c6b6b06c915baad1a87a728648aac 100644 --- a/Code/Mantid/scripts/SANS/ISISCommandInterface.py +++ b/Code/Mantid/scripts/SANS/ISISCommandInterface.py @@ -864,7 +864,7 @@ def SetDetectorOffsets(bank, x, y, z, rot, radius, side, xtilt=0.0, ytilt=0.0 ): detector.y_tilt = ytilt def SetCorrectionFile(bank, filename): - # 10/03/15 RKH, create a new routine that allows change of "direct beam file" = correction file, for a given + # 10/03/15 RKH, create a new routine that allows change of "direct beam file" = correction file, for a given # detector, this simplify the iterative process used to adjust it. Will still have to keep changing the name of the file # for each iteratiom to avoid Mantid using a cached version, but can then use only a single user (=mask) file for each set of iterations. # Modelled this on SetDetectorOffsets above ... @@ -876,7 +876,7 @@ def SetCorrectionFile(bank, filename): detector = ReductionSingleton().instrument.getDetector(bank) detector.correction_file = filename - + def LimitsR(rmin, rmax, quiet=False, reducer=None): if reducer == None: reducer = ReductionSingleton().reference() diff --git a/Code/Mantid/scripts/SANS/isis_instrument.py b/Code/Mantid/scripts/SANS/isis_instrument.py index 088d87dd5f3481a27bb9cc82a476b8dc892dd9c0..2f1b19ea1c0d3bfd07595e0409f754c08f5294ac 100644 --- a/Code/Mantid/scripts/SANS/isis_instrument.py +++ b/Code/Mantid/scripts/SANS/isis_instrument.py @@ -435,24 +435,24 @@ class ISISInstrument(BaseInstrument): # see if a second step size is defined. If not set the second value to the first for compatibility #logger.warning("Trying to find centre-finder-step-size2") try: - self.cen_find_step2 = float(self.definition.getNumberParameter('centre-finder-step-size2')[0]) + self.cen_find_step2 = float(self.definition.getNumberParameter('centre-finder-step-size2')[0]) except: #logger.warning("Failed to find centre-finder-step-size2") - self.cen_find_step2 = self.cen_find_step + self.cen_find_step2 = self.cen_find_step logger.warning("Trying to find beam-centre-scale-factor1") try: - self.beam_centre_scale_factor1 = float(self.definition.getNumberParameter('beam-centre-scale-factor1')[0]) + self.beam_centre_scale_factor1 = float(self.definition.getNumberParameter('beam-centre-scale-factor1')[0]) except: - logger.warning("Failed to find beam-centre-scale-factor1") - self.beam_centre_scale_factor1 = 1000.0 + logger.warning("Failed to find beam-centre-scale-factor1") + self.beam_centre_scale_factor1 = 1000.0 logger.warning("Trying to find beam-centre-scale-factor2") try: - self.beam_centre_scale_factor2 = float(self.definition.getNumberParameter('beam-centre-scale-factor2')[0]) + self.beam_centre_scale_factor2 = float(self.definition.getNumberParameter('beam-centre-scale-factor2')[0]) except: - logger.warning("Failed to find beam-centre-scale-factor2") - self.beam_centre_scale_factor2 = 1000.0 + logger.warning("Failed to find beam-centre-scale-factor2") + self.beam_centre_scale_factor2 = 1000.0 firstDetect = DetectorBank(self.definition, 'low-angle') #firstDetect.disable_y_and_rot_corrs() @@ -924,14 +924,14 @@ class SANS2D(ISISInstrument): FRONT_DET_Z, FRONT_DET_X, FRONT_DET_ROT, REAR_DET_Z, REAR_DET_X = self.getDetValues(ws) # Deal with front detector - # 10/03/15 RKH need to add tilt of detector, in degrees, with respect to the horizontal or vertical of the detector plane + # 10/03/15 RKH need to add tilt of detector, in degrees, with respect to the horizontal or vertical of the detector plane # this time we can rotate about the detector's own axis so can use RotateInstrumentComponent, ytilt rotates about x axis, xtilt rotates about z axis # if frontDet.y_tilt != 0.0: RotateInstrumentComponent(Workspace=ws,ComponentName= self.getDetector('front').name(), X = "1.", Y = "0.", Z = "0.", Angle = frontDet.y_tilt) if frontDet.x_tilt != 0.0: RotateInstrumentComponent(Workspace=ws,ComponentName= self.getDetector('front').name(), X = "0.", Y = "0.", Z = "1.", Angle = frontDet.x_tilt) - # + # # 9/1/12 this all dates to Richard Heenan & Russell Taylor's original python development for SANS2d # the rotation axis on the SANS2d front detector is actually set front_det_radius = 306mm behind the detector. # Since RotateInstrumentComponent will only rotate about the centre of the detector, we have to to the rest here. @@ -961,7 +961,7 @@ class SANS2D(ISISInstrument): # deal with rear detector - # 10/03/15 RKH need to add tilt of detector, in degrees, with respect to the horizontal or vertical of the detector plane + # 10/03/15 RKH need to add tilt of detector, in degrees, with respect to the horizontal or vertical of the detector plane # Best to do the tilts first, while the detector is still centred on the z axis, ytilt rotates about x axis, xtilt rotates about z axis # NOTE the beam centre coordinates may change if rearDet.y_tilt != 0.0: diff --git a/Code/Mantid/scripts/SCD_Reduction/ReduceSCD_OneRun.py b/Code/Mantid/scripts/SCD_Reduction/ReduceSCD_OneRun.py index 8ae81127207be15b6d7bff3d4f079f5d9f55bd07..eb86834cb9491a090effaee34c42229d3c58a5c6 100644 --- a/Code/Mantid/scripts/SCD_Reduction/ReduceSCD_OneRun.py +++ b/Code/Mantid/scripts/SCD_Reduction/ReduceSCD_OneRun.py @@ -224,9 +224,9 @@ IndexPeaks( PeaksWorkspace=peaks_ws, Tolerance=tolerance) # SaveIsawUB( InputWorkspace=peaks_ws,Filename=run_niggli_matrix_file ) if output_nexus: - SaveNexus( InputWorkspace=peaks_ws, Filename=run_niggli_integrate_file ) + SaveNexus( InputWorkspace=peaks_ws, Filename=run_niggli_integrate_file ) else: - SaveIsawPeaks( InputWorkspace=peaks_ws, AppendFile=False, + SaveIsawPeaks( InputWorkspace=peaks_ws, AppendFile=False, Filename=run_niggli_integrate_file ) # @@ -335,9 +335,9 @@ elif use_cylindrical_integration: # result. # if output_nexus: - SaveNexus( InputWorkspace=peaks_ws, Filename=run_niggli_integrate_file ) + SaveNexus( InputWorkspace=peaks_ws, Filename=run_niggli_integrate_file ) else: - SaveIsawPeaks( InputWorkspace=peaks_ws, AppendFile=False, + SaveIsawPeaks( InputWorkspace=peaks_ws, AppendFile=False, Filename=run_niggli_integrate_file ) # Print warning if user is trying to integrate using the cylindrical method and transorm the cell @@ -362,10 +362,10 @@ else: CellType=cell_type, Centering=centering,\ AllowPermutations=allow_perm,\ Apply=True, Tolerance=tolerance ) - if output_nexus: - SaveNexus( InputWorkspace=peaks_ws, Filename=run_conventional_integrate_file ) + if output_nexus: + SaveNexus( InputWorkspace=peaks_ws, Filename=run_conventional_integrate_file ) else: - SaveIsawPeaks( InputWorkspace=peaks_ws, AppendFile=False,\ + SaveIsawPeaks( InputWorkspace=peaks_ws, AppendFile=False,\ Filename=run_conventional_integrate_file ) SaveIsawUB( InputWorkspace=peaks_ws, Filename=run_conventional_matrix_file ) diff --git a/Code/Mantid/scripts/SCD_Reduction/ReduceSCD_Parallel.py b/Code/Mantid/scripts/SCD_Reduction/ReduceSCD_Parallel.py index 804358531b1d668af187c5aedeea8ff4958e5339..f64d649032e7ca20abfc691b3fdbc9c865959387 100644 --- a/Code/Mantid/scripts/SCD_Reduction/ReduceSCD_Parallel.py +++ b/Code/Mantid/scripts/SCD_Reduction/ReduceSCD_Parallel.py @@ -174,7 +174,7 @@ if output_nexus: for item in candidates: if os.path.exists(item): full_name = str(item) - + if not full_name.endswith('nxs'): print "Exiting since the data_directory was not specified and" print "findnexus failed for event NeXus file: " + instrument_name + " " + str(run) @@ -203,17 +203,17 @@ if not use_cylindrical_integration: uc_alpha = peaks_ws.sample().getOrientedLattice().alpha() uc_beta = peaks_ws.sample().getOrientedLattice().beta() uc_gamma = peaks_ws.sample().getOrientedLattice().gamma() - if output_nexus: + if output_nexus: peaks_total = CombinePeaksWorkspaces(LHSWorkspace=peaks_total, RHSWorkspace=peaks_ws) SaveNexus( InputWorkspace=peaks_ws, Filename=niggli_integrate_file ) - else: + else: SaveIsawPeaks( InputWorkspace=peaks_ws, AppendFile=False, Filename=niggli_integrate_file ) first_time = False else: - if output_nexus: + if output_nexus: peaks_total = CombinePeaksWorkspaces(LHSWorkspace=peaks_total, RHSWorkspace=peaks_ws) SaveNexus( InputWorkspace=peaks_total, Filename=niggli_integrate_file ) - else: + else: SaveIsawPeaks( InputWorkspace=peaks_ws, AppendFile=True, Filename=niggli_integrate_file ) # diff --git a/Code/Mantid/scripts/reduction/instruments/reflectometer/wks_utility.py b/Code/Mantid/scripts/reduction/instruments/reflectometer/wks_utility.py index 707396dfa9f5ecb92dd68ae2531a09c04ae7cea4..b9dec721553ac63bdbeb835d613fe0b306a45bb4 100644 --- a/Code/Mantid/scripts/reduction/instruments/reflectometer/wks_utility.py +++ b/Code/Mantid/scripts/reduction/instruments/reflectometer/wks_utility.py @@ -1420,7 +1420,7 @@ def applyScalingFactor(tof_axis, file created by the sfCalculator procedure """ isSFfound = False - + #sf_file = 'NaN' if os.path.isfile(sf_file):