Skip to content
Snippets Groups Projects
Commit 87600689 authored by Savici, Andrei T.'s avatar Savici, Andrei T.
Browse files

Merge pull request #13142 from mganeva/corrections_to_LoadDNSLegacy

Corrections to LoadDNSLegacy
parents 255d9c1d 6797887f
No related branches found
No related tags found
No related merge requests found
......@@ -12,11 +12,13 @@ from dnsdata import DNSdata
sys.path.pop(0)
POLARISATIONS = ['0', 'x', 'y', 'z', '-x', '-y', '-z']
NORMALIZATIONS = ['duration', 'monitor']
class LoadDNSLegacy(PythonAlgorithm):
"""
Load the DNS Legacy data file to the mantid workspace
Load the DNS Legacy data file to the matrix workspace
Monitor/duration data are loaded to the separate workspace
"""
def category(self):
"""
......@@ -42,98 +44,134 @@ class LoadDNSLegacy(PythonAlgorithm):
doc="Name of the workspace to store the experimental data.")
self.declareProperty("Polarisation", "0",
StringListValidator(POLARISATIONS),
doc="Type of polarisation. Valid values: %s" % str(POLARISATIONS))
doc="Type of polarisation.")
self.declareProperty("Normalization", "duration",
StringListValidator(NORMALIZATIONS),
doc="Type of data for normalization.")
return
def PyExec(self):
# Input
filename = self.getPropertyValue("Filename")
outws = self.getPropertyValue("OutputWorkspace")
outws_name = self.getPropertyValue("OutputWorkspace")
monws_name = outws_name + '_NORM'
pol = self.getPropertyValue("Polarisation")
norm = self.getPropertyValue("Normalization")
# load data array from the given file
data_array = np.loadtxt(filename)
if not data_array.size:
message = "File " + filename + " does not contain any data!"
self.log().error(message)
raise RuntimeError(message)
# load run information
metadata = DNSdata()
try:
metadata.read_legacy(filename)
except RuntimeError as err:
message = "Error of loading of file " + filename + ": " + err
self.log().error(message)
raise RuntimeError(message)
ndet = 24
dataX = np.zeros(ndet)
# this needed to be able to use ConvertToMD
dataX = np.zeros(2*ndet)
dataX.fill(metadata.wavelength + 0.00001)
dataX[::2] -= 0.000002
dataY = data_array[0:ndet, 1:]
dataE = np.sqrt(dataY)
# create workspace
__temporary_workspace__ = api.CreateWorkspace(DataX=dataX,
DataY=dataY, DataE=dataE, NSpec=ndet, UnitX="Wavelength")
api.LoadInstrument(__temporary_workspace__, InstrumentName='DNS')
api.CreateWorkspace(OutputWorkspace=outws_name, DataX=dataX, DataY=dataY,
DataE=dataE, NSpec=ndet, UnitX="Wavelength")
outws = api.mtd[outws_name]
api.LoadInstrument(outws, InstrumentName='DNS')
# load run information
metadata = DNSdata()
metadata.read_legacy(filename)
run = __temporary_workspace__.mutableRun()
run = outws.mutableRun()
if metadata.start_time and metadata.end_time:
run.setStartAndEndTime(DateAndTime(metadata.start_time),
DateAndTime(metadata.end_time))
# add name of file as a run title
fname = os.path.splitext(os.path.split(filename)[1])[0]
run.addProperty('run_title', fname, True)
# run.addProperty('dur_secs', str(metadata.duration), True)
# rotate the detector bank to the proper position
api.RotateInstrumentComponent(__temporary_workspace__,
"bank0", X=0, Y=1, Z=0, Angle=metadata.deterota)
api.RotateInstrumentComponent(outws, "bank0", X=0, Y=1, Z=0, Angle=metadata.deterota)
# add sample log Ei and wavelength
api.AddSampleLog(__temporary_workspace__,
'Ei', LogText=str(metadata.incident_energy),
LogType='Number')
api.AddSampleLog(__temporary_workspace__,
'wavelength', LogText=str(metadata.wavelength),
LogType='Number')
api.AddSampleLog(outws, LogName='Ei', LogText=str(metadata.incident_energy),
LogType='Number', LogUnit='meV')
api.AddSampleLog(outws, LogName='wavelength', LogText=str(metadata.wavelength),
LogType='Number', LogUnit='Angstrom')
# add other sample logs
api.AddSampleLog(__temporary_workspace__, 'deterota',
LogText=str(metadata.deterota), LogType='Number')
api.AddSampleLog(__temporary_workspace__, 'mon_sum',
LogText=str(metadata.monitor_counts), LogType='Number')
api.AddSampleLog(__temporary_workspace__, 'duration',
LogText=str(metadata.duration), LogType='Number')
api.AddSampleLog(__temporary_workspace__, 'huber',
LogText=str(metadata.huber), LogType='Number')
api.AddSampleLog(__temporary_workspace__, 'T1',
LogText=str(metadata.t1), LogType='Number')
api.AddSampleLog(__temporary_workspace__, 'T2',
LogText=str(metadata.t2), LogType='Number')
api.AddSampleLog(__temporary_workspace__, 'Tsp',
LogText=str(metadata.tsp), LogType='Number')
api.AddSampleLog(outws, LogName='deterota', LogText=str(metadata.deterota),
LogType='Number', LogUnit='Degrees')
api.AddSampleLog(outws, 'mon_sum',
LogText=str(float(metadata.monitor_counts)), LogType='Number')
api.AddSampleLog(outws, LogName='duration', LogText=str(metadata.duration),
LogType='Number', LogUnit='Seconds')
api.AddSampleLog(outws, LogName='huber', LogText=str(metadata.huber),
LogType='Number', LogUnit='Degrees')
api.AddSampleLog(outws, LogName='omega', LogText=str(metadata.huber - metadata.deterota),
LogType='Number', LogUnit='Degrees')
api.AddSampleLog(outws, LogName='T1', LogText=str(metadata.t1),
LogType='Number', LogUnit='K')
api.AddSampleLog(outws, LogName='T2', LogText=str(metadata.t2),
LogType='Number', LogUnit='K')
api.AddSampleLog(outws, LogName='Tsp', LogText=str(metadata.tsp),
LogType='Number', LogUnit='K')
# flipper
api.AddSampleLog(__temporary_workspace__, 'flipper_precession',
LogText=str(metadata.flipper_precession_current), LogType='Number')
api.AddSampleLog(__temporary_workspace__, 'flipper_z_compensation',
LogText=str(metadata.flipper_z_compensation_current), LogType='Number')
flipper_status = 'OFF'
api.AddSampleLog(outws, LogName='flipper_precession',
LogText=str(metadata.flipper_precession_current),
LogType='Number', LogUnit='A')
api.AddSampleLog(outws, LogName='flipper_z_compensation',
LogText=str(metadata.flipper_z_compensation_current),
LogType='Number', LogUnit='A')
flipper_status = 'OFF' # flipper OFF
if abs(metadata.flipper_precession_current) > sys.float_info.epsilon:
flipper_status = 'ON'
api.AddSampleLog(__temporary_workspace__, 'flipper',
flipper_status = 'ON' # flipper ON
api.AddSampleLog(outws, LogName='flipper',
LogText=flipper_status, LogType='String')
# coil currents
api.AddSampleLog(__temporary_workspace__, 'C_a',
LogText=str(metadata.a_coil_current), LogType='Number')
api.AddSampleLog(__temporary_workspace__, 'C_b',
LogText=str(metadata.b_coil_current), LogType='Number')
api.AddSampleLog(__temporary_workspace__, 'C_c',
LogText=str(metadata.c_coil_current), LogType='Number')
api.AddSampleLog(__temporary_workspace__, 'C_z',
LogText=str(metadata.z_coil_current), LogType='Number')
api.AddSampleLog(outws, LogName='C_a', LogText=str(metadata.a_coil_current),
LogType='Number', LogUnit='A')
api.AddSampleLog(outws, LogName='C_b', LogText=str(metadata.b_coil_current),
LogType='Number', LogUnit='A')
api.AddSampleLog(outws, LogName='C_c', LogText=str(metadata.c_coil_current),
LogType='Number', LogUnit='A')
api.AddSampleLog(outws, LogName='C_z', LogText=str(metadata.z_coil_current),
LogType='Number', LogUnit='A')
# type of polarisation
api.AddSampleLog(__temporary_workspace__, 'polarisation',
api.AddSampleLog(outws, 'polarisation',
LogText=pol, LogType='String')
# slits
api.AddSampleLog(__temporary_workspace__, 'slit_i_upper_blade_position',
LogText=str(metadata.slit_i_upper_blade_position), LogType='String')
api.AddSampleLog(__temporary_workspace__, 'slit_i_lower_blade_position',
LogText=str(metadata.slit_i_lower_blade_position), LogType='String')
api.AddSampleLog(__temporary_workspace__, 'slit_i_left_blade_position',
LogText=str(metadata.slit_i_left_blade_position), LogType='String')
api.AddSampleLog(__temporary_workspace__, 'slit_i_right_blade_position',
LogText=str(metadata.slit_i_right_blade_position), LogType='String')
self.setProperty("OutputWorkspace", __temporary_workspace__)
self.log().debug('LoadDNSLegacy: data are loaded to the workspace ' + outws)
api.DeleteWorkspace(__temporary_workspace__)
api.AddSampleLog(outws, LogName='slit_i_upper_blade_position',
LogText=str(metadata.slit_i_upper_blade_position),
LogType='Number', LogUnit='mm')
api.AddSampleLog(outws, LogName='slit_i_lower_blade_position',
LogText=str(metadata.slit_i_lower_blade_position),
LogType='Number', LogUnit='mm')
api.AddSampleLog(outws, LogName='slit_i_left_blade_position',
LogText=str(metadata.slit_i_left_blade_position),
LogType='Number', LogUnit='mm')
api.AddSampleLog(outws, 'slit_i_right_blade_position',
LogText=str(metadata.slit_i_right_blade_position),
LogType='Number', LogUnit='mm')
# create workspace with normalization data (monitor or duration)
if norm == 'duration':
dataY.fill(metadata.duration)
dataE.fill(0.001)
else:
dataY.fill(metadata.monitor_counts)
dataE = np.sqrt(dataY)
api.CreateWorkspace(OutputWorkspace=monws_name, DataX=dataX, DataY=dataY,
DataE=dataE, NSpec=ndet, UnitX="Wavelength")
monws = api.mtd[monws_name]
api.LoadInstrument(monws, InstrumentName='DNS')
api.CopyLogs(InputWorkspace=outws_name, OutputWorkspace=monws_name, MergeStrategy='MergeReplaceExisting')
self.setProperty("OutputWorkspace", outws)
self.log().debug('LoadDNSLegacy: data are loaded to the workspace ' + outws_name)
return
......
......@@ -69,6 +69,10 @@ class DNSdata(object):
unparsed = fhandler.read()
blocks = unparsed.split(splitsymbol)
# check whether the file is complete
if len(blocks) < 9:
raise RuntimeError("The file %s is not complete!" % filename)
# parse each block
# parse block 0 (header)
res = parse_header(blocks[0])
......@@ -79,7 +83,7 @@ class DNSdata(object):
self.sample_name = res['sample']
self.userid = res['userid']
except:
raise ValueError("The file %s does not contain valid DNS data format." % filename)
raise RuntimeError("The file %s does not contain valid DNS data format." % filename)
# parse block 1 (general information)
b1splitted = [s.strip() for s in blocks[1].split('#')]
b1rest = [el for el in b1splitted]
......
......@@ -2,6 +2,8 @@ import unittest
from testhelpers import run_algorithm
from mantid.api import AnalysisDataService
from math import pi
import os
from mantid.simpleapi import LoadDNSLegacy
class LoadDNSLegacyTest(unittest.TestCase):
......@@ -27,10 +29,31 @@ class LoadDNSLegacyTest(unittest.TestCase):
self.assertEqual(8332872, run.getProperty('mon_sum').value)
self.assertEqual('y', run.getProperty('polarisation').value)
# check whether detector bank is rotated
det = ws.getDetector(1)
det = ws.getDetector(0)
self.assertAlmostEqual(8.54, ws.detectorSignedTwoTheta(det)*180/pi)
run_algorithm("DeleteWorkspace", Workspace=outputWorkspaceName)
return
def _createIncompleteFile(self, filename):
"""
creates an incomplete data file
"""
with open(filename, "w") as f:
f.write("# DNS Data userid=sa,exp=961,file=988,sample=run2")
f.write("#--------------------------------------------------------------------------")
f.write("# 9")
f.write("# User: Some User")
f.close()
return
def test_LoadInvalidData(self):
outputWorkspaceName = "LoadDNSLegacyTest_Test2"
filename = "dns-incomplete.d_dat"
self._createIncompleteFile(filename)
self.assertRaises(RuntimeError, LoadDNSLegacy, Filename=filename,
OutputWorkspace=outputWorkspaceName, Polarisation='y')
os.remove(filename)
if __name__ == '__main__':
unittest.main()
......@@ -9,10 +9,17 @@
Description
-----------
Loads a DNS legacy .d_dat data file into a :ref:`Workspace2D <Workspace2D>` with
the given name.
.. warning::
The loader rotates the detector bank in the position given in the data file.
This algorithm is being developed for a specific instrument. It might get changed or even
removed without a notification, should instrument scientists decide to do so.
This algorithm loads a DNS legacy data file into a :ref:`Workspace2D <Workspace2D>`. Two workspaces will be created:
- raw data workspace with the given name.
- workspace with normalization data (monitor counts or experiment duration by user's choice). The normalization workspace is named same as the data workspace, but has suffix "_NORM".
The loader rotates the detector bank in the position given in the data file. No operations on the neutron counts are performed. Sample logs are dublicated for both, data and normalization workspaces.
This algorithm only supports DNS instrument in its configuration before major upgrade.
......@@ -27,10 +34,9 @@ Usage
datafile = 'dn134011vana.d_dat'
# Load dataset
ws = LoadDNSLegacy(datafile, Polarisation='x')
ws = LoadDNSLegacy(datafile, Polarisation='x', Normalization='monitor')
print "This workspace has", ws.getNumDims(), "dimensions and has", \
ws.getNumberHistograms(), "histograms."
print "This workspace has", ws.getNumDims(), "dimensions and has", ws.getNumberHistograms(), "histograms."
Output:
......
......@@ -22,13 +22,14 @@
</component>
<type name="moderator" is="Source"></type>
<!-- monitor -->
<component type="monitor" idlist="monitor">
<!--<component type="monitor" idlist="monitor">
<location z="-0.229" />
</component>
<type name="monitor" is="monitor"></type>
<idlist idname="monitor">
<id val="-1"/>
</idlist>
-->
<!-- Sample position -->
<component type="sample-position">
<location y="0.0" x="0.0" z="0.0" />
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment