Commit ef4bda30 authored by Zolnierczuk, Piotr's avatar Zolnierczuk, Piotr
Browse files

small fixes

removed redundand code and constants
started unit testing
parent de43c96d
Loading
Loading
Loading
Loading
+7 −3
Original line number Diff line number Diff line
@@ -43,10 +43,12 @@ PHI_LIMS = (3.0, 79.5)
# 3. The Detector
XDET = 0.300 # [m] detector width
YDET = 0.300 # [m] detector height
DETECTOR_SIZE = sqrt(XDET**2+YDET**2)/L2 # in radiana
DETECTOR_SIZE = sqrt(XDET**2+YDET**2)/L2 # in radians

NXCHAN = 32  # number of detector channels (horizontal direction)
NYCHAN = 32  # number of detector channels (vertical direction)
#
NTCHAN = 42  # default number of TOF channels

#dx   = XDET/NXCHAN # 30.0/32=0.009375 # m
#dy   = YDET/NYCHAN # 30.0/32=0.009375 # m
@@ -99,8 +101,10 @@ ATTENUATOR_TABLE_11A = {
PROTON_CHARGE_RTDL         = 1e-11  # 100 pico Coulomb (RTDL unit)
PROTON_CHARGE_UNIT         = 1e-7   # 0.1 micro Coulomb (NSE control program unit)
DEFAULT_ACCELERATOR_FREQ   =   60.0 # Hz
DEFAULT_ACCELERATOR_POWER  =    1.4 # MW
DEFAULT_PROTON_ENERGY      = 1000.0 # MeV
#DEFAULT_ACCELERATOR_POWER  =    1.4 # MW
#DEFAULT_PROTON_ENERGY      = 1000.0 # MeV
DEFAULT_ACCELERATOR_POWER  =    1.7 # MW
DEFAULT_PROTON_ENERGY      = 1300.0 # MeV
DEFAULT_PCHARGE_PER_SECOND = DEFAULT_ACCELERATOR_POWER/DEFAULT_PROTON_ENERGY/PROTON_CHARGE_UNIT

def pcharge_per_second(power=DEFAULT_ACCELERATOR_POWER, energy=DEFAULT_PROTON_ENERGY):
+3 −0
Original line number Diff line number Diff line
@@ -24,6 +24,9 @@ ANGSTROM = _const.angstrom # Angstrom in meters
GAUSS      = 1e-04 # Gauss-> Tesla
#
MICRO      = _const.micro # mu - micro prefix
NANO       = _const.nano
PICO       = _const.pico
#
NANOSECOND = _const.nano  #

# conversion constant from field integral to phase angle
+1 −1
Original line number Diff line number Diff line
@@ -5,6 +5,6 @@ from .hdf import HdfConverter, convert_to_hdf # NOQA
from .reader  import (  read_echo, read_magnetic, read_xyz,         # NOQA
                        read_detimage, read_datfile, read_diffrun,  # NOQA
                        read_datreat , read_transmission )          # NOQA
#from .wr
from .writer  import EchoScan, make_echofilename
# from .legacy_writer  import write_csv, generate_echo                       # NOQA
# from .legacy_history import extract_data, list_variables, parse_date       # NOQA

pysen/inout/nexus.py

0 → 100644
+229 −0
Original line number Diff line number Diff line
#!/usr/bin/env python
"""
Process SNS-NSE NeXus Files

----------------------------------------------------------------------------------
EVENT STRUCTURE:
/entry/bank<N>_events - detector
/entry/monitor<N>     - beam monitors

array_of_events
 events['event_id']          # pixels
 events['event_time_offset'] # event time of flight

these are accelerator pulse pointers
 events['event_time_zero']   # beginning of the pulse in seconds  0,16.7ms,33.3ms,
 events['event_index']       # pointer to index in "array_of_events"

The monitors do not have "event_id"s as they have fixed pixel id
 ----------------------------------------------------------------------------------
"""
import os.path
import logging
import datetime as dtm

import numpy as np
import h5py

from pysen import HMN, MICRO, ANGSTROM

TDC_SHIFT =    10
TDC_MAXCH = 0x3FF

# FIXME (hard coded detector geometry)
X0_PIX    =   516 # center pixel
Y0_PIX    =   518
XWID2     =   813//2 # 410 # half-width
YWID2     =   821//2 # 410

TAU_IDX   = 1000
DN_IDX    =  100
UP_IDX    =  200

TOF2LAM = MICRO*HMN/ANGSTROM # conversion factor for TOF [us] to lambda [A]

def pixel_decompose(pixel):
    "helper function to decompose pixel into x and y coordinates"
    ypix   = (pixel >> TDC_SHIFT) & TDC_MAXCH
    xpix   = (pixel             ) & TDC_MAXCH
    return np.asarray((ypix, xpix))

TimeIntValueDType   = np.dtype([('time',float),('value',int)])   # time, int_val
TimeFloatValueDType = np.dtype([('time',float),('value',float)]) # time, float_val
def timevalue_array(nexus_file, vname, prefix="/entry/DASlogs", dtype=TimeFloatValueDType):
    """create time/value array, so that we can index
        something[i].time
        something[i].value
        may need some optimization
    """
    v_time  = nexus_file[f"{prefix}/{vname}/time"][:]
    v_value = nexus_file[f"{prefix}/{vname}/value"][:]
    return np.array(list(zip(v_time,v_value)), dtype=dtype)

def time_format(ctime, fmt='%c'):
    "convert 'nexus' time to 'echo' time format"
    ctime = ctime.split('.') # remove nanoseconds
    ctime = ctime[0]+ctime[-1][-6:]
    ptime = dtm.datetime.strptime(ctime, '%Y-%m-%dT%H:%M:%S%z')
    return  ptime.strftime(fmt) #Fri Oct 24 05:26:54 2014

def get_echo_info(nxsfile, *extra_keys):
    "get a base run info as a dictionary"
    base = os.path.basename(nxsfile.filename)
    log = logging.getLogger('nexus')

    res = {}
    res['base'] = base
    res['time']     = time_format(nxsfile['/entry/start_time'][0].decode('ascii'))
    res['title']    = nxsfile['/entry/title'][0].decode('ascii')
    res['notes']    = nxsfile['/entry/notes'][0].decode('ascii')
    res['duration'] = nxsfile['/entry/duration'][0]
    res['proton_charge'] = nxsfile['/entry/proton_charge'][0]

    log.info("%s: %s, duration=%.1f min", base, res['time'], res['duration']/60)
    log.info("%s: %s %s", base, res['title'], res['notes'])

    def get_average_value(nexus_key, default_value):
        key = f"{nexus_key}/average_value"
        try:
            value = nxsfile[key][0]
        except KeyError:
            if default_value is None:
                log.error("*** %s: cannot find nexus key %s", base, key)
            else:
                log.warning("*** %s: using default value for missing %s=%s",
                    base, key, default_value)
            value = default_value
        return value

    res['mo_l']  = get_average_value('/entry/DASlogs/BL15:Mot:mo_l',  21.3)
    res['mophi'] = get_average_value('/entry/DASlogs/BL15:Mot:mophi', None)
    res['lmin' ] = get_average_value('/entry/DASlogs/BL15:Chop:Skf1:WavelengthSet', None)
    res['lmax' ] = get_average_value('/entry/DASlogs/BL15:Chop:Skf4:WavelengthSet', None)
    res['qmin' ] = get_average_value('/entry/DASlogs/BL15:Mot:Qmin', 0.1)
    #
    res['phasesens'] = get_average_value('/entry/DASlogs/BL15:Legacy:PhaseAngleSensitivity',1585.0)
    res['sample_temp'] = get_average_value('/entry/DASlogs/BL15:SE:LS1:SAMPLE_TEMP', 303.0)
    res['beam_power' ] = get_average_value('/entry/DASlogs/BeamPower', 1.7)
    #
    # legacy control values
    for pv in ('Tau', 'J1', 'J2'):
        key = pv.lower()
        try:
            res[key] = timevalue_array(nxsfile, vname=f"BL15:Legacy:{pv}", prefix="/entry/DASlogs")
        except KeyError:
            res[key] = None

    # extra keys
    for key in extra_keys:
        res[key] = get_average_value(key, None)


    return res


def get_tau_index(scan_idx):
    "return tau, phase index"
    idx   = scan_idx['value']
    return (idx // TAU_IDX, idx %  TAU_IDX)

def get_averages(values, begin, end):
    "get averages"
    res = {}
    if values is None:
        return res
    for key, val in values.items():
        sel = np.logical_and(begin<val['time'],   val['time']<end)
        sel = val[sel]
        if len(sel):
            avg = np.average(sel['value'])
        else:
            avg = np.average(val['value'])
        res[key] = avg
    return res

def process_scan_point(events, pcharge, ltot, begin=None, end=None, values=None):
    "get data for one phase point"
    ev_t0   = events['event_time_zero']
    ev_id   = events['event_index']

    res = {}

    # get the proton charge for the phase
    sel = np.logical_and(begin<pcharge['time'], pcharge['time']<end)
    res['pcharge'] = np.sum(pcharge[sel]['value'])

    # other stuff
    res.update(get_averages(values, begin, end))

    # events selection
    ev_sel = np.logical_and(begin<ev_t0,ev_t0<end)
    evid     = ev_id[ev_sel]   # select event ids with correct scan time
    id1, id2 = evid[0], evid[-1] # first and last event

    # event_time_offset is in usec
    lam = events['event_time_offset'][id1:id2]*TOF2LAM/ltot
    pix = pixel_decompose(events['event_id'][id1:id2])
    res['lambda'] = lam
    res['pixel']  = pix
    res['neutron']   = np.vstack((lam,pix))
    return res

def dummy_scanpoint_callback(*_args, **_kwargs):
    """dummy process scan point (for testing)
    a caller to process scan should override it"""
    return True

# process scan
def process_scan(filename, scan_point_callback=dummy_scanpoint_callback, **kwargs):
    "process nexus file (scan indices)"
    log = logging.getLogger('nexus')
    base = os.path.basename(filename)
    nxsfile = h5py.File(filename, 'r')

    # get the scan indices
    scan_index     = timevalue_array(nxsfile, 'scan_index', dtype=TimeIntValueDType)
    scan_index_len = len(scan_index)
    if scan_index_len <= 1: # need at least two indices
        log.warning("%s: no subscans found", base)
        return None

    info    = get_echo_info(nxsfile)
    pcharge = timevalue_array(nxsfile, 'proton_charge')
    events  = nxsfile['/entry/bank1_events']
    #
    values = {
        'i00'  : timevalue_array(nxsfile, 'BL15:Bruker:ReadOutCurrent'),
        'phase': timevalue_array(nxsfile, 'BL15:CAENELS5:ActualCurrent'),
    }
    for motor in ('mophi', 'mopsi', 'mo_z', 'moana', 'mo_l', 'moatt'):
        values[motor] = timevalue_array(nxsfile, f"BL15:Mot:{motor}")
    for bsens in ('sample', 'encl1', 'encl2', 'ext1','ext2', 'ext3','ext4','ext5'):
        values['b_'+bsens] = timevalue_array(nxsfile, f"BL15:PLC:Mag:{bsens}")

    i, k = 0,0 # begin and end of scan
    in_scan = False
    log.info("%s: processing scan indices", base)
    while k < scan_index_len:
        if in_scan:
            if scan_index[k]['value']==0:
                in_scan = False
                res = process_scan_point(events, pcharge, ltot=info['mo_l'],
                        begin=scan_index[i]['time'], end=scan_index[k]['time'],
                        values=values)

                # FIXME: get tau ....
                for key in ('tau', 'j1', 'j2'):
                    it =  info[key]['time']<=scan_index[i]['time'] # tau set before scan
                    res[key] = info[key][it]['value'][-1]

                if not scan_point_callback(res, filename=base,
                                begin=scan_index[i], end=scan_index[k], **kwargs):
                    break
        else:
            if scan_index[k]['value']>0:
                in_scan = True
                i = k
        k = k + 1
    return info
# EOF
+36 −0
Original line number Diff line number Diff line
#!/usr/bin/env python
"""Simple nexus to echo converter
Piotr Zolnierczuk, zolnierczukp AT ornl.gov
Oak Ridge National Lab, 2024
"""
import logging
import argparse

from pysen.inout import EchoScan, make_echofilename

def main():
    "the main"
    parser = argparse.ArgumentParser()
    parser.set_defaults(ntof=42, npix=32, nphases=19, outdir='.')
    parser.add_argument('filename', metavar='file', nargs='+', help='filename to convert')
    parser.add_argument('--npix', type=int, dest='npix',
                        help='set number of detector pixels (default %(default)s)')
    parser.add_argument('--ntof', type=int, dest='ntof',
                        help='set number of tof channels (default %(default)s)')
    parser.add_argument('--nphases', type=int, dest='nphases',
                        help='set number of phases (default %(default)s)')
    parser.add_argument('--outdir', '-o', metavar='out', dest='outdir',
                        help='set output directory (default %(default)s)')
    args = parser.parse_args()

    logging.basicConfig(format='%(levelname)s %(message)s', level=logging.INFO)

    for file_name in args.filename:
        echo = EchoScan(make_echofilename(file_name))
        if not echo.read_nexus(file_name, npix=args.npix, ntof=args.ntof, nphases=args.nphases):
            continue
        echo.to_echo(args.outdir)


if __name__ == "__main__":
    main()
Loading