Skip to content

Commit

Permalink
Cycle 23/4 bugs (#8)
Browse files Browse the repository at this point in the history
* Fix simple bugs from Cycle 23/4

Fix auto-ei for updatestored runs
Auto-ei now updates for each run if not using monovan
sample_bg now correctly NormaliseByCurrent'd
keepworkspaces reverted to False by default

* Re-enable instrument copying function

Fix issue with reading powder nxspe files in Mantid
  (missing NXClass in detector group)
Add extra field requested by Toby
Add handling for MARI data before Ei_nominal field introduced.

* Add numerical verification

* Remove 'accumulate' angle summing option #6
  • Loading branch information
mducle authored Jan 25, 2024
1 parent b53a88e commit 1e0c7fe
Show file tree
Hide file tree
Showing 3 changed files with 103 additions and 33 deletions.
26 changes: 18 additions & 8 deletions reduction_files/DG_reduction.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@
# what to do if see run with same angle as previous run (and sumruns==False)
# - 'replace': sum and replace first nxspe file created
# - 'ignore': ignore and create an nxspe for each run
# - 'accumulate': sum all runs with this angle when creating new nxspe
same_angle_action = 'ignore'
# to enable creating multiple reduced data files from a single
# "continuous scan" set the following variables to a valid log "block"
Expand Down Expand Up @@ -67,7 +66,7 @@
fixei = True # True for LET since no monitor 3
powdermap = 'RINGS_MAP_XML' # rings mapping file - must be .xml format
file_wait = 30 # wait for data file to appear (seconds)
keepworkspaces = True # should be false for Horace scans
keepworkspaces = False # should be false for Horace scans
saveformat = '.nxspe' # format of output, '.nxspe', '.nxs'
QENS = False # output Q-binned data for QENS data analysis '_red.nxs'
Qbins = 20 # approximate number of Q-bins for QENS
Expand Down Expand Up @@ -227,6 +226,7 @@ def load_sum(run_list, block_name=None):
ws_bg = load_sum(sample_bg)
if sample_cd is not None:
ws_bg = ws_bg - ws_cd
ws_bg = NormaliseByCurrent(ws_bg)

# ==========================continuous scan stuff=============================
if cs_block and cs_bin_size > 0:
Expand Down Expand Up @@ -262,23 +262,28 @@ def load_sum(run_list, block_name=None):
# =============================auto-Ei stuff=================================
is_auto = lambda x: isinstance(x, str) and 'auto' in x.lower()
if is_auto(Ei_list) or hasattr(Ei_list, '__iter__') and is_auto(Ei_list[0]):
use_auto_ei = True
try:
Ei_list = autoei(ws)
except NameError:
fn = str(sample[0])
if not fn.startswith(inst[:3]): fn = f'{inst[:3]}{fn}'
if fn.endswith('.raw'): fn = fn[:-4]
if not fn.endswith('.nxs'): fn += '.nxs'
if fn[-4:-2] == '.s': fn = fn[:-4] + '.n0' + fn[-2:]
if not fn.endswith('.nxs') and fn[-5:-2] != '.n0': fn += '.nxs'
Ei_list = autoei(LoadNexusMonitors(fn, OutputWorkspace='ws_tmp_mons'))
print(f"Automatically determined Ei's: {Ei_list}")
if len(trans) < len(Ei_list):
print(f'{inst}: WARNING - not enough transmision values for auto-Ei. ' \
'Extending list with last (end) value')
trans += [trans[-1]]*(len(Ei_list) - len(trans))
else:
use_auto_ei = False

# ============================load monovan file==============================
mv_fac = []
if mv_file is not None and monovan_mass is not None:
use_auto_ei = False
if mv_file not in ws_list:
print(f'{inst}: Loading monovan calibration factors - {mv_file}')
LoadAscii(mv_file,OutputWorkspace=mv_file)
Expand Down Expand Up @@ -336,9 +341,8 @@ def load_sum(run_list, block_name=None):
runs_with_same_angles = get_angle(irun, angles_workspace, psi_motor_name, tryload)
if len(runs_with_same_angles) > 1:
ws = load_sum(runs_with_same_angles)
if same_angle_action.lower() == 'replace':
irun = runs_with_same_angles[0]
runs_with_angles_already_seen += runs_with_same_angles
irun = runs_with_same_angles[0]
runs_with_angles_already_seen += runs_with_same_angles
else:
tryload(irun)
print(f'Loading run# {irun}')
Expand All @@ -352,6 +356,12 @@ def load_sum(run_list, block_name=None):
l1 = (sampos - ws.getInstrument().getSource().getPos()).norm()
l2 = (ws.getDetector(0).getPos() - sampos).norm()

# Updates ei_loop if auto _and_ not using monovan
if use_auto_ei:
Ei_list = autoei(ws)
if len(trans) < len(Ei_list): trans += [trans[-1]]*(len(Ei_list) - len(trans))
if len(mv_fac) < len(Ei_list): mv_fac += [1]*(len(Ei_list) - len(mv_fac))

# ============================= Ei loop =====================================
for ienergy in range(len(Ei_list)):
Ei = Ei_list[ienergy]
Expand Down Expand Up @@ -460,8 +470,8 @@ def load_sum(run_list, block_name=None):
print(f'{inst}: Writing {ofile}{saveformat}')
if saveformat.lower() == '.nxspe':
SaveNXSPE('ws_out', ofile+saveformat, Efixed=Ei, KiOverKfScaling=True)
#if utils_loaded and not powder:
# copy_inst_info(ofile+saveformat, 'ws_out') # Copies instrument info for Horace
if utils_loaded:
copy_inst_info(ofile+saveformat, 'ws_out') # Copies instrument info for Horace
elif saveformat.lower() == '.nxs':
rmlogs = {'events_log', 'frame_log', 'good_frame_log', 'period_log', 'proton_charge', 'raw_events_log'}
RemoveLogs('ws_out', KeepLogs=','.join(set(mtd['ws_out'].run().keys()).difference(rmlogs)))
Expand Down
31 changes: 24 additions & 7 deletions reduction_files/reduction_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,41 +101,55 @@ def get_raw_file_from_ws(ws):
return prp.value()
raise RuntimeError('Could not find raw NeXus file in workspace history')


def copy_inst_info(outfile, in_ws):
print(f'Copying Instrument Info to file {outfile}')
try:
raw_file_name = get_raw_file_from_ws(mtd[in_ws])
except RuntimeError:
return
print(raw_file_name)
if not os.path.exists(outfile):
outfile = os.path.join(mantid.simpleapi.config['defaultsave.directory'], os.path.basename(outfile))
print(outfile)
with h5py.File(raw_file_name, 'r') as raw:
exclude = ['dae', 'detector_1', 'name']
to_copy = [k for k in raw['/raw_data_1/instrument'] if not any([x in k for x in exclude])]
if 'aperture' not in to_copy and 'mono_chopper' not in to_copy:
return
n_spec = len(raw['/raw_data_1/instrument/detector_1/spectrum_index'])
with h5py.File(outfile, 'r+') as spe:
print(spe.keys())
spe_root = list(spe.keys())[0]
en0 = spe[f'{spe_root}/instrument/fermi/energy'][()]
if 'fermi' in to_copy:
del spe[f'{spe_root}/instrument/fermi']
for grp in to_copy:
print(grp)
src = raw[f'/raw_data_1/instrument/{grp}']
h5py.Group.copy(src, src, spe[f'{spe_root}/instrument/'])
if 'fermi' in to_copy:
spe[f'{spe_root}/instrument/fermi/energy'][()] = en0
detroot = f'{spe_root}/instrument/detector_elements_1'
spe.create_group(detroot)
spe[detroot].attrs['NX_class'] = np.array('NXdetector', dtype='S')
for df0, df1 in zip(['SPEC', 'UDET', 'DELT', 'LEN2', 'CODE', 'TTHE', 'UT01'], \
['spectrum_number', 'detector_number', 'delt', 'distance', 'detector_code', 'polar_angle', 'azimuthal_angle']):
['det2spec', 'detector_number', 'delt', 'distance', 'detector_code', 'polar_angle', 'azimuthal_angle']):
src = raw[f'/raw_data_1/isis_vms_compat/{df0}']
h5py.Group.copy(src, src, spe[detroot], df1)
for nn in range(raw['/raw_data_1/isis_vms_compat/NUSE'][0]):
src = raw[f'/raw_data_1/isis_vms_compat/UT{nn+1:02d}']
h5py.Group.copy(src, src, spe[detroot], f'user_table{nn+1:02d}')
spec2work = f'{spe_root}/instrument/detector_elements_1/spec2work'
ws = mtd[in_ws]
if n_spec == ws.getNumberHistograms():
s2w = np.arange(n_spec)
else:
nmon = np.array(raw['/raw_data_1/isis_vms_compat/NMON'])[0]
spec = np.array(raw['/raw_data_1/isis_vms_compat/SPEC'])[nmon:]
udet = np.array(raw['/raw_data_1/isis_vms_compat/UDET'])[nmon:]
_, iq = np.unique(spec, return_index=True)
s2 = np.hstack([np.array(ws.getSpectrum(ii).getDetectorIDs()) for ii in range(ws.getNumberHistograms())])
_, c1, _ = np.intersect1d(udet[iq], s2, return_indices=True)
s2w = -np.ones(iq.shape, dtype=np.int32)
s2w[c1] = np.array(ws.getIndicesFromDetectorIDs(s2[c1].tolist()))
spe[detroot].create_dataset('spec2work', s2w.shape, dtype='i4', data=s2w)

#========================================================
# MARI specific functions
Expand Down Expand Up @@ -302,12 +316,15 @@ def inrange(tf, l):
freq = mode(getLog('Fermi_Speed'))
except ValueError:
return []
ei_nominal = mode(getLog('Ei_nominal'))
phase1, phase2 = (mode(getLog(nam)) for nam in ['Phase_Thick_1', 'Phase_Thick_2'])
delay = getfracLog('Fermi_delay')
sqrt_ei = np.sqrt(ei_nominal)
lmc = 10.04 # Moderator-Fermi distance
period = 1.e6 / freq
try:
ei_nominal = mode(getLog('Ei_nominal'))
except RuntimeError: # Old file
ei_nominal = ((2286.26 * lmc) / delay)**2
sqrt_ei = np.sqrt(ei_nominal)
delay_calc = ((2286.26 * lmc) / sqrt_ei)
t_offset_ref = {'S':2033.3/freq-5.4, 'G':1339.9/freq-7.3}
t_offset = delay - (delay_calc % period)
Expand Down
79 changes: 61 additions & 18 deletions tests/run_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import mantid.simpleapi as s_api
from os.path import abspath, dirname
from mantid.simpleapi import LoadNexusProcessed
from numpy.testing import assert_allclose

class DGReductionTest(unittest.TestCase):

Expand Down Expand Up @@ -34,6 +35,19 @@ def substitute_file(filename, out_file, subs_dict):
f.write(ftxt)


@staticmethod
def load_return_sum(filename):
if isinstance(filename, str):
if 'nxspe' in filename:
ws = s_api.LoadNXSPE(filename)
else:
ws = s_api.LoadNexusProcessed(filename)
else:
ws = filename
ei = ws.getEFixed(ws.getDetector(0).getID())
wsi = s_api.Integration(ws, -ei/5, ei/5)
return [np.nansum(ws.extractY()), np.nansum(wsi.extractY())]

#@classmethod
#def tearDownClass(cls):
# # Removes all generated files
Expand Down Expand Up @@ -62,11 +76,12 @@ def test_MARI(self):
outfile = os.path.join(self.outputpath, 'mari_reduction.py')
self.substitute_file(infile, outfile, subsdict)
import mari_reduction
self.assertTrue(os.path.exists(os.path.join(self.outputpath, 'MAR28581_180meV.nxspe')))
self.assertTrue(os.path.exists(os.path.join(self.outputpath, 'MAR28581_29.8meV.nxspe')))
self.assertTrue(os.path.exists(os.path.join(self.outputpath, 'MAR28581_11.7meV.nxspe')))
#Load automatically calls `LoadNeXus` first which chokes on the file...
#ws = s_api.Load(os.path.join(self.outputpath, 'MAR28581_180meV.nxspe'))
cksum = {}
for ff in ['MAR28581_180meV.nxspe', 'MAR28581_29.8meV.nxspe', 'MAR28581_11.7meV.nxspe']:
cksum[ff] = self.load_return_sum(os.path.join(self.outputpath, ff))
assert_allclose(cksum['MAR28581_180meV.nxspe'], [2875.206344581399, 1142.3423609297706])
assert_allclose(cksum['MAR28581_29.8meV.nxspe'], [9883.796577551771, 699.569446722105])
assert_allclose(cksum['MAR28581_11.7meV.nxspe'], [7579.20214842476, 215.5181898957152])


def test_MARI_multiple_lowE(self):
Expand All @@ -85,8 +100,11 @@ def test_MARI_multiple_lowE(self):
outfile = os.path.join(self.outputpath, 'mari_reduction_lowE.py')
self.substitute_file(infile, outfile, subsdict)
import mari_reduction_lowE
self.assertTrue(os.path.exists(os.path.join(self.outputpath, 'MAR28727_1.84meV.nxspe')))
self.assertTrue(os.path.exists(os.path.join(self.outputpath, 'MAR28727_1.1meV.nxspe')))
cksum = {}
for ff in ['MAR28727_1.84meV.nxspe', 'MAR28727_1.1meV.nxspe']:
cksum[ff] = self.load_return_sum(os.path.join(self.outputpath, ff))
assert_allclose(cksum['MAR28727_1.84meV.nxspe'], [163870.34277088975, 630.7098856273342])
assert_allclose(cksum['MAR28727_1.1meV.nxspe'], [21173.386235858427, 54.68185719767952])


def test_existing_workspace(self):
Expand All @@ -107,8 +125,11 @@ def test_existing_workspace(self):
outfile = os.path.join(self.outputpath, 'mari_reduction_existing.py')
self.substitute_file(infile, outfile, subsdict)
import mari_reduction_existing
self.assertTrue(os.path.exists(os.path.join(self.outputpath, 'MARws_existing_1.84meV.nxspe')))
self.assertTrue(os.path.exists(os.path.join(self.outputpath, 'MARws_existing_1.1meV.nxspe')))
cksum = {}
for ff in ['MARws_existing_1.84meV.nxspe', 'MARws_existing_1.1meV.nxspe']:
cksum[ff] = self.load_return_sum(os.path.join(self.outputpath, ff))
assert_allclose(cksum['MARws_existing_1.84meV.nxspe'], [163949.90492295238, 630.5250778015388])
assert_allclose(cksum['MARws_existing_1.1meV.nxspe'], [20984.04034614979, 54.3481109137481])


def test_LET_QENS(self):
Expand All @@ -134,10 +155,12 @@ def test_LET_QENS(self):
outfile = os.path.join(self.outputpath, 'let_reduction.py')
self.substitute_file(infile, outfile, subsdict)
import let_reduction
self.assertTrue(os.path.exists(os.path.join(self.outputpath, 'LET93338_3.7meV_red.nxs')))
self.assertTrue(os.path.exists(os.path.join(self.outputpath, 'LET93338_1.77meV_red.nxs')))
self.assertTrue(os.path.exists(os.path.join(self.outputpath, 'LET93338_1.03meV_red.nxs')))
ws = s_api.Load(os.path.join(self.outputpath, 'LET93338_3.7meV_red.nxs'))
cksum = {}
for ff in ['LET93338_3.7meV_red.nxs', 'LET93338_1.77meV_red.nxs', 'LET93338_1.03meV_red.nxs']:
cksum[ff] = self.load_return_sum(os.path.join(self.outputpath, ff))
assert_allclose(cksum['LET93338_3.7meV_red.nxs'], [3192.464851059197, 29.13919482652566])
assert_allclose(cksum['LET93338_1.77meV_red.nxs'], [3105.388960389411, 13.568151311273674])
assert_allclose(cksum['LET93338_1.03meV_red.nxs'], [124.64663186380675, 0.31298472468130645])


def test_LET_same_angle(self):
Expand Down Expand Up @@ -166,6 +189,8 @@ def test_LET_same_angle(self):
self.assertTrue('Plus' in [v.split('_')[0] for v in algs.keys()])
loaded_files = [p.value() for al, pp in algs.items() if 'Load' in al for p in pp if p.name() == 'Filename']
self.assertTrue(any(['92089' in v for v in loaded_files]) and any(['92168' in v for v in loaded_files]))
cksum = {'LET92089_3.7meV_1to1.nxs':self.load_return_sum(ws_out)}
assert_allclose(cksum['LET92089_3.7meV_1to1.nxs'], [1532857.3406259378, 12756.274264973032])


def test_MERLIN(self):
Expand All @@ -191,8 +216,10 @@ def test_MERLIN(self):
outfile = os.path.join(self.outputpath, 'merlin_reduction.py')
self.substitute_file(infile, outfile, subsdict)
import merlin_reduction
self.assertTrue(os.path.exists(os.path.join(self.outputpath, 'MER59151_150meV_powder.nxspe')))
ws = s_api.Load(os.path.join(self.outputpath, 'MER59151_150meV_powder.nxspe'))
filepath = os.path.join(self.outputpath, 'MER59151_150meV_powder.nxspe')
self.assertTrue(os.path.exists(filepath))
cksum = {'MER59151_150meV_powder.nxspe':self.load_return_sum(filepath)}
assert_allclose(cksum['MER59151_150meV_powder.nxspe'], [1.3581422074777563e-06, 0.0])


def test_MAPS(self):
Expand Down Expand Up @@ -221,8 +248,10 @@ def test_MAPS(self):
outfile = os.path.join(self.outputpath, 'maps_reduction.py')
self.substitute_file(infile, outfile, subsdict)
import maps_reduction
self.assertTrue(os.path.exists(os.path.join(self.outputpath, 'MAP41335_80meV_1to1.nxspe')))
ws = s_api.Load(os.path.join(self.outputpath, 'MAP41335_80meV_1to1.nxspe'))
filepath = os.path.join(self.outputpath, 'MAP41335_80meV_1to1.nxspe')
self.assertTrue(os.path.exists(filepath))
cksum = {'MAP41335_80meV_1to1.nxspe':self.load_return_sum(filepath)}
assert_allclose(cksum['MAP41335_80meV_1to1.nxspe'], [23131.138680620898, 4327.7270169435615])


def test_auto_iliad(self):
Expand All @@ -241,9 +270,23 @@ def test_func_continuous(self):
run_reduction(sample=[97138, 97139], Ei_list=[1.7], sumruns=True, wv_file='WV_91329.txt',
inst='LET', mask='LET_mask_222.xml', powdermap='LET_rings_222.xml',
cs_block='T_Stick', cs_block_unit='K', cs_bin_size=10)
cksum = {}
for tt in np.arange(197.9, 288, 10):
self.assertTrue(os.path.exists(os.path.join(self.outputpath, f'LET97138_1.7meV_{tt}K_powder.nxspe')))
filepath = os.path.join(self.outputpath, f'LET97138_1.7meV_{tt}K_powder.nxspe')
self.assertTrue(os.path.exists(filepath))
cksum[f'LET97138_1.7meV_{tt}K_powder.nxspe'] = self.load_return_sum(filepath)
assert_allclose(cksum['LET97138_1.7meV_197.9K_powder.nxspe'], [44450.029085027454, 186.0716814404816])
assert_allclose(cksum['LET97138_1.7meV_207.9K_powder.nxspe'], [4494.497021939263, 18.816841893155306])
assert_allclose(cksum['LET97138_1.7meV_217.9K_powder.nxspe'], [2518.131943374195, 10.572366288273505])
assert_allclose(cksum['LET97138_1.7meV_227.9K_powder.nxspe'], [1592.3142473214912, 6.667062755720883])
assert_allclose(cksum['LET97138_1.7meV_237.9K_powder.nxspe'], [1577.2556582201576, 6.562542336896957])
assert_allclose(cksum['LET97138_1.7meV_247.9K_powder.nxspe'], [1518.5858436837416, 6.283506446675478])
assert_allclose(cksum['LET97138_1.7meV_257.9K_powder.nxspe'], [1144.3779715386936, 3.803046383223965])
assert_allclose(cksum['LET97138_1.7meV_267.9K_powder.nxspe'], [1093.830399356654, 3.394667791223837])
assert_allclose(cksum['LET97138_1.7meV_277.9K_powder.nxspe'], [1074.4433555947203, 3.1890836860885927])
assert_allclose(cksum['LET97138_1.7meV_287.9K_powder.nxspe'], [1059.5601228491867, 3.0205581732234803])


if __name__ == '__main__':
unittest.main()

0 comments on commit 1e0c7fe

Please sign in to comment.