Skip to content

Commit

Permalink
insert sample number, plus integration tests
Browse files Browse the repository at this point in the history
Signed-off-by: Jose Borreguero <[email protected]>
  • Loading branch information
jmborr committed Jun 20, 2024
1 parent d74efa6 commit 3deaecf
Show file tree
Hide file tree
Showing 10 changed files with 1,460 additions and 502 deletions.
108 changes: 80 additions & 28 deletions src/mr_reduction/mr_reduction.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,23 +3,36 @@
Reduction for MR
"""

from __future__ import absolute_import, division, print_function

# standard imports
import os
import sys
import time
from typing import List, Optional

# third-party imports
# from .settings import MANTID_PATH
# sys.path.insert(0, MANTID_PATH)
import mantid
from mantid.simpleapi import *

from .data_info import DataInfo
from .mr_direct_beam_finder import DirectBeamFinder
from .reflectivity_output import write_reflectivity
from .script_output import write_partial_script
from .settings import ANA_STATE, ANA_VETO, GLOBAL_AR_DIR, POL_STATE, POL_VETO, ar_out_dir
from .web_report import Report, process_collection
from mantid.simpleapi import (
FilterByLogValue,
GroupWorkspaces,
LoadEventNexus,
MagnetismReflectometryReduction,
MRFilterCrossSections,
SaveNexus,
logger,
mtd,
)

# mr_reduction imports
from mr_reduction.data_info import DataInfo
from mr_reduction.mr_direct_beam_finder import DirectBeamFinder
from mr_reduction.reflectivity_merge import combined_catalog_info, combined_curves, plot_combined
from mr_reduction.reflectivity_output import write_reflectivity
from mr_reduction.runsample import RunSampleNumber
from mr_reduction.script_output import write_partial_script
from mr_reduction.settings import ANA_STATE, ANA_VETO, GLOBAL_AR_DIR, POL_STATE, POL_VETO, ar_out_dir
from mr_reduction.web_report import Report, process_collection

DIRECT_BEAM_EVTS_MIN = 1000

Expand All @@ -41,6 +54,7 @@ def __init__(
self,
data_run,
data_ws=None,
sample_number=None,
output_dir=None,
const_q_binning=False,
const_q_cutoff=0.02,
Expand All @@ -59,21 +73,49 @@ def __init__(
debug=False,
live=False,
):
"""
r"""
The automated reduction is initializable such that most of what we need can be
changed at initialization time. That way the post-processing framework only
needs to create the object and execute the reduction.
@param data_run: run number or file path
Parameters
----------
data_run: str
Run number or file path
data_ws: mantid.dataobjects.EventWorkspace
Mantid events workspace containing the events for the run we're to reduce
sample_number: Optional[Union[int, str]]
Sample number when the run contains more than one sample. Numbers start at 1 (not 0)
output_dir: str
Directory where the autoreduced files are stored. Usually /SNS/REF_M/IPTS-****/shared/
const_q_binning
const_q_cutoff
update_peak_range
use_roi_bck
use_tight_bck
bck_offset
use_sangle
use_roi
q_step
force_peak_roi
peak_roi
force_bck_roi
bck_roi
publish: bool
If `True`, try to upload the HTML report generated by `reduce()` into the livedata server
debug
live
"""

try:
int(data_run)
self.run_number = data_run
self.run_number: Optional[int] = int(data_run)
self.file_path = "REF_M_%s" % data_run
except: # noqa E722
self.run_number = None
self.file_path = data_run
self.data_ws = data_ws
self.sample_number: Optional[int] = None if sample_number is None else int(sample_number)
self.ipts = None
self.output_dir = output_dir
self.const_q_binning = const_q_binning
Expand Down Expand Up @@ -205,7 +247,7 @@ def reduce(self):
# if self.data_ws is not None and self.use_slow_flipper_log:
if self.data_ws is None:
self.data_ws = LoadEventNexus(Filename=self.file_path, OutputWorkspace="raw_events")
self.run_number = self.data_ws.getRunNumber()
self.run_number = int(self.data_ws.getRunNumber())

if self.use_slow_flipper_log:
_xs_list = self.slow_filter_cross_sections(self.data_ws)
Expand Down Expand Up @@ -235,13 +277,17 @@ def reduce(self):
# Generate stitched plot
ref_plot = None
try:
from .reflectivity_merge import combined_catalog_info, combined_curves, plot_combined

# ipts_number = self.ipts.split('-')[1]
matched_runs, scaling_factors, outputs = combined_curves(run=int(self.run_number), ipts=self.ipts)
run_sample_number = str(RunSampleNumber(self.run_number, self.sample_number))
matched_runs, scaling_factors, outputs = combined_curves(run=run_sample_number, ipts=self.ipts)
if not self.live:
self.json_info = combined_catalog_info(matched_runs, self.ipts, outputs, run_number=self.run_number)
self.json_info = combined_catalog_info(
matched_runs,
self.ipts,
outputs,
run_sample_number=str(RunSampleNumber(self.run_number, self.sample_number)),
)
self.log("Matched runs: %s" % str(matched_runs))
# plotly figures for the reflectivity profile of each cross section, and embed them in an <div> container
ref_plot = plot_combined(matched_runs, scaling_factors, self.ipts, publish=False)
self.log("Generated reflectivity: %s" % len(str(ref_plot)))
except: # noqa E722
Expand Down Expand Up @@ -276,16 +322,18 @@ def reduce_workspace_group(self, xs_list):

# Find reflectivity peak of scattering run
ws = xs_list[0]
run_number = ws.getRunNumber()
entry = ws.getRun().getProperty("cross_section_id").value
self.ipts = ws.getRun().getProperty("experiment_identifier").value

# combine run and sample number when the run contains more than one sample
runsample = RunSampleNumber(self.run_number, self.sample_number)
logger.notice(
"R%s [%s] DATA TYPE: %s [ref=%s] [%s events]"
% (run_number, entry, data_info.data_type, data_info.cross_section, ws.getNumberEvents())
% (runsample, entry, data_info.data_type, data_info.cross_section, ws.getNumberEvents())
)
self.log(
"R%s [%s] DATA TYPE: %s [ref=%s] [%s events]"
% (run_number, entry, data_info.data_type, data_info.cross_section, ws.getNumberEvents())
% (runsample, entry, data_info.data_type, data_info.cross_section, ws.getNumberEvents())
)

if data_info.data_type < 1 or ws.getNumberEvents() < self.min_number_events:
Expand Down Expand Up @@ -320,19 +368,23 @@ def reduce_workspace_group(self, xs_list):
SpecularPixel=data_info.peak_position,
ConstantQBinning=self.const_q_binning,
ConstQTrim=0.1,
OutputWorkspace="r_%s" % run_number,
OutputWorkspace=f"r_{runsample}",
)

# Save sample number in the logs of the reduced workspaces
if runsample.sample_number:
runsample.log_sample_number(f"r_{runsample}")

# Generate partial python script
self.log("Workspace r_%s: %s" % (run_number, type(mtd["r_%s" % run_number])))
write_partial_script(mtd["r_%s" % run_number])
self.log("Workspace r_%s: %s" % (runsample, type(mtd["r_%s" % runsample])))
write_partial_script(mtd["r_%s" % runsample])

report_list = []
for ws in xs_list:
try:
if str(ws).endswith("unfiltered"):
continue
self.log("\n--- Run %s %s ---\n" % (self.run_number, str(ws)))
self.log(f"\n--- Run {runsample} {str(ws)} ---\n")
entry = ws.getRun().getProperty("cross_section_id").value
reflectivity = mtd["%s__reflectivity" % str(ws)]
report = Report(ws, data_info, direct_info, reflectivity, logfile=self.logfile, plot_2d=self.plot_2d)
Expand All @@ -342,12 +394,12 @@ def reduce_workspace_group(self, xs_list):
self.log(" - ready to write: %s" % self.output_dir)
write_reflectivity(
[reflectivity],
os.path.join(self.output_dir, "REF_M_%s_%s_autoreduce.dat" % (run_number, entry)),
os.path.join(self.output_dir, "REF_M_%s_%s_autoreduce.dat" % (runsample, entry)),
data_info.cross_section_label,
)
SaveNexus(
InputWorkspace=reflectivity,
Filename=os.path.join(self.output_dir, "REF_M_%s_%s_autoreduce.nxs.h5" % (run_number, entry)),
Filename=os.path.join(self.output_dir, "REF_M_%s_%s_autoreduce.nxs.h5" % (runsample, entry)),
)
self.log(" - done writing")
# Write partial output script
Expand Down
Loading

0 comments on commit 3deaecf

Please sign in to comment.