Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 34 additions & 0 deletions docs/user/slicing.rst
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,8 @@ The time slicing parameters are applied relative to the whole data from the begi
the run, independent of any event filtering during loading (i.e. using the parameter
``"loadOptions"``).

This example shows EQSANS (SNS time-of-flight instrument) configuration:

.. code-block:: json

{
Expand All @@ -111,6 +113,38 @@ the run, independent of any event filtering during loading (i.e. using the param
:alt: Diagram of log slicing
:width: 800px

Event Filtering for BIOSANS and GPSANS
++++++++++++++++++++++++++++++++++++++++

BIOSANS and GPSANS (HFIR monochromatic instruments) also support event filtering using
``loadOptions``. The same ``FilterByTimeStart`` and ``FilterByTimeStop`` parameters can be used
to filter events before reduction. These options are passed directly to the Mantid algorithm
``LoadEventAsWorkspace2D``.

Example for BIOSANS or GPSANS:

.. code-block:: json

{
"instrumentName": "BIOSANS",
"sample": {
"runNumber": "1322",
"loadOptions": {"FilterByTimeStart": 10.0, "FilterByTimeStop": 300.0}
},
"configuration": {
...
}
}

When using ``loadOptions`` with time slicing on HFIR instruments, the time filtering is applied
first during loading, and then the time slicing is applied to the filtered events. This allows for
fine-grained control over which portion of the run to analyze.

.. note::
The ``FilterByTimeStart`` and ``FilterByTimeStop`` parameters specify times in seconds relative
to the start of the run. For HFIR instruments (BIOSANS and GPSANS), these options are passed to
``LoadEventAsWorkspace2D``, while for SNS instruments (EQSANS), they are passed to ``LoadEventNexus``.

Log Slicing
-----------

Expand Down
14 changes: 10 additions & 4 deletions src/drtsans/mono/biosans/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,8 @@ def load_all_files(
instrument_name = reduction_input["instrumentName"]
ipts = reduction_input["iptsNumber"]
sample = reduction_input["sample"]["runNumber"]
# Extract sample loadOptions - use .get() with default empty dict to handle cases where loadOptions is not provided
sample_load_options = reduction_input.get("sample", {}).get("loadOptions", {})

# on the fly check to see if mid-range detector is present in data
reduction_input["has_midrange_detector"] = file_has_midrange_detector(
Expand Down Expand Up @@ -229,6 +231,10 @@ def load_all_files(

# special loading case for sample to allow the slicing options
logslice_data_dict = {}
# Merge sample_load_options with load_params for sample loading
# Merge load parameters: load_params takes precedence to preserve backward compatibility
# with existing behavior, but sample_load_options provides per-sample overrides
load_params_sample = {**sample_load_options, **load_params}

# Retrieve parameters for overwriting geometry related meta data
swd_value_dict = parse_json_meta_data(
Expand Down Expand Up @@ -282,7 +288,7 @@ def load_all_files(
si_nominal_distance=SI_WINDOW_NOMINAL_DISTANCE_METER,
sample_to_si_value=swd_value_dict[meta_data.SAMPLE],
sample_detector_distance_value=sdd_value_dict[meta_data.SAMPLE],
**load_params,
**load_params_sample,
)

for _w in mtd[ws_name]:
Expand Down Expand Up @@ -318,8 +324,8 @@ def load_all_files(
# if sample is not an absolute path to nexus file, convert it to the absolute path
filename = abspaths(sample, instrument=instrument_name, ipts=ipts, directory=path)
# Pass load params to be used in LoadEventAsWorkspace2D
load_params["XCenter"] = wave_length_dict[meta_data.SAMPLE]
load_params["XWidth"] = wave_length_spread_dict[meta_data.SAMPLE]
load_params_sample["XCenter"] = wave_length_dict[meta_data.SAMPLE]
load_params_sample["XWidth"] = wave_length_spread_dict[meta_data.SAMPLE]
logger.notice(f"Loading filename {filename} from sample {sample}")
biosans.load_events_and_histogram(
filename,
Expand All @@ -328,7 +334,7 @@ def load_all_files(
si_nominal_distance=SI_WINDOW_NOMINAL_DISTANCE_METER,
sample_to_si_value=swd_value_dict[meta_data.SAMPLE],
sample_detector_distance_value=sdd_value_dict[meta_data.SAMPLE],
**load_params,
**load_params_sample,
)
# Overwrite meta data
set_meta_data(
Expand Down
14 changes: 10 additions & 4 deletions src/drtsans/mono/gpsans/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,8 @@ def load_all_files(
instrument_name = reduction_input["instrumentName"]
ipts = reduction_input["iptsNumber"]
sample = reduction_input["sample"]["runNumber"]
# Extract sample loadOptions - use .get() with default empty dict to handle cases where loadOptions is not provided
sample_load_options = reduction_input.get("sample", {}).get("loadOptions", {})
sample_trans = reduction_input["sample"]["transmission"]["runNumber"]
bkgd = reduction_input["background"]["runNumber"]
bkgd_trans = reduction_input["background"]["transmission"]["runNumber"]
Expand Down Expand Up @@ -243,6 +245,10 @@ def load_all_files(

# special loading case for sample to allow the slicing options
logslice_data_dict = {}
# Merge sample_load_options with load_params for sample loading
# Merge load parameters: load_params takes precedence to preserve backward compatibility
# with existing behavior, but sample_load_options provides per-sample overrides
load_params_sample = {**sample_load_options, **load_params}
if timeslice or logslice or polarized:
# Load data and split
ws_name = f"{prefix}_{instrument_name}_{sample}_raw_histo_slice_group"
Expand Down Expand Up @@ -271,7 +277,7 @@ def load_all_files(
sample_to_si_value=swd_value_dict[meta_data.SAMPLE],
sample_detector_distance_value=sdd_value_dict[meta_data.SAMPLE],
reduction_config=reduction_config,
**load_params,
**load_params_sample,
)

for _w in mtd[ws_name]:
Expand Down Expand Up @@ -308,8 +314,8 @@ def load_all_files(
if not registered_workspace(ws_name):
filename = abspaths(sample, instrument=instrument_name, ipts=ipts, directory=path)
# Pass load params to be used in LoadEventAsWorkspace2D
load_params["XCenter"] = wave_length_dict[meta_data.SAMPLE]
load_params["XWidth"] = wave_length_spread_dict[meta_data.SAMPLE]
load_params_sample["XCenter"] = wave_length_dict[meta_data.SAMPLE]
load_params_sample["XWidth"] = wave_length_spread_dict[meta_data.SAMPLE]
logger.notice(f"Loading filename {filename} to {ws_name}")
load_events_and_histogram(
filename,
Expand All @@ -318,7 +324,7 @@ def load_all_files(
si_nominal_distance=SI_WINDOW_NOMINAL_DISTANCE_METER,
sample_to_si_value=swd_value_dict[meta_data.SAMPLE],
sample_detector_distance_value=sdd_value_dict[meta_data.SAMPLE],
**load_params,
**load_params_sample,
)
# Overwrite meta data
set_meta_data(
Expand Down
96 changes: 96 additions & 0 deletions tests/integration/drtsans/mono/biosans/test_loadoptions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
"""Integration tests for loadOptions time filtering in BIOSANS."""

import pytest
from mantid.simpleapi import SumSpectra
from drtsans.mono.biosans import load_all_files, reduction_parameters


@pytest.mark.datarepo
def test_load_with_time_filtering(datarepo_dir):
"""Test that FilterByTimeStart and FilterByTimeStop work correctly in loadOptions."""
reduction_input = {
"instrumentName": "CG3",
"sample": {
"runNumber": "1322",
"transmission": {"runNumber": ""},
"loadOptions": {
"FilterByTimeStart": 0.0,
"FilterByTimeStop": 10.0,
},
},
"background": {"runNumber": "", "transmission": {"runNumber": ""}},
"beamCenter": {"runNumber": "1322"},
"emptyTransmission": {"runNumber": ""},
"configuration": {"useDefaultMask": False},
}

reduction_input = reduction_parameters(reduction_input, "BIOSANS", validate=False)
loaded = load_all_files(reduction_input, path=datarepo_dir.biosans)

# Verify sample workspace was loaded
assert loaded.sample is not None
assert len(loaded.sample) == 1

# Check that workspace was loaded with time filtering
ws = loaded.sample[0]
assert ws is not None
assert ws.getNumberHistograms() > 0

# Verify time filtering was applied by checking event count
ws_summed = SumSpectra(ws)
assert ws_summed.dataY(0)[0] == 2283 # number of events in the first 10 seconds


@pytest.mark.datarepo
def test_load_without_time_filtering_baseline(datarepo_dir):
"""Test loading without time filtering as baseline."""
reduction_input = {
"instrumentName": "CG3",
"sample": {
"runNumber": "1322",
"transmission": {"runNumber": ""},
"loadOptions": {},
},
"background": {"runNumber": "", "transmission": {"runNumber": ""}},
"beamCenter": {"runNumber": "1322"},
"emptyTransmission": {"runNumber": ""},
"configuration": {"useDefaultMask": False},
}

reduction_input = reduction_parameters(reduction_input, "BIOSANS", validate=False)
loaded = load_all_files(reduction_input, path=datarepo_dir.biosans)

# Verify sample workspace was loaded
assert loaded.sample is not None
assert len(loaded.sample) == 1

ws = loaded.sample[0]
assert ws is not None
assert ws.getNumberHistograms() > 0


@pytest.mark.datarepo
def test_load_with_partial_time_range(datarepo_dir):
"""Test loading with only FilterByTimeStart specified."""
reduction_input = {
"instrumentName": "CG3",
"sample": {
"runNumber": "1322",
"transmission": {"runNumber": ""},
"loadOptions": {"FilterByTimeStart": 5.0},
},
"background": {"runNumber": "", "transmission": {"runNumber": ""}},
"beamCenter": {"runNumber": "1322"},
"emptyTransmission": {"runNumber": ""},
"configuration": {"useDefaultMask": False},
}

reduction_input = reduction_parameters(reduction_input, "BIOSANS", validate=False)
loaded = load_all_files(reduction_input, path=datarepo_dir.biosans)

# Verify sample workspace was loaded successfully with partial time filter
assert loaded.sample is not None
assert len(loaded.sample) == 1
ws = loaded.sample[0]
assert ws is not None
assert ws.getNumberHistograms() > 0
100 changes: 100 additions & 0 deletions tests/integration/drtsans/mono/gpsans/test_loadoptions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
"""Integration tests for loadOptions time filtering in GPSANS."""

import pytest
from mantid.simpleapi import SumSpectra
from drtsans.mono.gpsans import load_all_files, reduction_parameters


@pytest.mark.datarepo
def test_load_with_time_filtering(datarepo_dir):
"""Test that FilterByTimeStart and FilterByTimeStop work correctly in loadOptions."""
reduction_input = {
"instrumentName": "CG2",
"sample": {
"runNumber": "9166",
"transmission": {"runNumber": ""},
"loadOptions": {
"FilterByTimeStart": 0.0,
"FilterByTimeStop": 50.0,
},
},
"background": {"runNumber": "", "transmission": {"runNumber": ""}},
"beamCenter": {"runNumber": "9166"},
"emptyTransmission": {"runNumber": ""},
"configuration": {"useDefaultMask": False},
}

reduction_input = reduction_parameters(reduction_input, "GPSANS", validate=False)
loaded = load_all_files(reduction_input, path=datarepo_dir.gpsans)

# Verify sample workspace was loaded
assert loaded.sample is not None
assert len(loaded.sample) == 1

# Check that workspace was loaded with time filtering
ws = loaded.sample[0]
assert ws is not None
assert ws.getNumberHistograms() > 0

# Verify time filtering was applied by checking event count
ws_summed = SumSpectra(ws)
# Verify we have events loaded
event_count = ws_summed.dataY(0)[0]
assert event_count > 0, "Should have some events in the filtered range"
# Note: Actual event count verification would require knowing the exact count
# for this specific run with the given time filter parameters


@pytest.mark.datarepo
def test_load_without_time_filtering_baseline(datarepo_dir):
"""Test loading without time filtering as baseline."""
reduction_input = {
"instrumentName": "CG2",
"sample": {
"runNumber": "9166",
"transmission": {"runNumber": ""},
"loadOptions": {},
},
"background": {"runNumber": "", "transmission": {"runNumber": ""}},
"beamCenter": {"runNumber": "9166"},
"emptyTransmission": {"runNumber": ""},
"configuration": {"useDefaultMask": False},
}

reduction_input = reduction_parameters(reduction_input, "GPSANS", validate=False)
loaded = load_all_files(reduction_input, path=datarepo_dir.gpsans)

# Verify sample workspace was loaded
assert loaded.sample is not None
assert len(loaded.sample) == 1

ws = loaded.sample[0]
assert ws is not None
assert ws.getNumberHistograms() > 0


@pytest.mark.datarepo
def test_load_with_partial_time_range(datarepo_dir):
"""Test loading with only FilterByTimeStop specified."""
reduction_input = {
"instrumentName": "CG2",
"sample": {
"runNumber": "9166",
"transmission": {"runNumber": ""},
"loadOptions": {"FilterByTimeStop": 30.0},
},
"background": {"runNumber": "", "transmission": {"runNumber": ""}},
"beamCenter": {"runNumber": "9166"},
"emptyTransmission": {"runNumber": ""},
"configuration": {"useDefaultMask": False},
}

reduction_input = reduction_parameters(reduction_input, "GPSANS", validate=False)
loaded = load_all_files(reduction_input, path=datarepo_dir.gpsans)

# Verify sample workspace was loaded successfully with partial time filter
assert loaded.sample is not None
assert len(loaded.sample) == 1
ws = loaded.sample[0]
assert ws is not None
assert ws.getNumberHistograms() > 0