Skip to content

WorkflowCollection for batch reduction #166

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 35 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
edcf4c2
feat: more flexible helper for batch reduction
jokasimr May 5, 2025
7dbbf2c
fix: remove duplicate
jokasimr May 5, 2025
25add6b
update docs
jokasimr May 6, 2025
eda422e
spelling
jokasimr May 6, 2025
98a9389
fix: add theta to reference, can be useful in some contexts
jokasimr May 6, 2025
743743a
fix: handle case when SampleRotation etc are set in workflow
jokasimr May 6, 2025
45bcd0e
fix: add parameters before setting filenames
jokasimr May 6, 2025
7fb806b
docs: fix
jokasimr May 6, 2025
f097188
tests
jokasimr May 6, 2025
4f9cbd2
Merge branch 'main' into helper-for-reducing-multiple
jokasimr May 6, 2025
7c1750d
Merge branch 'main' into helper-for-reducing-multiple
jokasimr May 20, 2025
1926849
Merge remote-tracking branch 'origin/main' into helper-for-reducing-m…
jokasimr Jun 4, 2025
fc4214a
Merge branch 'main' into helper-for-reducing-multiple
jokasimr Jun 11, 2025
f158ac2
add scaling factor to Amor workflow
nvaytet Jul 3, 2025
268f487
map on unscaled data
nvaytet Jul 3, 2025
eb56472
add batch_processor and WorkflowCollection to the tools
nvaytet Jul 15, 2025
9637acd
udpate Amor notebook to use workflow collection
nvaytet Jul 15, 2025
2d2871d
fix critical_edge parameter in scaling
nvaytet Jul 15, 2025
2a5120d
remove commented code
nvaytet Jul 15, 2025
84167a5
Merge branch 'main' into pipeline-collection
nvaytet Aug 5, 2025
aced050
add tests for wf collection
nvaytet Aug 5, 2025
504405b
improve docstring
nvaytet Aug 5, 2025
7e0c24d
start fixing existing unit tests
nvaytet Aug 6, 2025
4bd66d3
refactor first tests slightly
nvaytet Aug 6, 2025
9f14288
simplify critical edge handling
nvaytet Aug 7, 2025
fff9b4e
update/fix tools tests
nvaytet Aug 7, 2025
a8d1b3a
fix scaling for a single workflow and fix amor pipeline tests
nvaytet Aug 7, 2025
31c437f
Merge branch 'main' into pipeline-collection
nvaytet Aug 7, 2025
7567565
cleanup
nvaytet Aug 7, 2025
7f884fb
do not fail if UnscaledReducibleData is not in graph
nvaytet Aug 7, 2025
2dea8de
modify the WorkflowCollection to just use cyclebane mapping under the…
nvaytet Aug 13, 2025
f181b41
formatting
nvaytet Aug 13, 2025
a23c007
update amor notebook
nvaytet Aug 13, 2025
ccfb1b7
debugging compute multiple
nvaytet Aug 15, 2025
c8e43a9
fix compute
nvaytet Aug 19, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
131 changes: 52 additions & 79 deletions docs/user-guide/amor/amor-reduction.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,12 @@
"from ess.amor import data # noqa: F401\n",
"from ess.reflectometry.types import *\n",
"from ess.amor.types import *\n",
"from ess.reflectometry import batch_processor\n",
"\n",
"# The files used in this tutorial have some issues that makes scippnexus\n",
"# raise warnings when loading them. To avoid noise in the notebook the warnings are silenced.\n",
"warnings.filterwarnings('ignore', 'Failed to convert .* into a transformation')\n",
"warnings.filterwarnings('ignore', 'Invalid transformation, missing attribute')"
"warnings.filterwarnings('ignore', 'Invalid transformation')"
]
},
{
Expand Down Expand Up @@ -124,19 +125,17 @@
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"## Computing sample reflectivity\n",
"## Computing sample reflectivity from batch reduction\n",
"\n",
"We now compute the sample reflectivity from 4 runs that used different sample rotation angles.\n",
"The measurements at different rotation angles cover different ranges of $Q$."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The measurements at different rotation angles cover different ranges of $Q$.\n",
"\n",
"We set up a batch reduction helper (using the `batch_processor` function) which makes it easy to process multiple runs at once.\n",
"\n",
"In this tutorial we use some Amor data files we have received.\n",
"The file paths to the tutorial files are obtained by calling:"
]
Expand Down Expand Up @@ -184,15 +183,22 @@
" },\n",
"}\n",
"\n",
"\n",
"reflectivity = {}\n",
"for run_number, params in runs.items():\n",
" wf = workflow.copy()\n",
" for key, value in params.items():\n",
" wf[key] = value\n",
" reflectivity[run_number] = wf.compute(ReflectivityOverQ).hist()\n",
"\n",
"sc.plot(reflectivity, norm='log', vmin=1e-4)"
"batch = batch_processor(workflow, runs)\n",
"batch.param_table"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Compute R(Q) for all runs\n",
"reflectivity = batch.compute(ReflectivityOverQ)\n",
"sc.plot(\n",
" {key: r.hist() for key, r in reflectivity.items()},\n",
" norm='log', vmin=1e-4\n",
")"
]
},
{
Expand All @@ -212,13 +218,16 @@
"source": [
"from ess.reflectometry.tools import scale_reflectivity_curves_to_overlap\n",
"\n",
"scaled_reflectivity_curves, scale_factors = scale_reflectivity_curves_to_overlap(\n",
" reflectivity.values(),\n",
" # Optionally specify a Q-interval where the reflectivity is known to be 1.0\n",
"# Pass the batch workflow collection and get a new workflow collection as output,\n",
"# with the correct scaling factors applied.\n",
"scaled_wf = scale_reflectivity_curves_to_overlap(\n",
" batch,\n",
" critical_edge_interval=(sc.scalar(0.01, unit='1/angstrom'), sc.scalar(0.014, unit='1/angstrom'))\n",
")\n",
"\n",
"sc.plot(dict(zip(reflectivity.keys(), scaled_reflectivity_curves, strict=True)), norm='log', vmin=1e-5)"
"scaled_r = {key: r.hist() for key, r in scaled_wf.compute(ReflectivityOverQ).items()}\n",
"\n",
"sc.plot(scaled_r, norm='log', vmin=1e-5)"
]
},
{
Expand All @@ -235,7 +244,7 @@
"outputs": [],
"source": [
"from ess.reflectometry.tools import combine_curves\n",
"combined = combine_curves(scaled_reflectivity_curves, workflow.compute(QBins))\n",
"combined = combine_curves(scaled_r.values(), workflow.compute(QBins))\n",
"combined.plot(norm='log')"
]
},
Expand Down Expand Up @@ -265,26 +274,8 @@
"metadata": {},
"outputs": [],
"source": [
"# Start by computing the `ReflectivityData` for each of the files\n",
"diagnostics = {}\n",
"for run_number, params in runs.items():\n",
" wf = workflow.copy()\n",
" for key, value in params.items():\n",
" wf[key] = value\n",
" diagnostics[run_number] = wf.compute((ReflectivityOverZW, ThetaBins[SampleRun]))\n",
"\n",
"# Scale the results using the scale factors computed earlier\n",
"for run_number, scale_factor in zip(reflectivity.keys(), scale_factors, strict=True):\n",
" diagnostics[run_number][ReflectivityOverZW] *= scale_factor"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"diagnostics['608'][ReflectivityOverZW].hist().flatten(('blade', 'wire'), to='z').plot(norm='log')"
"diagnostics = scaled_wf.compute(ReflectivityOverZW)\n",
"diagnostics['608'].hist().flatten(('blade', 'wire'), to='z').plot(norm='log')"
]
},
{
Expand All @@ -304,8 +295,8 @@
"from ess.reflectometry.figures import wavelength_theta_figure\n",
"\n",
"wavelength_theta_figure(\n",
" [result[ReflectivityOverZW] for result in diagnostics.values()],\n",
" theta_bins=[result[ThetaBins[SampleRun]] for result in diagnostics.values()],\n",
" diagnostics.values,\n",
" theta_bins=scaled_wf.compute(ThetaBins[SampleRun]).values,\n",
" q_edges_to_display=(sc.scalar(0.018, unit='1/angstrom'), sc.scalar(0.113, unit='1/angstrom'))\n",
")"
]
Expand Down Expand Up @@ -334,8 +325,8 @@
"from ess.reflectometry.figures import q_theta_figure\n",
"\n",
"q_theta_figure(\n",
" [res[ReflectivityOverZW] for res in diagnostics.values()],\n",
" theta_bins=[res[ThetaBins[SampleRun]] for res in diagnostics.values()],\n",
" diagnostics.values,\n",
" theta_bins=scaled_wf.compute(ThetaBins[SampleRun]).values,\n",
" q_bins=workflow.compute(QBins)\n",
")"
]
Expand Down Expand Up @@ -380,8 +371,7 @@
"We can save the computed $I(Q)$ to an [ORSO](https://www.reflectometry.org) [.ort](https://github.com/reflectivity/file_format/blob/master/specification.md) file using the [orsopy](https://orsopy.readthedocs.io/en/latest/index.html) package.\n",
"\n",
"First, we need to collect the metadata for that file.\n",
"To this end, we build a pipeline with additional providers.\n",
"We also insert a parameter to indicate the creator of the processed data."
"To this end, we insert a parameter to indicate the creator of the processed data."
]
},
{
Expand All @@ -400,7 +390,7 @@
"metadata": {},
"outputs": [],
"source": [
"workflow[orso.OrsoCreator] = orso.OrsoCreator(\n",
"scaled_wf[orso.OrsoCreator] = orso.OrsoCreator(\n",
" fileio.base.Person(\n",
" name='Max Mustermann',\n",
" affiliation='European Spallation Source ERIC',\n",
Expand All @@ -409,20 +399,11 @@
")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"workflow.visualize(orso.OrsoIofQDataset, graph_attr={'rankdir': 'LR'})"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"We build our ORSO dataset from the computed $I(Q)$ and the ORSO metadata:"
"We can visualize the workflow for the `OrsoIofQDataset`:"
]
},
{
Expand All @@ -431,15 +412,14 @@
"metadata": {},
"outputs": [],
"source": [
"iofq_dataset = workflow.compute(orso.OrsoIofQDataset)\n",
"iofq_dataset"
"scaled_wf.visualize(orso.OrsoIofQDataset, graph_attr={'rankdir': 'LR'})"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"We also add the URL of this notebook to make it easier to reproduce the data:"
"We build our ORSO dataset from the computed $I(Q)$ and the ORSO metadata:"
]
},
{
Expand All @@ -448,17 +428,14 @@
"metadata": {},
"outputs": [],
"source": [
"iofq_dataset.info.reduction.script = (\n",
" 'https://scipp.github.io/essreflectometry/examples/amor.html'\n",
")"
"iofq_datasets = scaled_wf.compute(orso.OrsoIofQDataset)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Now let's repeat this for all the sample measurements!\n",
"To do that we can use an utility in `ess.reflectometry.tools`:"
"We also add the URL of this notebook to make it easier to reproduce the data:"
]
},
{
Expand All @@ -467,22 +444,18 @@
"metadata": {},
"outputs": [],
"source": [
"from ess.reflectometry.tools import orso_datasets_from_measurements\n",
"\n",
"datasets = orso_datasets_from_measurements(\n",
" workflow,\n",
" runs.values(),\n",
" # Optionally scale the curves to overlap using `scale_reflectivity_curves_to_overlap`\n",
" scale_to_overlap=True\n",
")"
"for ds in iofq_datasets.values:\n",
" ds.info.reduction.script = (\n",
" 'https://scipp.github.io/essreflectometry/user-guide/amor/amor-reduction.html'\n",
" )"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Finally, we can save the data to a file.\n",
"Note that `iofq_dataset` is an [orsopy.fileio.orso.OrsoDataset](https://orsopy.readthedocs.io/en/latest/orsopy.fileio.orso.html#orsopy.fileio.orso.OrsoDataset)."
"Note that `iofq_datasets` contains [orsopy.fileio.orso.OrsoDataset](https://orsopy.readthedocs.io/en/latest/orsopy.fileio.orso.html#orsopy.fileio.orso.OrsoDataset)s."
]
},
{
Expand All @@ -491,7 +464,7 @@
"metadata": {},
"outputs": [],
"source": [
"fileio.orso.save_orso(datasets=datasets, fname='amor_reduced_iofq.ort')"
"fileio.orso.save_orso(datasets=iofq_datasets.values, fname='amor_reduced_iofq.ort')"
]
},
{
Expand Down Expand Up @@ -527,7 +500,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.14"
"version": "3.12.7"
}
},
"nbformat": 4,
Expand Down
2 changes: 2 additions & 0 deletions src/ess/amor/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
Position,
RunType,
SampleRotationOffset,
ScalingFactorForOverlap,
)
from . import (
conversions,
Expand Down Expand Up @@ -74,6 +75,7 @@ def default_parameters() -> dict:
),
GravityToggle: True,
SampleRotationOffset[RunType]: sc.scalar(0.0, unit='deg'),
ScalingFactorForOverlap[RunType]: 1.0,
}


Expand Down
2 changes: 1 addition & 1 deletion src/ess/amor/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
NeXusComponent,
NeXusDetectorName,
ProtonCurrent,
RawChopper,
RawSampleRotation,
RunType,
SampleRotation,
Expand All @@ -29,7 +30,6 @@
ChopperFrequency,
ChopperPhase,
ChopperSeparation,
RawChopper,
)


Expand Down
1 change: 1 addition & 0 deletions src/ess/amor/normalization.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ def evaluate_reference_at_sample_coords(
ref = ref.transform_coords(
(
"Q",
"theta",
"wavelength_resolution",
"sample_size_resolution",
"angular_resolution",
Expand Down
4 changes: 0 additions & 4 deletions src/ess/amor/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,4 @@ class ChopperSeparation(sciline.Scope[RunType, sc.Variable], sc.Variable):
"""Distance between the two choppers."""


class RawChopper(sciline.Scope[RunType, sc.DataGroup], sc.DataGroup):
"""Chopper data loaded from nexus file."""


GravityToggle = NewType("GravityToggle", bool)
18 changes: 15 additions & 3 deletions src/ess/amor/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
ProtonCurrent,
ReducibleData,
RunType,
ScalingFactorForOverlap,
UnscaledReducibleData,
WavelengthBins,
YIndexLimits,
ZIndexLimits,
Expand All @@ -27,7 +29,7 @@ def add_coords_masks_and_apply_corrections(
wbins: WavelengthBins,
proton_current: ProtonCurrent[RunType],
graph: CoordTransformationGraph,
) -> ReducibleData[RunType]:
) -> UnscaledReducibleData[RunType]:
"""
Computes coordinates, masks and corrections that are
the same for the sample measurement and the reference measurement.
Expand All @@ -43,7 +45,17 @@ def add_coords_masks_and_apply_corrections(
da = add_proton_current_mask(da)
da = correct_by_proton_current(da)

return ReducibleData[RunType](da)
return UnscaledReducibleData[RunType](da)


def scale_raw_reducible_data(
da: UnscaledReducibleData[RunType],
scale: ScalingFactorForOverlap[RunType],
) -> ReducibleData[RunType]:
"""
Scales the raw data by a given factor.
"""
return ReducibleData[RunType](da * scale)


providers = (add_coords_masks_and_apply_corrections,)
providers = (add_coords_masks_and_apply_corrections, scale_raw_reducible_data)
9 changes: 8 additions & 1 deletion src/ess/reflectometry/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@

from . import conversions, corrections, figures, normalization, orso
from .load import load_reference, save_reference
from .tools import batch_processor

providers = (
*corrections.providers,
Expand All @@ -31,9 +32,15 @@

del importlib


__all__ = [
"__version__",
"batch_processor",
"conversions",
"corrections",
"figures",
"load_reference",
"normalization",
"orso",
"providers",
"save_reference",
]
Loading