Skip to content

Commit 0743f9b

Browse files
committed
Merge remote-tracking branch 'origin/estia' into polcal
2 parents a34d3d6 + df83bfe commit 0743f9b

20 files changed

+827
-42
lines changed

src/ess/amor/conversions.py

Lines changed: 11 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -6,15 +6,15 @@
66
from ..reflectometry.conversions import reflectometry_q
77
from ..reflectometry.types import (
88
BeamDivergenceLimits,
9+
CoordTransformationGraph,
910
WavelengthBins,
1011
YIndexLimits,
1112
ZIndexLimits,
1213
)
1314
from .geometry import Detector
14-
from .types import CoordTransformationGraph
1515

1616

17-
def theta(wavelength, divergence_angle, L2, sample_rotation, detector_rotation):
17+
def theta(wavelength, pixel_divergence_angle, L2, sample_rotation, detector_rotation):
1818
'''
1919
Angle of reflection.
2020
@@ -61,14 +61,15 @@ def theta(wavelength, divergence_angle, L2, sample_rotation, detector_rotation):
6161
'''
6262
c = sc.constants.g * sc.constants.m_n**2 / sc.constants.h**2
6363
out = (c * L2 * wavelength**2).to(unit='dimensionless') + sc.sin(
64-
divergence_angle.to(unit='rad', copy=False) + detector_rotation.to(unit='rad')
64+
pixel_divergence_angle.to(unit='rad', copy=False)
65+
+ detector_rotation.to(unit='rad')
6566
)
6667
out = sc.asin(out, out=out)
6768
out -= sample_rotation.to(unit='rad')
6869
return out
6970

7071

71-
def angle_of_divergence(theta, sample_rotation, angle_to_center_of_beam):
72+
def divergence_angle(theta, sample_rotation, angle_to_center_of_beam):
7273
"""
7374
Difference between the incident angle and the center of the incident beam.
7475
Useful for filtering parts of the beam that have too high divergence.
@@ -84,7 +85,7 @@ def angle_of_divergence(theta, sample_rotation, angle_to_center_of_beam):
8485

8586

8687
def wavelength(
87-
event_time_offset, divergence_angle, L1, L2, chopper_phase, chopper_frequency
88+
event_time_offset, pixel_divergence_angle, L1, L2, chopper_phase, chopper_frequency
8889
):
8990
"Converts event_time_offset to wavelength using the chopper settings."
9091
out = event_time_offset.to(unit="ns", dtype="float64", copy=True)
@@ -108,37 +109,22 @@ def wavelength(
108109
)
109110
# Correction for path length through guides being different
110111
# depending on incident angle.
111-
out -= (divergence_angle.to(unit="rad") / (np.pi * sc.units.rad)) * tau
112+
out -= (pixel_divergence_angle.to(unit="rad") / (np.pi * sc.units.rad)) * tau
112113
out *= (sc.constants.h / sc.constants.m_n) / (L1 + L2)
113114
return out.to(unit='angstrom', copy=False)
114115

115116

116117
def coordinate_transformation_graph() -> CoordTransformationGraph:
117118
return {
118-
"divergence_angle": "pixel_divergence_angle",
119119
"wavelength": wavelength,
120120
"theta": theta,
121-
"angle_of_divergence": angle_of_divergence,
121+
"divergence_angle": divergence_angle,
122122
"Q": reflectometry_q,
123123
"L1": lambda chopper_distance: sc.abs(chopper_distance),
124124
"L2": lambda distance_in_detector: distance_in_detector + Detector.distance,
125125
}
126126

127127

128-
def add_coords(
129-
da: sc.DataArray,
130-
graph: dict,
131-
) -> sc.DataArray:
132-
"Adds scattering coordinates to the raw detector data."
133-
return da.transform_coords(
134-
("wavelength", "theta", "angle_of_divergence", "Q", "L1", "L2"),
135-
graph,
136-
rename_dims=False,
137-
keep_intermediate=False,
138-
keep_aliases=False,
139-
)
140-
141-
142128
def _not_between(v, a, b):
143129
return (v < a) | (v > b)
144130

@@ -161,9 +147,9 @@ def add_masks(
161147
)
162148
da = da.bins.assign_masks(
163149
divergence_too_large=_not_between(
164-
da.bins.coords["angle_of_divergence"],
165-
bdlim[0].to(unit=da.bins.coords["angle_of_divergence"].bins.unit),
166-
bdlim[1].to(unit=da.bins.coords["angle_of_divergence"].bins.unit),
150+
da.bins.coords["divergence_angle"],
151+
bdlim[0].to(unit=da.bins.coords["divergence_angle"].bins.unit),
152+
bdlim[1].to(unit=da.bins.coords["divergence_angle"].bins.unit),
167153
),
168154
wavelength=_not_between(
169155
da.bins.coords['wavelength'],

src/ess/amor/normalization.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,8 @@
88
supermirror_reflectivity,
99
)
1010
from ..reflectometry.types import (
11+
CoordTransformationGraph,
1112
DetectorSpatialResolution,
12-
QBins,
1313
ReducedReference,
1414
ReducibleData,
1515
Reference,
@@ -23,7 +23,6 @@
2323
sample_size_resolution,
2424
wavelength_resolution,
2525
)
26-
from .types import CoordTransformationGraph
2726

2827

2928
def mask_events_where_supermirror_does_not_cover(
@@ -64,14 +63,12 @@ def mask_events_where_supermirror_does_not_cover(
6463
m=mvalue,
6564
alpha=alpha,
6665
)
67-
sam.bins.masks["supermirror_does_not_cover"] = sc.isnan(R)
68-
return sam
66+
return sam.bins.assign_masks(supermirror_does_not_cover=sc.isnan(R))
6967

7068

7169
def evaluate_reference_at_sample_coords(
7270
reference: ReducedReference,
7371
sample: ReducibleData[SampleRun],
74-
qbins: QBins,
7572
detector_spatial_resolution: DetectorSpatialResolution[SampleRun],
7673
graph: CoordTransformationGraph,
7774
) -> Reference:
@@ -103,6 +100,8 @@ def evaluate_reference_at_sample_coords(
103100
"Q_resolution": q_resolution,
104101
},
105102
rename_dims=False,
103+
keep_intermediate=False,
104+
keep_aliases=False,
106105
)
107106
return sc.values(ref)
108107

src/ess/amor/types.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,6 @@
99
AngularResolution = NewType("AngularResolution", sc.Variable)
1010
SampleSizeResolution = NewType("SampleSizeResolution", sc.Variable)
1111

12-
CoordTransformationGraph = NewType("CoordTransformationGraph", dict)
13-
1412

1513
class ChopperFrequency(sciline.Scope[RunType, sc.Variable], sc.Variable):
1614
"""Frequency of the choppers used in the run."""

src/ess/amor/workflow.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,12 @@
11
from ..reflectometry.conversions import (
2+
add_coords,
23
add_proton_current_coord,
34
add_proton_current_mask,
45
)
56
from ..reflectometry.corrections import correct_by_footprint, correct_by_proton_current
67
from ..reflectometry.types import (
78
BeamDivergenceLimits,
9+
CoordTransformationGraph,
810
ProtonCurrent,
911
RawDetectorData,
1012
ReducibleData,
@@ -13,8 +15,7 @@
1315
YIndexLimits,
1416
ZIndexLimits,
1517
)
16-
from .conversions import add_coords, add_masks
17-
from .types import CoordTransformationGraph
18+
from .conversions import add_masks
1819

1920

2021
def add_coords_masks_and_apply_corrections(

src/ess/estia/__init__.py

Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
# SPDX-License-Identifier: BSD-3-Clause
2+
# Copyright (c) 2023 Scipp contributors (https://github.com/scipp)
3+
import importlib.metadata
4+
5+
import sciline
6+
import scipp as sc
7+
8+
from ..reflectometry import providers as reflectometry_providers
9+
from ..reflectometry import supermirror
10+
from ..reflectometry.types import (
11+
BeamDivergenceLimits,
12+
BeamSize,
13+
DetectorSpatialResolution,
14+
NeXusDetectorName,
15+
RunType,
16+
SamplePosition,
17+
)
18+
from . import conversions, load, normalization, resolution, workflow
19+
from .types import (
20+
AngularResolution,
21+
SampleSizeResolution,
22+
WavelengthResolution,
23+
)
24+
25+
try:
26+
__version__ = importlib.metadata.version(__package__ or __name__)
27+
except importlib.metadata.PackageNotFoundError:
28+
__version__ = "0.0.0"
29+
30+
31+
providers = (
32+
*reflectometry_providers,
33+
# *load.providers,
34+
*conversions.providers,
35+
*workflow.providers,
36+
*normalization.providers,
37+
)
38+
"""
39+
List of providers for setting up a Sciline pipeline.
40+
41+
This provides a default Estia workflow including providers for loadings files.
42+
"""
43+
44+
45+
def default_parameters() -> dict:
46+
return {
47+
supermirror.MValue: sc.scalar(5, unit=sc.units.dimensionless),
48+
supermirror.CriticalEdge: 0.022 * sc.Unit("1/angstrom"),
49+
supermirror.Alpha: sc.scalar(0.25 / 0.088, unit=sc.units.angstrom),
50+
BeamSize[RunType]: 2.0 * sc.units.mm,
51+
DetectorSpatialResolution[RunType]: 0.0025 * sc.units.m,
52+
SamplePosition[RunType]: sc.vector([0, 0, 0], unit="m"),
53+
NeXusDetectorName[RunType]: "detector",
54+
BeamDivergenceLimits: (
55+
sc.scalar(-0.75, unit='deg'),
56+
sc.scalar(0.75, unit='deg'),
57+
),
58+
}
59+
60+
61+
def EstiaWorkflow() -> sciline.Pipeline:
62+
"""
63+
Workflow with default parameters for the Estia instrument.
64+
"""
65+
return sciline.Pipeline(providers=providers, params=default_parameters())
66+
67+
68+
__all__ = [
69+
"AngularResolution",
70+
"EstiaWorkflow",
71+
"SampleSizeResolution",
72+
"WavelengthResolution",
73+
"conversions",
74+
"default_parameters",
75+
"load",
76+
"providers",
77+
"resolution",
78+
"supermirror",
79+
]

src/ess/estia/conversions.py

Lines changed: 103 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,103 @@
1+
# SPDX-License-Identifier: BSD-3-Clause
2+
# Copyright (c) 2023 Scipp contributors (https://github.com/scipp)
3+
import scipp as sc
4+
5+
from ..reflectometry.conversions import reflectometry_q
6+
from ..reflectometry.types import (
7+
BeamDivergenceLimits,
8+
CoordTransformationGraph,
9+
WavelengthBins,
10+
YIndexLimits,
11+
ZIndexLimits,
12+
)
13+
14+
15+
def theta(
16+
divergence_angle,
17+
sample_rotation,
18+
):
19+
'''
20+
Angle of reflection.
21+
22+
Computes the angle between the scattering direction of
23+
the neutron and the sample surface.
24+
'''
25+
return divergence_angle + sample_rotation.to(unit=divergence_angle.unit)
26+
27+
28+
def divergence_angle(
29+
position, sample_position, detector_rotation, incident_angle_of_center_of_beam
30+
):
31+
"""
32+
Angle between the scattering direction and
33+
the ray from the sample to the center of the detector.
34+
"""
35+
p = position - sample_position.to(unit=position.unit)
36+
return (
37+
sc.atan2(y=p.fields.x, x=p.fields.z)
38+
- detector_rotation.to(unit='rad')
39+
- incident_angle_of_center_of_beam.to(unit='rad')
40+
)
41+
42+
43+
def wavelength(
44+
event_time_offset,
45+
# Other inputs
46+
):
47+
"Converts event_time_offset to wavelength"
48+
# Use frame unwrapping from scippneutron
49+
pass
50+
51+
52+
def coordinate_transformation_graph() -> CoordTransformationGraph:
53+
return {
54+
"wavelength": wavelength,
55+
"theta": theta,
56+
"divergence_angle": divergence_angle,
57+
"Q": reflectometry_q,
58+
"L1": lambda source_position, sample_position: sc.norm(
59+
sample_position - source_position
60+
), # + extra correction for guides?
61+
"L2": lambda position, sample_position: sc.norm(position - sample_position),
62+
"incident_angle_of_center_of_beam": lambda: sc.scalar(1.7, unit='deg').to(
63+
unit='rad'
64+
),
65+
}
66+
67+
68+
def _not_between(v, a, b):
69+
return (v < a) | (v > b)
70+
71+
72+
def add_masks(
73+
da: sc.DataArray,
74+
ylim: YIndexLimits,
75+
zlims: ZIndexLimits,
76+
bdlim: BeamDivergenceLimits,
77+
wbins: WavelengthBins,
78+
) -> sc.DataArray:
79+
"""
80+
Masks the data by ranges in the detector
81+
coordinates ``z`` and ``y``, and by the divergence of the beam,
82+
and by wavelength.
83+
"""
84+
da = da.assign_masks(
85+
stripe_range=_not_between(da.coords["stripe"], *ylim),
86+
z_range=_not_between(da.coords["z_index"], *zlims),
87+
divergence_too_large=_not_between(
88+
da.coords["divergence_angle"],
89+
bdlim[0].to(unit=da.coords["divergence_angle"].unit),
90+
bdlim[1].to(unit=da.coords["divergence_angle"].unit),
91+
),
92+
)
93+
da = da.bins.assign_masks(
94+
wavelength=_not_between(
95+
da.bins.coords['wavelength'],
96+
wbins[0],
97+
wbins[-1],
98+
),
99+
)
100+
return da
101+
102+
103+
providers = (coordinate_transformation_graph,)

src/ess/estia/corrections.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
import scipp as sc
2+
3+
4+
def correct_by_footprint(da: sc.DataArray) -> sc.DataArray:
5+
"Corrects the data by the size of the footprint on the sample."
6+
return da / sc.sin(da.coords['theta'])

0 commit comments

Comments
 (0)