From 9ac17258f38a6b62dd2a876d06809058c3d91a0f Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Wed, 9 Oct 2024 11:20:27 +0200 Subject: [PATCH 01/11] Reorganize Integrator into a sub-module --- src/pyFAI/azimuthalIntegrator.py | 4036 +--------------------------- src/pyFAI/diffmap.py | 9 +- src/pyFAI/integrator/__init__.py | 45 + src/pyFAI/integrator/azimuthal.py | 3641 +++++++++++++++++++++++++ src/pyFAI/integrator/fiber.py | 467 ++++ src/pyFAI/integrator/load.py | 352 +++ src/pyFAI/integrator/meson.build | 8 + src/pyFAI/io/integration_config.py | 4 +- src/pyFAI/load_integrators.py | 319 +-- src/pyFAI/meson.build | 1 + 10 files changed, 4533 insertions(+), 4349 deletions(-) create mode 100644 src/pyFAI/integrator/__init__.py create mode 100644 src/pyFAI/integrator/azimuthal.py create mode 100644 src/pyFAI/integrator/fiber.py create mode 100644 src/pyFAI/integrator/load.py create mode 100644 src/pyFAI/integrator/meson.build diff --git a/src/pyFAI/azimuthalIntegrator.py b/src/pyFAI/azimuthalIntegrator.py index 056ca6910..c46591add 100644 --- a/src/pyFAI/azimuthalIntegrator.py +++ b/src/pyFAI/azimuthalIntegrator.py @@ -26,4040 +26,18 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. +""" Compatibility layer with former pyFAI.azimuthalIntegrator.AzimuthalIntegrator + +now located at pyFAI.integrator.azimuthal.AzimuthalIntegrator +""" + __author__ = "Jérôme Kieffer" __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "12/09/2024" +__date__ = "09/10/2024" __status__ = "stable" __docformat__ = 'restructuredtext' -import logging -logger = logging.getLogger(__name__) -import warnings -import threading -import gc -from math import pi, log -import numpy -from .geometry import Geometry -from . import units -from .utils import EPS32, deg2rad, crc32 -from .utils.decorators import deprecated, deprecated_warning -from .containers import Integrate1dResult, Integrate2dResult, SeparateResult, ErrorModel -from .io import DefaultAiWriter, save_integrate_result -from .io.ponifile import PoniFile -error = None -from .method_registry import IntegrationMethod - -from .load_integrators import ocl_azim_csr, ocl_azim_lut, ocl_sort, histogram, splitBBox, \ - splitPixel, splitBBoxCSR, splitBBoxLUT, splitPixelFullCSR, \ - histogram_engine, splitPixelFullLUT, splitBBoxCSC, splitPixelFullCSC, \ - PREFERED_METHODS_1D, PREFERED_METHODS_2D - -from .engines import Engine - -# Few constants for engine names: -OCL_CSR_ENGINE = "ocl_csr_integr" -OCL_LUT_ENGINE = "ocl_lut_integr" -OCL_HIST_ENGINE = "ocl_histogram" -OCL_SORT_ENGINE = "ocl_sorter" -EXT_LUT_ENGINE = "lut_integrator" -EXT_CSR_ENGINE = "csr_integrator" - - -class AzimuthalIntegrator(Geometry): - """ - This class is an azimuthal integrator based on P. Boesecke's - geometry and histogram algorithm by Manolo S. del Rio and V.A Sole - - All geometry calculation are done in the Geometry class - - main methods are: - - >>> tth, I = ai.integrate1d(data, npt, unit="2th_deg") - >>> q, I, sigma = ai.integrate1d(data, npt, unit="q_nm^-1", error_model="poisson") - >>> regrouped = ai.integrate2d(data, npt_rad, npt_azim, unit="q_nm^-1")[0] - """ - - DEFAULT_METHOD_1D = PREFERED_METHODS_1D[0] - DEFAULT_METHOD_2D = PREFERED_METHODS_2D[0] - "Fail-safe low-memory integrator" - - USE_LEGACY_MASK_NORMALIZATION = True - """If true, the Python engine integrator will normalize the mask to use the - most frequent value of the mask as the non-masking value. - - This behaviour is not consistant with other engines and is now deprecated. - This flag will be turned off in the comming releases. - - Turning off this flag force the user to provide a mask with 0 as non-masking - value. And any non-zero as masking value (negative or positive value). A - boolean mask is also accepted (`True` is the masking value). - """ - - def __init__(self, dist=1, poni1=0, poni2=0, - rot1=0, rot2=0, rot3=0, - pixel1=None, pixel2=None, - splineFile=None, detector=None, wavelength=None, orientation=0): - """ - :param dist: distance sample - detector plan (orthogonal distance, not along the beam), in meter. - :type dist: float - :param poni1: coordinate of the point of normal incidence along the detector's first dimension, in meter - :type poni1: float - :param poni2: coordinate of the point of normal incidence along the detector's second dimension, in meter - :type poni2: float - :param rot1: first rotation from sample ref to detector's ref, in radians - :type rot1: float - :param rot2: second rotation from sample ref to detector's ref, in radians - :type rot2: float - :param rot3: third rotation from sample ref to detector's ref, in radians - :type rot3: float - :param pixel1: Deprecated. Pixel size of the fist dimension of the detector, in meter. - If both pixel1 and pixel2 are not None, detector pixel size is overwritten. - Prefer defining the detector pixel size on the provided detector object. - Prefer defining the detector pixel size on the provided detector - object (``detector.pixel1 = 5e-6``). - :type pixel1: float - :param pixel2: Deprecated. Pixel size of the second dimension of the detector, in meter. - If both pixel1 and pixel2 are not None, detector pixel size is overwritten. - Prefer defining the detector pixel size on the provided detector - object (``detector.pixel2 = 5e-6``). - :type pixel2: float - :param splineFile: Deprecated. File containing the geometric distortion of the detector. - If not None, pixel1 and pixel2 are ignored and detector spline is overwritten. - Prefer defining the detector spline manually - (``detector.splineFile = "file.spline"``). - :type splineFile: str - :param detector: name of the detector or Detector instance. String - description is deprecated. Prefer using the result of the detector - factory: ``pyFAI.detector_factory("eiger4m")`` - :type detector: str or pyFAI.Detector - :param float wavelength: Wave length used in meter - :param int orientation: orientation of the detector, see pyFAI.detectors.orientation.Orientation - """ - Geometry.__init__(self, dist, poni1, poni2, - rot1, rot2, rot3, - pixel1, pixel2, splineFile, detector, wavelength, orientation) - - # mask, maskfile, darkcurrent and flatfield are properties pointing to - # self.detector now (16/06/2017) - - self._lock = threading.Semaphore() - self.engines = {} # key: name of the engine, - - self._empty = 0.0 - - def reset(self, collect_garbage=True): - """Reset azimuthal integrator in addition to other arrays. - - :param collect_garbage: set to False to prevent garbage collection, faster - """ - Geometry.reset(self, collect_garbage=False) - self.reset_engines(collect_garbage) - - def reset_engines(self, collect_garbage=True): - """Urgently free memory by deleting all regrid-engines - - :param collect_garbage: set to False to prevent garbage collection, faster - """ - with self._lock: - for key in list(self.engines.keys()): # explicit copy - self.engines.pop(key).reset() - if collect_garbage: - gc.collect() - - def create_mask(self, data, mask=None, - dummy=None, delta_dummy=None, - unit=None, radial_range=None, - azimuth_range=None, - mode="normal"): - """ - Combines various masks into another one. - - :param data: input array of data - :type data: ndarray - :param mask: input mask (if none, self.mask is used) - :type mask: ndarray - :param dummy: value of dead pixels - :type dummy: float - :param delta_dumy: precision of dummy pixels - :type delta_dummy: float - :param mode: can be "normal" or "numpy" (inverted) or "where" applied to the mask - :type mode: str - - :return: the new mask - :rtype: ndarray of bool - - This method combine two masks (dynamic mask from *data & - dummy* and *mask*) to generate a new one with the 'or' binary - operation. One can adjust the level, with the *dummy* and - the *delta_dummy* parameter, when you consider the *data* - values needs to be masked out. - - This method can work in two different *mode*: - - * "normal": False for valid pixels, True for bad pixels - * "numpy": True for valid pixels, false for others - * "where": does a numpy.where on the "numpy" output - - This method tries to accomodate various types of masks (like - valid=0 & masked=-1, ...) - - Note for the developper: we use a lot of numpy.logical_or in this method, - the out= argument allows to recycle buffers and save considerable time in - allocating temporary arrays. - """ - logical_or = numpy.logical_or - shape = data.shape - # ^^^^ this is why data is mandatory ! - if mask is None: - mask = self.mask - if mask is None: - mask = numpy.zeros(shape, dtype=bool) - else: - mask = mask.astype(bool) - if self.USE_LEGACY_MASK_NORMALIZATION: - if mask.sum(dtype=int) > mask.size // 2: - reason = "The provided mask is not complient with other engines. "\ - "The feature which automatically invert it will be removed soon. "\ - "For more information see https://github.com/silx-kit/pyFAI/pull/868" - deprecated_warning(__name__, name="provided mask content", reason=reason) - numpy.logical_not(mask, mask) - if (mask.shape != shape): - try: - mask = mask[:shape[0],:shape[1]] - except Exception as error: # IGNORE:W0703 - logger.error("Mask provided has wrong shape:" - " expected: %s, got %s, error: %s", - shape, mask.shape, error) - mask = numpy.zeros(shape, dtype=bool) - if dummy is not None: - if delta_dummy is None: - logical_or(mask, (data == dummy), out=mask) - else: - logical_or(mask, abs(data - dummy) <= delta_dummy, out=mask) - - if radial_range is not None: - assert unit, "unit is needed when building a mask based on radial_range" - if isinstance(unit, (tuple, list)) and len(unit) == 2: - radial_unit = units.to_unit(unit[0]) - else: - radial_unit = units.to_unit(unit) - rad = self.array_from_unit(shape, "center", radial_unit, scale=False) - logical_or(mask, rad < radial_range[0], out=mask) - logical_or(mask, rad > radial_range[1], out=mask) - if azimuth_range is not None: - if isinstance(unit, (tuple, list)) and len(unit) == 2: - azimuth_unit = units.to_unit(unit[1]) - chi = self.array_from_unit(shape, "center", azimuth_unit, scale=False) - logical_or(mask, chi < azimuth_range[0], out=mask) - logical_or(mask, chi > azimuth_range[1], out=mask) - - # Prepare alternative representation for output: - if mode == "numpy": - numpy.logical_not(mask, mask) - elif mode == "where": - mask = numpy.where(numpy.logical_not(mask)) - return mask - - def dark_correction(self, data, dark=None): - """ - Correct for Dark-current effects. - If dark is not defined, correct for a dark set by "set_darkfiles" - - :param data: input ndarray with the image - :param dark: ndarray with dark noise or None - :return: 2tuple: corrected_data, dark_actually used (or None) - """ - dark = dark if dark is not None else self.detector.darkcurrent - if dark is not None: - return data - dark, dark - else: - return data, None - - def flat_correction(self, data, flat=None): - """ - Correct for flat field. - If flat is not defined, correct for a flat set by "set_flatfiles" - - :param data: input ndarray with the image - :param flat: ndarray with flatfield or None for no correction - :return: 2tuple: corrected_data, flat_actually used (or None) - """ - flat = flat if flat is not None else self.detector.flatfield - if flat is not None: - return data / flat, flat - else: - return data, None - - def _normalize_method(self, method, dim, default): - """ - :rtype: IntegrationMethod - """ - requested_method = method - method = IntegrationMethod.select_one_available(method, dim=dim, default=None, degradable=False) - if method is not None: - return method - method = IntegrationMethod.select_one_available(requested_method, dim=dim, default=default, degradable=True) - logger.warning("Method requested '%s' not available. Method '%s' will be used", requested_method, method) - return default - - def setup_sparse_integrator(self, - shape, - npt, - mask=None, - pos0_range=None, pos1_range=None, - mask_checksum=None, unit=units.TTH, - split="bbox", algo="CSR", - empty=None, scale=True): - """ - Prepare a sparse-matrix integrator based on LUT, CSR or CSC format - - :param shape: shape of the dataset - :type shape: (int, int) - :param npt: number of points in the the output pattern - :type npt: int or (int, int) - :param mask: array with masked pixel (1=masked) - :type mask: ndarray - :param pos0_range: range in radial dimension - :type pos0_range: (float, float) - :param pos1_range: range in azimuthal dimension - :type pos1_range: (float, float) - :param mask_checksum: checksum of the mask buffer - :type mask_checksum: int (or anything else ...) - :param unit: use to propagate the LUT object for further checkings - :type unit: pyFAI.units.Unit or 2-tuple of them for 2D integration - :param split: Splitting scheme: valid options are "no", "bbox", "full" - :param algo: Sparse matrix format to use: "LUT", "CSR" or "CSC" - :param empty: override the default empty value - :param scale: set to False for working in S.I. units for pos0_range - which is faster. By default assumes pos0_range has `units` - Note that pos1_range, the chi-angle, is expected in radians - - - This method is called when a look-up table needs to be set-up. - The *shape* parameter, correspond to the shape of the original - datatset. It is possible to customize the number of point of - the output histogram with the *npt* parameter which can be - either an integer for an 1D integration or a 2-tuple of - integer in case of a 2D integration. The LUT will have a - different shape: (npt, lut_max_size), the later parameter - being calculated during the instanciation of the splitBBoxLUT - class. - - It is possible to prepare the LUT with a predefine - *mask*. This operation can speedup the computation of the - later integrations. Instead of applying the patch on the - dataset, it is taken into account during the histogram - computation. If provided the *mask_checksum* prevent the - re-calculation of the mask. When the mask changes, its - checksum is used to reset (or not) the LUT (which is a very - time consuming operation !) - - It is also possible to restrain the range of the 1D or 2D - pattern with the *pos0_range* (radial) and *pos1_range* (azimuthal). - - The *unit* parameter is just propagated to the LUT integrator - for further checkings: The aim is to prevent an integration to - be performed in 2th-space when the LUT was setup in q space. - Unit can also be a 2-tuple in the case of a 2D integration - """ - if isinstance(unit, (list, tuple)) and len(unit) == 2: - unit0, unit1 = tuple(units.to_unit(u) for u in unit) - else: - unit0 = units.to_unit(unit) - unit1 = units.CHI_DEG - if scale and pos0_range: - pos0_scale = unit0.scale - pos0_range = tuple(pos0_range[i] / pos0_scale for i in (0, -1)) - if "__len__" in dir(npt) and len(npt) == 2: - int2d = True - if scale and pos1_range: - pos1_scale = unit1.scale - pos1_range = tuple(pos1_range[i] / pos1_scale for i in (0, -1)) - else: - int2d = False - empty = self._empty if empty is None else empty - if split == "full": - pos = self.array_from_unit(shape, "corner", unit, scale=False) - else: - pos0 = self.array_from_unit(shape, "center", unit0, scale=False) - if split == "no": - dpos0 = None - else: - dpos0 = self.array_from_unit(shape, "delta", unit0, scale=False) - - pos1 = None - dpos1 = None - if int2d or pos1_range: - pos1 = self.array_from_unit(shape, "center", unit1, scale=False) - if split == "no": - dpos1 = None - else: - dpos1 = self.array_from_unit(shape, "delta", unit1, scale=False) - - if mask is None: - mask_checksum = None - else: - assert mask.shape == shape - algo = algo.upper() - if algo == "LUT": - if split == "full": - if int2d: - return splitPixelFullLUT.HistoLUT2dFullSplit(pos, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - allow_pos0_neg=not unit0.positive, - unit=unit, - empty=empty, - chiDiscAtPi=self.chiDiscAtPi, - clip_pos1=bool(unit1.period), - ) - else: - return splitPixelFullLUT.HistoLUT1dFullSplit(pos, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - allow_pos0_neg=not unit0.positive, - unit=unit, - empty=empty) - else: - if int2d: - return splitBBoxLUT.HistoBBox2d(pos0, dpos0, pos1, dpos1, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - allow_pos0_neg=not unit0.positive, - clip_pos1=bool(unit1.period), - unit=unit, - empty=empty) - else: - return splitBBoxLUT.HistoBBox1d(pos0, dpos0, pos1, dpos1, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - allow_pos0_neg=not unit0.positive, - unit=unit, - empty=empty) - elif algo == "CSR": - if split == "full": - if int2d: - return splitPixelFullCSR.FullSplitCSR_2d(pos, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - allow_pos0_neg=not unit0.positive, - unit=unit, - empty=empty, - chiDiscAtPi=self.chiDiscAtPi, - clip_pos1=bool(unit1.period), - ) - else: - return splitPixelFullCSR.FullSplitCSR_1d(pos, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - allow_pos0_neg=not unit0.positive, - unit=unit, - empty=empty) - else: - if int2d: - return splitBBoxCSR.HistoBBox2d(pos0, dpos0, pos1, dpos1, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - unit=unit, - empty=empty, - allow_pos0_neg=not unit0.positive, - clip_pos1=bool(unit1.period) -) - else: - return splitBBoxCSR.HistoBBox1d(pos0, dpos0, pos1, dpos1, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - allow_pos0_neg=not unit0.positive, - unit=unit, - empty=empty) - elif algo == "CSC": - if split == "full": - if int2d: - return splitPixelFullCSC.FullSplitCSC_2d(pos, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - allow_pos0_neg=not unit0.positive, - unit=unit, - empty=empty, - chiDiscAtPi=self.chiDiscAtPi, - clip_pos1=bool(unit1.period) - ) - else: - return splitPixelFullCSC.FullSplitCSC_1d(pos, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - allow_pos0_neg=not unit0.positive, - unit=unit, - empty=empty) - else: - if int2d: - return splitBBoxCSC.HistoBBox2d(pos0, dpos0, pos1, dpos1, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - unit=unit, - empty=empty, - allow_pos0_neg=not unit0.positive, - clip_pos1=bool(unit1.period) -) - else: - return splitBBoxCSC.HistoBBox1d(pos0, dpos0, pos1, dpos1, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - allow_pos0_neg=not unit0.positive, - unit=unit, - empty=empty) - - @deprecated(since_version="0.22", only_once=True, replacement="setup_sparse_integrator", deprecated_since="0.22.0") - def setup_LUT(self, shape, npt, mask=None, - pos0_range=None, pos1_range=None, - mask_checksum=None, unit=units.TTH, - split="bbox", empty=None, scale=True): - """See documentation of setup_sparse_integrator where algo=LUT""" - return self.setup_sparse_integrator(shape, npt, mask, - pos0_range, pos1_range, - mask_checksum, unit, - split=split, algo="LUT", - empty=empty, scale=scale) - - @deprecated(since_version="0.22", only_once=True, replacement="setup_sparse_integrator", deprecated_since="0.22.0") - def setup_CSR(self, shape, npt, mask=None, - pos0_range=None, pos1_range=None, - mask_checksum=None, unit=units.TTH, - split="bbox", empty=None, scale=True): - """See documentation of setup_sparse_integrator where algo=CSR""" - return self.setup_sparse_integrator(shape, npt, mask, - pos0_range, pos1_range, - mask_checksum, unit, - split=split, algo="CSR", - empty=empty, scale=scale) - - @deprecated(since_version="0.20", only_once=True, replacement="integrate1d_ng", deprecated_since="0.20.0") - def integrate1d_legacy(self, data, npt, filename=None, - correctSolidAngle=True, - variance=None, error_model=None, - radial_range=None, azimuth_range=None, - mask=None, dummy=None, delta_dummy=None, - polarization_factor=None, dark=None, flat=None, - method="csr", unit=units.Q, safe=True, - normalization_factor=1.0, - block_size=None, profile=False, metadata=None): - """Calculate the azimuthal integrated Saxs curve in q(nm^-1) by default - - Multi algorithm implementation (tries to be bullet proof), suitable for SAXS, WAXS, ... and much more - - - - :param data: 2D array from the Detector/CCD camera - :type data: ndarray - :param npt: number of points in the output pattern - :type npt: int - :param filename: output filename in 2/3 column ascii format - :type filename: str - :param correctSolidAngle: correct for solid angle of each pixel if True - :type correctSolidAngle: bool - :param variance: array containing the variance of the data. If not available, no error propagation is done - :type variance: ndarray - :param error_model: When the variance is unknown, an error model can be given: "poisson" (variance = I), "azimuthal" (variance = (I-)^2) - :type error_model: str - :param radial_range: The lower and upper range of the radial unit. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. - :type radial_range: (float, float), optional - :param azimuth_range: The lower and upper range of the azimuthal angle in degree. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. - :type azimuth_range: (float, float), optional - :param mask: array (same size as image) with 1 for masked pixels, and 0 for valid pixels - :type mask: ndarray - :param dummy: value for dead/masked pixels - :type dummy: float - :param delta_dummy: precision for dummy value - :type delta_dummy: float - :param polarization_factor: polarization factor between -1 (vertical) and +1 (horizontal). - 0 for circular polarization or random, - None for no correction, - True for using the former correction - :type polarization_factor: float - :param dark: dark noise image - :type dark: ndarray - :param flat: flat field image - :type flat: ndarray - :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) - :type method: can be Method named tuple, IntegrationMethod instance or str to be parsed - :param unit: Output units, can be "q_nm^-1", "q_A^-1", "2th_deg", "2th_rad", "r_mm" for now - :type unit: pyFAI.units.Unit - :param safe: Do some extra checks to ensure LUT/CSR is still valid. False is faster. - :type safe: bool - :param normalization_factor: Value of a normalization monitor - :type normalization_factor: float - :param block_size: size of the block for OpenCL integration (unused?) - :param profile: set to True to enable profiling in OpenCL - :param all: if true return a dictionary with many more parameters (deprecated, please refer to the documentation of Integrate1dResult). - :type all: bool - :param metadata: JSON serializable object containing the metadata, usually a dictionary. - :return: q/2th/r bins center positions and regrouped intensity (and error array if variance or variance model provided) - :rtype: Integrate1dResult, dict - """ - method = self._normalize_method(method, dim=1, default=self.DEFAULT_METHOD_1D) - assert method.dimension == 1 - unit = units.to_unit(unit) - - if mask is None: - has_mask = "from detector" - mask = self.mask - mask_crc = self.detector.get_mask_crc() - if mask is None: - has_mask = False - mask_crc = None - else: - has_mask = "provided" - mask = numpy.ascontiguousarray(mask) - mask_crc = crc32(mask) - - shape = data.shape - pos0_scale = unit.scale - - if radial_range: - radial_range = tuple(radial_range[i] / pos0_scale for i in (0, -1)) - if azimuth_range is not None: - azimuth_range = self.normalize_azimuth_range(azimuth_range) - - if variance is not None: - assert variance.size == data.size - elif error_model: - error_model = error_model.lower() - if error_model == "poisson": - variance = numpy.ascontiguousarray(data, numpy.float32) - - if correctSolidAngle: - solidangle = self.solidAngleArray(shape, correctSolidAngle) - else: - solidangle = None - - if polarization_factor is None: - polarization = polarization_crc = None - else: - polarization, polarization_crc = self.polarization(shape, polarization_factor, with_checksum=True) - - if dark is None: - dark = self.detector.darkcurrent - if dark is None: - has_dark = False - else: - has_dark = "from detector" - else: - has_dark = "provided" - - if flat is None: - flat = self.detector.flatfield - if dark is None: - has_flat = False - else: - has_flat = "from detector" - else: - has_flat = "provided" - - I = None - sigma = None - count = None - sum_ = None - - if method.algo_lower == "lut": - if EXT_LUT_ENGINE not in self.engines: - engine = self.engines[EXT_LUT_ENGINE] = Engine() - else: - engine = self.engines[EXT_LUT_ENGINE] - with engine.lock: - integr = engine.engine - reset = None - if integr is None: - reset = "of first initialization" - if (not reset) and safe: - if integr.unit != unit: - reset = "unit changed" - if integr.bins != npt: - reset = "number of points changed" - if integr.size != data.size: - reset = "input image size changed" - if (mask is not None) and\ - (not integr.check_mask): - reset = "mask but LUT was without mask" - elif (mask is None) and (integr.check_mask): - reset = "no mask but LUT has mask" - elif (mask is not None) and\ - (integr.mask_checksum != mask_crc): - reset = "mask changed" - if (radial_range is None) and\ - (integr.pos0_range is not None): - reset = "radial_range was defined in LUT" - elif (radial_range is not None) and\ - (integr.pos0_range != radial_range): - reset = ("radial_range is defined" - " but not the same as in LUT") - if (azimuth_range is None) and\ - (integr.pos1_range is not None): - reset = ("azimuth_range not defined and" - " LUT had azimuth_range defined") - elif (azimuth_range is not None) and\ - (integr.pos1_range != azimuth_range[0]): - reset = ("azimuth_range requested and" - " LUT's azimuth_range don't match") - if reset: - logger.info("AI.integrate1d: Resetting integrator because %s", reset) - split = method.split_lower - if split == "pseudo": - split = "full" - try: - integr = self.setup_sparse_integrator(shape, npt, mask=mask, - pos0_range=radial_range, pos1_range=azimuth_range, - mask_checksum=mask_crc, - unit=unit, split=split, algo="LUT", - scale=False) - - except MemoryError: - # LUT method is hungry... - logger.warning("MemoryError: falling back on default forward implementation") - integr = None - self.reset_engines() - method = self.DEFAULT_METHOD_1D - else: - engine.set_engine(integr) - if integr: - if method.impl_lower == "opencl": - # TODO: manage the target - if OCL_LUT_ENGINE in self.engines: - ocl_engine = self.engines[OCL_LUT_ENGINE] - else: - ocl_engine = self.engines[OCL_LUT_ENGINE] = Engine() - with ocl_engine.lock: - if method.target is not None: - platformid, deviceid = method.target - ocl_integr = ocl_engine.engine - if (ocl_integr is None) or \ - (ocl_integr.on_device["lut"] != integr.lut_checksum): - ocl_integr = ocl_azim_lut.OCL_LUT_Integrator(integr.lut, - integr.size, - platformid=platformid, - deviceid=deviceid, - checksum=integr.lut_checksum) - ocl_engine.set_engine(ocl_integr) - if ocl_integr is not None: - I, sum_, count = ocl_integr.integrate_legacy(data, dark=dark, flat=flat, - solidangle=solidangle, - solidangle_checksum=self._dssa_crc, - dummy=dummy, - delta_dummy=delta_dummy, - polarization=polarization, - polarization_checksum=polarization_crc, - normalization_factor=normalization_factor) - qAxis = integr.bin_centers # this will be copied later - if error_model == "azimuthal": - - variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 - if variance is not None: - var1d, a, b = ocl_integr.integrate_legacy(variance, - solidangle=None, - dummy=dummy, - delta_dummy=delta_dummy, - normalization_factor=1.0, - coef_power=2) - with numpy.errstate(divide='ignore', invalid='ignore'): - sigma = numpy.sqrt(a) / (b * normalization_factor) - sigma[b == 0] = dummy if dummy is not None else self._empty - else: - qAxis, I, sum_, count = integr.integrate_legacy(data, dark=dark, flat=flat, - solidAngle=solidangle, - dummy=dummy, - delta_dummy=delta_dummy, - polarization=polarization, - normalization_factor=normalization_factor) - - if error_model == "azimuthal": - variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 - if variance is not None: - _, var1d, a, b = integr.integrate_legacy(variance, - solidAngle=None, - dummy=dummy, - delta_dummy=delta_dummy, - coef_power=2, - normalization_factor=1.0) - with numpy.errstate(divide='ignore', invalid='ignore'): - sigma = numpy.sqrt(a) / (b * normalization_factor) - sigma[b == 0] = dummy if dummy is not None else self._empty - - if method.algo_lower == "csr": - if EXT_CSR_ENGINE not in self.engines: - engine = self.engines[EXT_CSR_ENGINE] = Engine() - else: - engine = self.engines[EXT_CSR_ENGINE] - with engine.lock: - integr = engine.engine - reset = None - - if integr is None: - reset = "of first initialization" - if (not reset) and safe: - if integr.unit != unit: - reset = "unit changed" - if integr.bins != npt: - reset = "number of points changed" - if integr.size != data.size: - reset = "input image size changed" - if (mask is not None) and\ - (not integr.check_mask): - reset = "mask but CSR was without mask" - elif (mask is None) and (integr.check_mask): - reset = "no mask but CSR has mask" - elif (mask is not None) and\ - (integr.mask_checksum != mask_crc): - reset = "mask changed" - if radial_range != integr.pos0_range: - reset = "radial_range changed" - if azimuth_range != integr.pos1_range: - reset = "azimuth_range changed" - if reset: - logger.info("AI.integrate1d: Resetting integrator because %s", reset) - split = method.split_lower - if split == "pseudo": - split = "full" - try: - integr = self.setup_sparse_integrator(shape, npt, mask=mask, - pos0_range=radial_range, pos1_range=azimuth_range, - mask_checksum=mask_crc, - unit=unit, split=split, algo="CSR", - scale=False) - except MemoryError: # CSR method is hungry... - logger.warning("MemoryError: falling back on forward implementation") - integr = None - self.reset_engines() - method = self.DEFAULT_METHOD_1D - else: - engine.set_engine(integr) - if integr: - if method.impl_lower == "opencl": - # TODO: manage OpenCL targets - if OCL_CSR_ENGINE not in self.engines: - self.engines[OCL_CSR_ENGINE] = Engine() - ocl_engine = self.engines[OCL_CSR_ENGINE] - with ocl_engine.lock: - if method.target is not None: - platformid, deviceid = method.target - ocl_integr = ocl_engine.engine - if (ocl_integr is None) or \ - (ocl_integr.on_device["data"] != integr.lut_checksum): - ocl_integr = ocl_azim_csr.OCL_CSR_Integrator(integr.lut, - integr.size, - platformid=platformid, - deviceid=deviceid, - checksum=integr.lut_checksum, - block_size=block_size, - profile=profile) - ocl_engine.set_engine(ocl_integr) - I, sum_, count = ocl_integr.integrate_legacy(data, dark=dark, flat=flat, - solidangle=solidangle, - solidangle_checksum=self._dssa_crc, - dummy=dummy, - delta_dummy=delta_dummy, - polarization=polarization, - polarization_checksum=polarization_crc, - normalization_factor=normalization_factor) - qAxis = integr.bin_centers # this will be copied later - if error_model == "azimuthal": - variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 - if variance is not None: - var1d, a, b = ocl_integr.integrate_legacy(variance, - solidangle=None, - dummy=dummy, - delta_dummy=delta_dummy) - with numpy.errstate(divide='ignore', invalid='ignore'): - sigma = numpy.sqrt(a) / (b * normalization_factor) - sigma[b == 0] = dummy if dummy is not None else self._empty - else: - qAxis, I, sum_, count = integr.integrate_legacy(data, dark=dark, flat=flat, - solidAngle=solidangle, - dummy=dummy, - delta_dummy=delta_dummy, - polarization=polarization, - normalization_factor=normalization_factor) - - if error_model == "azimuthal": - variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 - if variance is not None: - _, var1d, a, b = integr.integrate_legacy(variance, - solidAngle=None, - dummy=dummy, - delta_dummy=delta_dummy, - normalization_factor=1.0) - with numpy.errstate(divide='ignore', invalid='ignore'): - sigma = numpy.sqrt(a) / (b * normalization_factor) - sigma[b == 0] = dummy if dummy is not None else self._empty - - if method.method[1:4] == ("full", "histogram", "cython"): - logger.debug("integrate1d uses SplitPixel implementation") - pos = self.array_from_unit(shape, "corner", unit, scale=False) - qAxis, I, sum_, count = splitPixel.fullSplit1D(pos=pos, - weights=data, - bins=npt, - pos0_range=radial_range, - pos1_range=azimuth_range, - dummy=dummy, - delta_dummy=delta_dummy, - mask=mask, - dark=dark, - flat=flat, - solidangle=solidangle, - polarization=polarization, - normalization_factor=normalization_factor - ) - if error_model == "azimuthal": - variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 - if variance is not None: - _, var1d, a, b = splitPixel.fullSplit1D(pos=pos, - weights=variance, - bins=npt, - pos0_range=radial_range, - pos1_range=azimuth_range, - dummy=dummy, - delta_dummy=delta_dummy, - mask=mask, - normalization_factor=1.0 - ) - with numpy.errstate(divide='ignore', invalid='ignore'): - sigma = numpy.sqrt(a) / (b * normalization_factor) - sigma[b == 0] = dummy if dummy is not None else self._empty - - if method.method[1:4] == ("bbox", "histogram", "cython"): - logger.debug("integrate1d uses BBox implementation") - if azimuth_range is not None: - chi = self.chiArray(shape) - dchi = self.deltaChi(shape) - else: - chi = None - dchi = None - pos0 = self.array_from_unit(shape, "center", unit, scale=False) - dpos0 = self.array_from_unit(shape, "delta", unit, scale=False) - qAxis, I, sum_, count = splitBBox.histoBBox1d(weights=data, - pos0=pos0, - delta_pos0=dpos0, - pos1=chi, - delta_pos1=dchi, - bins=npt, - pos0_range=radial_range, - pos1_range=azimuth_range, - dummy=dummy, - delta_dummy=delta_dummy, - mask=mask, - dark=dark, - flat=flat, - solidangle=solidangle, - polarization=polarization, - normalization_factor=normalization_factor) - if error_model == "azimuthal": - variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 - if variance is not None: - _, var1d, a, b = splitBBox.histoBBox1d(weights=variance, - pos0=pos0, - delta_pos0=dpos0, - pos1=chi, - delta_pos1=dchi, - bins=npt, - pos0_range=radial_range, - pos1_range=azimuth_range, - dummy=dummy, - delta_dummy=delta_dummy, - mask=mask, - ) - with numpy.errstate(divide='ignore', invalid='ignore'): - sigma = numpy.sqrt(a) / (b * normalization_factor) - sigma[b == 0] = dummy if dummy is not None else self._empty - - if method.method[1:3] == ("no", "histogram") and method.impl_lower != "opencl": - # Common part for Numpy and Cython - data = data.astype(numpy.float32) - mask = self.create_mask(data, mask, dummy, delta_dummy, - unit=unit, - radial_range=radial_range, - azimuth_range=azimuth_range, - mode="where") - pos0 = self.array_from_unit(shape, "center", unit, scale=False) - if radial_range is None: - radial_range = (pos0.min(), pos0.max()) - pos0 = pos0[mask] - if dark is not None: - data -= dark - if flat is not None: - data /= flat - if polarization is not None: - data /= polarization - if solidangle is not None: - data /= solidangle - data = data[mask] - if variance is not None: - variance = variance[mask] - - if method.impl_lower == "cython": - logger.debug("integrate1d uses cython implementation") - qAxis, I, sum_, count = histogram.histogram(pos=pos0, - weights=data, - bins=npt, - bin_range=radial_range, - pixelSize_in_Pos=0, - empty=dummy if dummy is not None else self._empty, - normalization_factor=normalization_factor) - if error_model == "azimuthal": - variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)[mask]) ** 2 - if variance is not None: - _, var1d, a, b = histogram.histogram(pos=pos0, - weights=variance, - bins=npt, - bin_range=radial_range, - pixelSize_in_Pos=1, - empty=dummy if dummy is not None else self._empty) - with numpy.errstate(divide='ignore', invalid='ignore'): - sigma = numpy.sqrt(a) / (b * normalization_factor) - sigma[b == 0] = dummy if dummy is not None else self._empty - elif method.impl_lower == "python": - logger.debug("integrate1d uses Numpy implementation") - count, b = numpy.histogram(pos0, npt, range=radial_range) - qAxis = (b[1:] + b[:-1]) / 2.0 - sum_, b = numpy.histogram(pos0, npt, weights=data, range=radial_range) - with numpy.errstate(divide='ignore', invalid='ignore'): - if error_model == "azimuthal": - variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)[mask]) ** 2 - if variance is not None: - var1d, b = numpy.histogram(pos0, npt, weights=variance, range=radial_range) - sigma = numpy.sqrt(var1d) / (count * normalization_factor) - sigma[count == 0] = dummy if dummy is not None else self._empty - with numpy.errstate(divide='ignore', invalid='ignore'): - I = sum_ / count / normalization_factor - I[count == 0] = dummy if dummy is not None else self._empty - - if pos0_scale: - # not in place to make a copy - qAxis = qAxis * pos0_scale - - result = Integrate1dResult(qAxis, I, sigma) - result._set_method_called("integrate1d") - result._set_method(method) - result._set_compute_engine(str(method)) - result._set_unit(unit) - result._set_sum(sum_) - result._set_count(count) - result._set_has_dark_correction(has_dark) - result._set_has_flat_correction(has_flat) - result._set_has_mask_applied(has_mask) - result._set_polarization_factor(polarization_factor) - result._set_normalization_factor(normalization_factor) - result._set_metadata(metadata) - - if filename is not None: - save_integrate_result(filename, result) - - return result - - _integrate1d_legacy = integrate1d_legacy - - def integrate1d_ng(self, data, npt, filename=None, - correctSolidAngle=True, - variance=None, error_model=None, - radial_range=None, azimuth_range=None, - mask=None, dummy=None, delta_dummy=None, - polarization_factor=None, dark=None, flat=None, absorption=None, - method=("bbox", "csr", "cython"), unit=units.Q, safe=True, - normalization_factor=1.0, - metadata=None): - """Calculate the azimuthal integration (1d) of a 2D image. - - Multi algorithm implementation (tries to be bullet proof), suitable for SAXS, WAXS, ... and much more - Takes extra care of normalization and performs proper variance propagation. - - :param ndarray data: 2D array from the Detector/CCD camera - :param int npt: number of points in the output pattern - :param str filename: output filename in 2/3 column ascii format - :param bool correctSolidAngle: correct for solid angle of each pixel if True - :param ndarray variance: array containing the variance of the data. - :param str error_model: When the variance is unknown, an error model can be given: "poisson" (variance = I), "azimuthal" (variance = (I-)^2) - :param radial_range: The lower and upper range of the radial unit. If not provided, range is simply (min, max). Values outside the range are ignored. - :type radial_range: (float, float), optional - :param azimuth_range: The lower and upper range of the azimuthal angle in degree. If not provided, range is simply (min, max). Values outside the range are ignored. - :type azimuth_range: (float, float), optional - :param ndarray mask: array with 0 for valid pixels, all other are masked (static mask) - :param float dummy: value for dead/masked pixels (dynamic mask) - :param float delta_dummy: precision for dummy value - :param float polarization_factor: polarization factor between -1 (vertical) and +1 (horizontal). - 0 for circular polarization or random, - None for no correction, - True for using the former correction - :param ndarray dark: dark noise image - :param ndarray flat: flat field image - :param ndarray absorption: absorption correction image - :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) - :param Unit unit: Output units, can be "q_nm^-1" (default), "2th_deg", "r_mm" for now. - :param bool safe: Perform some extra checks to ensure LUT/CSR is still valid. False is faster. - :param float normalization_factor: Value of a normalization monitor - :param metadata: JSON serializable object containing the metadata, usually a dictionary. - :param ndarray absorption: detector absorption - :return: Integrate1dResult namedtuple with (q,I,sigma) +extra informations in it. - """ - method = self._normalize_method(method, dim=1, default=self.DEFAULT_METHOD_1D) - assert method.dimension == 1 - unit = units.to_unit(unit) - empty = dummy if dummy is not None else self._empty - shape = data.shape - pos0_scale = unit.scale - - if radial_range: - radial_range = tuple(radial_range[i] / pos0_scale for i in (0, -1)) - if azimuth_range is not None: - azimuth_range = self.normalize_azimuth_range(azimuth_range) - - if mask is None: - has_mask = "from detector" - mask = self.mask - mask_crc = self.detector.get_mask_crc() - if mask is None: - has_mask = False - mask_crc = None - else: - has_mask = "user provided" - mask = numpy.ascontiguousarray(mask) - mask_crc = crc32(mask) - - if correctSolidAngle: - solidangle = self.solidAngleArray(shape, correctSolidAngle) - solidangle_crc = self._cached_array[f"solid_angle#{self._dssa_order}_crc"] - else: - solidangle_crc = solidangle = None - - if polarization_factor is None: - polarization = polarization_crc = None - else: - polarization, polarization_crc = self.polarization(shape, polarization_factor, with_checksum=True) - - if dark is None: - dark = self.detector.darkcurrent - if dark is None: - has_dark = False - else: - has_dark = "from detector" - else: - has_dark = "provided" - - if flat is None: - flat = self.detector.flatfield - if dark is None: - has_flat = False - else: - has_flat = "from detector" - else: - has_flat = "provided" - - error_model = ErrorModel.parse(error_model) - if variance is not None: - assert variance.size == data.size - error_model = ErrorModel.VARIANCE - if error_model.poissonian and not method.manage_variance: - error_model = ErrorModel.VARIANCE - if dark is None: - variance = numpy.maximum(data, 1.0).astype(numpy.float32) - else: - variance = (numpy.maximum(data, 1.0) + numpy.maximum(dark, 0.0)).astype(numpy.float32) - - # Prepare LUT if needed! - if method.algo_is_sparse: - # initialize the CSR/LUT integrator in Cython as it may be needed later on. - cython_method = IntegrationMethod.select_method(method.dimension, method.split_lower, method.algo_lower, "cython")[0] - if cython_method not in self.engines: - cython_engine = self.engines[cython_method] = Engine() - else: - cython_engine = self.engines[cython_method] - with cython_engine.lock: - # Validate that the engine used is the proper one - cython_integr = cython_engine.engine - cython_reset = None - if cython_integr is None: - cython_reset = "of first initialization" - if (not cython_reset) and safe: - if cython_integr.unit != unit: - cython_reset = "unit was changed" - if cython_integr.bins != npt: - cython_reset = "number of points changed" - if cython_integr.size != data.size: - cython_reset = "input image size changed" - if cython_integr.empty != empty: - cython_reset = "empty value changed" - if (mask is not None) and (not cython_integr.check_mask): - cython_reset = f"mask but {method.algo_lower.upper()} was without mask" - elif (mask is None) and (cython_integr.cmask is not None): - cython_reset = f"no mask but { method.algo_lower.upper()} has mask" - elif (mask is not None) and (cython_integr.mask_checksum != mask_crc): - cython_reset = "mask changed" - if (radial_range is None) and (cython_integr.pos0_range is not None): - cython_reset = f"radial_range was defined in { method.algo_lower.upper()}" - elif (radial_range is not None) and (cython_integr.pos0_range != radial_range): - cython_reset = f"radial_range is defined but differs in %s" % method.algo_lower.upper() - if (azimuth_range is None) and (cython_integr.pos1_range is not None): - cython_reset = f"azimuth_range not defined and {method.algo_lower.upper()} had azimuth_range defined" - elif (azimuth_range is not None) and (cython_integr.pos1_range != azimuth_range): - cython_reset = f"azimuth_range requested and {method.algo_lower.upper()}'s azimuth_range don't match" - if cython_reset: - logger.info("AI.integrate1d_ng: Resetting Cython integrator because %s", cython_reset) - split = method.split_lower - if split == "pseudo": - split = "full" - try: - cython_integr = self.setup_sparse_integrator(shape, npt, mask, - radial_range, azimuth_range, - mask_checksum=mask_crc, - unit=unit, split=split, algo=method.algo_lower, - empty=empty, scale=False) - except MemoryError: # sparse methods are hungry... - logger.warning("MemoryError: falling back on forward implementation") - cython_integr = None - self.reset_engines() - method = self.DEFAULT_METHOD_1D - else: - cython_engine.set_engine(cython_integr) - # This whole block uses CSR, Now we should treat all the various implementation: Cython, OpenCL and finally Python. - if method.impl_lower == "cython": - # The integrator has already been initialized previously - integr = self.engines[method].engine - intpl = integr.integrate_ng(data, - variance=variance, - error_model=error_model, - dummy=dummy, - delta_dummy=delta_dummy, - dark=dark, - flat=flat, - solidangle=solidangle, - polarization=polarization, - absorption=absorption, - normalization_factor=normalization_factor, - weighted_average=method.weighted_average) - else: # method.impl_lower in ("opencl", "python"): - if method not in self.engines: - # instanciated the engine - engine = self.engines[method] = Engine() - else: - engine = self.engines[method] - with engine.lock: - # Validate that the engine used is the proper one - integr = engine.engine - reset = None - if integr is None: - reset = "of first initialization" - if (not reset) and safe: - if integr.unit != unit: - reset = "unit was changed" - if integr.bins != npt: - reset = "number of points changed" - if integr.size != data.size: - reset = "input image size changed" - if integr.empty != empty: - reset = "empty value changed" - if (mask is not None) and (not integr.check_mask): - reset = f"mask but {method.algo_lower.upper()} was without mask" - elif (mask is None) and (integr.check_mask): - reset = f"no mask but {method.algo_lower.upper()} has mask" - elif (mask is not None) and (integr.mask_checksum != mask_crc): - reset = "mask changed" - if (radial_range is None) and (integr.pos0_range is not None): - reset = f"radial_range was defined in {method.algo_lower.upper()}" - elif (radial_range is not None) and (integr.pos0_range != radial_range): - reset = f"radial_range is defined but differs in {method.algo_lower.upper()}" - if (azimuth_range is None) and (integr.pos1_range is not None): - reset = f"azimuth_range not defined and {method.algo_lower.upper()} had azimuth_range defined" - elif (azimuth_range is not None) and (integr.pos1_range != azimuth_range): - reset = f"azimuth_range requested and {method.algo_lower.upper()}'s azimuth_range don't match" - - if reset: - logger.info("ai.integrate1d_ng: Resetting ocl_csr integrator because %s", reset) - csr_integr = self.engines[cython_method].engine - if method.impl_lower == "opencl": - try: - integr = method.class_funct_ng.klass(csr_integr.lut, - image_size=data.size, - checksum=csr_integr.lut_checksum, - empty=empty, - unit=unit, - bin_centers=csr_integr.bin_centers, - platformid=method.target[0], - deviceid=method.target[1], - mask_checksum=csr_integr.mask_checksum) - # Copy some properties from the cython integrator - integr.pos0_range = csr_integr.pos0_range - integr.pos1_range = csr_integr.pos1_range - except MemoryError: - logger.warning("MemoryError: falling back on default forward implementation") - self.reset_engines() - method = self.DEFAULT_METHOD_1D - else: - engine.set_engine(integr) - elif method.impl_lower == "python": - integr = method.class_funct_ng.klass(image_size=data.size, - lut=csr_integr.lut, - empty=empty, - unit=unit, - bin_centers=csr_integr.bin_centers, - mask_checksum=csr_integr.mask_checksum) - # Copy some properties from the cython integrator - integr.pos0_range = csr_integr.pos0_range - integr.pos1_range = csr_integr.pos1_range - engine.set_engine(integr) - else: - raise RuntimeError("Unexpected configuration") - - else: - integr = self.engines[method].engine - - kwargs = {"error_model": error_model, - "variance": variance} - if method.impl_lower == "opencl": - kwargs["polarization_checksum"] = polarization_crc - kwargs["solidangle_checksum"] = solidangle_crc - intpl = integr.integrate_ng(data, dark=dark, - dummy=dummy, delta_dummy=delta_dummy, - flat=flat, solidangle=solidangle, - absorption=absorption, polarization=polarization, - normalization_factor=normalization_factor, - weighted_average=method.weighted_average, - ** kwargs) - # This section is common to all 3 CSR implementations... - if error_model.do_variance: - result = Integrate1dResult(intpl.position * unit.scale, - intpl.intensity, - intpl.sigma) - result._set_sum_variance(intpl.variance) - else: - result = Integrate1dResult(intpl.position * unit.scale, - intpl.intensity) - result._set_compute_engine(integr.__module__ + "." + integr.__class__.__name__) - result._set_unit(integr.unit) - result._set_sum_signal(intpl.signal) - result._set_sum_normalization(intpl.normalization) - result._set_sum_normalization2(intpl.norm_sq) - result._set_count(intpl.count) - result._set_sem(intpl.sem) - result._set_std(intpl.std) - - # END of CSR/CSC/LUT common implementations - elif (method.method[1:3] == ("no", "histogram") and - method.method[3] in ("python", "cython")): - integr = method.class_funct_ng.function # should be histogram[_engine].histogram1d_engine - if azimuth_range: - chi_min, chi_max = azimuth_range - chi = self.chiArray(shape) - azim_mask = numpy.logical_or(chi > chi_max, chi < chi_min) - if mask is None: - mask = azim_mask - else: - mask = numpy.logical_or(mask, azim_mask) - radial = self.array_from_unit(shape, "center", unit, scale=False) - intpl = integr(radial, npt, data, - dark=dark, - dummy=dummy, delta_dummy=delta_dummy, empty=empty, - variance=variance, - flat=flat, solidangle=solidangle, - polarization=polarization, - absorption=absorption, - normalization_factor=normalization_factor, - weighted_average=method.weighted_average, - mask=mask, - radial_range=radial_range, - error_model=error_model) - - if error_model.do_variance: - result = Integrate1dResult(intpl.position * unit.scale, - intpl.intensity, - intpl.sigma) - result._set_sum_variance(intpl.variance) - result._set_std(intpl.std) - result._set_sem(intpl.sem) - result._set_sum_normalization2(intpl.norm_sq) - else: - result = Integrate1dResult(intpl.position * unit.scale, - intpl.intensity) - result._set_compute_engine(integr.__module__ + "." + integr.__name__) - result._set_unit(unit) - result._set_sum_signal(intpl.signal) - result._set_sum_normalization(intpl.normalization) - result._set_count(intpl.count) - elif method.method[1:4] == ("no", "histogram", "opencl"): - if method not in self.engines: - # instanciated the engine - engine = self.engines[method] = Engine() - else: - engine = self.engines[method] - with engine.lock: - # Validate that the engine used is the proper one - integr = engine.engine - reset = None - if integr is None: - reset = "of first initialization" - if (not reset) and safe: - if integr.unit != unit: - reset = "unit was changed" - if integr.bins != npt: - reset = "number of points changed" - if integr.size != data.size: - reset = "input image size changed" - if integr.empty != empty: - reset = "empty value changed" - if reset: - logger.info("ai.integrate1d: Resetting integrator because %s", reset) - pos0 = self.array_from_unit(shape, "center", unit, scale=False) - azimuthal = self.chiArray(shape) - try: - integr = method.class_funct_ng.klass(pos0, - npt, - empty=empty, - azimuthal=azimuthal, - unit=unit, - mask=mask, - mask_checksum=mask_crc, - platformid=method.target[0], - deviceid=method.target[1]) - except MemoryError: - logger.warning("MemoryError: falling back on default forward implementation") - self.reset_engines() - method = self.DEFAULT_METHOD_1D - else: - engine.set_engine(integr) - intpl = integr(data, dark=dark, - dummy=dummy, - delta_dummy=delta_dummy, - variance=variance, - flat=flat, solidangle=solidangle, - polarization=polarization, absorption=absorption, - polarization_checksum=polarization_crc, - normalization_factor=normalization_factor, - weighted_average=method.weighted_average, - radial_range=radial_range, - azimuth_range=azimuth_range, - error_model=error_model) - - if error_model.do_variance: - result = Integrate1dResult(intpl.position * unit.scale, - intpl.intensity, - intpl.sigma) - result._set_sum_variance(intpl.variance) - else: - result = Integrate1dResult(intpl.position * unit.scale, - intpl.intensity) - result._set_compute_engine(integr.__module__ + "." + integr.__class__.__name__) - result._set_unit(integr.unit) - result._set_sum_signal(intpl.signal) - result._set_sum_normalization(intpl.normalization) - result._set_count(intpl.count) - elif (method.method[2:4] == ("histogram", "cython")): - integr = method.class_funct_ng.function # should be histogram[_engine].histogram1d_engine - if method.method[1] == "bbox": - if azimuth_range is None: - chi = None - delta_chi = None - else: - chi = self.chiArray(shape) - delta_chi = self.deltaChi(shape) - radial = self.array_from_unit(shape, "center", unit, scale=False) - delta_radial = self.array_from_unit(shape, "delta", unit, scale=False) - intpl = integr(weights=data, variance=variance, - pos0=radial, delta_pos0=delta_radial, - pos1=chi, delta_pos1=delta_chi, - bins=npt, - dummy=dummy, delta_dummy=delta_dummy, empty=empty, - dark=dark, flat=flat, solidangle=solidangle, - polarization=polarization, absorption=absorption, - normalization_factor=normalization_factor, - weighted_average=method.weighted_average, - mask=mask, - pos0_range=radial_range, - pos1_range=azimuth_range, - error_model=error_model) - elif method.method[1] == "full": - pos = self.array_from_unit(shape, "corner", unit, scale=False) - intpl = integr(weights=data, variance=variance, - pos=pos, - bins=npt, - dummy=dummy, delta_dummy=delta_dummy, empty=empty, - dark=dark, flat=flat, solidangle=solidangle, - polarization=polarization, absorption=absorption, - normalization_factor=normalization_factor, - weighted_average=method.weighted_average, - mask=mask, - pos0_range=radial_range, - pos1_range=azimuth_range, - error_model=error_model) - else: - raise RuntimeError("Should not arrive here") - if error_model.do_variance: - result = Integrate1dResult(intpl.position * unit.scale, - intpl.intensity, - intpl.sigma) - result._set_sum_variance(intpl.variance) - else: - result = Integrate1dResult(intpl.position * unit.scale, - intpl.intensity) - result._set_compute_engine(integr.__module__ + "." + integr.__name__) - result._set_unit(unit) - result._set_sum_signal(intpl.signal) - result._set_sum_normalization(intpl.normalization) - result._set_sum_normalization2(intpl.norm_sq) - result._set_count(intpl.count) - result._set_sem(intpl.sem) - result._set_std(intpl.std) - - else: - raise RuntimeError(f"Fallback method ... should no more be used: {method}") - result._set_method(method) - result._set_has_dark_correction(has_dark) - result._set_has_flat_correction(has_flat) - result._set_has_mask_applied(has_mask) - result._set_polarization_factor(polarization_factor) - result._set_normalization_factor(normalization_factor) - result._set_method_called("integrate1d_ng") - result._set_metadata(metadata) - result._set_error_model(error_model) - result._set_poni(PoniFile(self)) - result._set_has_solidangle_correction(correctSolidAngle) - result._set_weighted_average(method.weighted_average) - - if filename is not None: - save_integrate_result(filename, result) - return result - - _integrate1d_ng = integrate1d_ng - integrate1d = integrate1d_ng - - def integrate_radial(self, data, npt, npt_rad=100, - correctSolidAngle=True, - radial_range=None, azimuth_range=None, - mask=None, dummy=None, delta_dummy=None, - polarization_factor=None, dark=None, flat=None, - method=("bbox", "csr", "cython"), unit=units.CHI_DEG, radial_unit=units.Q, - normalization_factor=1.0): - """Calculate the radial integrated profile curve as I = f(chi) - - :param ndarray data: 2D array from the Detector/CCD camera - :param int npt: number of points in the output pattern - :param int npt_rad: number of points in the radial space. Too few points may lead to huge rounding errors. - :param str filename: output filename in 2/3 column ascii format - :param bool correctSolidAngle: correct for solid angle of each pixel if True - :param radial_range: The lower and upper range of the radial unit. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. Optional. - :type radial_range: Tuple(float, float) - :param azimuth_range: The lower and upper range of the azimuthal angle in degree. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. Optional. - :type azimuth_range: Tuple(float, float) - :param ndarray mask: array (same size as image) with 1 for masked pixels, and 0 for valid pixels - :param float dummy: value for dead/masked pixels - :param float delta_dummy: precision for dummy value - :param float polarization_factor: polarization factor between -1 (vertical) and +1 (horizontal). - * 0 for circular polarization or random, - * None for no correction, - * True for using the former correction - :param ndarray dark: dark noise image - :param ndarray flat: flat field image - :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) - :param pyFAI.units.Unit unit: Output units, can be "chi_deg" or "chi_rad" - :param pyFAI.units.Unit radial_unit: unit used for radial representation, can be "q_nm^-1", "q_A^-1", "2th_deg", "2th_rad", "r_mm" for now - :param float normalization_factor: Value of a normalization monitor - :return: chi bins center positions and regrouped intensity - :rtype: Integrate1dResult - """ - azimuth_unit = units.to_unit(unit, type_=units.AZIMUTHAL_UNITS) - res = self.integrate2d_ng(data, npt_rad, npt, - correctSolidAngle=correctSolidAngle, - mask=mask, dummy=dummy, delta_dummy=delta_dummy, - polarization_factor=polarization_factor, - dark=dark, flat=flat, method=method, - normalization_factor=normalization_factor, - radial_range=radial_range, - azimuth_range=azimuth_range, - unit=radial_unit) - - azim_scale = azimuth_unit.scale / units.CHI_DEG.scale - - sum_signal = res.sum_signal.sum(axis=-1) - count = res.count.sum(axis=-1) - sum_normalization = res._sum_normalization.sum(axis=-1) - - mask = numpy.where(count == 0) - empty = dummy if dummy is not None else self._empty - intensity = sum_signal / sum_normalization - intensity[mask] = empty - - if res.sigma is not None: - sum_variance = res.sum_variance.sum(axis=-1) - sigma = numpy.sqrt(sum_variance) / sum_normalization - sigma[mask] = empty - else: - sum_variance = None - sigma = None - result = Integrate1dResult(res.azimuthal * azim_scale, intensity, sigma) - result._set_method_called("integrate_radial") - result._set_unit(azimuth_unit) - result._set_sum_normalization(sum_normalization) - result._set_count(count) - result._set_sum_signal(sum_signal) - result._set_sum_variance(sum_variance) - result._set_has_dark_correction(dark is not None) - result._set_has_flat_correction(flat is not None) - result._set_polarization_factor(polarization_factor) - result._set_normalization_factor(normalization_factor) - result._set_method = res.method - result._set_compute_engine = res.compute_engine - - return result - - def integrate_fiber(self, data, - npt_output, output_unit=units.Q_OOP, output_unit_range=None, - npt_integrated=100, integrated_unit=units.Q_IP, integrated_unit_range=None, - sample_orientation=None, - filename=None, - correctSolidAngle=True, - mask=None, dummy=None, delta_dummy=None, - polarization_factor=None, dark=None, flat=None, - method=("no", "histogram", "cython"), - normalization_factor=1.0): - """Calculate the integrated profile curve along a specific FiberUnit - - :param ndarray data: 2D array from the Detector/CCD camera - :param int npt_output: number of points in the output pattern - :param pyFAI.units.UnitFiber output_unit: Output units - :param output_unit_range: The lower and upper range of the output unit. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. Optional. - :param int npt_integrated: number of points to be integrated along integrated_unit - :param pyFAI.units.UnitFiber integrated_unit: unit to be integrated along integrated_unit_range - :param integrated_unit_range: The lower and upper range to be integrated. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. Optional. - :param sample_orientation: 1-4, four different orientation of the fiber axis regarding the detector main axis, from 1 to 4 is +90º - :param str filename: output filename in 2/3 column ascii format - :param bool correctSolidAngle: correct for solid angle of each pixel if True - :param ndarray mask: array (same size as image) with 1 for masked pixels, and 0 for valid pixels - :param float dummy: value for dead/masked pixels - :param float delta_dummy: precision for dummy value - :param float polarization_factor: polarization factor between -1 (vertical) and +1 (horizontal). - * 0 for circular polarization or random, - * None for no correction, - * True for using the former correction - :param ndarray dark: dark noise image - :param ndarray flat: flat field image - :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) - :param float normalization_factor: Value of a normalization monitor - :return: chi bins center positions and regrouped intensity - :rtype: Integrate1dResult - """ - if isinstance(integrated_unit, units.UnitFiber): - sample_orientation = sample_orientation or integrated_unit.sample_orientation - else: - sample_orientation = sample_orientation or 1 - - reset = False - if isinstance(integrated_unit, units.UnitFiber): - if integrated_unit.sample_orientation != sample_orientation: - integrated_unit.set_sample_orientation(sample_orientation) - logger.info(f"Sample orientation set to {sample_orientation} for unit {integrated_unit}. AzimuthalIntegrator will be reset.") - reset = True - else: - integrated_unit = units.to_unit(integrated_unit) - integrated_unit.set_sample_orientation(sample_orientation) - logger.info(f"Sample orientation set to {sample_orientation} for unit {integrated_unit}. AzimuthalIntegrator will be reset.") - reset = True - - if isinstance(output_unit, units.UnitFiber): - if output_unit.sample_orientation != sample_orientation: - output_unit.set_sample_orientation(sample_orientation) - logger.info(f"Sample orientation set to {sample_orientation} for unit {output_unit}. AzimuthalIntegrator will be reset.") - reset = True - else: - output_unit = units.to_unit(output_unit) - output_unit.set_sample_orientation(sample_orientation) - logger.info(f"Sample orientation set to {sample_orientation} for unit {output_unit}. AzimuthalIntegrator will be reset.") - reset = True - - if reset: - self.reset() - logger.info(f"AzimuthalIntegrator was reset. Current fiber orientation: {sample_orientation}.") - - - if (isinstance(method, (tuple, list)) and method[0] != "no") or (isinstance(method, IntegrationMethod) and method.split != "no"): - logger.warning(f"Method {method} is using a pixel-splitting scheme. GI integration should be use WITHOUT PIXEL-SPLITTING! The results could be wrong!") - - res = self.integrate2d_ng(data, npt_rad=npt_integrated, npt_azim=npt_output, - correctSolidAngle=correctSolidAngle, - mask=mask, dummy=dummy, delta_dummy=delta_dummy, - polarization_factor=polarization_factor, - dark=dark, flat=flat, method=method, - normalization_factor=normalization_factor, - radial_range=integrated_unit_range, - azimuth_range=output_unit_range, - unit=(integrated_unit, output_unit)) - - unit_scale = output_unit.scale - sum_signal = res.sum_signal.sum(axis=-1) - count = res.count.sum(axis=-1) - sum_normalization = res._sum_normalization.sum(axis=-1) - mask = numpy.where(count == 0) - empty = dummy if dummy is not None else self._empty - intensity = sum_signal / sum_normalization - intensity[mask] = empty - - if res.sigma is not None: - sum_variance = res.sum_variance.sum(axis=-1) - sigma = numpy.sqrt(sum_variance) / sum_normalization - sigma[mask] = empty - else: - sum_variance = None - sigma = None - result = Integrate1dResult(res.azimuthal * unit_scale, intensity, sigma) - result._set_method_called("integrate_radial") - result._set_unit(output_unit) - result._set_sum_normalization(sum_normalization) - result._set_count(count) - result._set_sum_signal(sum_signal) - result._set_sum_variance(sum_variance) - result._set_has_dark_correction(dark is not None) - result._set_has_flat_correction(flat is not None) - result._set_polarization_factor(polarization_factor) - result._set_normalization_factor(normalization_factor) - result._set_method = res.method - result._set_compute_engine = res.compute_engine - - if filename is not None: - save_integrate_result(filename, result) - - return result - - def integrate_grazing_incidence(self, data, - npt_output, output_unit=units.Q_OOP, output_unit_range=None, - npt_integrated=100, integrated_unit=units.Q_IP, integrated_unit_range=None, - incident_angle=None, tilt_angle=None, sample_orientation=None, - filename=None, - correctSolidAngle=True, - mask=None, dummy=None, delta_dummy=None, - polarization_factor=None, dark=None, flat=None, - method=("no", "histogram", "cython"), - normalization_factor=1.0): - """Calculate the integrated profile curve along a specific FiberUnit, additional inputs for incident angle and tilt angle - - :param ndarray data: 2D array from the Detector/CCD camera - :param int npt_output: number of points in the output pattern - :param pyFAI.units.UnitFiber output_unit: Output units - :param output_unit_range: The lower and upper range of the output unit. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. Optional. - :param int npt_integrated: number of points to be integrated along integrated_unit - :param pyFAI.units.UnitFiber integrated_unit: unit to be integrated along integrated_unit_range - :param integrated_unit_range: The lower and upper range to be integrated. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. Optional. - :param incident_angle: tilting of the sample towards the beam (analog to rot2): in radians - :param tilt_angle: tilting of the sample orthogonal to the beam direction (analog to rot3): in radians - :param sample_orientation: 1-4, four different orientation of the fiber axis regarding the detector main axis, from 1 to 4 is +90º - :param str filename: output filename in 2/3 column ascii format - :param bool correctSolidAngle: correct for solid angle of each pixel if True - :param ndarray mask: array (same size as image) with 1 for masked pixels, and 0 for valid pixels - :param float dummy: value for dead/masked pixels - :param float delta_dummy: precision for dummy value - :param float polarization_factor: polarization factor between -1 (vertical) and +1 (horizontal). - * 0 for circular polarization or random, - * None for no correction, - * True for using the former correction - :param ndarray dark: dark noise image - :param ndarray flat: flat field image - :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) - :param float normalization_factor: Value of a normalization monitor - :return: chi bins center positions and regrouped intensity - :rtype: Integrate1dResult - """ - reset = False - - if isinstance(integrated_unit, units.UnitFiber): - incident_angle = incident_angle or integrated_unit.incident_angle - else: - incident_angle = incident_angle or 0.0 - - if isinstance(integrated_unit, units.UnitFiber): - if integrated_unit.incident_angle != incident_angle: - integrated_unit.set_incident_angle(incident_angle) - logger.info(f"Incident angle set to {incident_angle} for unit {integrated_unit}. AzimuthalIntegrator will be reset.") - reset = True - else: - integrated_unit = units.to_unit(integrated_unit) - integrated_unit.set_incident_angle(incident_angle) - logger.info(f"Incident angle set to {incident_angle} for unit {integrated_unit}. AzimuthalIntegrator will be reset.") - reset = True - - if isinstance(output_unit, units.UnitFiber): - if output_unit.incident_angle != incident_angle: - output_unit.set_incident_angle(incident_angle) - logger.info(f"Incident angle set to {incident_angle} for unit {output_unit}. AzimuthalIntegrator will be reset.") - reset = True - else: - output_unit = units.to_unit(output_unit) - output_unit.set_incident_angle(incident_angle) - logger.info(f"Incident angle set to {incident_angle} for unit {output_unit}. AzimuthalIntegrator will be reset.") - reset = True - - if isinstance(integrated_unit, units.UnitFiber): - tilt_angle = tilt_angle or integrated_unit.tilt_angle - else: - tilt_angle = tilt_angle or 0.0 - - if isinstance(integrated_unit, units.UnitFiber): - if integrated_unit.tilt_angle != tilt_angle: - integrated_unit.set_tilt_angle(tilt_angle) - logger.info(f"Tilt angle set to {tilt_angle} for unit {integrated_unit}. AzimuthalIntegrator will be reset.") - reset = True - else: - integrated_unit = units.to_unit(integrated_unit) - integrated_unit.set_tilt_angle(tilt_angle) - logger.info(f"Tilt angle set to {tilt_angle} for unit {integrated_unit}. AzimuthalIntegrator will be reset.") - reset = True - - if isinstance(output_unit, units.UnitFiber): - if output_unit.tilt_angle != tilt_angle: - output_unit.set_tilt_angle(tilt_angle) - logger.info(f"Tilt angle set to {tilt_angle} for unit {output_unit}. AzimuthalIntegrator will be reset.") - reset = True - else: - output_unit = units.to_unit(output_unit) - output_unit.set_tilt_angle(tilt_angle) - logger.info(f"Tilt angle set to {tilt_angle} for unit {output_unit}. AzimuthalIntegrator will be reset.") - reset = True - - if isinstance(integrated_unit, units.UnitFiber): - sample_orientation = sample_orientation or integrated_unit.sample_orientation - else: - sample_orientation = sample_orientation or 1 - - if isinstance(integrated_unit, units.UnitFiber): - if integrated_unit.sample_orientation != sample_orientation: - integrated_unit.set_sample_orientation(sample_orientation) - logger.info(f"Sample orientation set to {sample_orientation} for unit {integrated_unit}. AzimuthalIntegrator will be reset.") - reset = True - else: - integrated_unit = units.to_unit(integrated_unit) - integrated_unit.set_sample_orientation(sample_orientation) - logger.info(f"Sample orientation set to {sample_orientation} for unit {integrated_unit}. AzimuthalIntegrator will be reset.") - reset = True - - if isinstance(output_unit, units.UnitFiber): - if output_unit.sample_orientation != sample_orientation: - output_unit.set_sample_orientation(sample_orientation) - logger.info(f"Sample orientation set to {sample_orientation} for unit {output_unit}. AzimuthalIntegrator will be reset.") - reset = True - else: - output_unit = units.to_unit(output_unit) - output_unit.set_sample_orientation(sample_orientation) - logger.info(f"Sample orientation set to {sample_orientation} for unit {output_unit}. AzimuthalIntegrator will be reset.") - reset = True - - if reset: - self.reset() - logger.info(f"AzimuthalIntegrator was reset. Current grazing parameters: incident_angle: {incident_angle}, tilt_angle: {tilt_angle}, sample_orientation: {sample_orientation}.") - - return self.integrate_fiber(data=data, - npt_output=npt_output, output_unit=output_unit, output_unit_range=output_unit_range, - npt_integrated=npt_integrated, integrated_unit=integrated_unit, integrated_unit_range=integrated_unit_range, - sample_orientation=sample_orientation, - filename=filename, - correctSolidAngle=correctSolidAngle, - mask=mask, dummy=dummy, delta_dummy=delta_dummy, - polarization_factor=polarization_factor, dark=dark, flat=flat, - method=method, - normalization_factor=normalization_factor) - - def integrate2d_fiber(self, data, - npt_horizontal, horizontal_unit=units.Q_IP, horizontal_unit_range=None, - npt_vertical=1000, vertical_unit=units.Q_OOP, vertical_unit_range=None, - sample_orientation=None, - filename=None, - correctSolidAngle=True, - mask=None, dummy=None, delta_dummy=None, - polarization_factor=None, dark=None, flat=None, - method=("no", "histogram", "cython"), - normalization_factor=1.0): - if isinstance(vertical_unit, units.UnitFiber): - sample_orientation = sample_orientation or vertical_unit.sample_orientation - else: - sample_orientation = sample_orientation or 1 - - reset = False - if isinstance(vertical_unit, units.UnitFiber): - if vertical_unit.sample_orientation != sample_orientation: - vertical_unit.set_sample_orientation(sample_orientation) - logger.info(f"Sample orientation was set to {sample_orientation} for unit {vertical_unit}. AzimuthalIntegrator will be reset.") - reset = True - else: - vertical_unit = units.to_unit(vertical_unit) - vertical_unit.set_sample_orientation(sample_orientation) - logger.info(f"Sample orientation was set to {sample_orientation} for unit {vertical_unit}. AzimuthalIntegrator will be reset.") - reset = True - - if isinstance(horizontal_unit, units.UnitFiber): - if horizontal_unit.sample_orientation != sample_orientation: - horizontal_unit.set_sample_orientation(sample_orientation) - logger.info(f"Sample orientation was set to {sample_orientation} for unit {horizontal_unit}. AzimuthalIntegrator will be reset.") - reset = True - else: - horizontal_unit = units.to_unit(horizontal_unit) - horizontal_unit.set_sample_orientation(sample_orientation) - logger.info(f"Sample orientation was set to {sample_orientation} for unit {horizontal_unit}. AzimuthalIntegrator will be reset.") - reset = True - - if reset: - self.reset() - logger.info(f"AzimuthalIntegrator was reset. Current fiber parameters: sample_orientation: {sample_orientation}.") - - - if (isinstance(method, (tuple, list)) and method[0] != "no") or (isinstance(method, IntegrationMethod) and method.split != "no"): - logger.warning(f"Method {method} is using a pixel-splitting scheme. GI integration should be use WITHOUT PIXEL-SPLITTING! The results could be wrong!") - - return self.integrate2d_ng(data, npt_rad=npt_horizontal, npt_azim=npt_vertical, - correctSolidAngle=correctSolidAngle, - mask=mask, dummy=dummy, delta_dummy=delta_dummy, - polarization_factor=polarization_factor, - dark=dark, flat=flat, method=method, - normalization_factor=normalization_factor, - radial_range=horizontal_unit_range, - azimuth_range=vertical_unit_range, - unit=(horizontal_unit, vertical_unit), - filename=filename) - - def integrate2d_grazing_incidence(self, data, - npt_horizontal, horizontal_unit=units.Q_IP, horizontal_unit_range=None, - npt_vertical=1000, vertical_unit=units.Q_OOP, vertical_unit_range=None, - incident_angle=None, tilt_angle=None, sample_orientation=None, - filename=None, - correctSolidAngle=True, - mask=None, dummy=None, delta_dummy=None, - polarization_factor=None, dark=None, flat=None, - method=("no", "histogram", "cython"), - normalization_factor=1.0): - - reset = False - - if isinstance(vertical_unit, units.UnitFiber): - incident_angle = incident_angle or vertical_unit.incident_angle - else: - incident_angle = incident_angle or 0.0 - - if isinstance(vertical_unit, units.UnitFiber): - if vertical_unit.incident_angle != incident_angle: - vertical_unit.set_incident_angle(incident_angle) - logger.info(f"Incident angle set to {incident_angle} for unit {vertical_unit}. AzimuthalIntegrator will be reset.") - reset = True - else: - vertical_unit = units.to_unit(vertical_unit) - vertical_unit.set_incident_angle(incident_angle) - logger.info(f"Incident angle set to {incident_angle} for unit {vertical_unit}. AzimuthalIntegrator will be reset.") - reset = True - - if isinstance(horizontal_unit, units.UnitFiber): - if horizontal_unit.incident_angle != incident_angle: - horizontal_unit.set_incident_angle(incident_angle) - logger.info(f"Incident angle set to {incident_angle} for unit {horizontal_unit}. AzimuthalIntegrator will be reset.") - reset = True - else: - horizontal_unit = units.to_unit(horizontal_unit) - horizontal_unit.set_incident_angle(incident_angle) - logger.info(f"Incident angle set to {incident_angle} for unit {horizontal_unit}. AzimuthalIntegrator will be reset.") - reset = True - - if isinstance(vertical_unit, units.UnitFiber): - tilt_angle = tilt_angle or vertical_unit.tilt_angle - else: - tilt_angle = tilt_angle or 0.0 - - if isinstance(vertical_unit, units.UnitFiber): - if vertical_unit.tilt_angle != tilt_angle: - vertical_unit.set_tilt_angle(tilt_angle) - logger.info(f"Tilt angle set to {tilt_angle} for unit {vertical_unit}. AzimuthalIntegrator will be reset.") - reset = True - else: - vertical_unit = units.to_unit(vertical_unit) - vertical_unit.set_tilt_angle(tilt_angle) - logger.info(f"Tilt angle set to {tilt_angle} for unit {vertical_unit}. AzimuthalIntegrator will be reset.") - reset = True - - if isinstance(horizontal_unit, units.UnitFiber): - if horizontal_unit.tilt_angle != tilt_angle: - horizontal_unit.set_tilt_angle(tilt_angle) - logger.info(f"Tilt angle set to {tilt_angle} for unit {horizontal_unit}. AzimuthalIntegrator will be reset.") - reset = True - else: - horizontal_unit = units.to_unit(horizontal_unit) - horizontal_unit.set_tilt_angle(tilt_angle) - logger.info(f"Tilt angle set to {tilt_angle} for unit {horizontal_unit}. AzimuthalIntegrator will be reset.") - reset = True - - if isinstance(vertical_unit, units.UnitFiber): - sample_orientation = sample_orientation or vertical_unit.sample_orientation - else: - sample_orientation = sample_orientation or 1 - - if isinstance(vertical_unit, units.UnitFiber): - if vertical_unit.sample_orientation != sample_orientation: - vertical_unit.set_sample_orientation(sample_orientation) - logger.info(f"Sample orientation set to {sample_orientation} for unit {vertical_unit}. AzimuthalIntegrator will be reset.") - reset = True - else: - vertical_unit = units.to_unit(vertical_unit) - vertical_unit.set_sample_orientation(sample_orientation) - logger.info(f"Sample orientation set to {sample_orientation} for unit {vertical_unit}. AzimuthalIntegrator will be reset.") - reset = True - - if isinstance(horizontal_unit, units.UnitFiber): - if horizontal_unit.sample_orientation != sample_orientation: - horizontal_unit.set_sample_orientation(sample_orientation) - logger.info(f"Sample orientation set to {sample_orientation} for unit {horizontal_unit}. AzimuthalIntegrator will be reset.") - reset = True - else: - horizontal_unit = units.to_unit(horizontal_unit) - horizontal_unit.set_sample_orientation(sample_orientation) - logger.info(f"Sample orientation set to {sample_orientation} for unit {horizontal_unit}. AzimuthalIntegrator will be reset.") - reset = True - - if reset: - self.reset() - logger.info(f"AzimuthalIntegrator was reset. Current grazing parameters: incident_angle: {incident_angle}, tilt_angle: {tilt_angle}, sample_orientation: {sample_orientation}.") - - if (isinstance(method, (tuple, list)) and method[0] != "no") or (isinstance(method, IntegrationMethod) and method.split != "no"): - logger.warning(f"Method {method} is using a pixel-splitting scheme. GI integration should be use WITHOUT PIXEL-SPLITTING! The results could be wrong!") - - return self.integrate2d_fiber(data=data, npt_horizontal=npt_horizontal, npt_vertical=npt_vertical, - horizontal_unit=horizontal_unit, vertical_unit=vertical_unit, - horizontal_unit_range=horizontal_unit_range, - vertical_unit_range=vertical_unit_range, - sample_orientation=sample_orientation, - filename=filename, - correctSolidAngle=correctSolidAngle, - mask=mask, dummy=dummy, delta_dummy=delta_dummy, - polarization_factor=polarization_factor, dark=dark, flat=flat, - method=method, - normalization_factor=normalization_factor, - ) - - @deprecated(since_version="0.21", only_once=True, deprecated_since="0.21.0") - def integrate2d_legacy(self, data, npt_rad, npt_azim=360, - filename=None, correctSolidAngle=True, variance=None, - error_model=None, radial_range=None, azimuth_range=None, - mask=None, dummy=None, delta_dummy=None, - polarization_factor=None, dark=None, flat=None, - method=None, unit=units.Q, safe=True, - normalization_factor=1.0, metadata=None): - """ - Calculate the azimuthal regrouped 2d image in q(nm^-1)/chi(deg) by default - - Multi algorithm implementation (tries to be bullet proof) - - :param data: 2D array from the Detector/CCD camera - :type data: ndarray - :param npt_rad: number of points in the radial direction - :type npt_rad: int - :param npt_azim: number of points in the azimuthal direction - :type npt_azim: int - :param filename: output image (as edf format) - :type filename: str - :param correctSolidAngle: correct for solid angle of each pixel if True - :type correctSolidAngle: bool - :param variance: array containing the variance of the data. If not available, no error propagation is done - :type variance: ndarray - :param error_model: When the variance is unknown, an error model can be given: "poisson" (variance = I), "azimuthal" (variance = (I-)^2) - :type error_model: str - :param radial_range: The lower and upper range of the radial unit. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. - :type radial_range: (float, float), optional - :param azimuth_range: The lower and upper range of the azimuthal angle in degree. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. - :type azimuth_range: (float, float), optional - :param mask: array (same size as image) with 1 for masked pixels, and 0 for valid pixels - :type mask: ndarray - :param dummy: value for dead/masked pixels - :type dummy: float - :param delta_dummy: precision for dummy value - :type delta_dummy: float - :param polarization_factor: polarization factor between -1 (vertical) - and +1 (horizontal). 0 for circular polarization or random, - None for no correction - :type polarization_factor: float - :param dark: dark noise image - :type dark: ndarray - :param flat: flat field image - :type flat: ndarray - :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) - :param unit: Output units, can be "q_nm^-1", "q_A^-1", "2th_deg", "2th_rad", "r_mm" for now - :type unit: pyFAI.units.Unit - :param safe: Do some extra checks to ensure LUT is still valid. False is faster. - :type safe: bool - :param normalization_factor: Value of a normalization monitor - :type normalization_factor: float - :param all: if true, return many more intermediate results as a dict (deprecated, please refer to the documentation of Integrate2dResult). - :param metadata: JSON serializable object containing the metadata, usually a dictionary. - :type all: bool - :return: azimuthaly regrouped intensity, q/2theta/r pos. and chi pos. - :rtype: Integrate2dResult, dict - """ - method = self._normalize_method(method, dim=2, default=self.DEFAULT_METHOD_2D) - assert method.dimension == 2 - npt = (npt_rad, npt_azim) - unit = units.to_unit(unit) - pos0_scale = unit.scale - if mask is None: - has_mask = "from detector" - mask = self.mask - mask_crc = self.detector.get_mask_crc() - if mask is None: - has_mask = False - mask_crc = None - else: - has_mask = "provided" - mask = numpy.ascontiguousarray(mask) - mask_crc = crc32(mask) - - shape = data.shape - - if radial_range: - radial_range = tuple([i / pos0_scale for i in radial_range]) - - if variance is not None: - assert variance.size == data.size - elif error_model: - error_model = error_model.lower() - if error_model == "poisson": - variance = numpy.ascontiguousarray(data, numpy.float32) - - if azimuth_range is not None: - azimuth_range = tuple(deg2rad(azimuth_range[i], self.chiDiscAtPi) for i in (0, -1)) - if azimuth_range[1] <= azimuth_range[0]: - azimuth_range = (azimuth_range[0], azimuth_range[1] + 2 * pi) - self.check_chi_disc(azimuth_range) - - if correctSolidAngle: - solidangle = self.solidAngleArray(shape, correctSolidAngle) - else: - solidangle = None - - if polarization_factor is None: - polarization = polarization_crc = None - else: - polarization, polarization_crc = self.polarization(shape, polarization_factor, with_checksum=True) - - if dark is None: - dark = self.detector.darkcurrent - if dark is None: - has_dark = False - else: - has_dark = "from detector" - else: - has_dark = "provided" - - if flat is None: - flat = self.detector.flatfield - if dark is None: - has_flat = False - else: - has_flat = "from detector" - else: - has_flat = "provided" - - I = None - sigma = None - sum_ = None - count = None - - if method.algo_lower == "lut": - if EXT_LUT_ENGINE not in self.engines: - engine = self.engines[EXT_LUT_ENGINE] = Engine() - else: - engine = self.engines[EXT_LUT_ENGINE] - with engine.lock: - integr = engine.engine - reset = None - if integr is None: - reset = "of first initialization" - if (not reset) and safe: - if integr.unit != unit: - reset = "unit changed" - if integr.bins != npt: - reset = "number of points changed" - if integr.size != data.size: - reset = "input image size changed" - if (mask is not None) and (not integr.check_mask): - reset = "mask but LUT was without mask" - elif (mask is None) and (integr.check_mask): - reset = "no mask but LUT has mask" - elif (mask is not None) and (integr.mask_checksum != mask_crc): - reset = "mask changed" - if radial_range != integr.pos0_range: - reset = "radial_range changed" - if azimuth_range != integr.pos1_range: - reset = "azimuth_range changed" - error = False - if reset: - logger.info("ai.integrate2d: Resetting integrator because %s", reset) - try: - integr = self.setup_sparse_integrator(shape, npt, mask=mask, - pos0_range=radial_range, pos1_range=azimuth_range, - mask_checksum=mask_crc, algo="LUT", unit=unit, scale=False) - except MemoryError: - # LUT method is hungry im memory... - logger.warning("MemoryError: falling back on forward implementation") - integr = None - self.reset_engines() - method = self.DEFAULT_METHOD_2D - error = True - else: - error = False - engine.set_engine(integr) - if not error: - if method.impl_lower == "opencl": - if OCL_LUT_ENGINE in self.engines: - ocl_engine = self.engines[OCL_LUT_ENGINE] - else: - ocl_engine = self.engines[OCL_LUT_ENGINE] = Engine() - with ocl_engine.lock: - platformid, deviceid = method.target - ocl_integr = ocl_engine.engine - if (ocl_integr is None) or \ - (ocl_integr.on_device["lut"] != integr.lut_checksum): - ocl_integr = ocl_azim_lut.OCL_LUT_Integrator(integr.lut, - integr.size, - platformid=platformid, - deviceid=deviceid, - checksum=integr.lut_checksum) - ocl_engine.set_engine(ocl_integr) - - if (not error) and (ocl_integr is not None): - I, sum_, count = ocl_integr.integrate_legacy(data, dark=dark, flat=flat, - solidangle=solidangle, - solidangle_checksum=self._dssa_crc, - dummy=dummy, - delta_dummy=delta_dummy, - polarization=polarization, - polarization_checksum=polarization_crc, - normalization_factor=normalization_factor, - safe=safe) - I.shape = npt - I = I.T - bins_rad = integr.bin_centers0 # this will be copied later - bins_azim = integr.bin_centers1 - else: - I, bins_rad, bins_azim, sum_, count = integr.integrate_legacy(data, dark=dark, flat=flat, - solidAngle=solidangle, - dummy=dummy, - delta_dummy=delta_dummy, - polarization=polarization, - normalization_factor=normalization_factor - ) - - if method.algo_lower == "csr": - if EXT_CSR_ENGINE not in self.engines: - engine = self.engines[EXT_CSR_ENGINE] = Engine() - else: - engine = self.engines[EXT_CSR_ENGINE] - with engine.lock: - integr = engine.engine - reset = None - if integr is None: - reset = "of first initialization" - if (not reset) and safe: - if integr.unit != unit: - reset = "unit changed" - if integr.bins != npt: - reset = "number of points changed" - if integr.size != data.size: - reset = "input image size changed" - if (mask is not None) and (not integr.check_mask): - reset = "mask but CSR was without mask" - elif (mask is None) and (integr.check_mask): - reset = "no mask but CSR has mask" - elif (mask is not None) and (integr.mask_checksum != mask_crc): - reset = "mask changed" - if (radial_range is None) and (integr.pos0_range is not None): - reset = "radial_range was defined in CSR" - elif (radial_range is not None) and integr.pos0_range != (min(radial_range), max(radial_range)): - reset = "radial_range is defined but differs in CSR" - if (azimuth_range is None) and (integr.pos1_range is not None): - reset = "azimuth_range not defined and CSR had azimuth_range defined" - elif (azimuth_range is not None) and integr.pos1_range != (min(azimuth_range), max(azimuth_range)): - reset = "azimuth_range requested and CSR's azimuth_range don't match" - error = False - if reset: - logger.info("AI.integrate2d: Resetting integrator because %s", reset) - split = method.split_lower - if split == "pseudo": - split = "full" - try: - integr = self.setup_sparse_integrator(shape, npt, mask=mask, - pos0_range=radial_range, pos1_range=azimuth_range, - mask_checksum=mask_crc, - unit=unit, split=split, algo="CSR", - scale=False) - except MemoryError: - logger.warning("MemoryError: falling back on default forward implementation") - integr = None - self.reset_engines() - method = self.DEFAULT_METHOD_2D - error = True - else: - error = False - engine.set_engine(integr) - if not error: - if method.impl_lower == "opencl": - if OCL_CSR_ENGINE in self.engines: - ocl_engine = self.engines[OCL_CSR_ENGINE] - else: - ocl_engine = self.engines[OCL_CSR_ENGINE] = Engine() - with ocl_engine.lock: - platformid, deviceid = method.target - ocl_integr = ocl_engine.engine - if (ocl_integr is None) or (ocl_integr.on_device["data"] != integr.lut_checksum): - ocl_integr = ocl_azim_csr.OCL_CSR_Integrator(integr.lut, - integr.size, - platformid=platformid, - deviceid=deviceid, - checksum=integr.lut_checksum) - ocl_engine.set_engine(ocl_integr) - if (not error) and (ocl_integr is not None): - I, sum_, count = ocl_integr.integrate_legacy(data, dark=dark, flat=flat, - solidangle=solidangle, - solidangle_checksum=self._dssa_crc, - dummy=dummy, - delta_dummy=delta_dummy, - polarization=polarization, - polarization_checksum=polarization_crc, - safe=safe, - normalization_factor=normalization_factor) - I.shape = npt - I = I.T - bins_rad = integr.bin_centers0 # this will be copied later - bins_azim = integr.bin_centers1 - else: - I, bins_rad, bins_azim, sum_, count = integr.integrate_legacy(data, dark=dark, flat=flat, - solidAngle=solidangle, - dummy=dummy, - delta_dummy=delta_dummy, - polarization=polarization, - normalization_factor=normalization_factor) - - if method.method[1:4] in (("pseudo", "histogram", "cython"), ("full", "histogram", "cython")): - logger.debug("integrate2d uses SplitPixel implementation") - pos = self.array_from_unit(shape, "corner", unit, scale=False) - I, bins_rad, bins_azim, sum_, count = splitPixel.fullSplit2D(pos=pos, - weights=data, - bins=(npt_rad, npt_azim), - pos0_range=radial_range, - pos1_range=azimuth_range, - dummy=dummy, - delta_dummy=delta_dummy, - mask=mask, - dark=dark, - flat=flat, - solidangle=solidangle, - polarization=polarization, - normalization_factor=normalization_factor, - chiDiscAtPi=self.chiDiscAtPi, - empty=dummy if dummy is not None else self._empty) - if method.method[1:4] == ("bbox", "histogram", "cython"): - logger.debug("integrate2d uses BBox implementation") - chi = self.chiArray(shape) - dchi = self.deltaChi(shape) - pos0 = self.array_from_unit(shape, "center", unit, scale=False) - dpos0 = self.array_from_unit(shape, "delta", unit, scale=False) - I, bins_rad, bins_azim, sum_, count = splitBBox.histoBBox2d(weights=data, - pos0=pos0, - delta_pos0=dpos0, - pos1=chi, - delta_pos1=dchi, - bins=(npt_rad, npt_azim), - pos0_range=radial_range, - pos1_range=azimuth_range, - dummy=dummy, - delta_dummy=delta_dummy, - mask=mask, - dark=dark, - flat=flat, - solidangle=solidangle, - polarization=polarization, - normalization_factor=normalization_factor, - chiDiscAtPi=self.chiDiscAtPi, - empty=dummy if dummy is not None else self._empty) - - if method.method[1:3] == ("no", "histogram") and method.impl_lower != "opencl": - logger.debug("integrate2d uses numpy or cython implementation") - data = data.astype(numpy.float32) # it is important to make a copy see issue #88 - mask = self.create_mask(data, mask, dummy, delta_dummy, - unit=unit, - radial_range=radial_range, - azimuth_range=azimuth_range, - mode="where") - pos0 = self.array_from_unit(shape, "center", unit, scale=False) - pos1 = self.chiArray(shape) - - if radial_range is None: - radial_range = [pos0.min(), pos0.max() * EPS32] - - if azimuth_range is None: - azimuth_range = [pos1.min(), pos1.max() * EPS32] - - if variance is not None: - variance = variance[mask] - - if dark is not None: - data -= dark - - if flat is not None: - data /= flat - - if polarization is not None: - data /= polarization - - if solidangle is not None: - data /= solidangle - - data = data[mask] - pos0 = pos0[mask] - pos1 = pos1[mask] - if method.impl_lower == "cython": - I, bins_azim, bins_rad, sum_, count = histogram.histogram2d(pos0=pos1, - pos1=pos0, - weights=data, - bins=(npt_azim, npt_rad), - split=False, - empty=dummy if dummy is not None else self._empty, - normalization_factor=normalization_factor) - elif method.impl_lower == "python": - logger.debug("integrate2d uses Numpy implementation") - count, b, c = numpy.histogram2d(pos1, pos0, (npt_azim, npt_rad), range=[azimuth_range, radial_range]) - bins_azim = (b[1:] + b[:-1]) / 2.0 - bins_rad = (c[1:] + c[:-1]) / 2.0 - count1 = numpy.maximum(1, count) - sum_, b, c = numpy.histogram2d(pos1, pos0, (npt_azim, npt_rad), - weights=data, range=[azimuth_range, radial_range]) - I = sum_ / count1 / normalization_factor - I[count == 0] = dummy if dummy is not None else self._empty - # I know I make copies .... - bins_rad = bins_rad * pos0_scale - bins_azim = bins_azim * 180.0 / pi - - result = Integrate2dResult(I, bins_rad, bins_azim, sigma) - result._set_method_called("integrate2d") - result._set_compute_engine(str(method)) - result._set_unit(unit) - result._set_count(count) - result._set_sum(sum_) - result._set_has_dark_correction(has_dark) - result._set_has_flat_correction(has_flat) - result._set_has_mask_applied(has_mask) - result._set_polarization_factor(polarization_factor) - result._set_normalization_factor(normalization_factor) - result._set_metadata(metadata) - - if filename is not None: - save_integrate_result(filename, result) - - return result - - _integrate2d_legacy = integrate2d_legacy - - def integrate2d_ng(self, data, npt_rad, npt_azim=360, - filename=None, correctSolidAngle=True, variance=None, - error_model=None, radial_range=None, azimuth_range=None, - mask=None, dummy=None, delta_dummy=None, - polarization_factor=None, dark=None, flat=None, - method=("bbox", "csr", "cython"), unit=units.Q, - safe=True, normalization_factor=1.0, metadata=None): - """ - Calculate the azimuthal regrouped 2d image in q(nm^-1)/chi(deg) by default - - Multi algorithm implementation (tries to be bullet proof) - - :param data: 2D array from the Detector/CCD camera - :type data: ndarray - :param npt_rad: number of points in the radial direction - :type npt_rad: int - :param npt_azim: number of points in the azimuthal direction - :type npt_azim: int - :param filename: output image (as edf format) - :type filename: str - :param correctSolidAngle: correct for solid angle of each pixel if True - :type correctSolidAngle: bool - :param variance: array containing the variance of the data. If not available, no error propagation is done - :type variance: ndarray - :param error_model: When the variance is unknown, an error model can be given: "poisson" (variance = I), "azimuthal" (variance = (I-)^2) - :type error_model: str - :param radial_range: The lower and upper range of the radial unit. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. - :type radial_range: (float, float), optional - :param azimuth_range: The lower and upper range of the azimuthal angle in degree. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. - :type azimuth_range: (float, float), optional - :param mask: array (same size as image) with 1 for masked pixels, and 0 for valid pixels - :type mask: ndarray - :param dummy: value for dead/masked pixels - :type dummy: float - :param delta_dummy: precision for dummy value - :type delta_dummy: float - :param polarization_factor: polarization factor between -1 (vertical) - and +1 (horizontal). 0 for circular polarization or random, - None for no correction - :type polarization_factor: float - :param dark: dark noise image - :type dark: ndarray - :param flat: flat field image - :type flat: ndarray - :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) - :type method: str - :param pyFAI.units.Unit unit: Output units, can be "q_nm^-1", "q_A^-1", "2th_deg", "2th_rad", "r_mm" for anything defined as pyFAI.units.RADIAL_UNITS - can also be a 2-tuple of (RADIAL_UNITS, AZIMUTHAL_UNITS) (advanced usage) - :param safe: Do some extra checks to ensure LUT is still valid. False is faster. - :type safe: bool - :param normalization_factor: Value of a normalization monitor - :type normalization_factor: float - :param metadata: JSON serializable object containing the metadata, usually a dictionary. - :return: azimuthaly regrouped intensity, q/2theta/r pos. and chi pos. - :rtype: Integrate2dResult, dict - """ - method = self._normalize_method(method, dim=2, default=self.DEFAULT_METHOD_2D) - assert method.dimension == 2 - npt = (npt_rad, npt_azim) - if isinstance(unit, (tuple, list)) and len(unit) == 2: - radial_unit, azimuth_unit = unit - else: - radial_unit = unit - azimuth_unit = units.CHI_DEG - radial_unit = units.to_unit(radial_unit, units.RADIAL_UNITS) - azimuth_unit = units.to_unit(azimuth_unit, units.AZIMUTHAL_UNITS) - unit = (radial_unit, azimuth_unit) - space = (radial_unit.space, azimuth_unit.space) - pos0_scale = radial_unit.scale - pos1_scale = azimuth_unit.scale - empty = dummy if dummy is not None else self._empty - if mask is None: - has_mask = "from detector" - mask = self.mask - mask_crc = self.detector.get_mask_crc() - if mask is None: - has_mask = False - mask_crc = None - else: - has_mask = "provided" - mask = numpy.ascontiguousarray(mask) - mask_crc = crc32(mask) - - shape = data.shape - - if radial_range: - radial_range = tuple([i / pos0_scale for i in radial_range]) - - error_model = ErrorModel.parse(error_model) - if variance is not None: - assert variance.size == data.size - error_model = ErrorModel.VARIANCE - if error_model.poissonian and not method.manage_variance: - error_model = ErrorModel.VARIANCE - if dark is None: - variance = numpy.maximum(data, 1.0).astype(numpy.float32) - else: - variance = (numpy.maximum(data, 1.0) + numpy.maximum(dark, 0.0)).astype(numpy.float32) - - if azimuth_range is not None and azimuth_unit.period: - azimuth_range = tuple(deg2rad(azimuth_range[i], self.chiDiscAtPi) for i in (0, -1)) - if azimuth_range[1] <= azimuth_range[0]: - azimuth_range = (azimuth_range[0], azimuth_range[1] + 2 * pi) - self.check_chi_disc(azimuth_range) - - if correctSolidAngle: - solidangle = self.solidAngleArray(shape, correctSolidAngle) - else: - solidangle = None - - if polarization_factor is None: - polarization = polarization_crc = None - else: - polarization, polarization_crc = self.polarization(shape, polarization_factor, with_checksum=True) - - if dark is None: - dark = self.detector.darkcurrent - if dark is None: - has_dark = False - else: - has_dark = "from detector" - else: - has_dark = "provided" - - if flat is None: - flat = self.detector.flatfield - if dark is None: - has_flat = False - else: - has_flat = "from detector" - else: - has_flat = "provided" - - if method.algo_is_sparse: - intpl = None - cython_method = IntegrationMethod.select_method(method.dimension, method.split_lower, method.algo_lower, "cython")[0] - if cython_method not in self.engines: - cython_engine = self.engines[cython_method] = Engine() - else: - cython_engine = self.engines[cython_method] - with cython_engine.lock: - cython_integr = cython_engine.engine - cython_reset = None - - if cython_integr is None: - cython_reset = "of first initialization" - if (not cython_reset) and safe: - if cython_integr.space != space: - cython_reset = f"unit {cython_integr.unit} incompatible with requested {unit}" - if cython_integr.bins != npt: - cython_reset = f"number of points {cython_integr.bins} incompatible with requested {npt}" - if cython_integr.size != data.size: - cython_reset = f"input image size {cython_integr.size} incompatible with requested {data.size}" - if cython_integr.empty != empty: - cython_reset = f"empty value {cython_integr.empty} incompatible with requested {empty}" - if (mask is not None) and (not cython_integr.check_mask): - cython_reset = f"mask but {method.algo_lower.upper()} was without mask" - elif (mask is None) and (cython_integr.cmask is not None): - cython_reset = f"no mask but { method.algo_lower.upper()} has mask" - elif (mask is not None) and (cython_integr.mask_checksum != mask_crc): - cython_reset = "mask changed" - if (radial_range is None) and (cython_integr.pos0_range is not None): - cython_reset = f"radial_range was defined in { method.algo_lower.upper()}" - elif (radial_range is not None) and (cython_integr.pos0_range != radial_range): - cython_reset = f"radial_range is defined but differs in {method.algo_lower.upper()}" - if (azimuth_range is None) and (cython_integr.pos1_range is not None): - cython_reset = f"azimuth_range not defined and {method.algo_lower.upper()} had azimuth_range defined" - elif (azimuth_range is not None) and (cython_integr.pos1_range != azimuth_range): - cython_reset = f"azimuth_range requested and {method.algo_lower.upper()}'s azimuth_range don't match" - if cython_reset: - logger.info("AI.integrate2d_ng: Resetting Cython integrator because %s", cython_reset) - split = method.split_lower - if split == "pseudo": - split = "full" - try: - cython_integr = self.setup_sparse_integrator(shape, npt, mask, - radial_range, azimuth_range, - mask_checksum=mask_crc, - unit=unit, split=split, algo=method.algo_lower, - empty=empty, scale=False) - except MemoryError: # sparse method are hungry... - logger.warning("MemoryError: falling back on forward implementation") - cython_integr = None - self.reset_engines() - method = self.DEFAULT_METHOD_1D - else: - cython_engine.set_engine(cython_integr) - # This whole block uses CSR, Now we should treat all the various implementation: Cython, OpenCL and finally Python. - if method.impl_lower != "cython": - # method.impl_lower in ("opencl", "python"): - if method not in self.engines: - # instanciated the engine - engine = self.engines[method] = Engine() - else: - engine = self.engines[method] - with engine.lock: - # Validate that the engine used is the proper one - integr = engine.engine - reset = None - if integr is None: - reset = "of first initialization" - if (not reset) and safe: - if integr.space != space: - reset = f"unit {integr.unit} incompatible with requested {unit}" - if numpy.prod(integr.bins) != numpy.prod(npt): - reset = f"number of points {integr.bins} incompatible with requested {npt}" - if integr.size != data.size: - reset = f"input image size {integr.size} incompatible with requested {data.size}" - if integr.empty != empty: - reset = f"empty value {integr.empty} incompatible with requested {empty}" - if (mask is not None) and (not integr.check_mask): - reset = "mask but CSR was without mask" - elif (mask is None) and (integr.check_mask): - reset = "no mask but CSR has mask" - elif (mask is not None) and (integr.mask_checksum != mask_crc): - reset = "mask changed" - if (radial_range is None) and (integr.pos0_range is not None): - reset = "radial_range was defined in CSR" - elif (radial_range is not None) and integr.pos0_range != (min(radial_range), max(radial_range)): - reset = "radial_range is defined but differs in CSR" - if (azimuth_range is None) and (integr.pos1_range is not None): - reset = "azimuth_range not defined and CSR had azimuth_range defined" - elif (azimuth_range is not None) and integr.pos1_range != (min(azimuth_range), max(azimuth_range)): - reset = "azimuth_range requested and CSR's azimuth_range don't match" - error = False - if reset: - logger.info("AI.integrate2d: Resetting integrator because %s", reset) - split = method.split_lower - try: - cython_integr = self.setup_sparse_integrator(shape, npt, mask, - radial_range, azimuth_range, - mask_checksum=mask_crc, - unit=unit, split=split, algo=method.algo_lower, - empty=empty, scale=False) - except MemoryError: - logger.warning("MemoryError: falling back on default implementation") - cython_integr = None - self.reset_engines() - method = self.DEFAULT_METHOD_2D - error = True - else: - error = False - cython_engine.set_engine(cython_integr) - if not error: - if method in self.engines: - ocl_py_engine = self.engines[method] - else: - ocl_py_engine = self.engines[method] = Engine() - integr = ocl_py_engine.engine - if integr is None or integr.checksum != cython_integr.lut_checksum: - if (method.impl_lower == "opencl"): - with ocl_py_engine.lock: - integr = method.class_funct_ng.klass(cython_integr.lut, - cython_integr.size, - bin_centers=cython_integr.bin_centers0, - azim_centers=cython_integr.bin_centers1, - platformid=method.target[0], - deviceid=method.target[1], - checksum=cython_integr.lut_checksum, - unit=unit, empty=empty, - mask_checksum=mask_crc) - - elif (method.impl_lower == "python"): - with ocl_py_engine.lock: - integr = method.class_funct_ng.klass(cython_integr.size, - cython_integr.lut, - bin_centers0=cython_integr.bin_centers0, - bin_centers1=cython_integr.bin_centers1, - checksum=cython_integr.lut_checksum, - unit=unit, empty=empty, - mask_checksum=mask_crc) - integr.pos0_range = cython_integr.pos0_range - integr.pos1_range = cython_integr.pos1_range - ocl_py_engine.set_engine(integr) - - if (integr is not None): - intpl = integr.integrate_ng(data, - variance=variance, - error_model=error_model, - dark=dark, flat=flat, - solidangle=solidangle, - solidangle_checksum=self._dssa_crc, - dummy=dummy, - delta_dummy=delta_dummy, - polarization=polarization, - polarization_checksum=polarization_crc, - safe=safe, - normalization_factor=normalization_factor, - weighted_average=method.weighted_average,) - if intpl is None: # fallback if OpenCL failed or default cython - # The integrator has already been initialized previously - intpl = cython_integr.integrate_ng(data, - variance=variance, - error_model=error_model, - dummy=dummy, - delta_dummy=delta_dummy, - dark=dark, - flat=flat, - solidangle=solidangle, - polarization=polarization, - normalization_factor=normalization_factor, - weighted_average=method.weighted_average,) - - elif method.algo_lower == "histogram": - if method.split_lower in ("pseudo", "full"): - logger.debug("integrate2d uses (full, histogram, cython) implementation") - pos = self.array_from_unit(shape, "corner", unit, scale=False) - integrator = method.class_funct_ng.function - intpl = integrator(pos=pos, - weights=data, - bins=(npt_rad, npt_azim), - pos0_range=radial_range, - pos1_range=azimuth_range, - dummy=dummy, - delta_dummy=delta_dummy, - mask=mask, - dark=dark, - flat=flat, - solidangle=solidangle, - polarization=polarization, - normalization_factor=normalization_factor, - chiDiscAtPi=self.chiDiscAtPi, - empty=empty, - variance=variance, - error_model=error_model, - allow_pos0_neg=not radial_unit.positive, - weighted_average=method.weighted_average,) - - elif method.split_lower == "bbox": - logger.debug("integrate2d uses BBox implementation") - pos0 = self.array_from_unit(shape, "center", radial_unit, scale=False) - dpos0 = self.array_from_unit(shape, "delta", radial_unit, scale=False) - pos1 = self.array_from_unit(shape, "center", azimuth_unit, scale=False) - dpos1 = self.array_from_unit(shape, "delta", azimuth_unit, scale=False) - integrator = method.class_funct_ng.function - intpl = integrator(weights=data, - pos0=pos0, - delta_pos0=dpos0, - pos1=pos1, - delta_pos1=dpos1, - bins=(npt_rad, npt_azim), - pos0_range=radial_range, - pos1_range=azimuth_range, - dummy=dummy, - delta_dummy=delta_dummy, - mask=mask, - dark=dark, - flat=flat, - solidangle=solidangle, - polarization=polarization, - normalization_factor=normalization_factor, - chiDiscAtPi=self.chiDiscAtPi, - empty=empty, - variance=variance, - error_model=error_model, - allow_pos0_neg=not radial_unit.positive, - clip_pos1=bool(azimuth_unit.period), - weighted_average=method.weighted_average,) - elif method.split_lower == "no": - if method.impl_lower == "opencl": - logger.debug("integrate2d uses OpenCL histogram implementation") - if method not in self.engines: - # instanciated the engine - engine = self.engines[method] = Engine() - else: - engine = self.engines[method] - with engine.lock: - # Validate that the engine used is the proper one #TODO!!!! - integr = engine.engine - reset = None - if integr is None: - reset = "of first initialization" - if (not reset) and safe: - if integr.space != space: - reset = f"unit {integr.unit} incompatible with requested {unit}" - if (integr.bins_radial, integr.bins_azimuthal) != npt: - reset = "number of points changed" - if integr.size != data.size: - reset = "input image size changed" - if (mask is not None) and (not integr.check_mask): - reset = "mask but CSR was without mask" - elif (mask is None) and (integr.check_mask): - reset = "no mask but CSR has mask" - elif (mask is not None) and (integr.on_device.get("mask") != mask_crc): - reset = "mask changed" - if self._cached_array[f"{radial_unit.space}_crc"] != integr.on_device.get("radial"): - reset = "radial array changed" - if self._cached_array[f"{azimuth_unit.space}_crc"] != integr.on_device.get("azimuthal"): - reset = "azimuthal array changed" - # Nota: Ranges are enforced at runtime, not initialization - error = False - if reset: - logger.info("AI.integrate2d: Resetting OCL_Histogram2d integrator because %s", reset) - rad = self.array_from_unit(shape, typ="center", unit=radial_unit, scale=False) - rad_crc = self._cached_array[f"{radial_unit.space}_crc"] = crc32(rad) - azi = self.array_from_unit(shape, typ="center", unit=azimuth_unit, scale=False) - azi_crc = self._cached_array[f"{azimuth_unit.space}_crc"] = crc32(azi) - try: - integr = method.class_funct_ng.klass(rad, - azi, - *npt, - radial_checksum=rad_crc, - azimuthal_checksum=azi_crc, - empty=empty, unit=unit, - mask=mask, mask_checksum=mask_crc, - platformid=method.target[0], - deviceid=method.target[1] - ) - except MemoryError: - logger.warning("MemoryError: falling back on default forward implementation") - integr = None - self.reset_engines() - method = self.DEFAULT_METHOD_2D - error = True - else: - error = False - engine.set_engine(integr) - if not error: - intpl = integr.integrate(data, dark=dark, flat=flat, - solidangle=solidangle, - solidangle_checksum=self._dssa_crc, - dummy=dummy, - delta_dummy=delta_dummy, - polarization=polarization, - polarization_checksum=polarization_crc, - safe=safe, - normalization_factor=normalization_factor, - radial_range=radial_range, - azimuthal_range=azimuth_range, - error_model=error_model, - weighted_average=method.weighted_average,) -#################### - else: # if method.impl_lower in ["python", "cython"]: - logger.debug("integrate2d uses [CP]ython histogram implementation") - radial = self.array_from_unit(shape, "center", radial_unit, scale=False) - azim = self.array_from_unit(shape, "center", azimuth_unit, scale=False) - if method.impl_lower == "python": - data = data.astype(numpy.float32) # it is important to make a copy see issue #88 - mask = self.create_mask(data, mask, dummy, delta_dummy, - unit=unit, - radial_range=radial_range, - azimuth_range=azimuth_range, - mode="normal").ravel() - histogrammer = method.class_funct_ng.function - intpl = histogrammer(radial=radial, - azimuthal=azim, - bins=(npt_rad, npt_azim), - raw=data, - dark=dark, - flat=flat, - solidangle=solidangle, - polarization=polarization, - absorption=None, - mask=mask, - dummy=dummy, - delta_dummy=delta_dummy, - normalization_factor=normalization_factor, - empty=self._empty, - variance=variance, - dark_variance=None, - error_model=error_model, - radial_range=radial_range, - azimuth_range=azimuth_range, - allow_radial_neg=not radial_unit.positive, - clip_pos1=bool(azimuth_unit.period), - weighted_average=method.weighted_average,) - - I = intpl.intensity - bins_azim = intpl.azimuthal - bins_rad = intpl.radial - signal2d = intpl.signal - norm2d = intpl.normalization - count = intpl.count - if error_model.do_variance: - std = intpl.std - sem = intpl.sem - var2d = intpl.variance - norm2d_sq = intpl.norm_sq - else: - std = sem = var2d = norm2d_sq = None - - # Duplicate arrays on purpose .... - bins_rad = bins_rad * pos0_scale - bins_azim = bins_azim * pos1_scale - - result = Integrate2dResult(I, bins_rad, bins_azim, sem) - result._set_method_called("integrate2d") - result._set_compute_engine(str(method)) - result._set_method(method) - result._set_radial_unit(radial_unit) - result._set_azimuthal_unit(azimuth_unit) - result._set_count(count) - # result._set_sum(sum_) - result._set_has_dark_correction(has_dark) - result._set_has_flat_correction(has_flat) - result._set_has_mask_applied(has_mask) - result._set_polarization_factor(polarization_factor) - result._set_normalization_factor(normalization_factor) - result._set_metadata(metadata) - - result._set_sum_signal(signal2d) - result._set_sum_normalization(norm2d) - if error_model.do_variance: - result._set_sum_normalization2(norm2d_sq) - result._set_sum_variance(var2d) - result._set_std(std) - result._set_std(sem) - - if filename is not None: - save_integrate_result(filename, result) - - return result - - integrate2d = _integrate2d_ng = integrate2d_ng - - @deprecated(since_version="0.14", reason="Use the class DefaultAiWriter") - def save1D(self, filename, dim1, I, error=None, dim1_unit=units.TTH, - has_dark=False, has_flat=False, polarization_factor=None, normalization_factor=None): - """This method save the result of a 1D integration. - - Deprecated on 13/06/2017 - - :param filename: the filename used to save the 1D integration - :type filename: str - :param dim1: the x coordinates of the integrated curve - :type dim1: numpy.ndarray - :param I: The integrated intensity - :type I: numpy.mdarray - :param error: the error bar for each intensity - :type error: numpy.ndarray or None - :param dim1_unit: the unit of the dim1 array - :type dim1_unit: pyFAI.units.Unit - :param has_dark: save the darks filenames (default: no) - :type has_dark: bool - :param has_flat: save the flat filenames (default: no) - :type has_flat: bool - :param polarization_factor: the polarization factor - :type polarization_factor: float - :param normalization_factor: the monitor value - :type normalization_factor: float - """ - self.__save1D(filename=filename, - dim1=dim1, - I=I, - error=error, - dim1_unit=dim1_unit, - has_dark=has_dark, - has_flat=has_flat, - polarization_factor=polarization_factor, - normalization_factor=normalization_factor) - - def __save1D(self, filename, dim1, I, error=None, dim1_unit=units.TTH, - has_dark=False, has_flat=False, polarization_factor=None, normalization_factor=None): - """This method save the result of a 1D integration. - - :param filename: the filename used to save the 1D integration - :type filename: str - :param dim1: the x coordinates of the integrated curve - :type dim1: numpy.ndarray - :param I: The integrated intensity - :type I: numpy.mdarray - :param error: the error bar for each intensity - :type error: numpy.ndarray or None - :param dim1_unit: the unit of the dim1 array - :type dim1_unit: pyFAI.units.Unit - :param has_dark: save the darks filenames (default: no) - :type has_dark: bool - :param has_flat: save the flat filenames (default: no) - :type has_flat: bool - :param polarization_factor: the polarization factor - :type polarization_factor: float - :param normalization_factor: the monitor value - :type normalization_factor: float - """ - if not filename: - return - writer = DefaultAiWriter(None, self) - writer.save1D(filename, dim1, I, error, dim1_unit, has_dark, has_flat, - polarization_factor, normalization_factor) - - @deprecated(since_version="0.14", reason="Use the class DefaultAiWriter") - def save2D(self, filename, I, dim1, dim2, error=None, dim1_unit=units.TTH, - has_dark=False, has_flat=False, - polarization_factor=None, normalization_factor=None): - """This method save the result of a 2D integration. - - Deprecated on 13/06/2017 - - :param filename: the filename used to save the 2D histogram - :type filename: str - :param dim1: the 1st coordinates of the histogram - :type dim1: numpy.ndarray - :param dim1: the 2nd coordinates of the histogram - :type dim1: numpy.ndarray - :param I: The integrated intensity - :type I: numpy.mdarray - :param error: the error bar for each intensity - :type error: numpy.ndarray or None - :param dim1_unit: the unit of the dim1 array - :type dim1_unit: pyFAI.units.Unit - :param has_dark: save the darks filenames (default: no) - :type has_dark: bool - :param has_flat: save the flat filenames (default: no) - :type has_flat: bool - :param polarization_factor: the polarization factor - :type polarization_factor: float - :param normalization_factor: the monitor value - :type normalization_factor: float - """ - self.__save2D(filename=filename, - I=I, - dim1=dim1, - dim2=dim2, - error=error, - dim1_unit=dim1_unit, - has_dark=has_dark, - has_flat=has_flat, - polarization_factor=polarization_factor, - normalization_factor=normalization_factor) - - def __save2D(self, filename, I, dim1, dim2, error=None, dim1_unit=units.TTH, - has_dark=False, has_flat=False, - polarization_factor=None, normalization_factor=None): - """This method save the result of a 2D integration. - - Deprecated on 13/06/2017 - - :param filename: the filename used to save the 2D histogram - :type filename: str - :param dim1: the 1st coordinates of the histogram - :type dim1: numpy.ndarray - :param dim1: the 2nd coordinates of the histogram - :type dim1: numpy.ndarray - :param I: The integrated intensity - :type I: numpy.mdarray - :param error: the error bar for each intensity - :type error: numpy.ndarray or None - :param dim1_unit: the unit of the dim1 array - :type dim1_unit: pyFAI.units.Unit - :param has_dark: save the darks filenames (default: no) - :type has_dark: bool - :param has_flat: save the flat filenames (default: no) - :type has_flat: bool - :param polarization_factor: the polarization factor - :type polarization_factor: float - :param normalization_factor: the monitor value - :type normalization_factor: float - """ - if not filename: - return - writer = DefaultAiWriter(None, self) - writer.save2D(filename, I, dim1, dim2, error, dim1_unit, has_dark, has_flat, - polarization_factor, normalization_factor) - - def medfilt1d(self, data, npt_rad=1024, npt_azim=512, - correctSolidAngle=True, - radial_range=None, azimuth_range=None, - polarization_factor=None, dark=None, flat=None, - method="splitpixel", unit=units.Q, - percentile=50, dummy=None, delta_dummy=None, - mask=None, normalization_factor=1.0, metadata=None): - """Perform the 2D integration and filter along each row using a median - filter - - :param data: input image as numpy array - :param npt_rad: number of radial points - :param npt_azim: number of azimuthal points - :param correctSolidAngle: correct for solid angle of each pixel if True - :type correctSolidAngle: bool - :param radial_range: The lower and upper range of the radial unit. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. - :type radial_range: (float, float), optional - :param azimuth_range: The lower and upper range of the azimuthal angle in degree. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. - :type azimuth_range: (float, float), optional - - :param polarization_factor: polarization factor between -1 (vertical) and +1 (horizontal). - 0 for circular polarization or random, - None for no correction, - True for using the former correction - :type polarization_factor: float - :param dark: dark noise image - :type dark: ndarray - :param flat: flat field image - :type flat: ndarray - :param unit: unit to be used for integration - :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) - :param percentile: which percentile use for cutting out - percentil can be a 2-tuple to specify a region to - average out - :param mask: masked out pixels array - :param normalization_factor: Value of a normalization monitor - :type normalization_factor: float - :param metadata: any other metadata, - :type metadata: JSON serializable dict - :return: Integrate1D like result like - """ - if dummy is None: - dummy = numpy.finfo(numpy.float32).min - delta_dummy = None - unit = units.to_unit(unit) - method = self._normalize_method(method, dim=2, default=self.DEFAULT_METHOD_2D) - if (method.impl_lower == "opencl") and npt_azim and (npt_azim > 1): - old = npt_azim - npt_azim = 1 << int(round(log(npt_azim, 2))) # power of two above - if npt_azim != old: - logger.warning("Change number of azimuthal bins to nearest power of two: %s->%s", - old, npt_azim) - res2d = self.integrate2d(data, npt_rad, npt_azim, mask=mask, - flat=flat, dark=dark, - radial_range=radial_range, - azimuth_range=azimuth_range, - unit=unit, method=method.method, - dummy=dummy, delta_dummy=delta_dummy, - correctSolidAngle=correctSolidAngle, - polarization_factor=polarization_factor, - normalization_factor=normalization_factor) - integ2d = res2d.intensity - if (method.impl_lower == "opencl"): - ctx = self.engines[res2d.method].engine.ctx - if numpy.isfortran(integ2d) and integ2d.dtype == numpy.float32: - rdata = integ2d.T - horizontal = True - else: - rdata = numpy.ascontiguousarray(integ2d, dtype=numpy.float32) - horizontal = False - - if OCL_SORT_ENGINE not in self.engines: - with self._lock: - if OCL_SORT_ENGINE not in self.engines: - self.engines[OCL_SORT_ENGINE] = Engine() - engine = self.engines[OCL_SORT_ENGINE] - with engine.lock: - sorter = engine.engine - if (sorter is None) or \ - (sorter.npt_width != rdata.shape[1]) or\ - (sorter.npt_height != rdata.shape[0]): - logger.info("reset opencl sorter") - sorter = ocl_sort.Separator(npt_height=rdata.shape[0], npt_width=rdata.shape[1], ctx=ctx) - engine.set_engine(sorter) - if "__len__" in dir(percentile): - if horizontal: - spectrum = sorter.trimmed_mean_horizontal(rdata, dummy, [(i / 100.0) for i in percentile]).get() - else: - spectrum = sorter.trimmed_mean_vertical(rdata, dummy, [(i / 100.0) for i in percentile]).get() - else: - if horizontal: - spectrum = sorter.filter_horizontal(rdata, dummy, percentile / 100.0).get() - else: - spectrum = sorter.filter_vertical(rdata, dummy, percentile / 100.0).get() - else: - dummies = (integ2d == dummy).sum(axis=0) - # add a line of zeros at the end (along npt_azim) so that the value for no valid pixel is 0 - sorted_ = numpy.zeros((npt_azim + 1, npt_rad)) - sorted_[:npt_azim,:] = numpy.sort(integ2d, axis=0) - - if "__len__" in dir(percentile): - # mean over the valid value - lower = dummies + (numpy.floor(min(percentile) * (npt_azim - dummies) / 100.)).astype(int) - upper = dummies + (numpy.ceil(max(percentile) * (npt_azim - dummies) / 100.)).astype(int) - bounds = numpy.zeros(sorted_.shape, dtype=int) - assert (lower >= 0).all() - assert (upper <= npt_azim).all() - - rng = numpy.arange(npt_rad) - bounds[lower, rng] = 1 - bounds[upper, rng] = 1 - valid = (numpy.cumsum(bounds, axis=0) % 2) - invalid = numpy.logical_not(valid) - sorted_[invalid] = numpy.nan - spectrum = numpy.nanmean(sorted_, axis=0) - else: - # read only the valid value - dummies = (integ2d == dummy).sum(axis=0) - pos = dummies + (numpy.round(percentile * (npt_azim - dummies) / 100.)).astype(int) - assert (pos >= 0).all() - assert (pos <= npt_azim).all() - spectrum = sorted_[(pos, numpy.arange(npt_rad))] - - result = Integrate1dResult(res2d.radial, spectrum) - result._set_method_called("medfilt1d") - result._set_compute_engine(str(method)) - result._set_percentile(percentile) - result._set_npt_azim(npt_azim) - result._set_unit(unit) - result._set_has_mask_applied(res2d.has_mask_applied) - result._set_metadata(metadata) - result._set_has_dark_correction(res2d.has_dark_correction) - result._set_has_flat_correction(res2d.has_flat_correction) - result._set_polarization_factor(polarization_factor) - result._set_normalization_factor(normalization_factor) - return result - - def sigma_clip_legacy(self, data, npt_rad=1024, npt_azim=512, - correctSolidAngle=True, polarization_factor=None, - radial_range=None, azimuth_range=None, - dark=None, flat=None, - method=("full", "histogram", "cython"), unit=units.Q, - thres=3, max_iter=5, dummy=None, delta_dummy=None, - mask=None, normalization_factor=1.0, metadata=None, - safe=True, **kwargs): - """Perform first a 2D integration and then an iterative sigma-clipping - filter along each row. See the doc of scipy.stats.sigmaclip for the - options `thres` and `max_iter`. - - :param data: input image as numpy array - :param npt_rad: number of radial points (alias: npt) - :param npt_azim: number of azimuthal points - :param bool correctSolidAngle: correct for solid angle of each pixel when set - :param float polarization_factor: polarization factor between -1 (vertical) - and +1 (horizontal). - - - 0 for circular polarization or random, - - None for no correction, - - True for using the former correction - :param radial_range: The lower and upper range of the radial unit. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. - :type radial_range: (float, float), optional - :param azimuth_range: The lower and upper range of the azimuthal angle in degree. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. - :type azimuth_range: (float, float), optional - :param ndarray dark: dark noise image - :param ndarray flat: flat field image - :param unit: unit to be used for integration - :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) - :param thres: cut-off for n*sigma: discard any values with `|I-| > thres*σ`. - The threshold can be a 2-tuple with sigma_low and sigma_high. - :param max_iter: maximum number of iterations - :param mask: masked out pixels array - :param float normalization_factor: Value of a normalization monitor - :param metadata: any other metadata, - :type metadata: JSON serializable dict - :param safe: unset to save some checks on sparse matrix shape/content. - :kwargs: unused, just for signature compatibility when used within Worker. - :return: Integrate1D like result like - - Nota: The initial 2D-integration requires pixel splitting - """ - # compatibility layer with sigma_clip_ng - if "npt" in kwargs: - npt_rad = kwargs["npt"] - # We use NaN as dummies - if dummy is None: - dummy = numpy.nan - delta_dummy = None - unit = units.to_unit(unit) - method = self._normalize_method(method, dim=2, default=self.DEFAULT_METHOD_2D) - if "__len__" in dir(thres) and len(thres) > 0: - sigma_lo = thres[0] - sigma_hi = thres[-1] - else: - sigma_lo = sigma_hi = thres - - if (method.impl_lower == "opencl") and npt_azim and (npt_azim > 1): - old = npt_azim - npt_azim = 1 << int(round(log(npt_azim, 2))) # power of two above - if npt_azim != old: - logger.warning("Change number of azimuthal bins to nearest power of two: %s->%s", - old, npt_azim) - - res2d = self.integrate2d(data, npt_rad, npt_azim, mask=mask, - azimuth_range=azimuth_range, - radial_range=radial_range, - flat=flat, dark=dark, - unit=unit, method=method, - dummy=dummy, delta_dummy=delta_dummy, - correctSolidAngle=correctSolidAngle, - polarization_factor=polarization_factor, - normalization_factor=normalization_factor, - safe=safe) - image = res2d.intensity - if (method.impl_lower == "opencl"): - if (method.algo_lower == "csr") and \ - (OCL_CSR_ENGINE in self.engines) and \ - (self.engines[OCL_CSR_ENGINE].engine is not None): - ctx = self.engines[OCL_CSR_ENGINE].engine.ctx - elif (method.algo_lower == "csr") and \ - (OCL_LUT_ENGINE in self.engines) and \ - (self.engines[OCL_LUT_ENGINE].engine is not None): - ctx = self.engines[OCL_LUT_ENGINE].engine.ctx - else: - ctx = None - - if numpy.isfortran(image) and image.dtype == numpy.float32: - rdata = image.T - horizontal = True - else: - rdata = numpy.ascontiguousarray(image, dtype=numpy.float32) - horizontal = False - - if OCL_SORT_ENGINE not in self.engines: - with self._lock: - if OCL_SORT_ENGINE not in self.engines: - self.engines[OCL_SORT_ENGINE] = Engine() - engine = self.engines[OCL_SORT_ENGINE] - with engine.lock: - sorter = engine.engine - if (sorter is None) or \ - (sorter.npt_width != rdata.shape[1]) or\ - (sorter.npt_height != rdata.shape[0]): - logger.info("reset opencl sorter") - sorter = ocl_sort.Separator(npt_height=rdata.shape[0], npt_width=rdata.shape[1], ctx=ctx) - engine.set_engine(sorter) - - if horizontal: - res = sorter.sigma_clip_horizontal(rdata, dummy=dummy, - sigma_lo=sigma_lo, - sigma_hi=sigma_hi, - max_iter=max_iter) - else: - res = sorter.sigma_clip_vertical(rdata, dummy=dummy, - sigma_lo=sigma_lo, - sigma_hi=sigma_hi, - max_iter=max_iter) - mean = res[0].get() - std = res[1].get() - else: - as_strided = numpy.lib.stride_tricks.as_strided - mask = numpy.logical_not(numpy.isfinite(image)) - dummies = mask.sum() - image[mask] = numpy.nan - mean = numpy.nanmean(image, axis=0) - std = numpy.nanstd(image, axis=0) - for _ in range(max_iter): - mean2d = as_strided(mean, image.shape, (0, mean.strides[0])) - std2d = as_strided(std, image.shape, (0, std.strides[0])) - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - delta = (image - mean2d) / std2d - mask = numpy.logical_or(delta > sigma_hi, - delta < -sigma_lo) - dummies = mask.sum() - if dummies == 0: - break - image[mask] = numpy.nan - mean = numpy.nanmean(image, axis=0) - std = numpy.nanstd(image, axis=0) - - result = Integrate1dResult(res2d.radial, mean, std) - result._set_method_called("sigma_clip") - result._set_compute_engine(str(method)) - result._set_percentile(thres) - result._set_npt_azim(npt_azim) - result._set_unit(unit) - result._set_has_mask_applied(res2d.has_mask_applied) - result._set_metadata(metadata) - result._set_has_dark_correction(res2d.has_dark_correction) - result._set_has_flat_correction(res2d.has_flat_correction) - result._set_polarization_factor(polarization_factor) - result._set_normalization_factor(normalization_factor) - return result - - _sigma_clip_legacy = sigma_clip_legacy - - def sigma_clip_ng(self, data, - npt=1024, - correctSolidAngle=True, - polarization_factor=None, - variance=None, - error_model=ErrorModel.NO, - radial_range=None, - azimuth_range=None, - dark=None, - flat=None, - absorption=None, - method=("no", "csr", "cython"), - unit=units.Q, - thres=5.0, - max_iter=5, - dummy=None, - delta_dummy=None, - mask=None, - normalization_factor=1.0, - metadata=None, - safe=True, - **kwargs): - """Performs iteratively the 1D integration with variance propagation - and performs a sigm-clipping at each iteration, i.e. - all pixel which intensity differs more than thres*std is - discarded for next iteration. - - Keep only pixels with intensty: - - ``|I - | < thres * σ(I)`` - - This enforces a symmetric, bell-shaped distibution (i.e. gaussian-like) - and is very good at extracting background or amorphous isotropic scattering - out of Bragg peaks. - - :param data: input image as numpy array - :param npt_rad: number of radial points - :param bool correctSolidAngle: correct for solid angle of each pixel if True - :param float polarization_factor: polarization factor between: - -1 (vertical) - +1 (horizontal). - - 0 for circular polarization or random, - - None for no correction, - - True for using the former correction - :param radial_range: The lower and upper range of the radial unit. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. - :type radial_range: (float, float), optional - :param azimuth_range: The lower and upper range of the azimuthal angle in degree. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. - :type azimuth_range: (float, float), optional - - :param ndarray dark: dark noise image - :param ndarray flat: flat field image - :param ndarray absorption: Detector absorption (image) - :param ndarray variance: the variance of the signal - :param str error_model: can be "poisson" to assume a poissonian detector (variance=I) or "azimuthal" to take the std² in each ring (better, more expenive) - :param unit: unit to be used for integration - :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) - :param thres: cut-off for n*sigma: discard any values with (I-)/sigma > thres. - :param max_iter: maximum number of iterations - :param mask: masked out pixels array - :param float normalization_factor: Value of a normalization monitor - :param metadata: any other metadata, - :type metadata: JSON serializable dict - :param safe: set to False to skip some tests - :return: Integrate1D like result like - - The difference with the previous `sigma_clip_legacy` implementation is that there is no 2D regrouping. - Pixel splitting should be avoided with this implementation. - The standard deviation is usually smaller than previously and the signal cleaner. - It is also slightly faster. - - The case neither `error_model`, nor `variance` is provided, fall-back on a poissonian model. - - """ - for k in kwargs: - if k == "npt_azim": - logger.warning("'npt_azim' argument is not used in sigma_clip_ng as not 2D intergration is performed anymore") - else: - logger.warning("Got unknown argument %s %s", k, kwargs[k]) - - error_model = ErrorModel.parse(error_model) - if variance is not None: - assert variance.size == data.size - error_model = ErrorModel.VARIANCE - - unit = units.to_unit(unit) - if radial_range: - radial_range = tuple(radial_range[i] / unit.scale for i in (0, -1)) - if azimuth_range is not None: - azimuth_range = self.normalize_azimuth_range(azimuth_range) - - method = self._normalize_method(method, dim=1, default=self.DEFAULT_METHOD_1D) - if method.split != "no": - logger.warning("Method %s is using a pixel-splitting scheme. sigma_clip_ng should be use WITHOUT PIXEL-SPLITTING! Your results are likely to be wrong!", - method) - - if mask is None: - has_mask = "from detector" - mask = self.mask - mask_crc = self.detector.get_mask_crc() - if mask is None: - has_mask = False - mask_crc = None - else: - has_mask = "user provided" - mask = numpy.ascontiguousarray(mask) - mask_crc = crc32(mask) - - if dark is None: - dark = self.detector.darkcurrent - if dark is None: - has_dark = False - else: - has_dark = "from detector" - else: - has_dark = "provided" - - if flat is None: - flat = self.detector.flatfield - if dark is None: - has_flat = False - else: - has_flat = "from detector" - else: - has_flat = "provided" - - if correctSolidAngle: - solidangle = self.solidAngleArray(data.shape, correctSolidAngle) - else: - solidangle = None - - if polarization_factor is None: - polarization = polarization_crc = None - else: - polarization, polarization_crc = self.polarization(data.shape, polarization_factor, with_checksum=True) - - if (method.algo_lower == "csr"): - "This is the only method implemented for now ..." - # Prepare LUT if needed! - # initialize the CSR integrator in Cython as it may be needed later on. - cython_method = IntegrationMethod.select_method(method.dimension, method.split_lower, method.algo_lower, "cython")[0] - if cython_method not in self.engines: - cython_engine = self.engines[cython_method] = Engine() - else: - cython_engine = self.engines[cython_method] - with cython_engine.lock: - # Validate that the engine used is the proper one - cython_integr = cython_engine.engine - cython_reset = None - if cython_integr is None: - cython_reset = "of first initialization" - if (not cython_reset) and safe: - if cython_integr.unit != unit: - cython_reset = "unit was changed" - if cython_integr.bins != npt: - cython_reset = "number of points changed" - if cython_integr.size != data.size: - cython_reset = "input image size changed" - if cython_integr.empty != self._empty: - cython_reset = "empty value changed " - if (mask is not None) and (not cython_integr.check_mask): - cython_reset = "mask but CSR was without mask" - elif (mask is None) and (cython_integr.check_mask): - cython_reset = "no mask but CSR has mask" - elif (mask is not None) and (cython_integr.mask_checksum != mask_crc): - cython_reset = "mask changed" - if (radial_range is None) and (cython_integr.pos0_range is not None): - cython_reset = "radial_range was defined in CSR" - elif (radial_range is not None) and cython_integr.pos0_range != (min(radial_range), max(radial_range)): - cython_reset = "radial_range is defined but not the same as in CSR" - if (azimuth_range is None) and (cython_integr.pos1_range is not None): - cython_reset = "azimuth_range not defined and CSR had azimuth_range defined" - elif (azimuth_range is not None) and cython_integr.pos1_range != (min(azimuth_range), max(azimuth_range)): - cython_reset = "azimuth_range requested and CSR's azimuth_range don't match" - if cython_reset: - logger.info("AI.sigma_clip_ng: Resetting Cython integrator because %s", cython_reset) - split = method.split_lower - if split == "pseudo": - split = "full" - try: - cython_integr = self.setup_sparse_integrator(data.shape, npt, mask=mask, - mask_checksum=mask_crc, - unit=unit, split=split, algo="CSR", - pos0_range=radial_range, - pos1_range=azimuth_range, - empty=self._empty, - scale=False) - except MemoryError: # CSR method is hungry... - logger.warning("MemoryError: falling back on forward implementation") - cython_integr = None - self.reset_engines() - method = self.DEFAULT_METHOD_1D - else: - cython_engine.set_engine(cython_integr) - if method not in self.engines: - # instanciated the engine - engine = self.engines[method] = Engine() - else: - engine = self.engines[method] - with engine.lock: - # Validate that the engine used is the proper one - integr = engine.engine - reset = None - # This whole block uses CSR, Now we should treat all the various implementation: Cython, OpenCL and finally Python. - - # Validate that the engine used is the proper one - if integr is None: - reset = "of first initialization" - if (not reset) and safe: - if integr.unit != unit: - reset = "unit was changed" - if integr.bins != npt: - reset = "number of points changed" - if integr.size != data.size: - reset = "input image size changed" - if integr.empty != self._empty: - reset = "empty value changed " - if (mask is not None) and (not integr.check_mask): - reset = "mask but CSR was without mask" - elif (mask is None) and (integr.check_mask): - reset = "no mask but CSR has mask" - elif (mask is not None) and (integr.mask_checksum != mask_crc): - reset = "mask changed" - # TODO - if (radial_range is None) and (integr.pos0_range is not None): - reset = "radial_range was defined in CSR" - elif (radial_range is not None) and integr.pos0_range != (min(radial_range), max(radial_range)): - reset = "radial_range is defined but not the same as in CSR" - if (azimuth_range is None) and (integr.pos1_range is not None): - reset = "azimuth_range not defined and CSR had azimuth_range defined" - elif (azimuth_range is not None) and integr.pos1_range != (min(azimuth_range), max(azimuth_range)): - reset = "azimuth_range requested and CSR's azimuth_range don't match" - - if reset: - logger.info("ai.sigma_clip_ng: Resetting ocl_csr integrator because %s", reset) - csr_integr = self.engines[cython_method].engine - if method.impl_lower == "opencl": - try: - integr = method.class_funct_ng.klass(csr_integr.lut, - image_size=data.size, - checksum=csr_integr.lut_checksum, - empty=self._empty, - unit=unit, - mask_checksum=csr_integr.mask_checksum, - bin_centers=csr_integr.bin_centers, - platformid=method.target[0], - deviceid=method.target[1]) - except MemoryError: - logger.warning("MemoryError: falling back on default forward implementation") - self.reset_engines() - method = self.DEFAULT_METHOD_1D - else: - # Copy some properties from the cython integrator - integr.pos0_range = csr_integr.pos0_range - integr.pos1_range = csr_integr.pos1_range - engine.set_engine(integr) - elif method.impl_lower in ("python", "cython"): - integr = method.class_funct_ng.klass(lut=csr_integr.lut, - image_size=data.size, - empty=self._empty, - unit=unit, - mask_checksum=csr_integr.mask_checksum, - bin_centers=csr_integr.bin_centers) - # Copy some properties from the cython integrator - integr.pos0_range = csr_integr.pos0_range - integr.pos1_range = csr_integr.pos1_range - engine.set_engine(integr) - else: - logger.error(f"Implementation {method.impl_lower} not supported") - else: - integr = self.engines[method].engine - kwargs = {"dark":dark, "dummy":dummy, "delta_dummy":delta_dummy, - "variance":variance, "dark_variance":None, - "flat":flat, "solidangle":solidangle, "polarization":polarization, "absorption":absorption, - "error_model":error_model, "normalization_factor":normalization_factor, - "cutoff":thres, "cycle":max_iter} - - intpl = integr.sigma_clip(data, **kwargs) - else: - raise RuntimeError("Not yet implemented. Sorry") - result = Integrate1dResult(intpl.position * unit.scale, intpl.intensity, intpl.sem) - result._set_method_called("sigma_clip_ng") - result._set_method(method) - result._set_compute_engine(str(method)) - result._set_percentile(thres) - result._set_unit(unit) - result._set_has_mask_applied(has_mask) - result._set_has_dark_correction(has_dark) - result._set_has_flat_correction(has_flat) - result._set_metadata(metadata) - result._set_sum_signal(intpl.signal) - result._set_sum_normalization(intpl.normalization) - result._set_sum_normalization2(intpl.norm_sq) - result._set_std(intpl.std) - result._set_sem(intpl.sem) - result._set_sum_variance(intpl.variance) - result._set_count(intpl.count) - result._set_polarization_factor(polarization_factor) - result._set_normalization_factor(normalization_factor) - result._set_error_model(error_model) - return result - - sigma_clip = sigma_clip_ng - - def separate(self, data, npt_rad=1024, npt_azim=512, unit="2th_deg", method="splitpixel", - percentile=50, mask=None, restore_mask=True): - """ - Separate bragg signal from powder/amorphous signal using azimuthal integration, - median filering and projected back before subtraction. - - :param data: input image as numpy array - :param npt_rad: number of radial points - :param npt_azim: number of azimuthal points - :param unit: unit to be used for integration - :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) - :param percentile: which percentile use for cutting out - :param mask: masked out pixels array - :param restore_mask: masked pixels have the same value as input data provided - :return: SeparateResult which the bragg & amorphous signal - - Note: the filtered 1D spectrum can be retrieved from - SeparateResult.radial and SeparateResult.intensity - """ - - filter_result = self.medfilt1d(data, npt_rad=npt_rad, npt_azim=npt_azim, - unit=unit, method=method, - percentile=percentile, mask=mask) - # This takes 100ms and is the next to be optimized. - amorphous = self.calcfrom1d(filter_result.radial, filter_result.intensity, - data.shape, mask=None, - dim1_unit=unit, - correctSolidAngle=True) - bragg = data - amorphous - if restore_mask: - wmask = numpy.where(mask) - maskdata = data[wmask] - bragg[wmask] = maskdata - amorphous[wmask] = maskdata - - result = SeparateResult(bragg, amorphous) - result._radial = filter_result.radial - result._intensity = filter_result.intensity - result._sigma = filter_result.sigma - - result._set_sum_signal(filter_result.sum_signal) - result._set_sum_variance(filter_result.sum_variance) - result._set_sum_normalization(filter_result.sum_normalization) - result._set_count(filter_result.count) - - result._set_method_called("medfilt1d") - result._set_compute_engine(str(method)) - result._set_percentile(percentile) - result._set_npt_azim(npt_azim) - result._set_unit(unit) - result._set_has_mask_applied(filter_result.has_mask_applied) - result._set_metadata(filter_result.metadata) - result._set_has_dark_correction(filter_result.has_dark_correction) - result._set_has_flat_correction(filter_result.has_flat_correction) - - # TODO when switching to sigma-clipped filtering - # result._set_polarization_factor(polarization_factor) - # result._set_normalization_factor(normalization_factor) - - return result - - def inpainting(self, data, mask, npt_rad=1024, npt_azim=512, - unit="r_m", method="splitpixel", poissonian=False, - grow_mask=3): - """Re-invent the values of masked pixels - - :param data: input image as 2d numpy array - :param mask: masked out pixels array - :param npt_rad: number of radial points - :param npt_azim: number of azimuthal points - :param unit: unit to be used for integration - :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) - :param poissonian: If True, add some poisonian noise to the data to make - then more realistic - :param grow_mask: grow mask in polar coordinated to accomodate pixel - splitting algoritm - :return: inpainting object which contains the restored image as .data - """ - from .ext import inpainting - dummy = -1 - delta_dummy = 0.9 - method = IntegrationMethod.select_one_available(method, dim=2, - default=self.DEFAULT_METHOD_2D) - - assert mask.shape == self.detector.shape - mask = numpy.ascontiguousarray(mask, numpy.int8) - blank_data = numpy.zeros(mask.shape, dtype=numpy.float32) - ones_data = numpy.ones(mask.shape, dtype=numpy.float32) - - to_mask = numpy.where(mask) - - blank_mask = numpy.zeros_like(mask) - masked = numpy.zeros(mask.shape, dtype=numpy.float32) - masked[to_mask] = dummy - - masked_data = data.astype(numpy.float32) # explicit copy - masked_data[to_mask] = dummy - - if self.chiDiscAtPi: - azimuth_range = (-180, 180) - else: - azimuth_range = (0, 360) - r = self.array_from_unit(typ="corner", unit=unit, scale=True) - rmax = (1.0 + numpy.finfo(numpy.float32).eps) * r[..., 0].max() - kwargs = {"npt_rad": npt_rad, - "npt_azim": npt_azim, - "unit": unit, - "dummy": dummy, - "delta_dummy": delta_dummy, - "method": method, - "correctSolidAngle": False, - "azimuth_range": azimuth_range, - "radial_range": (0, rmax), - "polarization_factor": None, - # Nullify the masks to avoid to use the detector once - "dark": blank_mask, - "mask": blank_mask, - "flat": ones_data} - - imgb = self.integrate2d(blank_data, **kwargs) - imgp = self.integrate2d(masked, **kwargs) - imgd = self.integrate2d(masked_data, **kwargs) - omask = numpy.ascontiguousarray(numpy.round(imgb.intensity / dummy), numpy.int8) - imask = numpy.ascontiguousarray(numpy.round(imgp.intensity / dummy), numpy.int8) - to_paint = (imask - omask) - - if grow_mask: - # inpaint a bit more than needed to avoid "side" effects. - from scipy.ndimage import binary_dilation - structure = [[1], [1], [1]] - to_paint = binary_dilation(to_paint, structure=structure, iterations=grow_mask) - to_paint = to_paint.astype(numpy.int8) - - polar_inpainted = inpainting.polar_inpaint(imgd.intensity, - to_paint, omask, 0) - r = self.array_from_unit(typ="center", unit=unit, scale=True) - chi = numpy.rad2deg(self.chiArray()) - cart_inpatined = inpainting.polar_interpolate(data, mask, - r, - chi, - polar_inpainted, - imgd.radial, imgd.azimuthal) - - if poissonian: - res = data.copy() - res[to_mask] = numpy.random.poisson(cart_inpatined[to_mask]) - else: - res = cart_inpatined - return res - - def guess_max_bins(self, redundancy=1, search_range=None, unit="q_nm^-1", radial_range=None, azimuth_range=None): - """ - Guess the maximum number of bins, considering the excpected minimum redundancy: - - :param redundancy: minimum number of pixel per bin - :param search_range: the minimum and maximun number of bins to be considered - :param unit: the unit to be considered like "2th_deg" or "q_nm^-1" - :param radial_range: radial range to be considered, depends on unit ! - :param azimuth_range: azimuthal range to be considered - :return: the minimum bin number providing the provided redundancy - """ - img = numpy.empty(self.detector.shape, dtype=numpy.float32) - dia = int(numpy.sqrt(img.shape[0] ** 2 + img.shape[1] ** 2)) - method = self._normalize_method(("no", "histogram", "cython"), dim=1, default=self.DEFAULT_METHOD_1D) - unit = units.to_unit(unit) - if search_range is None: - ref = self.integrate1d(img, dia, method=method, unit=unit, - azimuth_range=azimuth_range, radial_range=radial_range).count.min() - if ref >= redundancy: - search_range = (dia, 4 * dia) - else: - search_range = (2, dia) - - for i in range(*search_range): - mini = self.integrate1d(img, i, method=method, unit=unit, - azimuth_range=azimuth_range, radial_range=radial_range).count.min() - if mini < redundancy: - return i - 1 - - def guess_polarization(self, img, npt_rad=None, npt_azim=360, unit="2th_deg", - method=("no", "csr", "cython"), target_rad=None): - """Guess the polarization factor for the given image - - For this one performs several integration with different polarization factors - and take the one with the lowest std along the outer-most ring. - - :param img: diffraction image, preferable with beam-stop centered. - :param npt_rad: number of point in the radial dimension, can be guessed, better avoid oversampling. - :param npt_azim: number of point in the azimuthal dimension, 1 per degree is usually OK - :param unit: radial unit for the integration - :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation). The default one is pretty optimal: no splitting, CSR for the speed of the integration - :param target_rad: position of the outer-most complete ring, can be guessed. - :return: polarization factor (#, polarization angle) - """ - if npt_rad is None: - if self.detector.shape is None: - self.detector.shape = img.shape - npt_rad = self.guess_npt_rad() - - res = self.integrate2d_ng(img, npt_rad, npt_azim, unit=unit, method=method) - - if target_rad is None: - azimuthal_range = (res.count > 0).sum(axis=0) - azim_min = azimuthal_range.max() * 0.95 - valid_rings = numpy.where(azimuthal_range > azim_min)[0] - nbpix = res.count.sum(axis=0)[valid_rings] - bin_idx = valid_rings[numpy.where(nbpix.max() == nbpix)[0][-1]] - else: - bin_idx = numpy.argmin(abs(res.radial - target_rad)) - - from scipy.optimize import minimize_scalar - sfun = lambda p:\ - self.integrate2d_ng(img, npt_rad, npt_azim, unit=unit, method=method, - polarization_factor=p).intensity[:, bin_idx].std() - opt = minimize_scalar(sfun, bounds=[-1, 1]) - logger.info(str(opt)) - return opt.x - -################################################################################ -# Some properties -################################################################################ - - def set_darkcurrent(self, dark): - self.detector.set_darkcurrent(dark) - - def get_darkcurrent(self): - return self.detector.get_darkcurrent() - - darkcurrent = property(get_darkcurrent, set_darkcurrent) - - def set_flatfield(self, flat): - self.detector.set_flatfield(flat) - - def get_flatfield(self): - return self.detector.get_flatfield() - - flatfield = property(get_flatfield, set_flatfield) - - @deprecated(reason="Not maintained", since_version="0.17") - def set_darkfiles(self, files=None, method="mean"): - """Set the dark current from one or mutliple files, avaraged - according to the method provided. - - Moved to Detector. - - :param files: file(s) used to compute the dark. - :type files: str or list(str) or None - :param method: method used to compute the dark, "mean" or "median" - :type method: str - """ - self.detector.set_darkfiles(files, method) - - @property - @deprecated(reason="Not maintained", since_version="0.17") - def darkfiles(self): - return self.detector.darkfiles - - @deprecated(reason="Not maintained", since_version="0.17") - def set_flatfiles(self, files, method="mean"): - """Set the flat field from one or mutliple files, averaged - according to the method provided. - - Moved to Detector. - - :param files: file(s) used to compute the flat-field. - :type files: str or list(str) or None - :param method: method used to compute the dark, "mean" or "median" - :type method: str - """ - self.detector.set_flatfiles(files, method) - - @property - @deprecated(reason="Not maintained", since_version="0.17") - def flatfiles(self): - return self.detector.flatfiles - - def get_empty(self): - return self._empty - - def set_empty(self, value): - self._empty = float(value) - # propagate empty values to integrators - for engine in self.engines.values(): - with engine.lock: - if engine.engine is not None: - try: - engine.engine.empty = self._empty - except Exception as exeption: - logger.error(exeption) - - empty = property(get_empty, set_empty) - - def __getnewargs_ex__(self): - "Helper function for pickling ai" - return (self.dist, self.poni1, self.poni2, - self.rot1, self.rot2, self.rot3, - self.pixel1, self.pixel2, - self.splineFile, self.detector, self.wavelength), {} - - def __getstate__(self): - """Helper function for pickling ai - - :return: the state of the object - """ - - state_blacklist = ('_lock', "engines") - state = Geometry.__getstate__(self) - for key in state_blacklist: - if key in state: - del state[key] - return state - - def __setstate__(self, state): - """Helper function for unpickling ai - :param state: the state of the object - """ - for statekey, statevalue in state.items(): - setattr(self, statekey, statevalue) - self._sem = threading.Semaphore() - self._lock = threading.Semaphore() - self.engines = {} +from .integrator.azimuthal import AzimuthalIntegrator, logger diff --git a/src/pyFAI/diffmap.py b/src/pyFAI/diffmap.py index c0df58a6f..7016ec529 100644 --- a/src/pyFAI/diffmap.py +++ b/src/pyFAI/diffmap.py @@ -31,7 +31,7 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "27/09/2024" +__date__ = "09/10/2024" __status__ = "development" __docformat__ = 'restructuredtext' @@ -48,7 +48,8 @@ import json import __main__ as main from .opencl import ocl -from . import version as PyFAI_VERSION, date as PyFAI_DATE, load, load_integrators +from . import version as PyFAI_VERSION, date as PyFAI_DATE, load +from .integrator.load import PREFERED_METHODS_2D, PREFERED_METHODS_1D from .io import Nexus, get_isotime, h5py from .worker import Worker, _reduce_images from .method_registry import Method, IntegrationMethod @@ -267,9 +268,9 @@ def parse(self, sysargv=None, with_config=False): ai["opencl_device"] = ocl.select_device(type="gpu") ndim = ai.get("do_2D", 1) if ndim==2: - default = load_integrators.PREFERED_METHODS_2D[0].method[1:-1] + default = PREFERED_METHODS_2D[0].method[1:-1] else: - default = load_integrators.PREFERED_METHODS_1D[0].method[1:-1] + default = PREFERED_METHODS_1D[0].method[1:-1] method = list(ai.get("method", default)) if len(method) == 3: # (split, algo, impl) method[2] = "opencl" diff --git a/src/pyFAI/integrator/__init__.py b/src/pyFAI/integrator/__init__.py new file mode 100644 index 000000000..d26c923a5 --- /dev/null +++ b/src/pyFAI/integrator/__init__.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Project: Azimuthal integration +# https://github.com/silx-kit/pyFAI +# +# Copyright (C) 2024-2024 European Synchrotron Radiation Facility, Grenoble, France +# +# Principal author: Jérôme Kieffer (Jerome.Kieffer@ESRF.eu) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# . +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# . +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +__author__ = "Jérôme Kieffer" +__contact__ = "Jerome.Kieffer@ESRF.eu" +__license__ = "MIT" +__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" +__date__ = "09/10/2024" +__status__ = "stable" +__docformat__ = 'restructuredtext' + +""" +This sub-module contains all types of integrator: +* azimuthal +* fiber +* ... +""" +from .load import * +# from .azimuthal import AzimuthalIntegrator +# from .fiber import FiberIntegrator diff --git a/src/pyFAI/integrator/azimuthal.py b/src/pyFAI/integrator/azimuthal.py new file mode 100644 index 000000000..2a4588c20 --- /dev/null +++ b/src/pyFAI/integrator/azimuthal.py @@ -0,0 +1,3641 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Project: Azimuthal integration +# https://github.com/silx-kit/pyFAI +# +# Copyright (C) 2012-2024 European Synchrotron Radiation Facility, Grenoble, France +# +# Principal author: Jérôme Kieffer (Jerome.Kieffer@ESRF.eu) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# . +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# . +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +__author__ = "Jérôme Kieffer" +__contact__ = "Jerome.Kieffer@ESRF.eu" +__license__ = "MIT" +__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" +__date__ = "09/10/2024" +__status__ = "stable" +__docformat__ = 'restructuredtext' + +import logging +logger = logging.getLogger(__name__) +import warnings +import threading +import gc +from math import pi, log +import numpy +from ..geometry import Geometry +from .. import units +from ..utils import EPS32, deg2rad, crc32 +from ..utils.decorators import deprecated, deprecated_warning +from ..containers import Integrate1dResult, Integrate2dResult, SeparateResult, ErrorModel +from ..io import DefaultAiWriter, save_integrate_result +from ..io.ponifile import PoniFile +error = None +from ..method_registry import IntegrationMethod + +from .load import ocl_azim_csr, ocl_azim_lut, ocl_sort, histogram, splitBBox, \ + splitPixel, splitBBoxCSR, splitBBoxLUT, splitPixelFullCSR, \ + histogram_engine, splitPixelFullLUT, splitBBoxCSC, splitPixelFullCSC, \ + PREFERED_METHODS_1D, PREFERED_METHODS_2D + +from ..engines import Engine + +# Few constants for engine names: +OCL_CSR_ENGINE = "ocl_csr_integr" +OCL_LUT_ENGINE = "ocl_lut_integr" +OCL_HIST_ENGINE = "ocl_histogram" +OCL_SORT_ENGINE = "ocl_sorter" +EXT_LUT_ENGINE = "lut_integrator" +EXT_CSR_ENGINE = "csr_integrator" + + +class AzimuthalIntegrator(Geometry): + """ + This class is an azimuthal integrator based on P. Boesecke's + geometry and histogram algorithm by Manolo S. del Rio and V.A Sole + + All geometry calculation are done in the Geometry class + + main methods are: + + >>> tth, I = ai.integrate1d(data, npt, unit="2th_deg") + >>> q, I, sigma = ai.integrate1d(data, npt, unit="q_nm^-1", error_model="poisson") + >>> regrouped = ai.integrate2d(data, npt_rad, npt_azim, unit="q_nm^-1")[0] + """ + + DEFAULT_METHOD_1D = PREFERED_METHODS_1D[0] + DEFAULT_METHOD_2D = PREFERED_METHODS_2D[0] + "Fail-safe low-memory integrator" + + USE_LEGACY_MASK_NORMALIZATION = True + """If true, the Python engine integrator will normalize the mask to use the + most frequent value of the mask as the non-masking value. + + This behaviour is not consistant with other engines and is now deprecated. + This flag will be turned off in the comming releases. + + Turning off this flag force the user to provide a mask with 0 as non-masking + value. And any non-zero as masking value (negative or positive value). A + boolean mask is also accepted (`True` is the masking value). + """ + + def __init__(self, dist=1, poni1=0, poni2=0, + rot1=0, rot2=0, rot3=0, + pixel1=None, pixel2=None, + splineFile=None, detector=None, wavelength=None, orientation=0): + """ + :param dist: distance sample - detector plan (orthogonal distance, not along the beam), in meter. + :type dist: float + :param poni1: coordinate of the point of normal incidence along the detector's first dimension, in meter + :type poni1: float + :param poni2: coordinate of the point of normal incidence along the detector's second dimension, in meter + :type poni2: float + :param rot1: first rotation from sample ref to detector's ref, in radians + :type rot1: float + :param rot2: second rotation from sample ref to detector's ref, in radians + :type rot2: float + :param rot3: third rotation from sample ref to detector's ref, in radians + :type rot3: float + :param pixel1: Deprecated. Pixel size of the fist dimension of the detector, in meter. + If both pixel1 and pixel2 are not None, detector pixel size is overwritten. + Prefer defining the detector pixel size on the provided detector object. + Prefer defining the detector pixel size on the provided detector + object (``detector.pixel1 = 5e-6``). + :type pixel1: float + :param pixel2: Deprecated. Pixel size of the second dimension of the detector, in meter. + If both pixel1 and pixel2 are not None, detector pixel size is overwritten. + Prefer defining the detector pixel size on the provided detector + object (``detector.pixel2 = 5e-6``). + :type pixel2: float + :param splineFile: Deprecated. File containing the geometric distortion of the detector. + If not None, pixel1 and pixel2 are ignored and detector spline is overwritten. + Prefer defining the detector spline manually + (``detector.splineFile = "file.spline"``). + :type splineFile: str + :param detector: name of the detector or Detector instance. String + description is deprecated. Prefer using the result of the detector + factory: ``pyFAI.detector_factory("eiger4m")`` + :type detector: str or pyFAI.Detector + :param float wavelength: Wave length used in meter + :param int orientation: orientation of the detector, see pyFAI.detectors.orientation.Orientation + """ + Geometry.__init__(self, dist, poni1, poni2, + rot1, rot2, rot3, + pixel1, pixel2, splineFile, detector, wavelength, orientation) + + # mask, maskfile, darkcurrent and flatfield are properties pointing to + # self.detector now (16/06/2017) + + self._lock = threading.Semaphore() + self.engines = {} # key: name of the engine, + + self._empty = 0.0 + + def reset(self, collect_garbage=True): + """Reset azimuthal integrator in addition to other arrays. + + :param collect_garbage: set to False to prevent garbage collection, faster + """ + Geometry.reset(self, collect_garbage=False) + self.reset_engines(collect_garbage) + + def reset_engines(self, collect_garbage=True): + """Urgently free memory by deleting all regrid-engines + + :param collect_garbage: set to False to prevent garbage collection, faster + """ + with self._lock: + for key in list(self.engines.keys()): # explicit copy + self.engines.pop(key).reset() + if collect_garbage: + gc.collect() + + def create_mask(self, data, mask=None, + dummy=None, delta_dummy=None, + unit=None, radial_range=None, + azimuth_range=None, + mode="normal"): + """ + Combines various masks into another one. + + :param data: input array of data + :type data: ndarray + :param mask: input mask (if none, self.mask is used) + :type mask: ndarray + :param dummy: value of dead pixels + :type dummy: float + :param delta_dumy: precision of dummy pixels + :type delta_dummy: float + :param mode: can be "normal" or "numpy" (inverted) or "where" applied to the mask + :type mode: str + + :return: the new mask + :rtype: ndarray of bool + + This method combine two masks (dynamic mask from *data & + dummy* and *mask*) to generate a new one with the 'or' binary + operation. One can adjust the level, with the *dummy* and + the *delta_dummy* parameter, when you consider the *data* + values needs to be masked out. + + This method can work in two different *mode*: + + * "normal": False for valid pixels, True for bad pixels + * "numpy": True for valid pixels, false for others + * "where": does a numpy.where on the "numpy" output + + This method tries to accomodate various types of masks (like + valid=0 & masked=-1, ...) + + Note for the developper: we use a lot of numpy.logical_or in this method, + the out= argument allows to recycle buffers and save considerable time in + allocating temporary arrays. + """ + logical_or = numpy.logical_or + shape = data.shape + # ^^^^ this is why data is mandatory ! + if mask is None: + mask = self.mask + if mask is None: + mask = numpy.zeros(shape, dtype=bool) + else: + mask = mask.astype(bool) + if self.USE_LEGACY_MASK_NORMALIZATION: + if mask.sum(dtype=int) > mask.size // 2: + reason = "The provided mask is not complient with other engines. "\ + "The feature which automatically invert it will be removed soon. "\ + "For more information see https://github.com/silx-kit/pyFAI/pull/868" + deprecated_warning(__name__, name="provided mask content", reason=reason) + numpy.logical_not(mask, mask) + if (mask.shape != shape): + try: + mask = mask[:shape[0],:shape[1]] + except Exception as error: # IGNORE:W0703 + logger.error("Mask provided has wrong shape:" + " expected: %s, got %s, error: %s", + shape, mask.shape, error) + mask = numpy.zeros(shape, dtype=bool) + if dummy is not None: + if delta_dummy is None: + logical_or(mask, (data == dummy), out=mask) + else: + logical_or(mask, abs(data - dummy) <= delta_dummy, out=mask) + + if radial_range is not None: + assert unit, "unit is needed when building a mask based on radial_range" + if isinstance(unit, (tuple, list)) and len(unit) == 2: + radial_unit = units.to_unit(unit[0]) + else: + radial_unit = units.to_unit(unit) + rad = self.array_from_unit(shape, "center", radial_unit, scale=False) + logical_or(mask, rad < radial_range[0], out=mask) + logical_or(mask, rad > radial_range[1], out=mask) + if azimuth_range is not None: + if isinstance(unit, (tuple, list)) and len(unit) == 2: + azimuth_unit = units.to_unit(unit[1]) + chi = self.array_from_unit(shape, "center", azimuth_unit, scale=False) + logical_or(mask, chi < azimuth_range[0], out=mask) + logical_or(mask, chi > azimuth_range[1], out=mask) + + # Prepare alternative representation for output: + if mode == "numpy": + numpy.logical_not(mask, mask) + elif mode == "where": + mask = numpy.where(numpy.logical_not(mask)) + return mask + + def dark_correction(self, data, dark=None): + """ + Correct for Dark-current effects. + If dark is not defined, correct for a dark set by "set_darkfiles" + + :param data: input ndarray with the image + :param dark: ndarray with dark noise or None + :return: 2tuple: corrected_data, dark_actually used (or None) + """ + dark = dark if dark is not None else self.detector.darkcurrent + if dark is not None: + return data - dark, dark + else: + return data, None + + def flat_correction(self, data, flat=None): + """ + Correct for flat field. + If flat is not defined, correct for a flat set by "set_flatfiles" + + :param data: input ndarray with the image + :param flat: ndarray with flatfield or None for no correction + :return: 2tuple: corrected_data, flat_actually used (or None) + """ + flat = flat if flat is not None else self.detector.flatfield + if flat is not None: + return data / flat, flat + else: + return data, None + + def _normalize_method(self, method, dim, default): + """ + :rtype: IntegrationMethod + """ + requested_method = method + method = IntegrationMethod.select_one_available(method, dim=dim, default=None, degradable=False) + if method is not None: + return method + method = IntegrationMethod.select_one_available(requested_method, dim=dim, default=default, degradable=True) + logger.warning("Method requested '%s' not available. Method '%s' will be used", requested_method, method) + return default + + def setup_sparse_integrator(self, + shape, + npt, + mask=None, + pos0_range=None, pos1_range=None, + mask_checksum=None, unit=units.TTH, + split="bbox", algo="CSR", + empty=None, scale=True): + """ + Prepare a sparse-matrix integrator based on LUT, CSR or CSC format + + :param shape: shape of the dataset + :type shape: (int, int) + :param npt: number of points in the the output pattern + :type npt: int or (int, int) + :param mask: array with masked pixel (1=masked) + :type mask: ndarray + :param pos0_range: range in radial dimension + :type pos0_range: (float, float) + :param pos1_range: range in azimuthal dimension + :type pos1_range: (float, float) + :param mask_checksum: checksum of the mask buffer + :type mask_checksum: int (or anything else ...) + :param unit: use to propagate the LUT object for further checkings + :type unit: pyFAI.units.Unit or 2-tuple of them for 2D integration + :param split: Splitting scheme: valid options are "no", "bbox", "full" + :param algo: Sparse matrix format to use: "LUT", "CSR" or "CSC" + :param empty: override the default empty value + :param scale: set to False for working in S.I. units for pos0_range + which is faster. By default assumes pos0_range has `units` + Note that pos1_range, the chi-angle, is expected in radians + + + This method is called when a look-up table needs to be set-up. + The *shape* parameter, correspond to the shape of the original + datatset. It is possible to customize the number of point of + the output histogram with the *npt* parameter which can be + either an integer for an 1D integration or a 2-tuple of + integer in case of a 2D integration. The LUT will have a + different shape: (npt, lut_max_size), the later parameter + being calculated during the instanciation of the splitBBoxLUT + class. + + It is possible to prepare the LUT with a predefine + *mask*. This operation can speedup the computation of the + later integrations. Instead of applying the patch on the + dataset, it is taken into account during the histogram + computation. If provided the *mask_checksum* prevent the + re-calculation of the mask. When the mask changes, its + checksum is used to reset (or not) the LUT (which is a very + time consuming operation !) + + It is also possible to restrain the range of the 1D or 2D + pattern with the *pos0_range* (radial) and *pos1_range* (azimuthal). + + The *unit* parameter is just propagated to the LUT integrator + for further checkings: The aim is to prevent an integration to + be performed in 2th-space when the LUT was setup in q space. + Unit can also be a 2-tuple in the case of a 2D integration + """ + if isinstance(unit, (list, tuple)) and len(unit) == 2: + unit0, unit1 = tuple(units.to_unit(u) for u in unit) + else: + unit0 = units.to_unit(unit) + unit1 = units.CHI_DEG + if scale and pos0_range: + pos0_scale = unit0.scale + pos0_range = tuple(pos0_range[i] / pos0_scale for i in (0, -1)) + if "__len__" in dir(npt) and len(npt) == 2: + int2d = True + if scale and pos1_range: + pos1_scale = unit1.scale + pos1_range = tuple(pos1_range[i] / pos1_scale for i in (0, -1)) + else: + int2d = False + empty = self._empty if empty is None else empty + if split == "full": + pos = self.array_from_unit(shape, "corner", unit, scale=False) + else: + pos0 = self.array_from_unit(shape, "center", unit0, scale=False) + if split == "no": + dpos0 = None + else: + dpos0 = self.array_from_unit(shape, "delta", unit0, scale=False) + + pos1 = None + dpos1 = None + if int2d or pos1_range: + pos1 = self.array_from_unit(shape, "center", unit1, scale=False) + if split == "no": + dpos1 = None + else: + dpos1 = self.array_from_unit(shape, "delta", unit1, scale=False) + + if mask is None: + mask_checksum = None + else: + assert mask.shape == shape + algo = algo.upper() + if algo == "LUT": + if split == "full": + if int2d: + return splitPixelFullLUT.HistoLUT2dFullSplit(pos, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + allow_pos0_neg=not unit0.positive, + unit=unit, + empty=empty, + chiDiscAtPi=self.chiDiscAtPi, + clip_pos1=bool(unit1.period), + ) + else: + return splitPixelFullLUT.HistoLUT1dFullSplit(pos, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + allow_pos0_neg=not unit0.positive, + unit=unit, + empty=empty) + else: + if int2d: + return splitBBoxLUT.HistoBBox2d(pos0, dpos0, pos1, dpos1, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + allow_pos0_neg=not unit0.positive, + clip_pos1=bool(unit1.period), + unit=unit, + empty=empty) + else: + return splitBBoxLUT.HistoBBox1d(pos0, dpos0, pos1, dpos1, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + allow_pos0_neg=not unit0.positive, + unit=unit, + empty=empty) + elif algo == "CSR": + if split == "full": + if int2d: + return splitPixelFullCSR.FullSplitCSR_2d(pos, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + allow_pos0_neg=not unit0.positive, + unit=unit, + empty=empty, + chiDiscAtPi=self.chiDiscAtPi, + clip_pos1=bool(unit1.period), + ) + else: + return splitPixelFullCSR.FullSplitCSR_1d(pos, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + allow_pos0_neg=not unit0.positive, + unit=unit, + empty=empty) + else: + if int2d: + return splitBBoxCSR.HistoBBox2d(pos0, dpos0, pos1, dpos1, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + unit=unit, + empty=empty, + allow_pos0_neg=not unit0.positive, + clip_pos1=bool(unit1.period) +) + else: + return splitBBoxCSR.HistoBBox1d(pos0, dpos0, pos1, dpos1, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + allow_pos0_neg=not unit0.positive, + unit=unit, + empty=empty) + elif algo == "CSC": + if split == "full": + if int2d: + return splitPixelFullCSC.FullSplitCSC_2d(pos, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + allow_pos0_neg=not unit0.positive, + unit=unit, + empty=empty, + chiDiscAtPi=self.chiDiscAtPi, + clip_pos1=bool(unit1.period) + ) + else: + return splitPixelFullCSC.FullSplitCSC_1d(pos, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + allow_pos0_neg=not unit0.positive, + unit=unit, + empty=empty) + else: + if int2d: + return splitBBoxCSC.HistoBBox2d(pos0, dpos0, pos1, dpos1, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + unit=unit, + empty=empty, + allow_pos0_neg=not unit0.positive, + clip_pos1=bool(unit1.period) +) + else: + return splitBBoxCSC.HistoBBox1d(pos0, dpos0, pos1, dpos1, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + allow_pos0_neg=not unit0.positive, + unit=unit, + empty=empty) + + @deprecated(since_version="0.22", only_once=True, replacement="setup_sparse_integrator", deprecated_since="0.22.0") + def setup_LUT(self, shape, npt, mask=None, + pos0_range=None, pos1_range=None, + mask_checksum=None, unit=units.TTH, + split="bbox", empty=None, scale=True): + """See documentation of setup_sparse_integrator where algo=LUT""" + return self.setup_sparse_integrator(shape, npt, mask, + pos0_range, pos1_range, + mask_checksum, unit, + split=split, algo="LUT", + empty=empty, scale=scale) + + @deprecated(since_version="0.22", only_once=True, replacement="setup_sparse_integrator", deprecated_since="0.22.0") + def setup_CSR(self, shape, npt, mask=None, + pos0_range=None, pos1_range=None, + mask_checksum=None, unit=units.TTH, + split="bbox", empty=None, scale=True): + """See documentation of setup_sparse_integrator where algo=CSR""" + return self.setup_sparse_integrator(shape, npt, mask, + pos0_range, pos1_range, + mask_checksum, unit, + split=split, algo="CSR", + empty=empty, scale=scale) + + @deprecated(since_version="0.20", only_once=True, replacement="integrate1d_ng", deprecated_since="0.20.0") + def integrate1d_legacy(self, data, npt, filename=None, + correctSolidAngle=True, + variance=None, error_model=None, + radial_range=None, azimuth_range=None, + mask=None, dummy=None, delta_dummy=None, + polarization_factor=None, dark=None, flat=None, + method="csr", unit=units.Q, safe=True, + normalization_factor=1.0, + block_size=None, profile=False, metadata=None): + """Calculate the azimuthal integrated Saxs curve in q(nm^-1) by default + + Multi algorithm implementation (tries to be bullet proof), suitable for SAXS, WAXS, ... and much more + + + + :param data: 2D array from the Detector/CCD camera + :type data: ndarray + :param npt: number of points in the output pattern + :type npt: int + :param filename: output filename in 2/3 column ascii format + :type filename: str + :param correctSolidAngle: correct for solid angle of each pixel if True + :type correctSolidAngle: bool + :param variance: array containing the variance of the data. If not available, no error propagation is done + :type variance: ndarray + :param error_model: When the variance is unknown, an error model can be given: "poisson" (variance = I), "azimuthal" (variance = (I-)^2) + :type error_model: str + :param radial_range: The lower and upper range of the radial unit. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. + :type radial_range: (float, float), optional + :param azimuth_range: The lower and upper range of the azimuthal angle in degree. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. + :type azimuth_range: (float, float), optional + :param mask: array (same size as image) with 1 for masked pixels, and 0 for valid pixels + :type mask: ndarray + :param dummy: value for dead/masked pixels + :type dummy: float + :param delta_dummy: precision for dummy value + :type delta_dummy: float + :param polarization_factor: polarization factor between -1 (vertical) and +1 (horizontal). + 0 for circular polarization or random, + None for no correction, + True for using the former correction + :type polarization_factor: float + :param dark: dark noise image + :type dark: ndarray + :param flat: flat field image + :type flat: ndarray + :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) + :type method: can be Method named tuple, IntegrationMethod instance or str to be parsed + :param unit: Output units, can be "q_nm^-1", "q_A^-1", "2th_deg", "2th_rad", "r_mm" for now + :type unit: pyFAI.units.Unit + :param safe: Do some extra checks to ensure LUT/CSR is still valid. False is faster. + :type safe: bool + :param normalization_factor: Value of a normalization monitor + :type normalization_factor: float + :param block_size: size of the block for OpenCL integration (unused?) + :param profile: set to True to enable profiling in OpenCL + :param all: if true return a dictionary with many more parameters (deprecated, please refer to the documentation of Integrate1dResult). + :type all: bool + :param metadata: JSON serializable object containing the metadata, usually a dictionary. + :return: q/2th/r bins center positions and regrouped intensity (and error array if variance or variance model provided) + :rtype: Integrate1dResult, dict + """ + method = self._normalize_method(method, dim=1, default=self.DEFAULT_METHOD_1D) + assert method.dimension == 1 + unit = units.to_unit(unit) + + if mask is None: + has_mask = "from detector" + mask = self.mask + mask_crc = self.detector.get_mask_crc() + if mask is None: + has_mask = False + mask_crc = None + else: + has_mask = "provided" + mask = numpy.ascontiguousarray(mask) + mask_crc = crc32(mask) + + shape = data.shape + pos0_scale = unit.scale + + if radial_range: + radial_range = tuple(radial_range[i] / pos0_scale for i in (0, -1)) + if azimuth_range is not None: + azimuth_range = self.normalize_azimuth_range(azimuth_range) + + if variance is not None: + assert variance.size == data.size + elif error_model: + error_model = error_model.lower() + if error_model == "poisson": + variance = numpy.ascontiguousarray(data, numpy.float32) + + if correctSolidAngle: + solidangle = self.solidAngleArray(shape, correctSolidAngle) + else: + solidangle = None + + if polarization_factor is None: + polarization = polarization_crc = None + else: + polarization, polarization_crc = self.polarization(shape, polarization_factor, with_checksum=True) + + if dark is None: + dark = self.detector.darkcurrent + if dark is None: + has_dark = False + else: + has_dark = "from detector" + else: + has_dark = "provided" + + if flat is None: + flat = self.detector.flatfield + if dark is None: + has_flat = False + else: + has_flat = "from detector" + else: + has_flat = "provided" + + I = None + sigma = None + count = None + sum_ = None + + if method.algo_lower == "lut": + if EXT_LUT_ENGINE not in self.engines: + engine = self.engines[EXT_LUT_ENGINE] = Engine() + else: + engine = self.engines[EXT_LUT_ENGINE] + with engine.lock: + integr = engine.engine + reset = None + if integr is None: + reset = "of first initialization" + if (not reset) and safe: + if integr.unit != unit: + reset = "unit changed" + if integr.bins != npt: + reset = "number of points changed" + if integr.size != data.size: + reset = "input image size changed" + if (mask is not None) and\ + (not integr.check_mask): + reset = "mask but LUT was without mask" + elif (mask is None) and (integr.check_mask): + reset = "no mask but LUT has mask" + elif (mask is not None) and\ + (integr.mask_checksum != mask_crc): + reset = "mask changed" + if (radial_range is None) and\ + (integr.pos0_range is not None): + reset = "radial_range was defined in LUT" + elif (radial_range is not None) and\ + (integr.pos0_range != radial_range): + reset = ("radial_range is defined" + " but not the same as in LUT") + if (azimuth_range is None) and\ + (integr.pos1_range is not None): + reset = ("azimuth_range not defined and" + " LUT had azimuth_range defined") + elif (azimuth_range is not None) and\ + (integr.pos1_range != azimuth_range[0]): + reset = ("azimuth_range requested and" + " LUT's azimuth_range don't match") + if reset: + logger.info("AI.integrate1d: Resetting integrator because %s", reset) + split = method.split_lower + if split == "pseudo": + split = "full" + try: + integr = self.setup_sparse_integrator(shape, npt, mask=mask, + pos0_range=radial_range, pos1_range=azimuth_range, + mask_checksum=mask_crc, + unit=unit, split=split, algo="LUT", + scale=False) + + except MemoryError: + # LUT method is hungry... + logger.warning("MemoryError: falling back on default forward implementation") + integr = None + self.reset_engines() + method = self.DEFAULT_METHOD_1D + else: + engine.set_engine(integr) + if integr: + if method.impl_lower == "opencl": + # TODO: manage the target + if OCL_LUT_ENGINE in self.engines: + ocl_engine = self.engines[OCL_LUT_ENGINE] + else: + ocl_engine = self.engines[OCL_LUT_ENGINE] = Engine() + with ocl_engine.lock: + if method.target is not None: + platformid, deviceid = method.target + ocl_integr = ocl_engine.engine + if (ocl_integr is None) or \ + (ocl_integr.on_device["lut"] != integr.lut_checksum): + ocl_integr = ocl_azim_lut.OCL_LUT_Integrator(integr.lut, + integr.size, + platformid=platformid, + deviceid=deviceid, + checksum=integr.lut_checksum) + ocl_engine.set_engine(ocl_integr) + if ocl_integr is not None: + I, sum_, count = ocl_integr.integrate_legacy(data, dark=dark, flat=flat, + solidangle=solidangle, + solidangle_checksum=self._dssa_crc, + dummy=dummy, + delta_dummy=delta_dummy, + polarization=polarization, + polarization_checksum=polarization_crc, + normalization_factor=normalization_factor) + qAxis = integr.bin_centers # this will be copied later + if error_model == "azimuthal": + + variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 + if variance is not None: + var1d, a, b = ocl_integr.integrate_legacy(variance, + solidangle=None, + dummy=dummy, + delta_dummy=delta_dummy, + normalization_factor=1.0, + coef_power=2) + with numpy.errstate(divide='ignore', invalid='ignore'): + sigma = numpy.sqrt(a) / (b * normalization_factor) + sigma[b == 0] = dummy if dummy is not None else self._empty + else: + qAxis, I, sum_, count = integr.integrate_legacy(data, dark=dark, flat=flat, + solidAngle=solidangle, + dummy=dummy, + delta_dummy=delta_dummy, + polarization=polarization, + normalization_factor=normalization_factor) + + if error_model == "azimuthal": + variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 + if variance is not None: + _, var1d, a, b = integr.integrate_legacy(variance, + solidAngle=None, + dummy=dummy, + delta_dummy=delta_dummy, + coef_power=2, + normalization_factor=1.0) + with numpy.errstate(divide='ignore', invalid='ignore'): + sigma = numpy.sqrt(a) / (b * normalization_factor) + sigma[b == 0] = dummy if dummy is not None else self._empty + + if method.algo_lower == "csr": + if EXT_CSR_ENGINE not in self.engines: + engine = self.engines[EXT_CSR_ENGINE] = Engine() + else: + engine = self.engines[EXT_CSR_ENGINE] + with engine.lock: + integr = engine.engine + reset = None + + if integr is None: + reset = "of first initialization" + if (not reset) and safe: + if integr.unit != unit: + reset = "unit changed" + if integr.bins != npt: + reset = "number of points changed" + if integr.size != data.size: + reset = "input image size changed" + if (mask is not None) and\ + (not integr.check_mask): + reset = "mask but CSR was without mask" + elif (mask is None) and (integr.check_mask): + reset = "no mask but CSR has mask" + elif (mask is not None) and\ + (integr.mask_checksum != mask_crc): + reset = "mask changed" + if radial_range != integr.pos0_range: + reset = "radial_range changed" + if azimuth_range != integr.pos1_range: + reset = "azimuth_range changed" + if reset: + logger.info("AI.integrate1d: Resetting integrator because %s", reset) + split = method.split_lower + if split == "pseudo": + split = "full" + try: + integr = self.setup_sparse_integrator(shape, npt, mask=mask, + pos0_range=radial_range, pos1_range=azimuth_range, + mask_checksum=mask_crc, + unit=unit, split=split, algo="CSR", + scale=False) + except MemoryError: # CSR method is hungry... + logger.warning("MemoryError: falling back on forward implementation") + integr = None + self.reset_engines() + method = self.DEFAULT_METHOD_1D + else: + engine.set_engine(integr) + if integr: + if method.impl_lower == "opencl": + # TODO: manage OpenCL targets + if OCL_CSR_ENGINE not in self.engines: + self.engines[OCL_CSR_ENGINE] = Engine() + ocl_engine = self.engines[OCL_CSR_ENGINE] + with ocl_engine.lock: + if method.target is not None: + platformid, deviceid = method.target + ocl_integr = ocl_engine.engine + if (ocl_integr is None) or \ + (ocl_integr.on_device["data"] != integr.lut_checksum): + ocl_integr = ocl_azim_csr.OCL_CSR_Integrator(integr.lut, + integr.size, + platformid=platformid, + deviceid=deviceid, + checksum=integr.lut_checksum, + block_size=block_size, + profile=profile) + ocl_engine.set_engine(ocl_integr) + I, sum_, count = ocl_integr.integrate_legacy(data, dark=dark, flat=flat, + solidangle=solidangle, + solidangle_checksum=self._dssa_crc, + dummy=dummy, + delta_dummy=delta_dummy, + polarization=polarization, + polarization_checksum=polarization_crc, + normalization_factor=normalization_factor) + qAxis = integr.bin_centers # this will be copied later + if error_model == "azimuthal": + variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 + if variance is not None: + var1d, a, b = ocl_integr.integrate_legacy(variance, + solidangle=None, + dummy=dummy, + delta_dummy=delta_dummy) + with numpy.errstate(divide='ignore', invalid='ignore'): + sigma = numpy.sqrt(a) / (b * normalization_factor) + sigma[b == 0] = dummy if dummy is not None else self._empty + else: + qAxis, I, sum_, count = integr.integrate_legacy(data, dark=dark, flat=flat, + solidAngle=solidangle, + dummy=dummy, + delta_dummy=delta_dummy, + polarization=polarization, + normalization_factor=normalization_factor) + + if error_model == "azimuthal": + variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 + if variance is not None: + _, var1d, a, b = integr.integrate_legacy(variance, + solidAngle=None, + dummy=dummy, + delta_dummy=delta_dummy, + normalization_factor=1.0) + with numpy.errstate(divide='ignore', invalid='ignore'): + sigma = numpy.sqrt(a) / (b * normalization_factor) + sigma[b == 0] = dummy if dummy is not None else self._empty + + if method.method[1:4] == ("full", "histogram", "cython"): + logger.debug("integrate1d uses SplitPixel implementation") + pos = self.array_from_unit(shape, "corner", unit, scale=False) + qAxis, I, sum_, count = splitPixel.fullSplit1D(pos=pos, + weights=data, + bins=npt, + pos0_range=radial_range, + pos1_range=azimuth_range, + dummy=dummy, + delta_dummy=delta_dummy, + mask=mask, + dark=dark, + flat=flat, + solidangle=solidangle, + polarization=polarization, + normalization_factor=normalization_factor + ) + if error_model == "azimuthal": + variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 + if variance is not None: + _, var1d, a, b = splitPixel.fullSplit1D(pos=pos, + weights=variance, + bins=npt, + pos0_range=radial_range, + pos1_range=azimuth_range, + dummy=dummy, + delta_dummy=delta_dummy, + mask=mask, + normalization_factor=1.0 + ) + with numpy.errstate(divide='ignore', invalid='ignore'): + sigma = numpy.sqrt(a) / (b * normalization_factor) + sigma[b == 0] = dummy if dummy is not None else self._empty + + if method.method[1:4] == ("bbox", "histogram", "cython"): + logger.debug("integrate1d uses BBox implementation") + if azimuth_range is not None: + chi = self.chiArray(shape) + dchi = self.deltaChi(shape) + else: + chi = None + dchi = None + pos0 = self.array_from_unit(shape, "center", unit, scale=False) + dpos0 = self.array_from_unit(shape, "delta", unit, scale=False) + qAxis, I, sum_, count = splitBBox.histoBBox1d(weights=data, + pos0=pos0, + delta_pos0=dpos0, + pos1=chi, + delta_pos1=dchi, + bins=npt, + pos0_range=radial_range, + pos1_range=azimuth_range, + dummy=dummy, + delta_dummy=delta_dummy, + mask=mask, + dark=dark, + flat=flat, + solidangle=solidangle, + polarization=polarization, + normalization_factor=normalization_factor) + if error_model == "azimuthal": + variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 + if variance is not None: + _, var1d, a, b = splitBBox.histoBBox1d(weights=variance, + pos0=pos0, + delta_pos0=dpos0, + pos1=chi, + delta_pos1=dchi, + bins=npt, + pos0_range=radial_range, + pos1_range=azimuth_range, + dummy=dummy, + delta_dummy=delta_dummy, + mask=mask, + ) + with numpy.errstate(divide='ignore', invalid='ignore'): + sigma = numpy.sqrt(a) / (b * normalization_factor) + sigma[b == 0] = dummy if dummy is not None else self._empty + + if method.method[1:3] == ("no", "histogram") and method.impl_lower != "opencl": + # Common part for Numpy and Cython + data = data.astype(numpy.float32) + mask = self.create_mask(data, mask, dummy, delta_dummy, + unit=unit, + radial_range=radial_range, + azimuth_range=azimuth_range, + mode="where") + pos0 = self.array_from_unit(shape, "center", unit, scale=False) + if radial_range is None: + radial_range = (pos0.min(), pos0.max()) + pos0 = pos0[mask] + if dark is not None: + data -= dark + if flat is not None: + data /= flat + if polarization is not None: + data /= polarization + if solidangle is not None: + data /= solidangle + data = data[mask] + if variance is not None: + variance = variance[mask] + + if method.impl_lower == "cython": + logger.debug("integrate1d uses cython implementation") + qAxis, I, sum_, count = histogram.histogram(pos=pos0, + weights=data, + bins=npt, + bin_range=radial_range, + pixelSize_in_Pos=0, + empty=dummy if dummy is not None else self._empty, + normalization_factor=normalization_factor) + if error_model == "azimuthal": + variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)[mask]) ** 2 + if variance is not None: + _, var1d, a, b = histogram.histogram(pos=pos0, + weights=variance, + bins=npt, + bin_range=radial_range, + pixelSize_in_Pos=1, + empty=dummy if dummy is not None else self._empty) + with numpy.errstate(divide='ignore', invalid='ignore'): + sigma = numpy.sqrt(a) / (b * normalization_factor) + sigma[b == 0] = dummy if dummy is not None else self._empty + elif method.impl_lower == "python": + logger.debug("integrate1d uses Numpy implementation") + count, b = numpy.histogram(pos0, npt, range=radial_range) + qAxis = (b[1:] + b[:-1]) / 2.0 + sum_, b = numpy.histogram(pos0, npt, weights=data, range=radial_range) + with numpy.errstate(divide='ignore', invalid='ignore'): + if error_model == "azimuthal": + variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)[mask]) ** 2 + if variance is not None: + var1d, b = numpy.histogram(pos0, npt, weights=variance, range=radial_range) + sigma = numpy.sqrt(var1d) / (count * normalization_factor) + sigma[count == 0] = dummy if dummy is not None else self._empty + with numpy.errstate(divide='ignore', invalid='ignore'): + I = sum_ / count / normalization_factor + I[count == 0] = dummy if dummy is not None else self._empty + + if pos0_scale: + # not in place to make a copy + qAxis = qAxis * pos0_scale + + result = Integrate1dResult(qAxis, I, sigma) + result._set_method_called("integrate1d") + result._set_method(method) + result._set_compute_engine(str(method)) + result._set_unit(unit) + result._set_sum(sum_) + result._set_count(count) + result._set_has_dark_correction(has_dark) + result._set_has_flat_correction(has_flat) + result._set_has_mask_applied(has_mask) + result._set_polarization_factor(polarization_factor) + result._set_normalization_factor(normalization_factor) + result._set_metadata(metadata) + + if filename is not None: + save_integrate_result(filename, result) + + return result + + _integrate1d_legacy = integrate1d_legacy + + def integrate1d_ng(self, data, npt, filename=None, + correctSolidAngle=True, + variance=None, error_model=None, + radial_range=None, azimuth_range=None, + mask=None, dummy=None, delta_dummy=None, + polarization_factor=None, dark=None, flat=None, absorption=None, + method=("bbox", "csr", "cython"), unit=units.Q, safe=True, + normalization_factor=1.0, + metadata=None): + """Calculate the azimuthal integration (1d) of a 2D image. + + Multi algorithm implementation (tries to be bullet proof), suitable for SAXS, WAXS, ... and much more + Takes extra care of normalization and performs proper variance propagation. + + :param ndarray data: 2D array from the Detector/CCD camera + :param int npt: number of points in the output pattern + :param str filename: output filename in 2/3 column ascii format + :param bool correctSolidAngle: correct for solid angle of each pixel if True + :param ndarray variance: array containing the variance of the data. + :param str error_model: When the variance is unknown, an error model can be given: "poisson" (variance = I), "azimuthal" (variance = (I-)^2) + :param radial_range: The lower and upper range of the radial unit. If not provided, range is simply (min, max). Values outside the range are ignored. + :type radial_range: (float, float), optional + :param azimuth_range: The lower and upper range of the azimuthal angle in degree. If not provided, range is simply (min, max). Values outside the range are ignored. + :type azimuth_range: (float, float), optional + :param ndarray mask: array with 0 for valid pixels, all other are masked (static mask) + :param float dummy: value for dead/masked pixels (dynamic mask) + :param float delta_dummy: precision for dummy value + :param float polarization_factor: polarization factor between -1 (vertical) and +1 (horizontal). + 0 for circular polarization or random, + None for no correction, + True for using the former correction + :param ndarray dark: dark noise image + :param ndarray flat: flat field image + :param ndarray absorption: absorption correction image + :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) + :param Unit unit: Output units, can be "q_nm^-1" (default), "2th_deg", "r_mm" for now. + :param bool safe: Perform some extra checks to ensure LUT/CSR is still valid. False is faster. + :param float normalization_factor: Value of a normalization monitor + :param metadata: JSON serializable object containing the metadata, usually a dictionary. + :param ndarray absorption: detector absorption + :return: Integrate1dResult namedtuple with (q,I,sigma) +extra informations in it. + """ + method = self._normalize_method(method, dim=1, default=self.DEFAULT_METHOD_1D) + assert method.dimension == 1 + unit = units.to_unit(unit) + empty = dummy if dummy is not None else self._empty + shape = data.shape + pos0_scale = unit.scale + + if radial_range: + radial_range = tuple(radial_range[i] / pos0_scale for i in (0, -1)) + if azimuth_range is not None: + azimuth_range = self.normalize_azimuth_range(azimuth_range) + + if mask is None: + has_mask = "from detector" + mask = self.mask + mask_crc = self.detector.get_mask_crc() + if mask is None: + has_mask = False + mask_crc = None + else: + has_mask = "user provided" + mask = numpy.ascontiguousarray(mask) + mask_crc = crc32(mask) + + if correctSolidAngle: + solidangle = self.solidAngleArray(shape, correctSolidAngle) + solidangle_crc = self._cached_array[f"solid_angle#{self._dssa_order}_crc"] + else: + solidangle_crc = solidangle = None + + if polarization_factor is None: + polarization = polarization_crc = None + else: + polarization, polarization_crc = self.polarization(shape, polarization_factor, with_checksum=True) + + if dark is None: + dark = self.detector.darkcurrent + if dark is None: + has_dark = False + else: + has_dark = "from detector" + else: + has_dark = "provided" + + if flat is None: + flat = self.detector.flatfield + if dark is None: + has_flat = False + else: + has_flat = "from detector" + else: + has_flat = "provided" + + error_model = ErrorModel.parse(error_model) + if variance is not None: + assert variance.size == data.size + error_model = ErrorModel.VARIANCE + if error_model.poissonian and not method.manage_variance: + error_model = ErrorModel.VARIANCE + if dark is None: + variance = numpy.maximum(data, 1.0).astype(numpy.float32) + else: + variance = (numpy.maximum(data, 1.0) + numpy.maximum(dark, 0.0)).astype(numpy.float32) + + # Prepare LUT if needed! + if method.algo_is_sparse: + # initialize the CSR/LUT integrator in Cython as it may be needed later on. + cython_method = IntegrationMethod.select_method(method.dimension, method.split_lower, method.algo_lower, "cython")[0] + if cython_method not in self.engines: + cython_engine = self.engines[cython_method] = Engine() + else: + cython_engine = self.engines[cython_method] + with cython_engine.lock: + # Validate that the engine used is the proper one + cython_integr = cython_engine.engine + cython_reset = None + if cython_integr is None: + cython_reset = "of first initialization" + if (not cython_reset) and safe: + if cython_integr.unit != unit: + cython_reset = "unit was changed" + if cython_integr.bins != npt: + cython_reset = "number of points changed" + if cython_integr.size != data.size: + cython_reset = "input image size changed" + if cython_integr.empty != empty: + cython_reset = "empty value changed" + if (mask is not None) and (not cython_integr.check_mask): + cython_reset = f"mask but {method.algo_lower.upper()} was without mask" + elif (mask is None) and (cython_integr.cmask is not None): + cython_reset = f"no mask but { method.algo_lower.upper()} has mask" + elif (mask is not None) and (cython_integr.mask_checksum != mask_crc): + cython_reset = "mask changed" + if (radial_range is None) and (cython_integr.pos0_range is not None): + cython_reset = f"radial_range was defined in { method.algo_lower.upper()}" + elif (radial_range is not None) and (cython_integr.pos0_range != radial_range): + cython_reset = f"radial_range is defined but differs in %s" % method.algo_lower.upper() + if (azimuth_range is None) and (cython_integr.pos1_range is not None): + cython_reset = f"azimuth_range not defined and {method.algo_lower.upper()} had azimuth_range defined" + elif (azimuth_range is not None) and (cython_integr.pos1_range != azimuth_range): + cython_reset = f"azimuth_range requested and {method.algo_lower.upper()}'s azimuth_range don't match" + if cython_reset: + logger.info("AI.integrate1d_ng: Resetting Cython integrator because %s", cython_reset) + split = method.split_lower + if split == "pseudo": + split = "full" + try: + cython_integr = self.setup_sparse_integrator(shape, npt, mask, + radial_range, azimuth_range, + mask_checksum=mask_crc, + unit=unit, split=split, algo=method.algo_lower, + empty=empty, scale=False) + except MemoryError: # sparse methods are hungry... + logger.warning("MemoryError: falling back on forward implementation") + cython_integr = None + self.reset_engines() + method = self.DEFAULT_METHOD_1D + else: + cython_engine.set_engine(cython_integr) + # This whole block uses CSR, Now we should treat all the various implementation: Cython, OpenCL and finally Python. + if method.impl_lower == "cython": + # The integrator has already been initialized previously + integr = self.engines[method].engine + intpl = integr.integrate_ng(data, + variance=variance, + error_model=error_model, + dummy=dummy, + delta_dummy=delta_dummy, + dark=dark, + flat=flat, + solidangle=solidangle, + polarization=polarization, + absorption=absorption, + normalization_factor=normalization_factor, + weighted_average=method.weighted_average) + else: # method.impl_lower in ("opencl", "python"): + if method not in self.engines: + # instanciated the engine + engine = self.engines[method] = Engine() + else: + engine = self.engines[method] + with engine.lock: + # Validate that the engine used is the proper one + integr = engine.engine + reset = None + if integr is None: + reset = "of first initialization" + if (not reset) and safe: + if integr.unit != unit: + reset = "unit was changed" + if integr.bins != npt: + reset = "number of points changed" + if integr.size != data.size: + reset = "input image size changed" + if integr.empty != empty: + reset = "empty value changed" + if (mask is not None) and (not integr.check_mask): + reset = f"mask but {method.algo_lower.upper()} was without mask" + elif (mask is None) and (integr.check_mask): + reset = f"no mask but {method.algo_lower.upper()} has mask" + elif (mask is not None) and (integr.mask_checksum != mask_crc): + reset = "mask changed" + if (radial_range is None) and (integr.pos0_range is not None): + reset = f"radial_range was defined in {method.algo_lower.upper()}" + elif (radial_range is not None) and (integr.pos0_range != radial_range): + reset = f"radial_range is defined but differs in {method.algo_lower.upper()}" + if (azimuth_range is None) and (integr.pos1_range is not None): + reset = f"azimuth_range not defined and {method.algo_lower.upper()} had azimuth_range defined" + elif (azimuth_range is not None) and (integr.pos1_range != azimuth_range): + reset = f"azimuth_range requested and {method.algo_lower.upper()}'s azimuth_range don't match" + + if reset: + logger.info("ai.integrate1d_ng: Resetting ocl_csr integrator because %s", reset) + csr_integr = self.engines[cython_method].engine + if method.impl_lower == "opencl": + try: + integr = method.class_funct_ng.klass(csr_integr.lut, + image_size=data.size, + checksum=csr_integr.lut_checksum, + empty=empty, + unit=unit, + bin_centers=csr_integr.bin_centers, + platformid=method.target[0], + deviceid=method.target[1], + mask_checksum=csr_integr.mask_checksum) + # Copy some properties from the cython integrator + integr.pos0_range = csr_integr.pos0_range + integr.pos1_range = csr_integr.pos1_range + except MemoryError: + logger.warning("MemoryError: falling back on default forward implementation") + self.reset_engines() + method = self.DEFAULT_METHOD_1D + else: + engine.set_engine(integr) + elif method.impl_lower == "python": + integr = method.class_funct_ng.klass(image_size=data.size, + lut=csr_integr.lut, + empty=empty, + unit=unit, + bin_centers=csr_integr.bin_centers, + mask_checksum=csr_integr.mask_checksum) + # Copy some properties from the cython integrator + integr.pos0_range = csr_integr.pos0_range + integr.pos1_range = csr_integr.pos1_range + engine.set_engine(integr) + else: + raise RuntimeError("Unexpected configuration") + + else: + integr = self.engines[method].engine + + kwargs = {"error_model": error_model, + "variance": variance} + if method.impl_lower == "opencl": + kwargs["polarization_checksum"] = polarization_crc + kwargs["solidangle_checksum"] = solidangle_crc + intpl = integr.integrate_ng(data, dark=dark, + dummy=dummy, delta_dummy=delta_dummy, + flat=flat, solidangle=solidangle, + absorption=absorption, polarization=polarization, + normalization_factor=normalization_factor, + weighted_average=method.weighted_average, + ** kwargs) + # This section is common to all 3 CSR implementations... + if error_model.do_variance: + result = Integrate1dResult(intpl.position * unit.scale, + intpl.intensity, + intpl.sigma) + result._set_sum_variance(intpl.variance) + else: + result = Integrate1dResult(intpl.position * unit.scale, + intpl.intensity) + result._set_compute_engine(integr.__module__ + "." + integr.__class__.__name__) + result._set_unit(integr.unit) + result._set_sum_signal(intpl.signal) + result._set_sum_normalization(intpl.normalization) + result._set_sum_normalization2(intpl.norm_sq) + result._set_count(intpl.count) + result._set_sem(intpl.sem) + result._set_std(intpl.std) + + # END of CSR/CSC/LUT common implementations + elif (method.method[1:3] == ("no", "histogram") and + method.method[3] in ("python", "cython")): + integr = method.class_funct_ng.function # should be histogram[_engine].histogram1d_engine + if azimuth_range: + chi_min, chi_max = azimuth_range + chi = self.chiArray(shape) + azim_mask = numpy.logical_or(chi > chi_max, chi < chi_min) + if mask is None: + mask = azim_mask + else: + mask = numpy.logical_or(mask, azim_mask) + radial = self.array_from_unit(shape, "center", unit, scale=False) + intpl = integr(radial, npt, data, + dark=dark, + dummy=dummy, delta_dummy=delta_dummy, empty=empty, + variance=variance, + flat=flat, solidangle=solidangle, + polarization=polarization, + absorption=absorption, + normalization_factor=normalization_factor, + weighted_average=method.weighted_average, + mask=mask, + radial_range=radial_range, + error_model=error_model) + + if error_model.do_variance: + result = Integrate1dResult(intpl.position * unit.scale, + intpl.intensity, + intpl.sigma) + result._set_sum_variance(intpl.variance) + result._set_std(intpl.std) + result._set_sem(intpl.sem) + result._set_sum_normalization2(intpl.norm_sq) + else: + result = Integrate1dResult(intpl.position * unit.scale, + intpl.intensity) + result._set_compute_engine(integr.__module__ + "." + integr.__name__) + result._set_unit(unit) + result._set_sum_signal(intpl.signal) + result._set_sum_normalization(intpl.normalization) + result._set_count(intpl.count) + elif method.method[1:4] == ("no", "histogram", "opencl"): + if method not in self.engines: + # instanciated the engine + engine = self.engines[method] = Engine() + else: + engine = self.engines[method] + with engine.lock: + # Validate that the engine used is the proper one + integr = engine.engine + reset = None + if integr is None: + reset = "of first initialization" + if (not reset) and safe: + if integr.unit != unit: + reset = "unit was changed" + if integr.bins != npt: + reset = "number of points changed" + if integr.size != data.size: + reset = "input image size changed" + if integr.empty != empty: + reset = "empty value changed" + if reset: + logger.info("ai.integrate1d: Resetting integrator because %s", reset) + pos0 = self.array_from_unit(shape, "center", unit, scale=False) + azimuthal = self.chiArray(shape) + try: + integr = method.class_funct_ng.klass(pos0, + npt, + empty=empty, + azimuthal=azimuthal, + unit=unit, + mask=mask, + mask_checksum=mask_crc, + platformid=method.target[0], + deviceid=method.target[1]) + except MemoryError: + logger.warning("MemoryError: falling back on default forward implementation") + self.reset_engines() + method = self.DEFAULT_METHOD_1D + else: + engine.set_engine(integr) + intpl = integr(data, dark=dark, + dummy=dummy, + delta_dummy=delta_dummy, + variance=variance, + flat=flat, solidangle=solidangle, + polarization=polarization, absorption=absorption, + polarization_checksum=polarization_crc, + normalization_factor=normalization_factor, + weighted_average=method.weighted_average, + radial_range=radial_range, + azimuth_range=azimuth_range, + error_model=error_model) + + if error_model.do_variance: + result = Integrate1dResult(intpl.position * unit.scale, + intpl.intensity, + intpl.sigma) + result._set_sum_variance(intpl.variance) + else: + result = Integrate1dResult(intpl.position * unit.scale, + intpl.intensity) + result._set_compute_engine(integr.__module__ + "." + integr.__class__.__name__) + result._set_unit(integr.unit) + result._set_sum_signal(intpl.signal) + result._set_sum_normalization(intpl.normalization) + result._set_count(intpl.count) + elif (method.method[2:4] == ("histogram", "cython")): + integr = method.class_funct_ng.function # should be histogram[_engine].histogram1d_engine + if method.method[1] == "bbox": + if azimuth_range is None: + chi = None + delta_chi = None + else: + chi = self.chiArray(shape) + delta_chi = self.deltaChi(shape) + radial = self.array_from_unit(shape, "center", unit, scale=False) + delta_radial = self.array_from_unit(shape, "delta", unit, scale=False) + intpl = integr(weights=data, variance=variance, + pos0=radial, delta_pos0=delta_radial, + pos1=chi, delta_pos1=delta_chi, + bins=npt, + dummy=dummy, delta_dummy=delta_dummy, empty=empty, + dark=dark, flat=flat, solidangle=solidangle, + polarization=polarization, absorption=absorption, + normalization_factor=normalization_factor, + weighted_average=method.weighted_average, + mask=mask, + pos0_range=radial_range, + pos1_range=azimuth_range, + error_model=error_model) + elif method.method[1] == "full": + pos = self.array_from_unit(shape, "corner", unit, scale=False) + intpl = integr(weights=data, variance=variance, + pos=pos, + bins=npt, + dummy=dummy, delta_dummy=delta_dummy, empty=empty, + dark=dark, flat=flat, solidangle=solidangle, + polarization=polarization, absorption=absorption, + normalization_factor=normalization_factor, + weighted_average=method.weighted_average, + mask=mask, + pos0_range=radial_range, + pos1_range=azimuth_range, + error_model=error_model) + else: + raise RuntimeError("Should not arrive here") + if error_model.do_variance: + result = Integrate1dResult(intpl.position * unit.scale, + intpl.intensity, + intpl.sigma) + result._set_sum_variance(intpl.variance) + else: + result = Integrate1dResult(intpl.position * unit.scale, + intpl.intensity) + result._set_compute_engine(integr.__module__ + "." + integr.__name__) + result._set_unit(unit) + result._set_sum_signal(intpl.signal) + result._set_sum_normalization(intpl.normalization) + result._set_sum_normalization2(intpl.norm_sq) + result._set_count(intpl.count) + result._set_sem(intpl.sem) + result._set_std(intpl.std) + + else: + raise RuntimeError(f"Fallback method ... should no more be used: {method}") + result._set_method(method) + result._set_has_dark_correction(has_dark) + result._set_has_flat_correction(has_flat) + result._set_has_mask_applied(has_mask) + result._set_polarization_factor(polarization_factor) + result._set_normalization_factor(normalization_factor) + result._set_method_called("integrate1d_ng") + result._set_metadata(metadata) + result._set_error_model(error_model) + result._set_poni(PoniFile(self)) + result._set_has_solidangle_correction(correctSolidAngle) + result._set_weighted_average(method.weighted_average) + + if filename is not None: + save_integrate_result(filename, result) + return result + + _integrate1d_ng = integrate1d_ng + integrate1d = integrate1d_ng + + def integrate_radial(self, data, npt, npt_rad=100, + correctSolidAngle=True, + radial_range=None, azimuth_range=None, + mask=None, dummy=None, delta_dummy=None, + polarization_factor=None, dark=None, flat=None, + method=("bbox", "csr", "cython"), unit=units.CHI_DEG, radial_unit=units.Q, + normalization_factor=1.0): + """Calculate the radial integrated profile curve as I = f(chi) + + :param ndarray data: 2D array from the Detector/CCD camera + :param int npt: number of points in the output pattern + :param int npt_rad: number of points in the radial space. Too few points may lead to huge rounding errors. + :param str filename: output filename in 2/3 column ascii format + :param bool correctSolidAngle: correct for solid angle of each pixel if True + :param radial_range: The lower and upper range of the radial unit. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. Optional. + :type radial_range: Tuple(float, float) + :param azimuth_range: The lower and upper range of the azimuthal angle in degree. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. Optional. + :type azimuth_range: Tuple(float, float) + :param ndarray mask: array (same size as image) with 1 for masked pixels, and 0 for valid pixels + :param float dummy: value for dead/masked pixels + :param float delta_dummy: precision for dummy value + :param float polarization_factor: polarization factor between -1 (vertical) and +1 (horizontal). + * 0 for circular polarization or random, + * None for no correction, + * True for using the former correction + :param ndarray dark: dark noise image + :param ndarray flat: flat field image + :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) + :param pyFAI.units.Unit unit: Output units, can be "chi_deg" or "chi_rad" + :param pyFAI.units.Unit radial_unit: unit used for radial representation, can be "q_nm^-1", "q_A^-1", "2th_deg", "2th_rad", "r_mm" for now + :param float normalization_factor: Value of a normalization monitor + :return: chi bins center positions and regrouped intensity + :rtype: Integrate1dResult + """ + azimuth_unit = units.to_unit(unit, type_=units.AZIMUTHAL_UNITS) + res = self.integrate2d_ng(data, npt_rad, npt, + correctSolidAngle=correctSolidAngle, + mask=mask, dummy=dummy, delta_dummy=delta_dummy, + polarization_factor=polarization_factor, + dark=dark, flat=flat, method=method, + normalization_factor=normalization_factor, + radial_range=radial_range, + azimuth_range=azimuth_range, + unit=radial_unit) + + azim_scale = azimuth_unit.scale / units.CHI_DEG.scale + + sum_signal = res.sum_signal.sum(axis=-1) + count = res.count.sum(axis=-1) + sum_normalization = res._sum_normalization.sum(axis=-1) + + mask = numpy.where(count == 0) + empty = dummy if dummy is not None else self._empty + intensity = sum_signal / sum_normalization + intensity[mask] = empty + + if res.sigma is not None: + sum_variance = res.sum_variance.sum(axis=-1) + sigma = numpy.sqrt(sum_variance) / sum_normalization + sigma[mask] = empty + else: + sum_variance = None + sigma = None + result = Integrate1dResult(res.azimuthal * azim_scale, intensity, sigma) + result._set_method_called("integrate_radial") + result._set_unit(azimuth_unit) + result._set_sum_normalization(sum_normalization) + result._set_count(count) + result._set_sum_signal(sum_signal) + result._set_sum_variance(sum_variance) + result._set_has_dark_correction(dark is not None) + result._set_has_flat_correction(flat is not None) + result._set_polarization_factor(polarization_factor) + result._set_normalization_factor(normalization_factor) + result._set_method = res.method + result._set_compute_engine = res.compute_engine + + return result + + @deprecated(since_version="0.21", only_once=True, deprecated_since="0.21.0") + def integrate2d_legacy(self, data, npt_rad, npt_azim=360, + filename=None, correctSolidAngle=True, variance=None, + error_model=None, radial_range=None, azimuth_range=None, + mask=None, dummy=None, delta_dummy=None, + polarization_factor=None, dark=None, flat=None, + method=None, unit=units.Q, safe=True, + normalization_factor=1.0, metadata=None): + """ + Calculate the azimuthal regrouped 2d image in q(nm^-1)/chi(deg) by default + + Multi algorithm implementation (tries to be bullet proof) + + :param data: 2D array from the Detector/CCD camera + :type data: ndarray + :param npt_rad: number of points in the radial direction + :type npt_rad: int + :param npt_azim: number of points in the azimuthal direction + :type npt_azim: int + :param filename: output image (as edf format) + :type filename: str + :param correctSolidAngle: correct for solid angle of each pixel if True + :type correctSolidAngle: bool + :param variance: array containing the variance of the data. If not available, no error propagation is done + :type variance: ndarray + :param error_model: When the variance is unknown, an error model can be given: "poisson" (variance = I), "azimuthal" (variance = (I-)^2) + :type error_model: str + :param radial_range: The lower and upper range of the radial unit. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. + :type radial_range: (float, float), optional + :param azimuth_range: The lower and upper range of the azimuthal angle in degree. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. + :type azimuth_range: (float, float), optional + :param mask: array (same size as image) with 1 for masked pixels, and 0 for valid pixels + :type mask: ndarray + :param dummy: value for dead/masked pixels + :type dummy: float + :param delta_dummy: precision for dummy value + :type delta_dummy: float + :param polarization_factor: polarization factor between -1 (vertical) + and +1 (horizontal). 0 for circular polarization or random, + None for no correction + :type polarization_factor: float + :param dark: dark noise image + :type dark: ndarray + :param flat: flat field image + :type flat: ndarray + :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) + :param unit: Output units, can be "q_nm^-1", "q_A^-1", "2th_deg", "2th_rad", "r_mm" for now + :type unit: pyFAI.units.Unit + :param safe: Do some extra checks to ensure LUT is still valid. False is faster. + :type safe: bool + :param normalization_factor: Value of a normalization monitor + :type normalization_factor: float + :param all: if true, return many more intermediate results as a dict (deprecated, please refer to the documentation of Integrate2dResult). + :param metadata: JSON serializable object containing the metadata, usually a dictionary. + :type all: bool + :return: azimuthaly regrouped intensity, q/2theta/r pos. and chi pos. + :rtype: Integrate2dResult, dict + """ + method = self._normalize_method(method, dim=2, default=self.DEFAULT_METHOD_2D) + assert method.dimension == 2 + npt = (npt_rad, npt_azim) + unit = units.to_unit(unit) + pos0_scale = unit.scale + if mask is None: + has_mask = "from detector" + mask = self.mask + mask_crc = self.detector.get_mask_crc() + if mask is None: + has_mask = False + mask_crc = None + else: + has_mask = "provided" + mask = numpy.ascontiguousarray(mask) + mask_crc = crc32(mask) + + shape = data.shape + + if radial_range: + radial_range = tuple([i / pos0_scale for i in radial_range]) + + if variance is not None: + assert variance.size == data.size + elif error_model: + error_model = error_model.lower() + if error_model == "poisson": + variance = numpy.ascontiguousarray(data, numpy.float32) + + if azimuth_range is not None: + azimuth_range = tuple(deg2rad(azimuth_range[i], self.chiDiscAtPi) for i in (0, -1)) + if azimuth_range[1] <= azimuth_range[0]: + azimuth_range = (azimuth_range[0], azimuth_range[1] + 2 * pi) + self.check_chi_disc(azimuth_range) + + if correctSolidAngle: + solidangle = self.solidAngleArray(shape, correctSolidAngle) + else: + solidangle = None + + if polarization_factor is None: + polarization = polarization_crc = None + else: + polarization, polarization_crc = self.polarization(shape, polarization_factor, with_checksum=True) + + if dark is None: + dark = self.detector.darkcurrent + if dark is None: + has_dark = False + else: + has_dark = "from detector" + else: + has_dark = "provided" + + if flat is None: + flat = self.detector.flatfield + if dark is None: + has_flat = False + else: + has_flat = "from detector" + else: + has_flat = "provided" + + I = None + sigma = None + sum_ = None + count = None + + if method.algo_lower == "lut": + if EXT_LUT_ENGINE not in self.engines: + engine = self.engines[EXT_LUT_ENGINE] = Engine() + else: + engine = self.engines[EXT_LUT_ENGINE] + with engine.lock: + integr = engine.engine + reset = None + if integr is None: + reset = "of first initialization" + if (not reset) and safe: + if integr.unit != unit: + reset = "unit changed" + if integr.bins != npt: + reset = "number of points changed" + if integr.size != data.size: + reset = "input image size changed" + if (mask is not None) and (not integr.check_mask): + reset = "mask but LUT was without mask" + elif (mask is None) and (integr.check_mask): + reset = "no mask but LUT has mask" + elif (mask is not None) and (integr.mask_checksum != mask_crc): + reset = "mask changed" + if radial_range != integr.pos0_range: + reset = "radial_range changed" + if azimuth_range != integr.pos1_range: + reset = "azimuth_range changed" + error = False + if reset: + logger.info("ai.integrate2d: Resetting integrator because %s", reset) + try: + integr = self.setup_sparse_integrator(shape, npt, mask=mask, + pos0_range=radial_range, pos1_range=azimuth_range, + mask_checksum=mask_crc, algo="LUT", unit=unit, scale=False) + except MemoryError: + # LUT method is hungry im memory... + logger.warning("MemoryError: falling back on forward implementation") + integr = None + self.reset_engines() + method = self.DEFAULT_METHOD_2D + error = True + else: + error = False + engine.set_engine(integr) + if not error: + if method.impl_lower == "opencl": + if OCL_LUT_ENGINE in self.engines: + ocl_engine = self.engines[OCL_LUT_ENGINE] + else: + ocl_engine = self.engines[OCL_LUT_ENGINE] = Engine() + with ocl_engine.lock: + platformid, deviceid = method.target + ocl_integr = ocl_engine.engine + if (ocl_integr is None) or \ + (ocl_integr.on_device["lut"] != integr.lut_checksum): + ocl_integr = ocl_azim_lut.OCL_LUT_Integrator(integr.lut, + integr.size, + platformid=platformid, + deviceid=deviceid, + checksum=integr.lut_checksum) + ocl_engine.set_engine(ocl_integr) + + if (not error) and (ocl_integr is not None): + I, sum_, count = ocl_integr.integrate_legacy(data, dark=dark, flat=flat, + solidangle=solidangle, + solidangle_checksum=self._dssa_crc, + dummy=dummy, + delta_dummy=delta_dummy, + polarization=polarization, + polarization_checksum=polarization_crc, + normalization_factor=normalization_factor, + safe=safe) + I.shape = npt + I = I.T + bins_rad = integr.bin_centers0 # this will be copied later + bins_azim = integr.bin_centers1 + else: + I, bins_rad, bins_azim, sum_, count = integr.integrate_legacy(data, dark=dark, flat=flat, + solidAngle=solidangle, + dummy=dummy, + delta_dummy=delta_dummy, + polarization=polarization, + normalization_factor=normalization_factor + ) + + if method.algo_lower == "csr": + if EXT_CSR_ENGINE not in self.engines: + engine = self.engines[EXT_CSR_ENGINE] = Engine() + else: + engine = self.engines[EXT_CSR_ENGINE] + with engine.lock: + integr = engine.engine + reset = None + if integr is None: + reset = "of first initialization" + if (not reset) and safe: + if integr.unit != unit: + reset = "unit changed" + if integr.bins != npt: + reset = "number of points changed" + if integr.size != data.size: + reset = "input image size changed" + if (mask is not None) and (not integr.check_mask): + reset = "mask but CSR was without mask" + elif (mask is None) and (integr.check_mask): + reset = "no mask but CSR has mask" + elif (mask is not None) and (integr.mask_checksum != mask_crc): + reset = "mask changed" + if (radial_range is None) and (integr.pos0_range is not None): + reset = "radial_range was defined in CSR" + elif (radial_range is not None) and integr.pos0_range != (min(radial_range), max(radial_range)): + reset = "radial_range is defined but differs in CSR" + if (azimuth_range is None) and (integr.pos1_range is not None): + reset = "azimuth_range not defined and CSR had azimuth_range defined" + elif (azimuth_range is not None) and integr.pos1_range != (min(azimuth_range), max(azimuth_range)): + reset = "azimuth_range requested and CSR's azimuth_range don't match" + error = False + if reset: + logger.info("AI.integrate2d: Resetting integrator because %s", reset) + split = method.split_lower + if split == "pseudo": + split = "full" + try: + integr = self.setup_sparse_integrator(shape, npt, mask=mask, + pos0_range=radial_range, pos1_range=azimuth_range, + mask_checksum=mask_crc, + unit=unit, split=split, algo="CSR", + scale=False) + except MemoryError: + logger.warning("MemoryError: falling back on default forward implementation") + integr = None + self.reset_engines() + method = self.DEFAULT_METHOD_2D + error = True + else: + error = False + engine.set_engine(integr) + if not error: + if method.impl_lower == "opencl": + if OCL_CSR_ENGINE in self.engines: + ocl_engine = self.engines[OCL_CSR_ENGINE] + else: + ocl_engine = self.engines[OCL_CSR_ENGINE] = Engine() + with ocl_engine.lock: + platformid, deviceid = method.target + ocl_integr = ocl_engine.engine + if (ocl_integr is None) or (ocl_integr.on_device["data"] != integr.lut_checksum): + ocl_integr = ocl_azim_csr.OCL_CSR_Integrator(integr.lut, + integr.size, + platformid=platformid, + deviceid=deviceid, + checksum=integr.lut_checksum) + ocl_engine.set_engine(ocl_integr) + if (not error) and (ocl_integr is not None): + I, sum_, count = ocl_integr.integrate_legacy(data, dark=dark, flat=flat, + solidangle=solidangle, + solidangle_checksum=self._dssa_crc, + dummy=dummy, + delta_dummy=delta_dummy, + polarization=polarization, + polarization_checksum=polarization_crc, + safe=safe, + normalization_factor=normalization_factor) + I.shape = npt + I = I.T + bins_rad = integr.bin_centers0 # this will be copied later + bins_azim = integr.bin_centers1 + else: + I, bins_rad, bins_azim, sum_, count = integr.integrate_legacy(data, dark=dark, flat=flat, + solidAngle=solidangle, + dummy=dummy, + delta_dummy=delta_dummy, + polarization=polarization, + normalization_factor=normalization_factor) + + if method.method[1:4] in (("pseudo", "histogram", "cython"), ("full", "histogram", "cython")): + logger.debug("integrate2d uses SplitPixel implementation") + pos = self.array_from_unit(shape, "corner", unit, scale=False) + I, bins_rad, bins_azim, sum_, count = splitPixel.fullSplit2D(pos=pos, + weights=data, + bins=(npt_rad, npt_azim), + pos0_range=radial_range, + pos1_range=azimuth_range, + dummy=dummy, + delta_dummy=delta_dummy, + mask=mask, + dark=dark, + flat=flat, + solidangle=solidangle, + polarization=polarization, + normalization_factor=normalization_factor, + chiDiscAtPi=self.chiDiscAtPi, + empty=dummy if dummy is not None else self._empty) + if method.method[1:4] == ("bbox", "histogram", "cython"): + logger.debug("integrate2d uses BBox implementation") + chi = self.chiArray(shape) + dchi = self.deltaChi(shape) + pos0 = self.array_from_unit(shape, "center", unit, scale=False) + dpos0 = self.array_from_unit(shape, "delta", unit, scale=False) + I, bins_rad, bins_azim, sum_, count = splitBBox.histoBBox2d(weights=data, + pos0=pos0, + delta_pos0=dpos0, + pos1=chi, + delta_pos1=dchi, + bins=(npt_rad, npt_azim), + pos0_range=radial_range, + pos1_range=azimuth_range, + dummy=dummy, + delta_dummy=delta_dummy, + mask=mask, + dark=dark, + flat=flat, + solidangle=solidangle, + polarization=polarization, + normalization_factor=normalization_factor, + chiDiscAtPi=self.chiDiscAtPi, + empty=dummy if dummy is not None else self._empty) + + if method.method[1:3] == ("no", "histogram") and method.impl_lower != "opencl": + logger.debug("integrate2d uses numpy or cython implementation") + data = data.astype(numpy.float32) # it is important to make a copy see issue #88 + mask = self.create_mask(data, mask, dummy, delta_dummy, + unit=unit, + radial_range=radial_range, + azimuth_range=azimuth_range, + mode="where") + pos0 = self.array_from_unit(shape, "center", unit, scale=False) + pos1 = self.chiArray(shape) + + if radial_range is None: + radial_range = [pos0.min(), pos0.max() * EPS32] + + if azimuth_range is None: + azimuth_range = [pos1.min(), pos1.max() * EPS32] + + if variance is not None: + variance = variance[mask] + + if dark is not None: + data -= dark + + if flat is not None: + data /= flat + + if polarization is not None: + data /= polarization + + if solidangle is not None: + data /= solidangle + + data = data[mask] + pos0 = pos0[mask] + pos1 = pos1[mask] + if method.impl_lower == "cython": + I, bins_azim, bins_rad, sum_, count = histogram.histogram2d(pos0=pos1, + pos1=pos0, + weights=data, + bins=(npt_azim, npt_rad), + split=False, + empty=dummy if dummy is not None else self._empty, + normalization_factor=normalization_factor) + elif method.impl_lower == "python": + logger.debug("integrate2d uses Numpy implementation") + count, b, c = numpy.histogram2d(pos1, pos0, (npt_azim, npt_rad), range=[azimuth_range, radial_range]) + bins_azim = (b[1:] + b[:-1]) / 2.0 + bins_rad = (c[1:] + c[:-1]) / 2.0 + count1 = numpy.maximum(1, count) + sum_, b, c = numpy.histogram2d(pos1, pos0, (npt_azim, npt_rad), + weights=data, range=[azimuth_range, radial_range]) + I = sum_ / count1 / normalization_factor + I[count == 0] = dummy if dummy is not None else self._empty + # I know I make copies .... + bins_rad = bins_rad * pos0_scale + bins_azim = bins_azim * 180.0 / pi + + result = Integrate2dResult(I, bins_rad, bins_azim, sigma) + result._set_method_called("integrate2d") + result._set_compute_engine(str(method)) + result._set_unit(unit) + result._set_count(count) + result._set_sum(sum_) + result._set_has_dark_correction(has_dark) + result._set_has_flat_correction(has_flat) + result._set_has_mask_applied(has_mask) + result._set_polarization_factor(polarization_factor) + result._set_normalization_factor(normalization_factor) + result._set_metadata(metadata) + + if filename is not None: + save_integrate_result(filename, result) + + return result + + _integrate2d_legacy = integrate2d_legacy + + def integrate2d_ng(self, data, npt_rad, npt_azim=360, + filename=None, correctSolidAngle=True, variance=None, + error_model=None, radial_range=None, azimuth_range=None, + mask=None, dummy=None, delta_dummy=None, + polarization_factor=None, dark=None, flat=None, + method=("bbox", "csr", "cython"), unit=units.Q, + safe=True, normalization_factor=1.0, metadata=None): + """ + Calculate the azimuthal regrouped 2d image in q(nm^-1)/chi(deg) by default + + Multi algorithm implementation (tries to be bullet proof) + + :param data: 2D array from the Detector/CCD camera + :type data: ndarray + :param npt_rad: number of points in the radial direction + :type npt_rad: int + :param npt_azim: number of points in the azimuthal direction + :type npt_azim: int + :param filename: output image (as edf format) + :type filename: str + :param correctSolidAngle: correct for solid angle of each pixel if True + :type correctSolidAngle: bool + :param variance: array containing the variance of the data. If not available, no error propagation is done + :type variance: ndarray + :param error_model: When the variance is unknown, an error model can be given: "poisson" (variance = I), "azimuthal" (variance = (I-)^2) + :type error_model: str + :param radial_range: The lower and upper range of the radial unit. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. + :type radial_range: (float, float), optional + :param azimuth_range: The lower and upper range of the azimuthal angle in degree. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. + :type azimuth_range: (float, float), optional + :param mask: array (same size as image) with 1 for masked pixels, and 0 for valid pixels + :type mask: ndarray + :param dummy: value for dead/masked pixels + :type dummy: float + :param delta_dummy: precision for dummy value + :type delta_dummy: float + :param polarization_factor: polarization factor between -1 (vertical) + and +1 (horizontal). 0 for circular polarization or random, + None for no correction + :type polarization_factor: float + :param dark: dark noise image + :type dark: ndarray + :param flat: flat field image + :type flat: ndarray + :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) + :type method: str + :param pyFAI.units.Unit unit: Output units, can be "q_nm^-1", "q_A^-1", "2th_deg", "2th_rad", "r_mm" for anything defined as pyFAI.units.RADIAL_UNITS + can also be a 2-tuple of (RADIAL_UNITS, AZIMUTHAL_UNITS) (advanced usage) + :param safe: Do some extra checks to ensure LUT is still valid. False is faster. + :type safe: bool + :param normalization_factor: Value of a normalization monitor + :type normalization_factor: float + :param metadata: JSON serializable object containing the metadata, usually a dictionary. + :return: azimuthaly regrouped intensity, q/2theta/r pos. and chi pos. + :rtype: Integrate2dResult, dict + """ + method = self._normalize_method(method, dim=2, default=self.DEFAULT_METHOD_2D) + assert method.dimension == 2 + npt = (npt_rad, npt_azim) + if isinstance(unit, (tuple, list)) and len(unit) == 2: + radial_unit, azimuth_unit = unit + else: + radial_unit = unit + azimuth_unit = units.CHI_DEG + radial_unit = units.to_unit(radial_unit, units.RADIAL_UNITS) + azimuth_unit = units.to_unit(azimuth_unit, units.AZIMUTHAL_UNITS) + unit = (radial_unit, azimuth_unit) + space = (radial_unit.space, azimuth_unit.space) + pos0_scale = radial_unit.scale + pos1_scale = azimuth_unit.scale + empty = dummy if dummy is not None else self._empty + if mask is None: + has_mask = "from detector" + mask = self.mask + mask_crc = self.detector.get_mask_crc() + if mask is None: + has_mask = False + mask_crc = None + else: + has_mask = "provided" + mask = numpy.ascontiguousarray(mask) + mask_crc = crc32(mask) + + shape = data.shape + + if radial_range: + radial_range = tuple([i / pos0_scale for i in radial_range]) + + error_model = ErrorModel.parse(error_model) + if variance is not None: + assert variance.size == data.size + error_model = ErrorModel.VARIANCE + if error_model.poissonian and not method.manage_variance: + error_model = ErrorModel.VARIANCE + if dark is None: + variance = numpy.maximum(data, 1.0).astype(numpy.float32) + else: + variance = (numpy.maximum(data, 1.0) + numpy.maximum(dark, 0.0)).astype(numpy.float32) + + if azimuth_range is not None and azimuth_unit.period: + azimuth_range = tuple(deg2rad(azimuth_range[i], self.chiDiscAtPi) for i in (0, -1)) + if azimuth_range[1] <= azimuth_range[0]: + azimuth_range = (azimuth_range[0], azimuth_range[1] + 2 * pi) + self.check_chi_disc(azimuth_range) + + if correctSolidAngle: + solidangle = self.solidAngleArray(shape, correctSolidAngle) + else: + solidangle = None + + if polarization_factor is None: + polarization = polarization_crc = None + else: + polarization, polarization_crc = self.polarization(shape, polarization_factor, with_checksum=True) + + if dark is None: + dark = self.detector.darkcurrent + if dark is None: + has_dark = False + else: + has_dark = "from detector" + else: + has_dark = "provided" + + if flat is None: + flat = self.detector.flatfield + if dark is None: + has_flat = False + else: + has_flat = "from detector" + else: + has_flat = "provided" + + if method.algo_is_sparse: + intpl = None + cython_method = IntegrationMethod.select_method(method.dimension, method.split_lower, method.algo_lower, "cython")[0] + if cython_method not in self.engines: + cython_engine = self.engines[cython_method] = Engine() + else: + cython_engine = self.engines[cython_method] + with cython_engine.lock: + cython_integr = cython_engine.engine + cython_reset = None + + if cython_integr is None: + cython_reset = "of first initialization" + if (not cython_reset) and safe: + if cython_integr.space != space: + cython_reset = f"unit {cython_integr.unit} incompatible with requested {unit}" + if cython_integr.bins != npt: + cython_reset = f"number of points {cython_integr.bins} incompatible with requested {npt}" + if cython_integr.size != data.size: + cython_reset = f"input image size {cython_integr.size} incompatible with requested {data.size}" + if cython_integr.empty != empty: + cython_reset = f"empty value {cython_integr.empty} incompatible with requested {empty}" + if (mask is not None) and (not cython_integr.check_mask): + cython_reset = f"mask but {method.algo_lower.upper()} was without mask" + elif (mask is None) and (cython_integr.cmask is not None): + cython_reset = f"no mask but { method.algo_lower.upper()} has mask" + elif (mask is not None) and (cython_integr.mask_checksum != mask_crc): + cython_reset = "mask changed" + if (radial_range is None) and (cython_integr.pos0_range is not None): + cython_reset = f"radial_range was defined in { method.algo_lower.upper()}" + elif (radial_range is not None) and (cython_integr.pos0_range != radial_range): + cython_reset = f"radial_range is defined but differs in {method.algo_lower.upper()}" + if (azimuth_range is None) and (cython_integr.pos1_range is not None): + cython_reset = f"azimuth_range not defined and {method.algo_lower.upper()} had azimuth_range defined" + elif (azimuth_range is not None) and (cython_integr.pos1_range != azimuth_range): + cython_reset = f"azimuth_range requested and {method.algo_lower.upper()}'s azimuth_range don't match" + if cython_reset: + logger.info("AI.integrate2d_ng: Resetting Cython integrator because %s", cython_reset) + split = method.split_lower + if split == "pseudo": + split = "full" + try: + cython_integr = self.setup_sparse_integrator(shape, npt, mask, + radial_range, azimuth_range, + mask_checksum=mask_crc, + unit=unit, split=split, algo=method.algo_lower, + empty=empty, scale=False) + except MemoryError: # sparse method are hungry... + logger.warning("MemoryError: falling back on forward implementation") + cython_integr = None + self.reset_engines() + method = self.DEFAULT_METHOD_1D + else: + cython_engine.set_engine(cython_integr) + # This whole block uses CSR, Now we should treat all the various implementation: Cython, OpenCL and finally Python. + if method.impl_lower != "cython": + # method.impl_lower in ("opencl", "python"): + if method not in self.engines: + # instanciated the engine + engine = self.engines[method] = Engine() + else: + engine = self.engines[method] + with engine.lock: + # Validate that the engine used is the proper one + integr = engine.engine + reset = None + if integr is None: + reset = "of first initialization" + if (not reset) and safe: + if integr.space != space: + reset = f"unit {integr.unit} incompatible with requested {unit}" + if numpy.prod(integr.bins) != numpy.prod(npt): + reset = f"number of points {integr.bins} incompatible with requested {npt}" + if integr.size != data.size: + reset = f"input image size {integr.size} incompatible with requested {data.size}" + if integr.empty != empty: + reset = f"empty value {integr.empty} incompatible with requested {empty}" + if (mask is not None) and (not integr.check_mask): + reset = "mask but CSR was without mask" + elif (mask is None) and (integr.check_mask): + reset = "no mask but CSR has mask" + elif (mask is not None) and (integr.mask_checksum != mask_crc): + reset = "mask changed" + if (radial_range is None) and (integr.pos0_range is not None): + reset = "radial_range was defined in CSR" + elif (radial_range is not None) and integr.pos0_range != (min(radial_range), max(radial_range)): + reset = "radial_range is defined but differs in CSR" + if (azimuth_range is None) and (integr.pos1_range is not None): + reset = "azimuth_range not defined and CSR had azimuth_range defined" + elif (azimuth_range is not None) and integr.pos1_range != (min(azimuth_range), max(azimuth_range)): + reset = "azimuth_range requested and CSR's azimuth_range don't match" + error = False + if reset: + logger.info("AI.integrate2d: Resetting integrator because %s", reset) + split = method.split_lower + try: + cython_integr = self.setup_sparse_integrator(shape, npt, mask, + radial_range, azimuth_range, + mask_checksum=mask_crc, + unit=unit, split=split, algo=method.algo_lower, + empty=empty, scale=False) + except MemoryError: + logger.warning("MemoryError: falling back on default implementation") + cython_integr = None + self.reset_engines() + method = self.DEFAULT_METHOD_2D + error = True + else: + error = False + cython_engine.set_engine(cython_integr) + if not error: + if method in self.engines: + ocl_py_engine = self.engines[method] + else: + ocl_py_engine = self.engines[method] = Engine() + integr = ocl_py_engine.engine + if integr is None or integr.checksum != cython_integr.lut_checksum: + if (method.impl_lower == "opencl"): + with ocl_py_engine.lock: + integr = method.class_funct_ng.klass(cython_integr.lut, + cython_integr.size, + bin_centers=cython_integr.bin_centers0, + azim_centers=cython_integr.bin_centers1, + platformid=method.target[0], + deviceid=method.target[1], + checksum=cython_integr.lut_checksum, + unit=unit, empty=empty, + mask_checksum=mask_crc) + + elif (method.impl_lower == "python"): + with ocl_py_engine.lock: + integr = method.class_funct_ng.klass(cython_integr.size, + cython_integr.lut, + bin_centers0=cython_integr.bin_centers0, + bin_centers1=cython_integr.bin_centers1, + checksum=cython_integr.lut_checksum, + unit=unit, empty=empty, + mask_checksum=mask_crc) + integr.pos0_range = cython_integr.pos0_range + integr.pos1_range = cython_integr.pos1_range + ocl_py_engine.set_engine(integr) + + if (integr is not None): + intpl = integr.integrate_ng(data, + variance=variance, + error_model=error_model, + dark=dark, flat=flat, + solidangle=solidangle, + solidangle_checksum=self._dssa_crc, + dummy=dummy, + delta_dummy=delta_dummy, + polarization=polarization, + polarization_checksum=polarization_crc, + safe=safe, + normalization_factor=normalization_factor, + weighted_average=method.weighted_average,) + if intpl is None: # fallback if OpenCL failed or default cython + # The integrator has already been initialized previously + intpl = cython_integr.integrate_ng(data, + variance=variance, + error_model=error_model, + dummy=dummy, + delta_dummy=delta_dummy, + dark=dark, + flat=flat, + solidangle=solidangle, + polarization=polarization, + normalization_factor=normalization_factor, + weighted_average=method.weighted_average,) + + elif method.algo_lower == "histogram": + if method.split_lower in ("pseudo", "full"): + logger.debug("integrate2d uses (full, histogram, cython) implementation") + pos = self.array_from_unit(shape, "corner", unit, scale=False) + integrator = method.class_funct_ng.function + intpl = integrator(pos=pos, + weights=data, + bins=(npt_rad, npt_azim), + pos0_range=radial_range, + pos1_range=azimuth_range, + dummy=dummy, + delta_dummy=delta_dummy, + mask=mask, + dark=dark, + flat=flat, + solidangle=solidangle, + polarization=polarization, + normalization_factor=normalization_factor, + chiDiscAtPi=self.chiDiscAtPi, + empty=empty, + variance=variance, + error_model=error_model, + allow_pos0_neg=not radial_unit.positive, + weighted_average=method.weighted_average,) + + elif method.split_lower == "bbox": + logger.debug("integrate2d uses BBox implementation") + pos0 = self.array_from_unit(shape, "center", radial_unit, scale=False) + dpos0 = self.array_from_unit(shape, "delta", radial_unit, scale=False) + pos1 = self.array_from_unit(shape, "center", azimuth_unit, scale=False) + dpos1 = self.array_from_unit(shape, "delta", azimuth_unit, scale=False) + integrator = method.class_funct_ng.function + intpl = integrator(weights=data, + pos0=pos0, + delta_pos0=dpos0, + pos1=pos1, + delta_pos1=dpos1, + bins=(npt_rad, npt_azim), + pos0_range=radial_range, + pos1_range=azimuth_range, + dummy=dummy, + delta_dummy=delta_dummy, + mask=mask, + dark=dark, + flat=flat, + solidangle=solidangle, + polarization=polarization, + normalization_factor=normalization_factor, + chiDiscAtPi=self.chiDiscAtPi, + empty=empty, + variance=variance, + error_model=error_model, + allow_pos0_neg=not radial_unit.positive, + clip_pos1=bool(azimuth_unit.period), + weighted_average=method.weighted_average,) + elif method.split_lower == "no": + if method.impl_lower == "opencl": + logger.debug("integrate2d uses OpenCL histogram implementation") + if method not in self.engines: + # instanciated the engine + engine = self.engines[method] = Engine() + else: + engine = self.engines[method] + with engine.lock: + # Validate that the engine used is the proper one #TODO!!!! + integr = engine.engine + reset = None + if integr is None: + reset = "of first initialization" + if (not reset) and safe: + if integr.space != space: + reset = f"unit {integr.unit} incompatible with requested {unit}" + if (integr.bins_radial, integr.bins_azimuthal) != npt: + reset = "number of points changed" + if integr.size != data.size: + reset = "input image size changed" + if (mask is not None) and (not integr.check_mask): + reset = "mask but CSR was without mask" + elif (mask is None) and (integr.check_mask): + reset = "no mask but CSR has mask" + elif (mask is not None) and (integr.on_device.get("mask") != mask_crc): + reset = "mask changed" + if self._cached_array[f"{radial_unit.space}_crc"] != integr.on_device.get("radial"): + reset = "radial array changed" + if self._cached_array[f"{azimuth_unit.space}_crc"] != integr.on_device.get("azimuthal"): + reset = "azimuthal array changed" + # Nota: Ranges are enforced at runtime, not initialization + error = False + if reset: + logger.info("AI.integrate2d: Resetting OCL_Histogram2d integrator because %s", reset) + rad = self.array_from_unit(shape, typ="center", unit=radial_unit, scale=False) + rad_crc = self._cached_array[f"{radial_unit.space}_crc"] = crc32(rad) + azi = self.array_from_unit(shape, typ="center", unit=azimuth_unit, scale=False) + azi_crc = self._cached_array[f"{azimuth_unit.space}_crc"] = crc32(azi) + try: + integr = method.class_funct_ng.klass(rad, + azi, + *npt, + radial_checksum=rad_crc, + azimuthal_checksum=azi_crc, + empty=empty, unit=unit, + mask=mask, mask_checksum=mask_crc, + platformid=method.target[0], + deviceid=method.target[1] + ) + except MemoryError: + logger.warning("MemoryError: falling back on default forward implementation") + integr = None + self.reset_engines() + method = self.DEFAULT_METHOD_2D + error = True + else: + error = False + engine.set_engine(integr) + if not error: + intpl = integr.integrate(data, dark=dark, flat=flat, + solidangle=solidangle, + solidangle_checksum=self._dssa_crc, + dummy=dummy, + delta_dummy=delta_dummy, + polarization=polarization, + polarization_checksum=polarization_crc, + safe=safe, + normalization_factor=normalization_factor, + radial_range=radial_range, + azimuthal_range=azimuth_range, + error_model=error_model, + weighted_average=method.weighted_average,) +#################### + else: # if method.impl_lower in ["python", "cython"]: + logger.debug("integrate2d uses [CP]ython histogram implementation") + radial = self.array_from_unit(shape, "center", radial_unit, scale=False) + azim = self.array_from_unit(shape, "center", azimuth_unit, scale=False) + if method.impl_lower == "python": + data = data.astype(numpy.float32) # it is important to make a copy see issue #88 + mask = self.create_mask(data, mask, dummy, delta_dummy, + unit=unit, + radial_range=radial_range, + azimuth_range=azimuth_range, + mode="normal").ravel() + histogrammer = method.class_funct_ng.function + intpl = histogrammer(radial=radial, + azimuthal=azim, + bins=(npt_rad, npt_azim), + raw=data, + dark=dark, + flat=flat, + solidangle=solidangle, + polarization=polarization, + absorption=None, + mask=mask, + dummy=dummy, + delta_dummy=delta_dummy, + normalization_factor=normalization_factor, + empty=self._empty, + variance=variance, + dark_variance=None, + error_model=error_model, + radial_range=radial_range, + azimuth_range=azimuth_range, + allow_radial_neg=not radial_unit.positive, + clip_pos1=bool(azimuth_unit.period), + weighted_average=method.weighted_average,) + + I = intpl.intensity + bins_azim = intpl.azimuthal + bins_rad = intpl.radial + signal2d = intpl.signal + norm2d = intpl.normalization + count = intpl.count + if error_model.do_variance: + std = intpl.std + sem = intpl.sem + var2d = intpl.variance + norm2d_sq = intpl.norm_sq + else: + std = sem = var2d = norm2d_sq = None + + # Duplicate arrays on purpose .... + bins_rad = bins_rad * pos0_scale + bins_azim = bins_azim * pos1_scale + + result = Integrate2dResult(I, bins_rad, bins_azim, sem) + result._set_method_called("integrate2d") + result._set_compute_engine(str(method)) + result._set_method(method) + result._set_radial_unit(radial_unit) + result._set_azimuthal_unit(azimuth_unit) + result._set_count(count) + # result._set_sum(sum_) + result._set_has_dark_correction(has_dark) + result._set_has_flat_correction(has_flat) + result._set_has_mask_applied(has_mask) + result._set_polarization_factor(polarization_factor) + result._set_normalization_factor(normalization_factor) + result._set_metadata(metadata) + + result._set_sum_signal(signal2d) + result._set_sum_normalization(norm2d) + if error_model.do_variance: + result._set_sum_normalization2(norm2d_sq) + result._set_sum_variance(var2d) + result._set_std(std) + result._set_std(sem) + + if filename is not None: + save_integrate_result(filename, result) + + return result + + integrate2d = _integrate2d_ng = integrate2d_ng + + @deprecated(since_version="0.14", reason="Use the class DefaultAiWriter") + def save1D(self, filename, dim1, I, error=None, dim1_unit=units.TTH, + has_dark=False, has_flat=False, polarization_factor=None, normalization_factor=None): + """This method save the result of a 1D integration. + + Deprecated on 13/06/2017 + + :param filename: the filename used to save the 1D integration + :type filename: str + :param dim1: the x coordinates of the integrated curve + :type dim1: numpy.ndarray + :param I: The integrated intensity + :type I: numpy.mdarray + :param error: the error bar for each intensity + :type error: numpy.ndarray or None + :param dim1_unit: the unit of the dim1 array + :type dim1_unit: pyFAI.units.Unit + :param has_dark: save the darks filenames (default: no) + :type has_dark: bool + :param has_flat: save the flat filenames (default: no) + :type has_flat: bool + :param polarization_factor: the polarization factor + :type polarization_factor: float + :param normalization_factor: the monitor value + :type normalization_factor: float + """ + self.__save1D(filename=filename, + dim1=dim1, + I=I, + error=error, + dim1_unit=dim1_unit, + has_dark=has_dark, + has_flat=has_flat, + polarization_factor=polarization_factor, + normalization_factor=normalization_factor) + + def __save1D(self, filename, dim1, I, error=None, dim1_unit=units.TTH, + has_dark=False, has_flat=False, polarization_factor=None, normalization_factor=None): + """This method save the result of a 1D integration. + + :param filename: the filename used to save the 1D integration + :type filename: str + :param dim1: the x coordinates of the integrated curve + :type dim1: numpy.ndarray + :param I: The integrated intensity + :type I: numpy.mdarray + :param error: the error bar for each intensity + :type error: numpy.ndarray or None + :param dim1_unit: the unit of the dim1 array + :type dim1_unit: pyFAI.units.Unit + :param has_dark: save the darks filenames (default: no) + :type has_dark: bool + :param has_flat: save the flat filenames (default: no) + :type has_flat: bool + :param polarization_factor: the polarization factor + :type polarization_factor: float + :param normalization_factor: the monitor value + :type normalization_factor: float + """ + if not filename: + return + writer = DefaultAiWriter(None, self) + writer.save1D(filename, dim1, I, error, dim1_unit, has_dark, has_flat, + polarization_factor, normalization_factor) + + @deprecated(since_version="0.14", reason="Use the class DefaultAiWriter") + def save2D(self, filename, I, dim1, dim2, error=None, dim1_unit=units.TTH, + has_dark=False, has_flat=False, + polarization_factor=None, normalization_factor=None): + """This method save the result of a 2D integration. + + Deprecated on 13/06/2017 + + :param filename: the filename used to save the 2D histogram + :type filename: str + :param dim1: the 1st coordinates of the histogram + :type dim1: numpy.ndarray + :param dim1: the 2nd coordinates of the histogram + :type dim1: numpy.ndarray + :param I: The integrated intensity + :type I: numpy.mdarray + :param error: the error bar for each intensity + :type error: numpy.ndarray or None + :param dim1_unit: the unit of the dim1 array + :type dim1_unit: pyFAI.units.Unit + :param has_dark: save the darks filenames (default: no) + :type has_dark: bool + :param has_flat: save the flat filenames (default: no) + :type has_flat: bool + :param polarization_factor: the polarization factor + :type polarization_factor: float + :param normalization_factor: the monitor value + :type normalization_factor: float + """ + self.__save2D(filename=filename, + I=I, + dim1=dim1, + dim2=dim2, + error=error, + dim1_unit=dim1_unit, + has_dark=has_dark, + has_flat=has_flat, + polarization_factor=polarization_factor, + normalization_factor=normalization_factor) + + def __save2D(self, filename, I, dim1, dim2, error=None, dim1_unit=units.TTH, + has_dark=False, has_flat=False, + polarization_factor=None, normalization_factor=None): + """This method save the result of a 2D integration. + + Deprecated on 13/06/2017 + + :param filename: the filename used to save the 2D histogram + :type filename: str + :param dim1: the 1st coordinates of the histogram + :type dim1: numpy.ndarray + :param dim1: the 2nd coordinates of the histogram + :type dim1: numpy.ndarray + :param I: The integrated intensity + :type I: numpy.mdarray + :param error: the error bar for each intensity + :type error: numpy.ndarray or None + :param dim1_unit: the unit of the dim1 array + :type dim1_unit: pyFAI.units.Unit + :param has_dark: save the darks filenames (default: no) + :type has_dark: bool + :param has_flat: save the flat filenames (default: no) + :type has_flat: bool + :param polarization_factor: the polarization factor + :type polarization_factor: float + :param normalization_factor: the monitor value + :type normalization_factor: float + """ + if not filename: + return + writer = DefaultAiWriter(None, self) + writer.save2D(filename, I, dim1, dim2, error, dim1_unit, has_dark, has_flat, + polarization_factor, normalization_factor) + + def medfilt1d(self, data, npt_rad=1024, npt_azim=512, + correctSolidAngle=True, + radial_range=None, azimuth_range=None, + polarization_factor=None, dark=None, flat=None, + method="splitpixel", unit=units.Q, + percentile=50, dummy=None, delta_dummy=None, + mask=None, normalization_factor=1.0, metadata=None): + """Perform the 2D integration and filter along each row using a median + filter + + :param data: input image as numpy array + :param npt_rad: number of radial points + :param npt_azim: number of azimuthal points + :param correctSolidAngle: correct for solid angle of each pixel if True + :type correctSolidAngle: bool + :param radial_range: The lower and upper range of the radial unit. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. + :type radial_range: (float, float), optional + :param azimuth_range: The lower and upper range of the azimuthal angle in degree. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. + :type azimuth_range: (float, float), optional + + :param polarization_factor: polarization factor between -1 (vertical) and +1 (horizontal). + 0 for circular polarization or random, + None for no correction, + True for using the former correction + :type polarization_factor: float + :param dark: dark noise image + :type dark: ndarray + :param flat: flat field image + :type flat: ndarray + :param unit: unit to be used for integration + :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) + :param percentile: which percentile use for cutting out + percentil can be a 2-tuple to specify a region to + average out + :param mask: masked out pixels array + :param normalization_factor: Value of a normalization monitor + :type normalization_factor: float + :param metadata: any other metadata, + :type metadata: JSON serializable dict + :return: Integrate1D like result like + """ + if dummy is None: + dummy = numpy.finfo(numpy.float32).min + delta_dummy = None + unit = units.to_unit(unit) + method = self._normalize_method(method, dim=2, default=self.DEFAULT_METHOD_2D) + if (method.impl_lower == "opencl") and npt_azim and (npt_azim > 1): + old = npt_azim + npt_azim = 1 << int(round(log(npt_azim, 2))) # power of two above + if npt_azim != old: + logger.warning("Change number of azimuthal bins to nearest power of two: %s->%s", + old, npt_azim) + res2d = self.integrate2d(data, npt_rad, npt_azim, mask=mask, + flat=flat, dark=dark, + radial_range=radial_range, + azimuth_range=azimuth_range, + unit=unit, method=method.method, + dummy=dummy, delta_dummy=delta_dummy, + correctSolidAngle=correctSolidAngle, + polarization_factor=polarization_factor, + normalization_factor=normalization_factor) + integ2d = res2d.intensity + if (method.impl_lower == "opencl"): + ctx = self.engines[res2d.method].engine.ctx + if numpy.isfortran(integ2d) and integ2d.dtype == numpy.float32: + rdata = integ2d.T + horizontal = True + else: + rdata = numpy.ascontiguousarray(integ2d, dtype=numpy.float32) + horizontal = False + + if OCL_SORT_ENGINE not in self.engines: + with self._lock: + if OCL_SORT_ENGINE not in self.engines: + self.engines[OCL_SORT_ENGINE] = Engine() + engine = self.engines[OCL_SORT_ENGINE] + with engine.lock: + sorter = engine.engine + if (sorter is None) or \ + (sorter.npt_width != rdata.shape[1]) or\ + (sorter.npt_height != rdata.shape[0]): + logger.info("reset opencl sorter") + sorter = ocl_sort.Separator(npt_height=rdata.shape[0], npt_width=rdata.shape[1], ctx=ctx) + engine.set_engine(sorter) + if "__len__" in dir(percentile): + if horizontal: + spectrum = sorter.trimmed_mean_horizontal(rdata, dummy, [(i / 100.0) for i in percentile]).get() + else: + spectrum = sorter.trimmed_mean_vertical(rdata, dummy, [(i / 100.0) for i in percentile]).get() + else: + if horizontal: + spectrum = sorter.filter_horizontal(rdata, dummy, percentile / 100.0).get() + else: + spectrum = sorter.filter_vertical(rdata, dummy, percentile / 100.0).get() + else: + dummies = (integ2d == dummy).sum(axis=0) + # add a line of zeros at the end (along npt_azim) so that the value for no valid pixel is 0 + sorted_ = numpy.zeros((npt_azim + 1, npt_rad)) + sorted_[:npt_azim,:] = numpy.sort(integ2d, axis=0) + + if "__len__" in dir(percentile): + # mean over the valid value + lower = dummies + (numpy.floor(min(percentile) * (npt_azim - dummies) / 100.)).astype(int) + upper = dummies + (numpy.ceil(max(percentile) * (npt_azim - dummies) / 100.)).astype(int) + bounds = numpy.zeros(sorted_.shape, dtype=int) + assert (lower >= 0).all() + assert (upper <= npt_azim).all() + + rng = numpy.arange(npt_rad) + bounds[lower, rng] = 1 + bounds[upper, rng] = 1 + valid = (numpy.cumsum(bounds, axis=0) % 2) + invalid = numpy.logical_not(valid) + sorted_[invalid] = numpy.nan + spectrum = numpy.nanmean(sorted_, axis=0) + else: + # read only the valid value + dummies = (integ2d == dummy).sum(axis=0) + pos = dummies + (numpy.round(percentile * (npt_azim - dummies) / 100.)).astype(int) + assert (pos >= 0).all() + assert (pos <= npt_azim).all() + spectrum = sorted_[(pos, numpy.arange(npt_rad))] + + result = Integrate1dResult(res2d.radial, spectrum) + result._set_method_called("medfilt1d") + result._set_compute_engine(str(method)) + result._set_percentile(percentile) + result._set_npt_azim(npt_azim) + result._set_unit(unit) + result._set_has_mask_applied(res2d.has_mask_applied) + result._set_metadata(metadata) + result._set_has_dark_correction(res2d.has_dark_correction) + result._set_has_flat_correction(res2d.has_flat_correction) + result._set_polarization_factor(polarization_factor) + result._set_normalization_factor(normalization_factor) + return result + + def sigma_clip_legacy(self, data, npt_rad=1024, npt_azim=512, + correctSolidAngle=True, polarization_factor=None, + radial_range=None, azimuth_range=None, + dark=None, flat=None, + method=("full", "histogram", "cython"), unit=units.Q, + thres=3, max_iter=5, dummy=None, delta_dummy=None, + mask=None, normalization_factor=1.0, metadata=None, + safe=True, **kwargs): + """Perform first a 2D integration and then an iterative sigma-clipping + filter along each row. See the doc of scipy.stats.sigmaclip for the + options `thres` and `max_iter`. + + :param data: input image as numpy array + :param npt_rad: number of radial points (alias: npt) + :param npt_azim: number of azimuthal points + :param bool correctSolidAngle: correct for solid angle of each pixel when set + :param float polarization_factor: polarization factor between -1 (vertical) + and +1 (horizontal). + + - 0 for circular polarization or random, + - None for no correction, + - True for using the former correction + :param radial_range: The lower and upper range of the radial unit. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. + :type radial_range: (float, float), optional + :param azimuth_range: The lower and upper range of the azimuthal angle in degree. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. + :type azimuth_range: (float, float), optional + :param ndarray dark: dark noise image + :param ndarray flat: flat field image + :param unit: unit to be used for integration + :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) + :param thres: cut-off for n*sigma: discard any values with `|I-| > thres*σ`. + The threshold can be a 2-tuple with sigma_low and sigma_high. + :param max_iter: maximum number of iterations + :param mask: masked out pixels array + :param float normalization_factor: Value of a normalization monitor + :param metadata: any other metadata, + :type metadata: JSON serializable dict + :param safe: unset to save some checks on sparse matrix shape/content. + :kwargs: unused, just for signature compatibility when used within Worker. + :return: Integrate1D like result like + + Nota: The initial 2D-integration requires pixel splitting + """ + # compatibility layer with sigma_clip_ng + if "npt" in kwargs: + npt_rad = kwargs["npt"] + # We use NaN as dummies + if dummy is None: + dummy = numpy.nan + delta_dummy = None + unit = units.to_unit(unit) + method = self._normalize_method(method, dim=2, default=self.DEFAULT_METHOD_2D) + if "__len__" in dir(thres) and len(thres) > 0: + sigma_lo = thres[0] + sigma_hi = thres[-1] + else: + sigma_lo = sigma_hi = thres + + if (method.impl_lower == "opencl") and npt_azim and (npt_azim > 1): + old = npt_azim + npt_azim = 1 << int(round(log(npt_azim, 2))) # power of two above + if npt_azim != old: + logger.warning("Change number of azimuthal bins to nearest power of two: %s->%s", + old, npt_azim) + + res2d = self.integrate2d(data, npt_rad, npt_azim, mask=mask, + azimuth_range=azimuth_range, + radial_range=radial_range, + flat=flat, dark=dark, + unit=unit, method=method, + dummy=dummy, delta_dummy=delta_dummy, + correctSolidAngle=correctSolidAngle, + polarization_factor=polarization_factor, + normalization_factor=normalization_factor, + safe=safe) + image = res2d.intensity + if (method.impl_lower == "opencl"): + if (method.algo_lower == "csr") and \ + (OCL_CSR_ENGINE in self.engines) and \ + (self.engines[OCL_CSR_ENGINE].engine is not None): + ctx = self.engines[OCL_CSR_ENGINE].engine.ctx + elif (method.algo_lower == "csr") and \ + (OCL_LUT_ENGINE in self.engines) and \ + (self.engines[OCL_LUT_ENGINE].engine is not None): + ctx = self.engines[OCL_LUT_ENGINE].engine.ctx + else: + ctx = None + + if numpy.isfortran(image) and image.dtype == numpy.float32: + rdata = image.T + horizontal = True + else: + rdata = numpy.ascontiguousarray(image, dtype=numpy.float32) + horizontal = False + + if OCL_SORT_ENGINE not in self.engines: + with self._lock: + if OCL_SORT_ENGINE not in self.engines: + self.engines[OCL_SORT_ENGINE] = Engine() + engine = self.engines[OCL_SORT_ENGINE] + with engine.lock: + sorter = engine.engine + if (sorter is None) or \ + (sorter.npt_width != rdata.shape[1]) or\ + (sorter.npt_height != rdata.shape[0]): + logger.info("reset opencl sorter") + sorter = ocl_sort.Separator(npt_height=rdata.shape[0], npt_width=rdata.shape[1], ctx=ctx) + engine.set_engine(sorter) + + if horizontal: + res = sorter.sigma_clip_horizontal(rdata, dummy=dummy, + sigma_lo=sigma_lo, + sigma_hi=sigma_hi, + max_iter=max_iter) + else: + res = sorter.sigma_clip_vertical(rdata, dummy=dummy, + sigma_lo=sigma_lo, + sigma_hi=sigma_hi, + max_iter=max_iter) + mean = res[0].get() + std = res[1].get() + else: + as_strided = numpy.lib.stride_tricks.as_strided + mask = numpy.logical_not(numpy.isfinite(image)) + dummies = mask.sum() + image[mask] = numpy.nan + mean = numpy.nanmean(image, axis=0) + std = numpy.nanstd(image, axis=0) + for _ in range(max_iter): + mean2d = as_strided(mean, image.shape, (0, mean.strides[0])) + std2d = as_strided(std, image.shape, (0, std.strides[0])) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + delta = (image - mean2d) / std2d + mask = numpy.logical_or(delta > sigma_hi, + delta < -sigma_lo) + dummies = mask.sum() + if dummies == 0: + break + image[mask] = numpy.nan + mean = numpy.nanmean(image, axis=0) + std = numpy.nanstd(image, axis=0) + + result = Integrate1dResult(res2d.radial, mean, std) + result._set_method_called("sigma_clip") + result._set_compute_engine(str(method)) + result._set_percentile(thres) + result._set_npt_azim(npt_azim) + result._set_unit(unit) + result._set_has_mask_applied(res2d.has_mask_applied) + result._set_metadata(metadata) + result._set_has_dark_correction(res2d.has_dark_correction) + result._set_has_flat_correction(res2d.has_flat_correction) + result._set_polarization_factor(polarization_factor) + result._set_normalization_factor(normalization_factor) + return result + + _sigma_clip_legacy = sigma_clip_legacy + + def sigma_clip_ng(self, data, + npt=1024, + correctSolidAngle=True, + polarization_factor=None, + variance=None, + error_model=ErrorModel.NO, + radial_range=None, + azimuth_range=None, + dark=None, + flat=None, + absorption=None, + method=("no", "csr", "cython"), + unit=units.Q, + thres=5.0, + max_iter=5, + dummy=None, + delta_dummy=None, + mask=None, + normalization_factor=1.0, + metadata=None, + safe=True, + **kwargs): + """Performs iteratively the 1D integration with variance propagation + and performs a sigm-clipping at each iteration, i.e. + all pixel which intensity differs more than thres*std is + discarded for next iteration. + + Keep only pixels with intensty: + + ``|I - | < thres * σ(I)`` + + This enforces a symmetric, bell-shaped distibution (i.e. gaussian-like) + and is very good at extracting background or amorphous isotropic scattering + out of Bragg peaks. + + :param data: input image as numpy array + :param npt_rad: number of radial points + :param bool correctSolidAngle: correct for solid angle of each pixel if True + :param float polarization_factor: polarization factor between: + -1 (vertical) + +1 (horizontal). + - 0 for circular polarization or random, + - None for no correction, + - True for using the former correction + :param radial_range: The lower and upper range of the radial unit. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. + :type radial_range: (float, float), optional + :param azimuth_range: The lower and upper range of the azimuthal angle in degree. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. + :type azimuth_range: (float, float), optional + + :param ndarray dark: dark noise image + :param ndarray flat: flat field image + :param ndarray absorption: Detector absorption (image) + :param ndarray variance: the variance of the signal + :param str error_model: can be "poisson" to assume a poissonian detector (variance=I) or "azimuthal" to take the std² in each ring (better, more expenive) + :param unit: unit to be used for integration + :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) + :param thres: cut-off for n*sigma: discard any values with (I-)/sigma > thres. + :param max_iter: maximum number of iterations + :param mask: masked out pixels array + :param float normalization_factor: Value of a normalization monitor + :param metadata: any other metadata, + :type metadata: JSON serializable dict + :param safe: set to False to skip some tests + :return: Integrate1D like result like + + The difference with the previous `sigma_clip_legacy` implementation is that there is no 2D regrouping. + Pixel splitting should be avoided with this implementation. + The standard deviation is usually smaller than previously and the signal cleaner. + It is also slightly faster. + + The case neither `error_model`, nor `variance` is provided, fall-back on a poissonian model. + + """ + for k in kwargs: + if k == "npt_azim": + logger.warning("'npt_azim' argument is not used in sigma_clip_ng as not 2D intergration is performed anymore") + else: + logger.warning("Got unknown argument %s %s", k, kwargs[k]) + + error_model = ErrorModel.parse(error_model) + if variance is not None: + assert variance.size == data.size + error_model = ErrorModel.VARIANCE + + unit = units.to_unit(unit) + if radial_range: + radial_range = tuple(radial_range[i] / unit.scale for i in (0, -1)) + if azimuth_range is not None: + azimuth_range = self.normalize_azimuth_range(azimuth_range) + + method = self._normalize_method(method, dim=1, default=self.DEFAULT_METHOD_1D) + if method.split != "no": + logger.warning("Method %s is using a pixel-splitting scheme. sigma_clip_ng should be use WITHOUT PIXEL-SPLITTING! Your results are likely to be wrong!", + method) + + if mask is None: + has_mask = "from detector" + mask = self.mask + mask_crc = self.detector.get_mask_crc() + if mask is None: + has_mask = False + mask_crc = None + else: + has_mask = "user provided" + mask = numpy.ascontiguousarray(mask) + mask_crc = crc32(mask) + + if dark is None: + dark = self.detector.darkcurrent + if dark is None: + has_dark = False + else: + has_dark = "from detector" + else: + has_dark = "provided" + + if flat is None: + flat = self.detector.flatfield + if dark is None: + has_flat = False + else: + has_flat = "from detector" + else: + has_flat = "provided" + + if correctSolidAngle: + solidangle = self.solidAngleArray(data.shape, correctSolidAngle) + else: + solidangle = None + + if polarization_factor is None: + polarization = polarization_crc = None + else: + polarization, polarization_crc = self.polarization(data.shape, polarization_factor, with_checksum=True) + + if (method.algo_lower == "csr"): + "This is the only method implemented for now ..." + # Prepare LUT if needed! + # initialize the CSR integrator in Cython as it may be needed later on. + cython_method = IntegrationMethod.select_method(method.dimension, method.split_lower, method.algo_lower, "cython")[0] + if cython_method not in self.engines: + cython_engine = self.engines[cython_method] = Engine() + else: + cython_engine = self.engines[cython_method] + with cython_engine.lock: + # Validate that the engine used is the proper one + cython_integr = cython_engine.engine + cython_reset = None + if cython_integr is None: + cython_reset = "of first initialization" + if (not cython_reset) and safe: + if cython_integr.unit != unit: + cython_reset = "unit was changed" + if cython_integr.bins != npt: + cython_reset = "number of points changed" + if cython_integr.size != data.size: + cython_reset = "input image size changed" + if cython_integr.empty != self._empty: + cython_reset = "empty value changed " + if (mask is not None) and (not cython_integr.check_mask): + cython_reset = "mask but CSR was without mask" + elif (mask is None) and (cython_integr.check_mask): + cython_reset = "no mask but CSR has mask" + elif (mask is not None) and (cython_integr.mask_checksum != mask_crc): + cython_reset = "mask changed" + if (radial_range is None) and (cython_integr.pos0_range is not None): + cython_reset = "radial_range was defined in CSR" + elif (radial_range is not None) and cython_integr.pos0_range != (min(radial_range), max(radial_range)): + cython_reset = "radial_range is defined but not the same as in CSR" + if (azimuth_range is None) and (cython_integr.pos1_range is not None): + cython_reset = "azimuth_range not defined and CSR had azimuth_range defined" + elif (azimuth_range is not None) and cython_integr.pos1_range != (min(azimuth_range), max(azimuth_range)): + cython_reset = "azimuth_range requested and CSR's azimuth_range don't match" + if cython_reset: + logger.info("AI.sigma_clip_ng: Resetting Cython integrator because %s", cython_reset) + split = method.split_lower + if split == "pseudo": + split = "full" + try: + cython_integr = self.setup_sparse_integrator(data.shape, npt, mask=mask, + mask_checksum=mask_crc, + unit=unit, split=split, algo="CSR", + pos0_range=radial_range, + pos1_range=azimuth_range, + empty=self._empty, + scale=False) + except MemoryError: # CSR method is hungry... + logger.warning("MemoryError: falling back on forward implementation") + cython_integr = None + self.reset_engines() + method = self.DEFAULT_METHOD_1D + else: + cython_engine.set_engine(cython_integr) + if method not in self.engines: + # instanciated the engine + engine = self.engines[method] = Engine() + else: + engine = self.engines[method] + with engine.lock: + # Validate that the engine used is the proper one + integr = engine.engine + reset = None + # This whole block uses CSR, Now we should treat all the various implementation: Cython, OpenCL and finally Python. + + # Validate that the engine used is the proper one + if integr is None: + reset = "of first initialization" + if (not reset) and safe: + if integr.unit != unit: + reset = "unit was changed" + if integr.bins != npt: + reset = "number of points changed" + if integr.size != data.size: + reset = "input image size changed" + if integr.empty != self._empty: + reset = "empty value changed " + if (mask is not None) and (not integr.check_mask): + reset = "mask but CSR was without mask" + elif (mask is None) and (integr.check_mask): + reset = "no mask but CSR has mask" + elif (mask is not None) and (integr.mask_checksum != mask_crc): + reset = "mask changed" + # TODO + if (radial_range is None) and (integr.pos0_range is not None): + reset = "radial_range was defined in CSR" + elif (radial_range is not None) and integr.pos0_range != (min(radial_range), max(radial_range)): + reset = "radial_range is defined but not the same as in CSR" + if (azimuth_range is None) and (integr.pos1_range is not None): + reset = "azimuth_range not defined and CSR had azimuth_range defined" + elif (azimuth_range is not None) and integr.pos1_range != (min(azimuth_range), max(azimuth_range)): + reset = "azimuth_range requested and CSR's azimuth_range don't match" + + if reset: + logger.info("ai.sigma_clip_ng: Resetting ocl_csr integrator because %s", reset) + csr_integr = self.engines[cython_method].engine + if method.impl_lower == "opencl": + try: + integr = method.class_funct_ng.klass(csr_integr.lut, + image_size=data.size, + checksum=csr_integr.lut_checksum, + empty=self._empty, + unit=unit, + mask_checksum=csr_integr.mask_checksum, + bin_centers=csr_integr.bin_centers, + platformid=method.target[0], + deviceid=method.target[1]) + except MemoryError: + logger.warning("MemoryError: falling back on default forward implementation") + self.reset_engines() + method = self.DEFAULT_METHOD_1D + else: + # Copy some properties from the cython integrator + integr.pos0_range = csr_integr.pos0_range + integr.pos1_range = csr_integr.pos1_range + engine.set_engine(integr) + elif method.impl_lower in ("python", "cython"): + integr = method.class_funct_ng.klass(lut=csr_integr.lut, + image_size=data.size, + empty=self._empty, + unit=unit, + mask_checksum=csr_integr.mask_checksum, + bin_centers=csr_integr.bin_centers) + # Copy some properties from the cython integrator + integr.pos0_range = csr_integr.pos0_range + integr.pos1_range = csr_integr.pos1_range + engine.set_engine(integr) + else: + logger.error(f"Implementation {method.impl_lower} not supported") + else: + integr = self.engines[method].engine + kwargs = {"dark":dark, "dummy":dummy, "delta_dummy":delta_dummy, + "variance":variance, "dark_variance":None, + "flat":flat, "solidangle":solidangle, "polarization":polarization, "absorption":absorption, + "error_model":error_model, "normalization_factor":normalization_factor, + "cutoff":thres, "cycle":max_iter} + + intpl = integr.sigma_clip(data, **kwargs) + else: + raise RuntimeError("Not yet implemented. Sorry") + result = Integrate1dResult(intpl.position * unit.scale, intpl.intensity, intpl.sem) + result._set_method_called("sigma_clip_ng") + result._set_method(method) + result._set_compute_engine(str(method)) + result._set_percentile(thres) + result._set_unit(unit) + result._set_has_mask_applied(has_mask) + result._set_has_dark_correction(has_dark) + result._set_has_flat_correction(has_flat) + result._set_metadata(metadata) + result._set_sum_signal(intpl.signal) + result._set_sum_normalization(intpl.normalization) + result._set_sum_normalization2(intpl.norm_sq) + result._set_std(intpl.std) + result._set_sem(intpl.sem) + result._set_sum_variance(intpl.variance) + result._set_count(intpl.count) + result._set_polarization_factor(polarization_factor) + result._set_normalization_factor(normalization_factor) + result._set_error_model(error_model) + return result + + sigma_clip = sigma_clip_ng + + def separate(self, data, npt_rad=1024, npt_azim=512, unit="2th_deg", method="splitpixel", + percentile=50, mask=None, restore_mask=True): + """ + Separate bragg signal from powder/amorphous signal using azimuthal integration, + median filering and projected back before subtraction. + + :param data: input image as numpy array + :param npt_rad: number of radial points + :param npt_azim: number of azimuthal points + :param unit: unit to be used for integration + :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) + :param percentile: which percentile use for cutting out + :param mask: masked out pixels array + :param restore_mask: masked pixels have the same value as input data provided + :return: SeparateResult which the bragg & amorphous signal + + Note: the filtered 1D spectrum can be retrieved from + SeparateResult.radial and SeparateResult.intensity + """ + + filter_result = self.medfilt1d(data, npt_rad=npt_rad, npt_azim=npt_azim, + unit=unit, method=method, + percentile=percentile, mask=mask) + # This takes 100ms and is the next to be optimized. + amorphous = self.calcfrom1d(filter_result.radial, filter_result.intensity, + data.shape, mask=None, + dim1_unit=unit, + correctSolidAngle=True) + bragg = data - amorphous + if restore_mask: + wmask = numpy.where(mask) + maskdata = data[wmask] + bragg[wmask] = maskdata + amorphous[wmask] = maskdata + + result = SeparateResult(bragg, amorphous) + result._radial = filter_result.radial + result._intensity = filter_result.intensity + result._sigma = filter_result.sigma + + result._set_sum_signal(filter_result.sum_signal) + result._set_sum_variance(filter_result.sum_variance) + result._set_sum_normalization(filter_result.sum_normalization) + result._set_count(filter_result.count) + + result._set_method_called("medfilt1d") + result._set_compute_engine(str(method)) + result._set_percentile(percentile) + result._set_npt_azim(npt_azim) + result._set_unit(unit) + result._set_has_mask_applied(filter_result.has_mask_applied) + result._set_metadata(filter_result.metadata) + result._set_has_dark_correction(filter_result.has_dark_correction) + result._set_has_flat_correction(filter_result.has_flat_correction) + + # TODO when switching to sigma-clipped filtering + # result._set_polarization_factor(polarization_factor) + # result._set_normalization_factor(normalization_factor) + + return result + + def inpainting(self, data, mask, npt_rad=1024, npt_azim=512, + unit="r_m", method="splitpixel", poissonian=False, + grow_mask=3): + """Re-invent the values of masked pixels + + :param data: input image as 2d numpy array + :param mask: masked out pixels array + :param npt_rad: number of radial points + :param npt_azim: number of azimuthal points + :param unit: unit to be used for integration + :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) + :param poissonian: If True, add some poisonian noise to the data to make + then more realistic + :param grow_mask: grow mask in polar coordinated to accomodate pixel + splitting algoritm + :return: inpainting object which contains the restored image as .data + """ + from ..ext import inpainting + dummy = -1 + delta_dummy = 0.9 + method = IntegrationMethod.select_one_available(method, dim=2, + default=self.DEFAULT_METHOD_2D) + + assert mask.shape == self.detector.shape + mask = numpy.ascontiguousarray(mask, numpy.int8) + blank_data = numpy.zeros(mask.shape, dtype=numpy.float32) + ones_data = numpy.ones(mask.shape, dtype=numpy.float32) + + to_mask = numpy.where(mask) + + blank_mask = numpy.zeros_like(mask) + masked = numpy.zeros(mask.shape, dtype=numpy.float32) + masked[to_mask] = dummy + + masked_data = data.astype(numpy.float32) # explicit copy + masked_data[to_mask] = dummy + + if self.chiDiscAtPi: + azimuth_range = (-180, 180) + else: + azimuth_range = (0, 360) + r = self.array_from_unit(typ="corner", unit=unit, scale=True) + rmax = (1.0 + numpy.finfo(numpy.float32).eps) * r[..., 0].max() + kwargs = {"npt_rad": npt_rad, + "npt_azim": npt_azim, + "unit": unit, + "dummy": dummy, + "delta_dummy": delta_dummy, + "method": method, + "correctSolidAngle": False, + "azimuth_range": azimuth_range, + "radial_range": (0, rmax), + "polarization_factor": None, + # Nullify the masks to avoid to use the detector once + "dark": blank_mask, + "mask": blank_mask, + "flat": ones_data} + + imgb = self.integrate2d(blank_data, **kwargs) + imgp = self.integrate2d(masked, **kwargs) + imgd = self.integrate2d(masked_data, **kwargs) + omask = numpy.ascontiguousarray(numpy.round(imgb.intensity / dummy), numpy.int8) + imask = numpy.ascontiguousarray(numpy.round(imgp.intensity / dummy), numpy.int8) + to_paint = (imask - omask) + + if grow_mask: + # inpaint a bit more than needed to avoid "side" effects. + from scipy.ndimage import binary_dilation + structure = [[1], [1], [1]] + to_paint = binary_dilation(to_paint, structure=structure, iterations=grow_mask) + to_paint = to_paint.astype(numpy.int8) + + polar_inpainted = inpainting.polar_inpaint(imgd.intensity, + to_paint, omask, 0) + r = self.array_from_unit(typ="center", unit=unit, scale=True) + chi = numpy.rad2deg(self.chiArray()) + cart_inpatined = inpainting.polar_interpolate(data, mask, + r, + chi, + polar_inpainted, + imgd.radial, imgd.azimuthal) + + if poissonian: + res = data.copy() + res[to_mask] = numpy.random.poisson(cart_inpatined[to_mask]) + else: + res = cart_inpatined + return res + + def guess_max_bins(self, redundancy=1, search_range=None, unit="q_nm^-1", radial_range=None, azimuth_range=None): + """ + Guess the maximum number of bins, considering the excpected minimum redundancy: + + :param redundancy: minimum number of pixel per bin + :param search_range: the minimum and maximun number of bins to be considered + :param unit: the unit to be considered like "2th_deg" or "q_nm^-1" + :param radial_range: radial range to be considered, depends on unit ! + :param azimuth_range: azimuthal range to be considered + :return: the minimum bin number providing the provided redundancy + """ + img = numpy.empty(self.detector.shape, dtype=numpy.float32) + dia = int(numpy.sqrt(img.shape[0] ** 2 + img.shape[1] ** 2)) + method = self._normalize_method(("no", "histogram", "cython"), dim=1, default=self.DEFAULT_METHOD_1D) + unit = units.to_unit(unit) + if search_range is None: + ref = self.integrate1d(img, dia, method=method, unit=unit, + azimuth_range=azimuth_range, radial_range=radial_range).count.min() + if ref >= redundancy: + search_range = (dia, 4 * dia) + else: + search_range = (2, dia) + + for i in range(*search_range): + mini = self.integrate1d(img, i, method=method, unit=unit, + azimuth_range=azimuth_range, radial_range=radial_range).count.min() + if mini < redundancy: + return i - 1 + + def guess_polarization(self, img, npt_rad=None, npt_azim=360, unit="2th_deg", + method=("no", "csr", "cython"), target_rad=None): + """Guess the polarization factor for the given image + + For this one performs several integration with different polarization factors + and take the one with the lowest std along the outer-most ring. + + :param img: diffraction image, preferable with beam-stop centered. + :param npt_rad: number of point in the radial dimension, can be guessed, better avoid oversampling. + :param npt_azim: number of point in the azimuthal dimension, 1 per degree is usually OK + :param unit: radial unit for the integration + :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation). The default one is pretty optimal: no splitting, CSR for the speed of the integration + :param target_rad: position of the outer-most complete ring, can be guessed. + :return: polarization factor (#, polarization angle) + """ + if npt_rad is None: + if self.detector.shape is None: + self.detector.shape = img.shape + npt_rad = self.guess_npt_rad() + + res = self.integrate2d_ng(img, npt_rad, npt_azim, unit=unit, method=method) + + if target_rad is None: + azimuthal_range = (res.count > 0).sum(axis=0) + azim_min = azimuthal_range.max() * 0.95 + valid_rings = numpy.where(azimuthal_range > azim_min)[0] + nbpix = res.count.sum(axis=0)[valid_rings] + bin_idx = valid_rings[numpy.where(nbpix.max() == nbpix)[0][-1]] + else: + bin_idx = numpy.argmin(abs(res.radial - target_rad)) + + from scipy.optimize import minimize_scalar + sfun = lambda p:\ + self.integrate2d_ng(img, npt_rad, npt_azim, unit=unit, method=method, + polarization_factor=p).intensity[:, bin_idx].std() + opt = minimize_scalar(sfun, bounds=[-1, 1]) + logger.info(str(opt)) + return opt.x + +################################################################################ +# Some properties +################################################################################ + + def set_darkcurrent(self, dark): + self.detector.set_darkcurrent(dark) + + def get_darkcurrent(self): + return self.detector.get_darkcurrent() + + darkcurrent = property(get_darkcurrent, set_darkcurrent) + + def set_flatfield(self, flat): + self.detector.set_flatfield(flat) + + def get_flatfield(self): + return self.detector.get_flatfield() + + flatfield = property(get_flatfield, set_flatfield) + + @deprecated(reason="Not maintained", since_version="0.17") + def set_darkfiles(self, files=None, method="mean"): + """Set the dark current from one or mutliple files, avaraged + according to the method provided. + + Moved to Detector. + + :param files: file(s) used to compute the dark. + :type files: str or list(str) or None + :param method: method used to compute the dark, "mean" or "median" + :type method: str + """ + self.detector.set_darkfiles(files, method) + + @property + @deprecated(reason="Not maintained", since_version="0.17") + def darkfiles(self): + return self.detector.darkfiles + + @deprecated(reason="Not maintained", since_version="0.17") + def set_flatfiles(self, files, method="mean"): + """Set the flat field from one or mutliple files, averaged + according to the method provided. + + Moved to Detector. + + :param files: file(s) used to compute the flat-field. + :type files: str or list(str) or None + :param method: method used to compute the dark, "mean" or "median" + :type method: str + """ + self.detector.set_flatfiles(files, method) + + @property + @deprecated(reason="Not maintained", since_version="0.17") + def flatfiles(self): + return self.detector.flatfiles + + def get_empty(self): + return self._empty + + def set_empty(self, value): + self._empty = float(value) + # propagate empty values to integrators + for engine in self.engines.values(): + with engine.lock: + if engine.engine is not None: + try: + engine.engine.empty = self._empty + except Exception as exeption: + logger.error(exeption) + + empty = property(get_empty, set_empty) + + def __getnewargs_ex__(self): + "Helper function for pickling ai" + return (self.dist, self.poni1, self.poni2, + self.rot1, self.rot2, self.rot3, + self.pixel1, self.pixel2, + self.splineFile, self.detector, self.wavelength), {} + + def __getstate__(self): + """Helper function for pickling ai + + :return: the state of the object + """ + + state_blacklist = ('_lock', "engines") + state = Geometry.__getstate__(self) + for key in state_blacklist: + if key in state: + del state[key] + return state + + def __setstate__(self, state): + """Helper function for unpickling ai + + :param state: the state of the object + """ + for statekey, statevalue in state.items(): + setattr(self, statekey, statevalue) + self._sem = threading.Semaphore() + self._lock = threading.Semaphore() + self.engines = {} diff --git a/src/pyFAI/integrator/fiber.py b/src/pyFAI/integrator/fiber.py new file mode 100644 index 000000000..6098b8ae2 --- /dev/null +++ b/src/pyFAI/integrator/fiber.py @@ -0,0 +1,467 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Project: Azimuthal integration +# https://github.com/silx-kit/pyFAI +# +# Copyright (C) 2023-2024 European Synchrotron Radiation Facility, Grenoble, France +# +# Principal author: Jérôme Kieffer (Jerome.Kieffer@ESRF.eu) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# . +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# . +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +__author__ = "Edgar GUTIERREZ FERNANDEZ " +__contact__ = "edgar.gutierrez-fernandez@ESRF.eu" +__license__ = "MIT" +__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" +__date__ = "09/10/2024" +__status__ = "stable" +__docformat__ = 'restructuredtext' + +import logging +logger = logging.getLogger(__name__) +import numpy +from .azimuthal import AzimuthalIntegrator +from ..containers import Integrate1dResult +from .. import units + +class FiberIntegrator(AzimuthalIntegrator): + def integrate_fiber(self, data, + npt_output, output_unit=units.Q_OOP, output_unit_range=None, + npt_integrated=100, integrated_unit=units.Q_IP, integrated_unit_range=None, + sample_orientation=None, + filename=None, + correctSolidAngle=True, + mask=None, dummy=None, delta_dummy=None, + polarization_factor=None, dark=None, flat=None, + method=("no", "histogram", "cython"), + normalization_factor=1.0): + """Calculate the integrated profile curve along a specific FiberUnit + + :param ndarray data: 2D array from the Detector/CCD camera + :param int npt_output: number of points in the output pattern + :param pyFAI.units.UnitFiber output_unit: Output units + :param output_unit_range: The lower and upper range of the output unit. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. Optional. + :param int npt_integrated: number of points to be integrated along integrated_unit + :param pyFAI.units.UnitFiber integrated_unit: unit to be integrated along integrated_unit_range + :param integrated_unit_range: The lower and upper range to be integrated. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. Optional. + :param sample_orientation: 1-4, four different orientation of the fiber axis regarding the detector main axis, from 1 to 4 is +90º + :param str filename: output filename in 2/3 column ascii format + :param bool correctSolidAngle: correct for solid angle of each pixel if True + :param ndarray mask: array (same size as image) with 1 for masked pixels, and 0 for valid pixels + :param float dummy: value for dead/masked pixels + :param float delta_dummy: precision for dummy value + :param float polarization_factor: polarization factor between -1 (vertical) and +1 (horizontal). + * 0 for circular polarization or random, + * None for no correction, + * True for using the former correction + :param ndarray dark: dark noise image + :param ndarray flat: flat field image + :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) + :param float normalization_factor: Value of a normalization monitor + :return: chi bins center positions and regrouped intensity + :rtype: Integrate1dResult + """ + if isinstance(integrated_unit, units.UnitFiber): + sample_orientation = sample_orientation or integrated_unit.sample_orientation + else: + sample_orientation = sample_orientation or 1 + + reset = False + if isinstance(integrated_unit, units.UnitFiber): + if integrated_unit.sample_orientation != sample_orientation: + integrated_unit.set_sample_orientation(sample_orientation) + logger.info(f"Sample orientation set to {sample_orientation} for unit {integrated_unit}. AzimuthalIntegrator will be reset.") + reset = True + else: + integrated_unit = units.to_unit(integrated_unit) + integrated_unit.set_sample_orientation(sample_orientation) + logger.info(f"Sample orientation set to {sample_orientation} for unit {integrated_unit}. AzimuthalIntegrator will be reset.") + reset = True + + if isinstance(output_unit, units.UnitFiber): + if output_unit.sample_orientation != sample_orientation: + output_unit.set_sample_orientation(sample_orientation) + logger.info(f"Sample orientation set to {sample_orientation} for unit {output_unit}. AzimuthalIntegrator will be reset.") + reset = True + else: + output_unit = units.to_unit(output_unit) + output_unit.set_sample_orientation(sample_orientation) + logger.info(f"Sample orientation set to {sample_orientation} for unit {output_unit}. AzimuthalIntegrator will be reset.") + reset = True + + if reset: + self.reset() + logger.info(f"AzimuthalIntegrator was reset. Current fiber orientation: {sample_orientation}.") + + + if (isinstance(method, (tuple, list)) and method[0] != "no") or (isinstance(method, IntegrationMethod) and method.split != "no"): + logger.warning(f"Method {method} is using a pixel-splitting scheme. GI integration should be use WITHOUT PIXEL-SPLITTING! The results could be wrong!") + + res = self.integrate2d_ng(data, npt_rad=npt_integrated, npt_azim=npt_output, + correctSolidAngle=correctSolidAngle, + mask=mask, dummy=dummy, delta_dummy=delta_dummy, + polarization_factor=polarization_factor, + dark=dark, flat=flat, method=method, + normalization_factor=normalization_factor, + radial_range=integrated_unit_range, + azimuth_range=output_unit_range, + unit=(integrated_unit, output_unit)) + + unit_scale = output_unit.scale + sum_signal = res.sum_signal.sum(axis=-1) + count = res.count.sum(axis=-1) + sum_normalization = res._sum_normalization.sum(axis=-1) + mask = numpy.where(count == 0) + empty = dummy if dummy is not None else self._empty + intensity = sum_signal / sum_normalization + intensity[mask] = empty + + if res.sigma is not None: + sum_variance = res.sum_variance.sum(axis=-1) + sigma = numpy.sqrt(sum_variance) / sum_normalization + sigma[mask] = empty + else: + sum_variance = None + sigma = None + result = Integrate1dResult(res.azimuthal * unit_scale, intensity, sigma) + result._set_method_called("integrate_radial") + result._set_unit(output_unit) + result._set_sum_normalization(sum_normalization) + result._set_count(count) + result._set_sum_signal(sum_signal) + result._set_sum_variance(sum_variance) + result._set_has_dark_correction(dark is not None) + result._set_has_flat_correction(flat is not None) + result._set_polarization_factor(polarization_factor) + result._set_normalization_factor(normalization_factor) + result._set_method = res.method + result._set_compute_engine = res.compute_engine + + if filename is not None: + save_integrate_result(filename, result) + + return result + + def integrate_grazing_incidence(self, data, + npt_output, output_unit=units.Q_OOP, output_unit_range=None, + npt_integrated=100, integrated_unit=units.Q_IP, integrated_unit_range=None, + incident_angle=None, tilt_angle=None, sample_orientation=None, + filename=None, + correctSolidAngle=True, + mask=None, dummy=None, delta_dummy=None, + polarization_factor=None, dark=None, flat=None, + method=("no", "histogram", "cython"), + normalization_factor=1.0): + """Calculate the integrated profile curve along a specific FiberUnit, additional inputs for incident angle and tilt angle + + :param ndarray data: 2D array from the Detector/CCD camera + :param int npt_output: number of points in the output pattern + :param pyFAI.units.UnitFiber output_unit: Output units + :param output_unit_range: The lower and upper range of the output unit. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. Optional. + :param int npt_integrated: number of points to be integrated along integrated_unit + :param pyFAI.units.UnitFiber integrated_unit: unit to be integrated along integrated_unit_range + :param integrated_unit_range: The lower and upper range to be integrated. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. Optional. + :param incident_angle: tilting of the sample towards the beam (analog to rot2): in radians + :param tilt_angle: tilting of the sample orthogonal to the beam direction (analog to rot3): in radians + :param sample_orientation: 1-4, four different orientation of the fiber axis regarding the detector main axis, from 1 to 4 is +90º + :param str filename: output filename in 2/3 column ascii format + :param bool correctSolidAngle: correct for solid angle of each pixel if True + :param ndarray mask: array (same size as image) with 1 for masked pixels, and 0 for valid pixels + :param float dummy: value for dead/masked pixels + :param float delta_dummy: precision for dummy value + :param float polarization_factor: polarization factor between -1 (vertical) and +1 (horizontal). + * 0 for circular polarization or random, + * None for no correction, + * True for using the former correction + :param ndarray dark: dark noise image + :param ndarray flat: flat field image + :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) + :param float normalization_factor: Value of a normalization monitor + :return: chi bins center positions and regrouped intensity + :rtype: Integrate1dResult + """ + reset = False + + if isinstance(integrated_unit, units.UnitFiber): + incident_angle = incident_angle or integrated_unit.incident_angle + else: + incident_angle = incident_angle or 0.0 + + if isinstance(integrated_unit, units.UnitFiber): + if integrated_unit.incident_angle != incident_angle: + integrated_unit.set_incident_angle(incident_angle) + logger.info(f"Incident angle set to {incident_angle} for unit {integrated_unit}. AzimuthalIntegrator will be reset.") + reset = True + else: + integrated_unit = units.to_unit(integrated_unit) + integrated_unit.set_incident_angle(incident_angle) + logger.info(f"Incident angle set to {incident_angle} for unit {integrated_unit}. AzimuthalIntegrator will be reset.") + reset = True + + if isinstance(output_unit, units.UnitFiber): + if output_unit.incident_angle != incident_angle: + output_unit.set_incident_angle(incident_angle) + logger.info(f"Incident angle set to {incident_angle} for unit {output_unit}. AzimuthalIntegrator will be reset.") + reset = True + else: + output_unit = units.to_unit(output_unit) + output_unit.set_incident_angle(incident_angle) + logger.info(f"Incident angle set to {incident_angle} for unit {output_unit}. AzimuthalIntegrator will be reset.") + reset = True + + if isinstance(integrated_unit, units.UnitFiber): + tilt_angle = tilt_angle or integrated_unit.tilt_angle + else: + tilt_angle = tilt_angle or 0.0 + + if isinstance(integrated_unit, units.UnitFiber): + if integrated_unit.tilt_angle != tilt_angle: + integrated_unit.set_tilt_angle(tilt_angle) + logger.info(f"Tilt angle set to {tilt_angle} for unit {integrated_unit}. AzimuthalIntegrator will be reset.") + reset = True + else: + integrated_unit = units.to_unit(integrated_unit) + integrated_unit.set_tilt_angle(tilt_angle) + logger.info(f"Tilt angle set to {tilt_angle} for unit {integrated_unit}. AzimuthalIntegrator will be reset.") + reset = True + + if isinstance(output_unit, units.UnitFiber): + if output_unit.tilt_angle != tilt_angle: + output_unit.set_tilt_angle(tilt_angle) + logger.info(f"Tilt angle set to {tilt_angle} for unit {output_unit}. AzimuthalIntegrator will be reset.") + reset = True + else: + output_unit = units.to_unit(output_unit) + output_unit.set_tilt_angle(tilt_angle) + logger.info(f"Tilt angle set to {tilt_angle} for unit {output_unit}. AzimuthalIntegrator will be reset.") + reset = True + + if isinstance(integrated_unit, units.UnitFiber): + sample_orientation = sample_orientation or integrated_unit.sample_orientation + else: + sample_orientation = sample_orientation or 1 + + if isinstance(integrated_unit, units.UnitFiber): + if integrated_unit.sample_orientation != sample_orientation: + integrated_unit.set_sample_orientation(sample_orientation) + logger.info(f"Sample orientation set to {sample_orientation} for unit {integrated_unit}. AzimuthalIntegrator will be reset.") + reset = True + else: + integrated_unit = units.to_unit(integrated_unit) + integrated_unit.set_sample_orientation(sample_orientation) + logger.info(f"Sample orientation set to {sample_orientation} for unit {integrated_unit}. AzimuthalIntegrator will be reset.") + reset = True + + if isinstance(output_unit, units.UnitFiber): + if output_unit.sample_orientation != sample_orientation: + output_unit.set_sample_orientation(sample_orientation) + logger.info(f"Sample orientation set to {sample_orientation} for unit {output_unit}. AzimuthalIntegrator will be reset.") + reset = True + else: + output_unit = units.to_unit(output_unit) + output_unit.set_sample_orientation(sample_orientation) + logger.info(f"Sample orientation set to {sample_orientation} for unit {output_unit}. AzimuthalIntegrator will be reset.") + reset = True + + if reset: + self.reset() + logger.info(f"AzimuthalIntegrator was reset. Current grazing parameters: incident_angle: {incident_angle}, tilt_angle: {tilt_angle}, sample_orientation: {sample_orientation}.") + + return self.integrate_fiber(data=data, + npt_output=npt_output, output_unit=output_unit, output_unit_range=output_unit_range, + npt_integrated=npt_integrated, integrated_unit=integrated_unit, integrated_unit_range=integrated_unit_range, + sample_orientation=sample_orientation, + filename=filename, + correctSolidAngle=correctSolidAngle, + mask=mask, dummy=dummy, delta_dummy=delta_dummy, + polarization_factor=polarization_factor, dark=dark, flat=flat, + method=method, + normalization_factor=normalization_factor) + + def integrate2d_fiber(self, data, + npt_horizontal, horizontal_unit=units.Q_IP, horizontal_unit_range=None, + npt_vertical=1000, vertical_unit=units.Q_OOP, vertical_unit_range=None, + sample_orientation=None, + filename=None, + correctSolidAngle=True, + mask=None, dummy=None, delta_dummy=None, + polarization_factor=None, dark=None, flat=None, + method=("no", "histogram", "cython"), + normalization_factor=1.0): + if isinstance(vertical_unit, units.UnitFiber): + sample_orientation = sample_orientation or vertical_unit.sample_orientation + else: + sample_orientation = sample_orientation or 1 + + reset = False + if isinstance(vertical_unit, units.UnitFiber): + if vertical_unit.sample_orientation != sample_orientation: + vertical_unit.set_sample_orientation(sample_orientation) + logger.info(f"Sample orientation was set to {sample_orientation} for unit {vertical_unit}. AzimuthalIntegrator will be reset.") + reset = True + else: + vertical_unit = units.to_unit(vertical_unit) + vertical_unit.set_sample_orientation(sample_orientation) + logger.info(f"Sample orientation was set to {sample_orientation} for unit {vertical_unit}. AzimuthalIntegrator will be reset.") + reset = True + + if isinstance(horizontal_unit, units.UnitFiber): + if horizontal_unit.sample_orientation != sample_orientation: + horizontal_unit.set_sample_orientation(sample_orientation) + logger.info(f"Sample orientation was set to {sample_orientation} for unit {horizontal_unit}. AzimuthalIntegrator will be reset.") + reset = True + else: + horizontal_unit = units.to_unit(horizontal_unit) + horizontal_unit.set_sample_orientation(sample_orientation) + logger.info(f"Sample orientation was set to {sample_orientation} for unit {horizontal_unit}. AzimuthalIntegrator will be reset.") + reset = True + + if reset: + self.reset() + logger.info(f"AzimuthalIntegrator was reset. Current fiber parameters: sample_orientation: {sample_orientation}.") + + + if (isinstance(method, (tuple, list)) and method[0] != "no") or (isinstance(method, IntegrationMethod) and method.split != "no"): + logger.warning(f"Method {method} is using a pixel-splitting scheme. GI integration should be use WITHOUT PIXEL-SPLITTING! The results could be wrong!") + + return self.integrate2d_ng(data, npt_rad=npt_horizontal, npt_azim=npt_vertical, + correctSolidAngle=correctSolidAngle, + mask=mask, dummy=dummy, delta_dummy=delta_dummy, + polarization_factor=polarization_factor, + dark=dark, flat=flat, method=method, + normalization_factor=normalization_factor, + radial_range=horizontal_unit_range, + azimuth_range=vertical_unit_range, + unit=(horizontal_unit, vertical_unit), + filename=filename) + + def integrate2d_grazing_incidence(self, data, + npt_horizontal, horizontal_unit=units.Q_IP, horizontal_unit_range=None, + npt_vertical=1000, vertical_unit=units.Q_OOP, vertical_unit_range=None, + incident_angle=None, tilt_angle=None, sample_orientation=None, + filename=None, + correctSolidAngle=True, + mask=None, dummy=None, delta_dummy=None, + polarization_factor=None, dark=None, flat=None, + method=("no", "histogram", "cython"), + normalization_factor=1.0): + + reset = False + + if isinstance(vertical_unit, units.UnitFiber): + incident_angle = incident_angle or vertical_unit.incident_angle + else: + incident_angle = incident_angle or 0.0 + + if isinstance(vertical_unit, units.UnitFiber): + if vertical_unit.incident_angle != incident_angle: + vertical_unit.set_incident_angle(incident_angle) + logger.info(f"Incident angle set to {incident_angle} for unit {vertical_unit}. AzimuthalIntegrator will be reset.") + reset = True + else: + vertical_unit = units.to_unit(vertical_unit) + vertical_unit.set_incident_angle(incident_angle) + logger.info(f"Incident angle set to {incident_angle} for unit {vertical_unit}. AzimuthalIntegrator will be reset.") + reset = True + + if isinstance(horizontal_unit, units.UnitFiber): + if horizontal_unit.incident_angle != incident_angle: + horizontal_unit.set_incident_angle(incident_angle) + logger.info(f"Incident angle set to {incident_angle} for unit {horizontal_unit}. AzimuthalIntegrator will be reset.") + reset = True + else: + horizontal_unit = units.to_unit(horizontal_unit) + horizontal_unit.set_incident_angle(incident_angle) + logger.info(f"Incident angle set to {incident_angle} for unit {horizontal_unit}. AzimuthalIntegrator will be reset.") + reset = True + + if isinstance(vertical_unit, units.UnitFiber): + tilt_angle = tilt_angle or vertical_unit.tilt_angle + else: + tilt_angle = tilt_angle or 0.0 + + if isinstance(vertical_unit, units.UnitFiber): + if vertical_unit.tilt_angle != tilt_angle: + vertical_unit.set_tilt_angle(tilt_angle) + logger.info(f"Tilt angle set to {tilt_angle} for unit {vertical_unit}. AzimuthalIntegrator will be reset.") + reset = True + else: + vertical_unit = units.to_unit(vertical_unit) + vertical_unit.set_tilt_angle(tilt_angle) + logger.info(f"Tilt angle set to {tilt_angle} for unit {vertical_unit}. AzimuthalIntegrator will be reset.") + reset = True + + if isinstance(horizontal_unit, units.UnitFiber): + if horizontal_unit.tilt_angle != tilt_angle: + horizontal_unit.set_tilt_angle(tilt_angle) + logger.info(f"Tilt angle set to {tilt_angle} for unit {horizontal_unit}. AzimuthalIntegrator will be reset.") + reset = True + else: + horizontal_unit = units.to_unit(horizontal_unit) + horizontal_unit.set_tilt_angle(tilt_angle) + logger.info(f"Tilt angle set to {tilt_angle} for unit {horizontal_unit}. AzimuthalIntegrator will be reset.") + reset = True + + if isinstance(vertical_unit, units.UnitFiber): + sample_orientation = sample_orientation or vertical_unit.sample_orientation + else: + sample_orientation = sample_orientation or 1 + + if isinstance(vertical_unit, units.UnitFiber): + if vertical_unit.sample_orientation != sample_orientation: + vertical_unit.set_sample_orientation(sample_orientation) + logger.info(f"Sample orientation set to {sample_orientation} for unit {vertical_unit}. AzimuthalIntegrator will be reset.") + reset = True + else: + vertical_unit = units.to_unit(vertical_unit) + vertical_unit.set_sample_orientation(sample_orientation) + logger.info(f"Sample orientation set to {sample_orientation} for unit {vertical_unit}. AzimuthalIntegrator will be reset.") + reset = True + + if isinstance(horizontal_unit, units.UnitFiber): + if horizontal_unit.sample_orientation != sample_orientation: + horizontal_unit.set_sample_orientation(sample_orientation) + logger.info(f"Sample orientation set to {sample_orientation} for unit {horizontal_unit}. AzimuthalIntegrator will be reset.") + reset = True + else: + horizontal_unit = units.to_unit(horizontal_unit) + horizontal_unit.set_sample_orientation(sample_orientation) + logger.info(f"Sample orientation set to {sample_orientation} for unit {horizontal_unit}. AzimuthalIntegrator will be reset.") + reset = True + + if reset: + self.reset() + logger.info(f"AzimuthalIntegrator was reset. Current grazing parameters: incident_angle: {incident_angle}, tilt_angle: {tilt_angle}, sample_orientation: {sample_orientation}.") + + if (isinstance(method, (tuple, list)) and method[0] != "no") or (isinstance(method, IntegrationMethod) and method.split != "no"): + logger.warning(f"Method {method} is using a pixel-splitting scheme. GI integration should be use WITHOUT PIXEL-SPLITTING! The results could be wrong!") + + return self.integrate2d_fiber(data=data, npt_horizontal=npt_horizontal, npt_vertical=npt_vertical, + horizontal_unit=horizontal_unit, vertical_unit=vertical_unit, + horizontal_unit_range=horizontal_unit_range, + vertical_unit_range=vertical_unit_range, + sample_orientation=sample_orientation, + filename=filename, + correctSolidAngle=correctSolidAngle, + mask=mask, dummy=dummy, delta_dummy=delta_dummy, + polarization_factor=polarization_factor, dark=dark, flat=flat, + method=method, + normalization_factor=normalization_factor, + ) diff --git a/src/pyFAI/integrator/load.py b/src/pyFAI/integrator/load.py new file mode 100644 index 000000000..adb6710ef --- /dev/null +++ b/src/pyFAI/integrator/load.py @@ -0,0 +1,352 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Project: Azimuthal integration +# https://github.com/silx-kit/pyFAI +# +# Copyright (C) 2012-2024 European Synchrotron Radiation Facility, Grenoble, France +# +# Principal author: Jérôme Kieffer (Jerome.Kieffer@ESRF.eu) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# . +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# . +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +__author__ = "Jérôme Kieffer" +__contact__ = "Jerome.Kieffer@ESRF.eu" +__license__ = "MIT" +__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" +__date__ = "09/10/2024" +__status__ = "stable" +__docformat__ = 'restructuredtext' + +""" +This module tries to load every possible type of integrator and registers them +into the registry +""" +import logging +logger = logging.getLogger(__name__) +from collections import OrderedDict +import numpy + +from ..method_registry import IntegrationMethod +from ..engines import CSR_engine as py_CSR_engine +from ..engines import CSC_engine as py_CSC_engine +# Register numpy integrators which are fail-safe +from ..engines import histogram_engine +IntegrationMethod(1, "no", "histogram", "python", old_method="numpy", + class_funct_ng=(None, histogram_engine.histogram1d_engine)) +IntegrationMethod(2, "no", "histogram", "python", old_method="numpy", + class_funct_ng=(None, histogram_engine.histogram2d_engine)) + +try: + from ..ext import histogram +except ImportError as error: + logger.error("Unable to import pyFAI.ext.histogram" + " Cython histogram implementation: %s", error) + histogram = None +else: + # Register histogram integrators + IntegrationMethod(1, "no", "histogram", "cython", old_method="cython", + class_funct_legacy=(None, histogram.histogram), + class_funct_ng=(None, histogram.histogram1d_engine)) + IntegrationMethod(2, "no", "histogram", "cython", old_method="cython", + class_funct_legacy=(None, histogram.histogram2d), + class_funct_ng=(None, histogram.histogram2d_engine)) + +try: + from ..ext import splitBBox # IGNORE:F0401 +except ImportError as error: + logger.error("Unable to import pyFAI.ext.splitBBox" + " Bounding Box pixel splitting: %s", error) + splitBBox = None +else: + # Register splitBBox integrators + IntegrationMethod(1, "bbox", "histogram", "cython", old_method="bbox", + class_funct_legacy=(None, splitBBox.histoBBox1d), + class_funct_ng=(None, splitBBox.histoBBox1d_engine)) + IntegrationMethod(2, "bbox", "histogram", "cython", old_method="bbox", + class_funct_legacy=(None, splitBBox.histoBBox2d), + class_funct_ng=(None, splitBBox.histoBBox2d_engine),) + +try: + from ..ext import splitPixel +except ImportError as error: + logger.error("Unable to import pyFAI.ext.splitPixel full pixel splitting: %s", error) + logger.debug("Backtrace", exc_info=True) + splitPixel = None +else: + # Register splitPixel integrators + IntegrationMethod(1, "full", "histogram", "cython", old_method="splitpixel", + class_funct_legacy=(None, splitPixel.fullSplit1D), + class_funct_ng=(None, splitPixel.fullSplit1D_engine)) + IntegrationMethod(2, "full", "histogram", "cython", old_method="splitpixel", + class_funct_legacy=(None, splitPixel.fullSplit2D), + class_funct_ng=(None, splitPixel.fullSplit2D_engine)) + IntegrationMethod(2, "pseudo", "histogram", "cython", old_method="splitpixel", + class_funct_legacy=(None, splitPixel.fullSplit2D), + class_funct_ng=(None, splitPixel.pseudoSplit2D_engine)) + +try: + from ..ext import splitBBoxCSR # IGNORE:F0401 +except ImportError as error: + logger.error("Unable to import pyFAI.ext.splitBBoxCSR" + " CSR based azimuthal integration: %s", error) + splitBBoxCSR = None +else: + # Register splitBBoxCSR integrators + IntegrationMethod(1, "no", "CSR", "cython", old_method="nosplit_csr", + class_funct_legacy=(splitBBoxCSR.HistoBBox1d, splitBBoxCSR.HistoBBox1d.integrate_legacy), + class_funct_ng=(splitBBoxCSR.HistoBBox1d, splitBBoxCSR.HistoBBox1d.integrate_ng)) + IntegrationMethod(2, "no", "CSR", "cython", old_method="nosplit_csr", + class_funct_legacy=(splitBBoxCSR.HistoBBox2d, splitBBoxCSR.HistoBBox2d.integrate)) + IntegrationMethod(1, "bbox", "CSR", "cython", old_method="csr", + class_funct_legacy=(splitBBoxCSR.HistoBBox1d, splitBBoxCSR.HistoBBox1d.integrate_legacy), + class_funct_ng=(splitBBoxCSR.HistoBBox1d, splitBBoxCSR.HistoBBox1d.integrate_ng)) + IntegrationMethod(2, "bbox", "CSR", "cython", old_method="csr", + class_funct_legacy=(splitBBoxCSR.HistoBBox2d, splitBBoxCSR.HistoBBox2d.integrate)) + + IntegrationMethod(1, "no", "CSR", "python", + class_funct_ng=(py_CSR_engine.CsrIntegrator1d, py_CSR_engine.CsrIntegrator1d.integrate)) + IntegrationMethod(2, "no", "CSR", "python", + class_funct_ng=(py_CSR_engine.CsrIntegrator2d, py_CSR_engine.CsrIntegrator2d.integrate)) + IntegrationMethod(1, "bbox", "CSR", "python", + class_funct_ng=(py_CSR_engine.CsrIntegrator1d, py_CSR_engine.CsrIntegrator1d.integrate)) + IntegrationMethod(2, "bbox", "CSR", "python", + class_funct_ng=(py_CSR_engine.CsrIntegrator2d, py_CSR_engine.CsrIntegrator2d.integrate)) + +try: + from ..ext import splitBBoxCSC # IGNORE:F0401 +except ImportError as error: + logger.error("Unable to import pyFAI.ext.splitBBoxCSC" + " CSC based azimuthal integration: %s", error) + splitBBoxCSC = None +else: + # Register splitBBoxCSC integrators + IntegrationMethod(1, "no", "CSC", "cython", + class_funct_ng=(splitBBoxCSC.HistoBBox1d, splitBBoxCSC.HistoBBox1d.integrate_ng)) + IntegrationMethod(2, "no", "CSC", "cython", + class_funct_ng=(splitBBoxCSC.HistoBBox2d, splitBBoxCSC.HistoBBox2d.integrate_ng)) + IntegrationMethod(1, "bbox", "CSC", "cython", + class_funct_ng=(splitBBoxCSC.HistoBBox1d, splitBBoxCSC.HistoBBox1d.integrate_ng)) + IntegrationMethod(2, "bbox", "CSC", "cython", + class_funct_ng=(splitBBoxCSC.HistoBBox2d, splitBBoxCSC.HistoBBox2d.integrate_ng)) + IntegrationMethod(1, "no", "CSC", "python", + class_funct_ng=(py_CSC_engine.CscIntegrator1d, py_CSC_engine.CscIntegrator1d.integrate)) + IntegrationMethod(2, "no", "CSC", "python", + class_funct_ng=(py_CSC_engine.CscIntegrator2d, py_CSC_engine.CscIntegrator2d.integrate)) + IntegrationMethod(1, "bbox", "CSC", "python", + class_funct_ng=(py_CSC_engine.CscIntegrator1d, py_CSC_engine.CscIntegrator1d.integrate)) + IntegrationMethod(2, "bbox", "CSC", "python", + class_funct_ng=(py_CSC_engine.CscIntegrator2d, py_CSC_engine.CscIntegrator2d.integrate)) + +try: + from ..ext import splitBBoxLUT +except ImportError as error: + logger.warning("Unable to import pyFAI.ext.splitBBoxLUT for" + " Look-up table based azimuthal integration") + logger.debug("Backtrace", exc_info=True) + splitBBoxLUT = None +else: + # Register splitBBoxLUT integrators + IntegrationMethod(1, "bbox", "LUT", "cython", old_method="lut", + class_funct_legacy=(splitBBoxLUT.HistoBBox1d, splitBBoxLUT.HistoBBox1d.integrate), + class_funct_ng=(splitBBoxLUT.HistoBBox1d, splitBBoxLUT.HistoBBox1d.integrate_ng)) + IntegrationMethod(2, "bbox", "LUT", "cython", old_method="lut", + class_funct_legacy=(splitBBoxLUT.HistoBBox2d, splitBBoxLUT.HistoBBox2d.integrate)) + IntegrationMethod(1, "no", "LUT", "cython", old_method="nosplit_lut", + class_funct_legacy=(splitBBoxLUT.HistoBBox1d, splitBBoxLUT.HistoBBox1d.integrate_legacy), + class_funct_ng=(splitBBoxLUT.HistoBBox1d, splitBBoxLUT.HistoBBox1d.integrate_ng)) + IntegrationMethod(2, "no", "LUT", "cython", old_method="nosplit_lut", + class_funct_legacy=(splitBBoxLUT.HistoBBox2d, splitBBoxLUT.HistoBBox2d.integrate), + class_funct_ng=(splitBBoxLUT.HistoBBox2d, splitBBoxLUT.HistoBBox2d.integrate_ng)) + +try: + from ..ext import splitPixelFullLUT +except ImportError as error: + logger.warning("Unable to import pyFAI.ext.splitPixelFullLUT for" + " Look-up table based azimuthal integration") + logger.debug("Backtrace", exc_info=True) + splitPixelFullLUT = None +else: + # Register splitPixelFullLUT integrators + IntegrationMethod(1, "full", "LUT", "cython", old_method="full_lut", + class_funct_legacy=(splitPixelFullLUT.HistoLUT1dFullSplit, splitPixelFullLUT.HistoLUT1dFullSplit.integrate), + class_funct_ng=(splitPixelFullLUT.HistoLUT1dFullSplit, splitPixelFullLUT.HistoLUT1dFullSplit.integrate_ng)) + IntegrationMethod(2, "full", "LUT", "cython", old_method="full_lut", + class_funct_legacy=(splitPixelFullLUT.HistoLUT2dFullSplit, splitPixelFullLUT.HistoLUT2dFullSplit.integrate), + class_funct_ng=(splitPixelFullLUT.HistoLUT2dFullSplit, splitPixelFullLUT.HistoLUT2dFullSplit.integrate_ng)) + +try: + from ..ext import splitPixelFullCSR # IGNORE:F0401 +except ImportError as error: + logger.error("Unable to import pyFAI.ext.splitPixelFullCSR" + " CSR based azimuthal integration: %s", error) + splitPixelFullCSR = None +else: + # Register splitPixelFullCSR integrators + IntegrationMethod(1, "full", "CSR", "cython", old_method="full_csr", + class_funct_legacy=(splitPixelFullCSR.FullSplitCSR_1d, splitPixelFullCSR.FullSplitCSR_1d.integrate_legacy), + class_funct_ng=(splitPixelFullCSR.FullSplitCSR_1d, splitPixelFullCSR.FullSplitCSR_1d.integrate_ng)) + IntegrationMethod(2, "full", "CSR", "cython", old_method="full_csr", + class_funct_legacy=(splitPixelFullCSR.FullSplitCSR_2d, splitPixelFullCSR.FullSplitCSR_2d.integrate), + class_funct_ng=(splitPixelFullCSR.FullSplitCSR_2d, splitPixelFullCSR.FullSplitCSR_2d.integrate_ng)) + IntegrationMethod(1, "full", "CSR", "python", + class_funct_legacy=(py_CSR_engine.CsrIntegrator1d, py_CSR_engine.CsrIntegrator1d.integrate), + class_funct_ng=(py_CSR_engine.CsrIntegrator1d, py_CSR_engine.CsrIntegrator1d.integrate)) + IntegrationMethod(2, "full", "CSR", "python", + class_funct_legacy=(py_CSR_engine.CsrIntegrator2d, py_CSR_engine.CsrIntegrator2d.integrate), + class_funct_ng=(py_CSR_engine.CsrIntegrator2d, py_CSR_engine.CsrIntegrator2d.integrate)) + +try: + from ..ext import splitPixelFullCSC # IGNORE:F0401 +except ImportError as error: + logger.error("Unable to import pyFAI.ext.splitPixelFullCSC" + " CSC based azimuthal integration: %s", error) + splitPixelFullCSR = None +else: + # Register splitPixelFullCSC integrators + IntegrationMethod(1, "full", "CSC", "cython", + class_funct_ng=(splitPixelFullCSC.FullSplitCSC_1d, splitPixelFullCSC.FullSplitCSC_1d.integrate_ng)) + IntegrationMethod(2, "full", "CSC", "cython", + class_funct_ng=(splitPixelFullCSC.FullSplitCSC_2d, splitPixelFullCSC.FullSplitCSC_2d.integrate_ng)) + IntegrationMethod(1, "full", "CSC", "python", + class_funct_ng=(py_CSC_engine.CscIntegrator1d, py_CSC_engine.CscIntegrator1d.integrate)) + IntegrationMethod(2, "full", "CSC", "python", + class_funct_ng=(py_CSC_engine.CscIntegrator2d, py_CSC_engine.CscIntegrator2d.integrate)) + +try: + from ..opencl import ocl +except ImportError: + ocl = None + +if ocl: + devices_list = [] + devtype_list = [] + devices = OrderedDict() + perf = [] + for platform in ocl.platforms: + for device in platform.devices: + perf.append(device.flops) + devices_list.append((platform.id, device.id)) + devtype_list.append(device.type.lower()) + + for idx in (len(perf) - 1 - numpy.argsort(perf)): + device = devices_list[idx] + devices[device] = (f"{ocl.platforms[device[0]].name} / {ocl.platforms[device[0]].devices[device[1]].name}", + devtype_list[idx]) + + try: + from ..opencl import azim_hist as ocl_azim # IGNORE:F0401 + except ImportError as error: # IGNORE:W0703 + logger.error("Unable to import pyFAI.opencl.azim_hist: %s", error) + ocl_azim = None + else: + for ids, name in devices.items(): + IntegrationMethod(1, "no", "histogram", "OpenCL", + class_funct_ng=(ocl_azim.OCL_Histogram1d, ocl_azim.OCL_Histogram1d.integrate), + target=ids, target_name=name[0], target_type=name[1]) + IntegrationMethod(2, "no", "histogram", "OpenCL", + class_funct_ng=(ocl_azim.OCL_Histogram2d, ocl_azim.OCL_Histogram2d.integrate), + target=ids, target_name=name[0], target_type=name[1]) + try: + from ..opencl import azim_csr as ocl_azim_csr # IGNORE:F0401 + except ImportError as error: + logger.error("Unable to import pyFAI.opencl.azim_csr: %s", error) + ocl_azim_csr = None + else: + if splitBBoxCSR: + for ids, name in devices.items(): + IntegrationMethod(1, "bbox", "CSR", "OpenCL", + class_funct_legacy=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate), + class_funct_ng=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate_ng), + target=ids, target_name=name[0], target_type=name[1]) + IntegrationMethod(2, "bbox", "CSR", "OpenCL", + class_funct_legacy=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate), + class_funct_ng=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate_ng), + target=ids, target_name=name[0], target_type=name[1]) + IntegrationMethod(1, "no", "CSR", "OpenCL", + class_funct_legacy=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate), + class_funct_ng=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate_ng), + target=ids, target_name=name[0], target_type=name[1]) + IntegrationMethod(2, "no", "CSR", "OpenCL", + class_funct_legacy=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate), + class_funct_ng=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate_ng), + target=ids, target_name=name[0], target_type=name[1]) + if splitPixelFullCSR: + for ids, name in devices.items(): + IntegrationMethod(1, "full", "CSR", "OpenCL", + class_funct_legacy=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate), + class_funct_ng=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate_ng), + target=ids, target_name=name[0], target_type=name[1]) + IntegrationMethod(2, "full", "CSR", "OpenCL", + class_funct_legacy=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate), + class_funct_ng=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate_ng), + target=ids, target_name=name[0], target_type=name[1]) + + try: + from ..opencl import azim_lut as ocl_azim_lut # IGNORE:F0401 + except ImportError as error: # IGNORE:W0703 + logger.error("Unable to import pyFAI.opencl.azim_lut: %s", error) + ocl_azim_lut = None + else: + if splitBBoxLUT: + for ids, name in devices.items(): + IntegrationMethod(1, "bbox", "LUT", "OpenCL", + class_funct_legacy=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate), + class_funct_ng=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate_ng), + target=ids, target_name=name[0], target_type=name[1]) + IntegrationMethod(2, "bbox", "LUT", "OpenCL", + class_funct_legacy=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate), + class_funct_ng=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate_ng), + target=ids, target_name=name[0], target_type=name[1]) + IntegrationMethod(1, "no", "LUT", "OpenCL", + class_funct_legacy=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate), + class_funct_ng=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate_ng), + target=ids, target_name=name[0], target_type=name[1]) + IntegrationMethod(2, "no", "LUT", "OpenCL", + class_funct_legacy=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate), + class_funct_ng=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate_ng), + target=ids, target_name=name[0], target_type=name[1]) + if splitPixelFullLUT: + for ids, name in devices.items(): + IntegrationMethod(1, "full", "LUT", "OpenCL", + class_funct_legacy=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate), + class_funct_ng=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate_ng), + target=ids, target_name=name[0], target_type=name[1]) + IntegrationMethod(2, "full", "LUT", "OpenCL", + class_funct_legacy=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate), + class_funct_ng=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate_ng), + target=ids, target_name=name[0], target_type=name[1]) + + try: + from ..opencl import sort as ocl_sort + except ImportError as error: # IGNORE:W0703 + logger.error("Unable to import pyFAI.opencl.sort: %s", error) + ocl_sort = None +else: + ocl_sort = ocl_azim = ocl_azim_csr = ocl_azim_lut = None + + +# A set of fail-save but precise methods. +PREFERED_METHODS_1D = IntegrationMethod.select_method(1, split="full", algo="histogram") + \ + IntegrationMethod.select_method(1, split="bbox", algo="histogram") + \ + IntegrationMethod.select_method(1, split="no", algo="histogram") +PREFERED_METHODS_2D = IntegrationMethod.select_method(2, split="full", algo="histogram") + \ + IntegrationMethod.select_method(2, split="pseudo", algo="histogram") + \ + IntegrationMethod.select_method(2, split="bbox", algo="histogram") + \ + IntegrationMethod.select_method(2, split="no", algo="histogram") diff --git a/src/pyFAI/integrator/meson.build b/src/pyFAI/integrator/meson.build new file mode 100644 index 000000000..3926f6be2 --- /dev/null +++ b/src/pyFAI/integrator/meson.build @@ -0,0 +1,8 @@ +py.install_sources( + ['__init__.py', + 'azimuthal.py', + 'fiber.py', + 'load.py'], + pure: false, # Will be installed next to binaries + subdir: 'pyFAI/integrator' # Folder relative to site-packages to install to +) diff --git a/src/pyFAI/io/integration_config.py b/src/pyFAI/io/integration_config.py index 4ad44b999..e5d81aaf7 100644 --- a/src/pyFAI/io/integration_config.py +++ b/src/pyFAI/io/integration_config.py @@ -31,14 +31,14 @@ __author__ = "Jerome Kieffer" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "04/07/2024" +__date__ = "09/10/2024" __docformat__ = 'restructuredtext' import logging from . import ponifile from .. import detectors -from .. import load_integrators from .. import method_registry +from ..integrator import load _logger = logging.getLogger(__name__) diff --git a/src/pyFAI/load_integrators.py b/src/pyFAI/load_integrators.py index 868755f58..9f6ae9c0b 100644 --- a/src/pyFAI/load_integrators.py +++ b/src/pyFAI/load_integrators.py @@ -30,323 +30,14 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "15/04/2024" +__date__ = "09/10/2024" __status__ = "stable" __docformat__ = 'restructuredtext' """ -This module tries to load every possible type of integrator and registers them -into the registry -""" -import logging -logger = logging.getLogger(__name__) -from collections import OrderedDict -import numpy - -from .method_registry import IntegrationMethod -from .engines import CSR_engine as py_CSR_engine -from .engines import CSC_engine as py_CSC_engine -# Register numpy integrators which are fail-safe -from .engines import histogram_engine -IntegrationMethod(1, "no", "histogram", "python", old_method="numpy", - class_funct_ng=(None, histogram_engine.histogram1d_engine)) -IntegrationMethod(2, "no", "histogram", "python", old_method="numpy", - class_funct_ng=(None, histogram_engine.histogram2d_engine)) - -try: - from .ext import histogram -except ImportError as error: - logger.error("Unable to import pyFAI.ext.histogram" - " Cython histogram implementation: %s", error) - histogram = None -else: - # Register histogram integrators - IntegrationMethod(1, "no", "histogram", "cython", old_method="cython", - class_funct_legacy=(None, histogram.histogram), - class_funct_ng=(None, histogram.histogram1d_engine)) - IntegrationMethod(2, "no", "histogram", "cython", old_method="cython", - class_funct_legacy=(None, histogram.histogram2d), - class_funct_ng=(None, histogram.histogram2d_engine)) - -try: - from .ext import splitBBox # IGNORE:F0401 -except ImportError as error: - logger.error("Unable to import pyFAI.ext.splitBBox" - " Bounding Box pixel splitting: %s", error) - splitBBox = None -else: - # Register splitBBox integrators - IntegrationMethod(1, "bbox", "histogram", "cython", old_method="bbox", - class_funct_legacy=(None, splitBBox.histoBBox1d), - class_funct_ng=(None, splitBBox.histoBBox1d_engine)) - IntegrationMethod(2, "bbox", "histogram", "cython", old_method="bbox", - class_funct_legacy=(None, splitBBox.histoBBox2d), - class_funct_ng=(None, splitBBox.histoBBox2d_engine),) - -try: - from .ext import splitPixel -except ImportError as error: - logger.error("Unable to import pyFAI.ext.splitPixel full pixel splitting: %s", error) - logger.debug("Backtrace", exc_info=True) - splitPixel = None -else: - # Register splitPixel integrators - IntegrationMethod(1, "full", "histogram", "cython", old_method="splitpixel", - class_funct_legacy=(None, splitPixel.fullSplit1D), - class_funct_ng=(None, splitPixel.fullSplit1D_engine)) - IntegrationMethod(2, "full", "histogram", "cython", old_method="splitpixel", - class_funct_legacy=(None, splitPixel.fullSplit2D), - class_funct_ng=(None, splitPixel.fullSplit2D_engine)) - IntegrationMethod(2, "pseudo", "histogram", "cython", old_method="splitpixel", - class_funct_legacy=(None, splitPixel.fullSplit2D), - class_funct_ng=(None, splitPixel.pseudoSplit2D_engine)) - -try: - from .ext import splitBBoxCSR # IGNORE:F0401 -except ImportError as error: - logger.error("Unable to import pyFAI.ext.splitBBoxCSR" - " CSR based azimuthal integration: %s", error) - splitBBoxCSR = None -else: - # Register splitBBoxCSR integrators - IntegrationMethod(1, "no", "CSR", "cython", old_method="nosplit_csr", - class_funct_legacy=(splitBBoxCSR.HistoBBox1d, splitBBoxCSR.HistoBBox1d.integrate_legacy), - class_funct_ng=(splitBBoxCSR.HistoBBox1d, splitBBoxCSR.HistoBBox1d.integrate_ng)) - IntegrationMethod(2, "no", "CSR", "cython", old_method="nosplit_csr", - class_funct_legacy=(splitBBoxCSR.HistoBBox2d, splitBBoxCSR.HistoBBox2d.integrate)) - IntegrationMethod(1, "bbox", "CSR", "cython", old_method="csr", - class_funct_legacy=(splitBBoxCSR.HistoBBox1d, splitBBoxCSR.HistoBBox1d.integrate_legacy), - class_funct_ng=(splitBBoxCSR.HistoBBox1d, splitBBoxCSR.HistoBBox1d.integrate_ng)) - IntegrationMethod(2, "bbox", "CSR", "cython", old_method="csr", - class_funct_legacy=(splitBBoxCSR.HistoBBox2d, splitBBoxCSR.HistoBBox2d.integrate)) - - IntegrationMethod(1, "no", "CSR", "python", - class_funct_ng=(py_CSR_engine.CsrIntegrator1d, py_CSR_engine.CsrIntegrator1d.integrate)) - IntegrationMethod(2, "no", "CSR", "python", - class_funct_ng=(py_CSR_engine.CsrIntegrator2d, py_CSR_engine.CsrIntegrator2d.integrate)) - IntegrationMethod(1, "bbox", "CSR", "python", - class_funct_ng=(py_CSR_engine.CsrIntegrator1d, py_CSR_engine.CsrIntegrator1d.integrate)) - IntegrationMethod(2, "bbox", "CSR", "python", - class_funct_ng=(py_CSR_engine.CsrIntegrator2d, py_CSR_engine.CsrIntegrator2d.integrate)) - -try: - from .ext import splitBBoxCSC # IGNORE:F0401 -except ImportError as error: - logger.error("Unable to import pyFAI.ext.splitBBoxCSC" - " CSC based azimuthal integration: %s", error) - splitBBoxCSC = None -else: - # Register splitBBoxCSC integrators - IntegrationMethod(1, "no", "CSC", "cython", - class_funct_ng=(splitBBoxCSC.HistoBBox1d, splitBBoxCSC.HistoBBox1d.integrate_ng)) - IntegrationMethod(2, "no", "CSC", "cython", - class_funct_ng=(splitBBoxCSC.HistoBBox2d, splitBBoxCSC.HistoBBox2d.integrate_ng)) - IntegrationMethod(1, "bbox", "CSC", "cython", - class_funct_ng=(splitBBoxCSC.HistoBBox1d, splitBBoxCSC.HistoBBox1d.integrate_ng)) - IntegrationMethod(2, "bbox", "CSC", "cython", - class_funct_ng=(splitBBoxCSC.HistoBBox2d, splitBBoxCSC.HistoBBox2d.integrate_ng)) - IntegrationMethod(1, "no", "CSC", "python", - class_funct_ng=(py_CSC_engine.CscIntegrator1d, py_CSC_engine.CscIntegrator1d.integrate)) - IntegrationMethod(2, "no", "CSC", "python", - class_funct_ng=(py_CSC_engine.CscIntegrator2d, py_CSC_engine.CscIntegrator2d.integrate)) - IntegrationMethod(1, "bbox", "CSC", "python", - class_funct_ng=(py_CSC_engine.CscIntegrator1d, py_CSC_engine.CscIntegrator1d.integrate)) - IntegrationMethod(2, "bbox", "CSC", "python", - class_funct_ng=(py_CSC_engine.CscIntegrator2d, py_CSC_engine.CscIntegrator2d.integrate)) - -try: - from .ext import splitBBoxLUT -except ImportError as error: - logger.warning("Unable to import pyFAI.ext.splitBBoxLUT for" - " Look-up table based azimuthal integration") - logger.debug("Backtrace", exc_info=True) - splitBBoxLUT = None -else: - # Register splitBBoxLUT integrators - IntegrationMethod(1, "bbox", "LUT", "cython", old_method="lut", - class_funct_legacy=(splitBBoxLUT.HistoBBox1d, splitBBoxLUT.HistoBBox1d.integrate), - class_funct_ng=(splitBBoxLUT.HistoBBox1d, splitBBoxLUT.HistoBBox1d.integrate_ng)) - IntegrationMethod(2, "bbox", "LUT", "cython", old_method="lut", - class_funct_legacy=(splitBBoxLUT.HistoBBox2d, splitBBoxLUT.HistoBBox2d.integrate)) - IntegrationMethod(1, "no", "LUT", "cython", old_method="nosplit_lut", - class_funct_legacy=(splitBBoxLUT.HistoBBox1d, splitBBoxLUT.HistoBBox1d.integrate_legacy), - class_funct_ng=(splitBBoxLUT.HistoBBox1d, splitBBoxLUT.HistoBBox1d.integrate_ng)) - IntegrationMethod(2, "no", "LUT", "cython", old_method="nosplit_lut", - class_funct_legacy=(splitBBoxLUT.HistoBBox2d, splitBBoxLUT.HistoBBox2d.integrate), - class_funct_ng=(splitBBoxLUT.HistoBBox2d, splitBBoxLUT.HistoBBox2d.integrate_ng)) - -try: - from .ext import splitPixelFullLUT -except ImportError as error: - logger.warning("Unable to import pyFAI.ext.splitPixelFullLUT for" - " Look-up table based azimuthal integration") - logger.debug("Backtrace", exc_info=True) - splitPixelFullLUT = None -else: - # Register splitPixelFullLUT integrators - IntegrationMethod(1, "full", "LUT", "cython", old_method="full_lut", - class_funct_legacy=(splitPixelFullLUT.HistoLUT1dFullSplit, splitPixelFullLUT.HistoLUT1dFullSplit.integrate), - class_funct_ng=(splitPixelFullLUT.HistoLUT1dFullSplit, splitPixelFullLUT.HistoLUT1dFullSplit.integrate_ng)) - IntegrationMethod(2, "full", "LUT", "cython", old_method="full_lut", - class_funct_legacy=(splitPixelFullLUT.HistoLUT2dFullSplit, splitPixelFullLUT.HistoLUT2dFullSplit.integrate), - class_funct_ng=(splitPixelFullLUT.HistoLUT2dFullSplit, splitPixelFullLUT.HistoLUT2dFullSplit.integrate_ng)) - -try: - from .ext import splitPixelFullCSR # IGNORE:F0401 -except ImportError as error: - logger.error("Unable to import pyFAI.ext.splitPixelFullCSR" - " CSR based azimuthal integration: %s", error) - splitPixelFullCSR = None -else: - # Register splitPixelFullCSR integrators - IntegrationMethod(1, "full", "CSR", "cython", old_method="full_csr", - class_funct_legacy=(splitPixelFullCSR.FullSplitCSR_1d, splitPixelFullCSR.FullSplitCSR_1d.integrate_legacy), - class_funct_ng=(splitPixelFullCSR.FullSplitCSR_1d, splitPixelFullCSR.FullSplitCSR_1d.integrate_ng)) - IntegrationMethod(2, "full", "CSR", "cython", old_method="full_csr", - class_funct_legacy=(splitPixelFullCSR.FullSplitCSR_2d, splitPixelFullCSR.FullSplitCSR_2d.integrate), - class_funct_ng=(splitPixelFullCSR.FullSplitCSR_2d, splitPixelFullCSR.FullSplitCSR_2d.integrate_ng)) - IntegrationMethod(1, "full", "CSR", "python", - class_funct_legacy=(py_CSR_engine.CsrIntegrator1d, py_CSR_engine.CsrIntegrator1d.integrate), - class_funct_ng=(py_CSR_engine.CsrIntegrator1d, py_CSR_engine.CsrIntegrator1d.integrate)) - IntegrationMethod(2, "full", "CSR", "python", - class_funct_legacy=(py_CSR_engine.CsrIntegrator2d, py_CSR_engine.CsrIntegrator2d.integrate), - class_funct_ng=(py_CSR_engine.CsrIntegrator2d, py_CSR_engine.CsrIntegrator2d.integrate)) - -try: - from .ext import splitPixelFullCSC # IGNORE:F0401 -except ImportError as error: - logger.error("Unable to import pyFAI.ext.splitPixelFullCSC" - " CSC based azimuthal integration: %s", error) - splitPixelFullCSR = None -else: - # Register splitPixelFullCSC integrators - IntegrationMethod(1, "full", "CSC", "cython", - class_funct_ng=(splitPixelFullCSC.FullSplitCSC_1d, splitPixelFullCSC.FullSplitCSC_1d.integrate_ng)) - IntegrationMethod(2, "full", "CSC", "cython", - class_funct_ng=(splitPixelFullCSC.FullSplitCSC_2d, splitPixelFullCSC.FullSplitCSC_2d.integrate_ng)) - IntegrationMethod(1, "full", "CSC", "python", - class_funct_ng=(py_CSC_engine.CscIntegrator1d, py_CSC_engine.CscIntegrator1d.integrate)) - IntegrationMethod(2, "full", "CSC", "python", - class_funct_ng=(py_CSC_engine.CscIntegrator2d, py_CSC_engine.CscIntegrator2d.integrate)) - -try: - from .opencl import ocl -except ImportError: - ocl = None - -if ocl: - devices_list = [] - devtype_list = [] - devices = OrderedDict() - perf = [] - for platform in ocl.platforms: - for device in platform.devices: - perf.append(device.flops) - devices_list.append((platform.id, device.id)) - devtype_list.append(device.type.lower()) - - for idx in (len(perf) - 1 - numpy.argsort(perf)): - device = devices_list[idx] - devices[device] = (f"{ocl.platforms[device[0]].name} / {ocl.platforms[device[0]].devices[device[1]].name}", - devtype_list[idx]) - - try: - from .opencl import azim_hist as ocl_azim # IGNORE:F0401 - except ImportError as error: # IGNORE:W0703 - logger.error("Unable to import pyFAI.opencl.azim_hist: %s", error) - ocl_azim = None - else: - for ids, name in devices.items(): - IntegrationMethod(1, "no", "histogram", "OpenCL", - class_funct_ng=(ocl_azim.OCL_Histogram1d, ocl_azim.OCL_Histogram1d.integrate), - target=ids, target_name=name[0], target_type=name[1]) - IntegrationMethod(2, "no", "histogram", "OpenCL", - class_funct_ng=(ocl_azim.OCL_Histogram2d, ocl_azim.OCL_Histogram2d.integrate), - target=ids, target_name=name[0], target_type=name[1]) - try: - from .opencl import azim_csr as ocl_azim_csr # IGNORE:F0401 - except ImportError as error: - logger.error("Unable to import pyFAI.opencl.azim_csr: %s", error) - ocl_azim_csr = None - else: - if splitBBoxCSR: - for ids, name in devices.items(): - IntegrationMethod(1, "bbox", "CSR", "OpenCL", - class_funct_legacy=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate), - class_funct_ng=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate_ng), - target=ids, target_name=name[0], target_type=name[1]) - IntegrationMethod(2, "bbox", "CSR", "OpenCL", - class_funct_legacy=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate), - class_funct_ng=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate_ng), - target=ids, target_name=name[0], target_type=name[1]) - IntegrationMethod(1, "no", "CSR", "OpenCL", - class_funct_legacy=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate), - class_funct_ng=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate_ng), - target=ids, target_name=name[0], target_type=name[1]) - IntegrationMethod(2, "no", "CSR", "OpenCL", - class_funct_legacy=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate), - class_funct_ng=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate_ng), - target=ids, target_name=name[0], target_type=name[1]) - if splitPixelFullCSR: - for ids, name in devices.items(): - IntegrationMethod(1, "full", "CSR", "OpenCL", - class_funct_legacy=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate), - class_funct_ng=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate_ng), - target=ids, target_name=name[0], target_type=name[1]) - IntegrationMethod(2, "full", "CSR", "OpenCL", - class_funct_legacy=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate), - class_funct_ng=(ocl_azim_csr.OCL_CSR_Integrator, ocl_azim_csr.OCL_CSR_Integrator.integrate_ng), - target=ids, target_name=name[0], target_type=name[1]) - - try: - from .opencl import azim_lut as ocl_azim_lut # IGNORE:F0401 - except ImportError as error: # IGNORE:W0703 - logger.error("Unable to import pyFAI.opencl.azim_lut: %s", error) - ocl_azim_lut = None - else: - if splitBBoxLUT: - for ids, name in devices.items(): - IntegrationMethod(1, "bbox", "LUT", "OpenCL", - class_funct_legacy=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate), - class_funct_ng=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate_ng), - target=ids, target_name=name[0], target_type=name[1]) - IntegrationMethod(2, "bbox", "LUT", "OpenCL", - class_funct_legacy=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate), - class_funct_ng=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate_ng), - target=ids, target_name=name[0], target_type=name[1]) - IntegrationMethod(1, "no", "LUT", "OpenCL", - class_funct_legacy=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate), - class_funct_ng=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate_ng), - target=ids, target_name=name[0], target_type=name[1]) - IntegrationMethod(2, "no", "LUT", "OpenCL", - class_funct_legacy=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate), - class_funct_ng=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate_ng), - target=ids, target_name=name[0], target_type=name[1]) - if splitPixelFullLUT: - for ids, name in devices.items(): - IntegrationMethod(1, "full", "LUT", "OpenCL", - class_funct_legacy=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate), - class_funct_ng=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate_ng), - target=ids, target_name=name[0], target_type=name[1]) - IntegrationMethod(2, "full", "LUT", "OpenCL", - class_funct_legacy=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate), - class_funct_ng=(ocl_azim_lut.OCL_LUT_Integrator, ocl_azim_lut.OCL_LUT_Integrator.integrate_ng), - target=ids, target_name=name[0], target_type=name[1]) - - try: - from .opencl import sort as ocl_sort - except ImportError as error: # IGNORE:W0703 - logger.error("Unable to import pyFAI.opencl.sort: %s", error) - ocl_sort = None -else: - ocl_sort = ocl_azim = ocl_azim_csr = ocl_azim_lut = None +Compatibility layer, the code has been moved to pyFAI.integrator.load +09/10/2024 +""" -# A set of fail-save but precise methods. -PREFERED_METHODS_1D = IntegrationMethod.select_method(1, split="full", algo="histogram") + \ - IntegrationMethod.select_method(1, split="bbox", algo="histogram") + \ - IntegrationMethod.select_method(1, split="no", algo="histogram") -PREFERED_METHODS_2D = IntegrationMethod.select_method(2, split="full", algo="histogram") + \ - IntegrationMethod.select_method(2, split="pseudo", algo="histogram") + \ - IntegrationMethod.select_method(2, split="bbox", algo="histogram") + \ - IntegrationMethod.select_method(2, split="no", algo="histogram") +from .integrator import load diff --git a/src/pyFAI/meson.build b/src/pyFAI/meson.build index 2cf43258e..784753684 100644 --- a/src/pyFAI/meson.build +++ b/src/pyFAI/meson.build @@ -5,6 +5,7 @@ subdir('engines') subdir('ext') subdir('geometry') subdir('gui') +subdir('integrator') subdir('io') subdir('opencl') subdir('resources') From e5a57d1ac17d6fccbecff4175d4504a511abeb8b Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Wed, 9 Oct 2024 16:09:17 +0200 Subject: [PATCH 02/11] clean-up --- src/pyFAI/__init__.py | 19 ++++++++++++------- src/pyFAI/diffmap.py | 2 +- src/pyFAI/integrator/__init__.py | 2 +- src/pyFAI/integrator/azimuthal.py | 8 ++++---- src/pyFAI/integrator/fiber.py | 2 ++ .../integrator/{load.py => load_engines.py} | 0 src/pyFAI/integrator/meson.build | 2 +- src/pyFAI/io/integration_config.py | 2 +- src/pyFAI/load_integrators.py | 2 +- 9 files changed, 23 insertions(+), 16 deletions(-) rename src/pyFAI/integrator/{load.py => load_engines.py} (100%) diff --git a/src/pyFAI/__init__.py b/src/pyFAI/__init__.py index d52a005f7..71ce9bcd1 100644 --- a/src/pyFAI/__init__.py +++ b/src/pyFAI/__init__.py @@ -28,7 +28,7 @@ __author__ = "Jérôme Kieffer" __license__ = "MIT" -__date__ = "28/09/2024" +__date__ = "09/10/2024" import sys import os @@ -57,21 +57,26 @@ """ -@decorators.deprecated(replacement="pyFAI.azimuthalIntegrator.AzimuthalIntegrator", since_version="0.16") +@decorators.deprecated(replacement="pyFAI.integrator.azimuthal.AzimuthalIntegrator", since_version="0.16") def AzimuthalIntegrator(*args, **kwargs): - from .azimuthalIntegrator import AzimuthalIntegrator + from .integrator.azimuthal import AzimuthalIntegrator return AzimuthalIntegrator(*args, **kwargs) -def load(filename): +def load(filename, type_="AzimuthalIntegrator"): """ Load an azimuthal integrator from a filename description. - :param str filename: name of the file to load + :param str filename: name of the file to load, or dict of config or ponifile ... :return: instance of Gerometry of AzimuthalIntegrator set-up with the parameter from the file. """ - from .azimuthalIntegrator import AzimuthalIntegrator - return AzimuthalIntegrator.sload(filename) + if type_=="AzimuthalIntegrator": + from .integrator.azimuthal import AzimuthalIntegrator + return AzimuthalIntegrator.sload(filename) + else: + from .geometry import Geometry + Geometry.sload(filename).promote(type_) + def detector_factory(name, config=None): diff --git a/src/pyFAI/diffmap.py b/src/pyFAI/diffmap.py index 7016ec529..12d8cfb0c 100644 --- a/src/pyFAI/diffmap.py +++ b/src/pyFAI/diffmap.py @@ -49,7 +49,7 @@ import __main__ as main from .opencl import ocl from . import version as PyFAI_VERSION, date as PyFAI_DATE, load -from .integrator.load import PREFERED_METHODS_2D, PREFERED_METHODS_1D +from .integrator.load_engines import PREFERED_METHODS_2D, PREFERED_METHODS_1D from .io import Nexus, get_isotime, h5py from .worker import Worker, _reduce_images from .method_registry import Method, IntegrationMethod diff --git a/src/pyFAI/integrator/__init__.py b/src/pyFAI/integrator/__init__.py index d26c923a5..9301f71ac 100644 --- a/src/pyFAI/integrator/__init__.py +++ b/src/pyFAI/integrator/__init__.py @@ -40,6 +40,6 @@ * fiber * ... """ -from .load import * +from . import load_engines # from .azimuthal import AzimuthalIntegrator # from .fiber import FiberIntegrator diff --git a/src/pyFAI/integrator/azimuthal.py b/src/pyFAI/integrator/azimuthal.py index 2a4588c20..6b523a2d6 100644 --- a/src/pyFAI/integrator/azimuthal.py +++ b/src/pyFAI/integrator/azimuthal.py @@ -51,10 +51,10 @@ error = None from ..method_registry import IntegrationMethod -from .load import ocl_azim_csr, ocl_azim_lut, ocl_sort, histogram, splitBBox, \ - splitPixel, splitBBoxCSR, splitBBoxLUT, splitPixelFullCSR, \ - histogram_engine, splitPixelFullLUT, splitBBoxCSC, splitPixelFullCSC, \ - PREFERED_METHODS_1D, PREFERED_METHODS_2D +from .load_engines import ocl_azim_csr, ocl_azim_lut, ocl_sort, histogram, splitBBox, \ + splitPixel, splitBBoxCSR, splitBBoxLUT, splitPixelFullCSR, \ + histogram_engine, splitPixelFullLUT, splitBBoxCSC, splitPixelFullCSC, \ + PREFERED_METHODS_1D, PREFERED_METHODS_2D from ..engines import Engine diff --git a/src/pyFAI/integrator/fiber.py b/src/pyFAI/integrator/fiber.py index 6098b8ae2..17f8fa7f0 100644 --- a/src/pyFAI/integrator/fiber.py +++ b/src/pyFAI/integrator/fiber.py @@ -39,6 +39,8 @@ import numpy from .azimuthal import AzimuthalIntegrator from ..containers import Integrate1dResult +from ..method_registry import IntegrationMethod +from ..io import save_integrate_result from .. import units class FiberIntegrator(AzimuthalIntegrator): diff --git a/src/pyFAI/integrator/load.py b/src/pyFAI/integrator/load_engines.py similarity index 100% rename from src/pyFAI/integrator/load.py rename to src/pyFAI/integrator/load_engines.py diff --git a/src/pyFAI/integrator/meson.build b/src/pyFAI/integrator/meson.build index 3926f6be2..2573c18a2 100644 --- a/src/pyFAI/integrator/meson.build +++ b/src/pyFAI/integrator/meson.build @@ -2,7 +2,7 @@ py.install_sources( ['__init__.py', 'azimuthal.py', 'fiber.py', - 'load.py'], + 'load_engines.py'], pure: false, # Will be installed next to binaries subdir: 'pyFAI/integrator' # Folder relative to site-packages to install to ) diff --git a/src/pyFAI/io/integration_config.py b/src/pyFAI/io/integration_config.py index e5d81aaf7..1bdf44be0 100644 --- a/src/pyFAI/io/integration_config.py +++ b/src/pyFAI/io/integration_config.py @@ -38,7 +38,7 @@ from . import ponifile from .. import detectors from .. import method_registry -from ..integrator import load +from ..integrator import load_engines _logger = logging.getLogger(__name__) diff --git a/src/pyFAI/load_integrators.py b/src/pyFAI/load_integrators.py index 9f6ae9c0b..508d4df1b 100644 --- a/src/pyFAI/load_integrators.py +++ b/src/pyFAI/load_integrators.py @@ -40,4 +40,4 @@ 09/10/2024 """ -from .integrator import load +from .integrator.load_engines import * From 7fdddfb3c56768c8d96e5308f3441ffcb1827c51 Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Wed, 9 Oct 2024 17:16:30 +0200 Subject: [PATCH 03/11] proper import --- src/pyFAI/io/integration_config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyFAI/io/integration_config.py b/src/pyFAI/io/integration_config.py index 1bdf44be0..8bba899e8 100644 --- a/src/pyFAI/io/integration_config.py +++ b/src/pyFAI/io/integration_config.py @@ -38,7 +38,7 @@ from . import ponifile from .. import detectors from .. import method_registry -from ..integrator import load_engines +from ..integrator import load_engines as load_integrators _logger = logging.getLogger(__name__) From e9bff111a24e7997286a57aba469e2c344ebcfa7 Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Thu, 10 Oct 2024 08:21:38 +0200 Subject: [PATCH 04/11] Update changelog --- doc/source/changelog.rst | 4 ++++ version.py | 6 +++--- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/doc/source/changelog.rst b/doc/source/changelog.rst index 13da90ee6..75181b020 100644 --- a/doc/source/changelog.rst +++ b/doc/source/changelog.rst @@ -5,6 +5,10 @@ Change-log of versions ====================== +Next Version: 2025 +------------------ +- Refactoring of the integrator classes + 2024.09 12/09/2024 ------------------ - New tutorials: diff --git a/version.py b/version.py index 5a7fba375..0b57d2232 100755 --- a/version.py +++ b/version.py @@ -47,7 +47,7 @@ __authors__ = ["Jérôme Kieffer", "V. Valls"] __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "02/09/2024" +__date__ = "10/10/2024" __status__ = "production" __docformat__ = 'restructuredtext' __all__ = ["date", "version_info", "strictversion", "hexversion", "debianversion", @@ -61,8 +61,8 @@ "final": 15} MAJOR = 2024 -MINOR = 9 -MICRO = 1 +MINOR = 10 +MICRO = 0 RELEV = "dev" # <16 SERIAL = 0 # <16 From 6937065e3fc6e7b6dc3bd5d1143792dafe76b103 Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Thu, 10 Oct 2024 09:17:24 +0200 Subject: [PATCH 05/11] Change the import path for AzimuthalIntegrator --- src/pyFAI/app/saxs.py | 2 +- src/pyFAI/app/waxs.py | 2 +- src/pyFAI/azimuthalIntegrator.py | 5 +- src/pyFAI/benchmark/__init__.py | 2 +- src/pyFAI/geometry/core.py | 4 +- src/pyFAI/goniometer.py | 2 +- src/pyFAI/gui/cli_calibration.py | 2 +- src/pyFAI/gui/pilx/utils.py | 2 +- src/pyFAI/gui/tasks/IntegrationTask.py | 2 +- src/pyFAI/integrator/__init__.py | 3 +- src/pyFAI/integrator/azimuthal.py | 1765 +------------------ src/pyFAI/integrator/common.py | 1751 ++++++++++++++++++ src/pyFAI/integrator/meson.build | 1 + src/pyFAI/load_integrators.py | 6 +- src/pyFAI/multi_geometry.py | 2 +- src/pyFAI/opencl/test/test_ocl_azim_csr.py | 2 +- src/pyFAI/opencl/test/test_ocl_azim_lut.py | 2 +- src/pyFAI/opencl/test/test_ocl_histo.py | 2 +- src/pyFAI/opencl/test/test_peak_finder.py | 2 +- src/pyFAI/test/test_azimuthal_integrator.py | 2 +- src/pyFAI/test/test_blob_detection.py | 2 +- src/pyFAI/test/test_bug_regression.py | 2 +- src/pyFAI/test/test_calibrant.py | 2 +- src/pyFAI/test/test_detector.py | 2 +- src/pyFAI/test/test_error_model.py | 2 +- src/pyFAI/test/test_export.py | 2 +- src/pyFAI/test/test_flat.py | 2 +- src/pyFAI/test/test_geometry.py | 2 +- src/pyFAI/test/test_integrate.py | 2 +- src/pyFAI/test/test_invert_geometry.py | 2 +- src/pyFAI/test/test_mask.py | 2 +- src/pyFAI/test/test_multi_geometry.py | 2 +- src/pyFAI/test/test_pickle.py | 2 +- src/pyFAI/test/test_polarization.py | 2 +- src/pyFAI/test/test_saxs.py | 2 +- src/pyFAI/test/test_sparse.py | 2 +- src/pyFAI/test/test_split_pixel.py | 2 +- src/pyFAI/test/test_worker.py | 2 +- src/pyFAI/worker.py | 2 +- 39 files changed, 1841 insertions(+), 1758 deletions(-) create mode 100644 src/pyFAI/integrator/common.py diff --git a/src/pyFAI/app/saxs.py b/src/pyFAI/app/saxs.py index fc59415e1..9c99fceee 100755 --- a/src/pyFAI/app/saxs.py +++ b/src/pyFAI/app/saxs.py @@ -52,7 +52,7 @@ from .. import date as pyFAI_date, version as pyFAI_version, units, utils from ..method_registry import IntegrationMethod -from ..azimuthalIntegrator import AzimuthalIntegrator +from ..integrator.azimuthal import AzimuthalIntegrator hc = units.hc diff --git a/src/pyFAI/app/waxs.py b/src/pyFAI/app/waxs.py index 4ad3e4b19..7f63be1c1 100755 --- a/src/pyFAI/app/waxs.py +++ b/src/pyFAI/app/waxs.py @@ -52,7 +52,7 @@ from .. import date as pyFAI_date, version as pyFAI_version, units, utils from ..average import average_dark -from ..azimuthalIntegrator import AzimuthalIntegrator +from ..integrator.azimuthal import AzimuthalIntegrator from ..method_registry import IntegrationMethod hc = units.hc diff --git a/src/pyFAI/azimuthalIntegrator.py b/src/pyFAI/azimuthalIntegrator.py index c46591add..a106e2c05 100644 --- a/src/pyFAI/azimuthalIntegrator.py +++ b/src/pyFAI/azimuthalIntegrator.py @@ -35,9 +35,10 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "09/10/2024" +__date__ = "10/10/2024" __status__ = "stable" __docformat__ = 'restructuredtext' - from .integrator.azimuthal import AzimuthalIntegrator, logger +from .utils.decorators import deprecated_warning +deprecated_warning("Module", "pyFAI.azimuthalIntegrator", replacement="pyFAI.integrator.azimuthal", since_version="2024.10", only_once=False) diff --git a/src/pyFAI/benchmark/__init__.py b/src/pyFAI/benchmark/__init__.py index dfceb115c..f2ec4ab16 100644 --- a/src/pyFAI/benchmark/__init__.py +++ b/src/pyFAI/benchmark/__init__.py @@ -42,7 +42,7 @@ import numpy from .. import load, detector_factory -from ..azimuthalIntegrator import AzimuthalIntegrator +from ..integrator.azimuthal import AzimuthalIntegrator from ..method_registry import IntegrationMethod, Method from ..utils import mathutil from ..test.utilstest import UtilsTest diff --git a/src/pyFAI/geometry/core.py b/src/pyFAI/geometry/core.py index 5e617250f..ffe72f3f3 100644 --- a/src/pyFAI/geometry/core.py +++ b/src/pyFAI/geometry/core.py @@ -40,7 +40,7 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "09/10/2024" +__date__ = "10/10/2024" __status__ = "production" __docformat__ = 'restructuredtext' @@ -175,7 +175,7 @@ def __init__(self, dist=1, poni1=0, poni2=0, rot1=0, rot2=0, rot3=0, self._wavelength = wavelength self._oversampling = None self._correct_solid_angle_for_spline = True - self._sem = threading.Semaphore() + self._sem = threading.Semaphore() # ensure the object remains unchanged while updating self._transmission_normal = None self._parallax = None diff --git a/src/pyFAI/goniometer.py b/src/pyFAI/goniometer.py index a73f335e9..a555a1097 100644 --- a/src/pyFAI/goniometer.py +++ b/src/pyFAI/goniometer.py @@ -50,7 +50,7 @@ from .detectors import detector_factory, Detector from .geometry import Geometry from .geometryRefinement import GeometryRefinement -from .azimuthalIntegrator import AzimuthalIntegrator +from .integrator.azimuthal import AzimuthalIntegrator from .utils import StringTypes from .multi_geometry import MultiGeometry from .units import CONST_hc, CONST_q diff --git a/src/pyFAI/gui/cli_calibration.py b/src/pyFAI/gui/cli_calibration.py index a4a89a3cf..96e26f4b5 100644 --- a/src/pyFAI/gui/cli_calibration.py +++ b/src/pyFAI/gui/cli_calibration.py @@ -66,7 +66,7 @@ from ..utils import measure_offset, expand_args, \ readFloatFromKeyboard, FixedParameters, round_fft, \ win32 -from ..azimuthalIntegrator import AzimuthalIntegrator +from ..integrator.azimuthal import AzimuthalIntegrator from ..units import hc from .. import version as PyFAI_VERSION from .. import date as PyFAI_DATE diff --git a/src/pyFAI/gui/pilx/utils.py b/src/pyFAI/gui/pilx/utils.py index 9f382090f..f92689cbf 100644 --- a/src/pyFAI/gui/pilx/utils.py +++ b/src/pyFAI/gui/pilx/utils.py @@ -40,7 +40,7 @@ import h5py import numpy import os.path -from ...azimuthalIntegrator import AzimuthalIntegrator +from ...integrator.azimuthal import AzimuthalIntegrator from ...detectors import Detector diff --git a/src/pyFAI/gui/tasks/IntegrationTask.py b/src/pyFAI/gui/tasks/IntegrationTask.py index 094b6c355..a75479fd0 100644 --- a/src/pyFAI/gui/tasks/IntegrationTask.py +++ b/src/pyFAI/gui/tasks/IntegrationTask.py @@ -37,7 +37,7 @@ import pyFAI.utils from .AbstractCalibrationTask import AbstractCalibrationTask -from pyFAI.azimuthalIntegrator import AzimuthalIntegrator +from pyFAI.integrator.azimuthal import AzimuthalIntegrator from ..utils import unitutils from ..model.DataModel import DataModel # from ..model.GeometryModel import GeometryModel diff --git a/src/pyFAI/integrator/__init__.py b/src/pyFAI/integrator/__init__.py index 9301f71ac..e150699b5 100644 --- a/src/pyFAI/integrator/__init__.py +++ b/src/pyFAI/integrator/__init__.py @@ -30,7 +30,7 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "09/10/2024" +__date__ = "10/10/2024" __status__ = "stable" __docformat__ = 'restructuredtext' @@ -41,5 +41,6 @@ * ... """ from . import load_engines +# from .common import Integrator # from .azimuthal import AzimuthalIntegrator # from .fiber import FiberIntegrator diff --git a/src/pyFAI/integrator/azimuthal.py b/src/pyFAI/integrator/azimuthal.py index 6b523a2d6..0c209a2ba 100644 --- a/src/pyFAI/integrator/azimuthal.py +++ b/src/pyFAI/integrator/azimuthal.py @@ -30,32 +30,32 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "09/10/2024" +__date__ = "10/10/2024" __status__ = "stable" __docformat__ = 'restructuredtext' import logging logger = logging.getLogger(__name__) import warnings -import threading -import gc from math import pi, log import numpy -from ..geometry import Geometry +from .common import Integrator +# from ..geometry import Geometry from .. import units from ..utils import EPS32, deg2rad, crc32 -from ..utils.decorators import deprecated, deprecated_warning +# from ..utils.decorators import deprecated, deprecated_warning from ..containers import Integrate1dResult, Integrate2dResult, SeparateResult, ErrorModel from ..io import DefaultAiWriter, save_integrate_result from ..io.ponifile import PoniFile error = None from ..method_registry import IntegrationMethod - -from .load_engines import ocl_azim_csr, ocl_azim_lut, ocl_sort, histogram, splitBBox, \ - splitPixel, splitBBoxCSR, splitBBoxLUT, splitPixelFullCSR, \ - histogram_engine, splitPixelFullLUT, splitBBoxCSC, splitPixelFullCSC, \ - PREFERED_METHODS_1D, PREFERED_METHODS_2D - +# +from .load_engines import ocl_sort +#ocl_azim_csr, ocl_azim_lut, , histogram, splitBBox, \ +# splitPixel, splitBBoxCSR, splitBBoxLUT, splitPixelFullCSR, \ +# histogram_engine, splitPixelFullLUT, splitBBoxCSC, splitPixelFullCSC, \ +# PREFERED_METHODS_1D, PREFERED_METHODS_2D +# from ..engines import Engine # Few constants for engine names: @@ -67,7 +67,7 @@ EXT_CSR_ENGINE = "csr_integrator" -class AzimuthalIntegrator(Geometry): +class AzimuthalIntegrator(Integrator): """ This class is an azimuthal integrator based on P. Boesecke's geometry and histogram algorithm by Manolo S. del Rio and V.A Sole @@ -81,1027 +81,16 @@ class AzimuthalIntegrator(Geometry): >>> regrouped = ai.integrate2d(data, npt_rad, npt_azim, unit="q_nm^-1")[0] """ - DEFAULT_METHOD_1D = PREFERED_METHODS_1D[0] - DEFAULT_METHOD_2D = PREFERED_METHODS_2D[0] - "Fail-safe low-memory integrator" - - USE_LEGACY_MASK_NORMALIZATION = True - """If true, the Python engine integrator will normalize the mask to use the - most frequent value of the mask as the non-masking value. - - This behaviour is not consistant with other engines and is now deprecated. - This flag will be turned off in the comming releases. - - Turning off this flag force the user to provide a mask with 0 as non-masking - value. And any non-zero as masking value (negative or positive value). A - boolean mask is also accepted (`True` is the masking value). - """ - - def __init__(self, dist=1, poni1=0, poni2=0, - rot1=0, rot2=0, rot3=0, - pixel1=None, pixel2=None, - splineFile=None, detector=None, wavelength=None, orientation=0): - """ - :param dist: distance sample - detector plan (orthogonal distance, not along the beam), in meter. - :type dist: float - :param poni1: coordinate of the point of normal incidence along the detector's first dimension, in meter - :type poni1: float - :param poni2: coordinate of the point of normal incidence along the detector's second dimension, in meter - :type poni2: float - :param rot1: first rotation from sample ref to detector's ref, in radians - :type rot1: float - :param rot2: second rotation from sample ref to detector's ref, in radians - :type rot2: float - :param rot3: third rotation from sample ref to detector's ref, in radians - :type rot3: float - :param pixel1: Deprecated. Pixel size of the fist dimension of the detector, in meter. - If both pixel1 and pixel2 are not None, detector pixel size is overwritten. - Prefer defining the detector pixel size on the provided detector object. - Prefer defining the detector pixel size on the provided detector - object (``detector.pixel1 = 5e-6``). - :type pixel1: float - :param pixel2: Deprecated. Pixel size of the second dimension of the detector, in meter. - If both pixel1 and pixel2 are not None, detector pixel size is overwritten. - Prefer defining the detector pixel size on the provided detector - object (``detector.pixel2 = 5e-6``). - :type pixel2: float - :param splineFile: Deprecated. File containing the geometric distortion of the detector. - If not None, pixel1 and pixel2 are ignored and detector spline is overwritten. - Prefer defining the detector spline manually - (``detector.splineFile = "file.spline"``). - :type splineFile: str - :param detector: name of the detector or Detector instance. String - description is deprecated. Prefer using the result of the detector - factory: ``pyFAI.detector_factory("eiger4m")`` - :type detector: str or pyFAI.Detector - :param float wavelength: Wave length used in meter - :param int orientation: orientation of the detector, see pyFAI.detectors.orientation.Orientation - """ - Geometry.__init__(self, dist, poni1, poni2, - rot1, rot2, rot3, - pixel1, pixel2, splineFile, detector, wavelength, orientation) - - # mask, maskfile, darkcurrent and flatfield are properties pointing to - # self.detector now (16/06/2017) - - self._lock = threading.Semaphore() - self.engines = {} # key: name of the engine, - - self._empty = 0.0 - - def reset(self, collect_garbage=True): - """Reset azimuthal integrator in addition to other arrays. - - :param collect_garbage: set to False to prevent garbage collection, faster - """ - Geometry.reset(self, collect_garbage=False) - self.reset_engines(collect_garbage) - - def reset_engines(self, collect_garbage=True): - """Urgently free memory by deleting all regrid-engines - - :param collect_garbage: set to False to prevent garbage collection, faster - """ - with self._lock: - for key in list(self.engines.keys()): # explicit copy - self.engines.pop(key).reset() - if collect_garbage: - gc.collect() - - def create_mask(self, data, mask=None, - dummy=None, delta_dummy=None, - unit=None, radial_range=None, - azimuth_range=None, - mode="normal"): - """ - Combines various masks into another one. - - :param data: input array of data - :type data: ndarray - :param mask: input mask (if none, self.mask is used) - :type mask: ndarray - :param dummy: value of dead pixels - :type dummy: float - :param delta_dumy: precision of dummy pixels - :type delta_dummy: float - :param mode: can be "normal" or "numpy" (inverted) or "where" applied to the mask - :type mode: str - - :return: the new mask - :rtype: ndarray of bool - - This method combine two masks (dynamic mask from *data & - dummy* and *mask*) to generate a new one with the 'or' binary - operation. One can adjust the level, with the *dummy* and - the *delta_dummy* parameter, when you consider the *data* - values needs to be masked out. - - This method can work in two different *mode*: - - * "normal": False for valid pixels, True for bad pixels - * "numpy": True for valid pixels, false for others - * "where": does a numpy.where on the "numpy" output - - This method tries to accomodate various types of masks (like - valid=0 & masked=-1, ...) - - Note for the developper: we use a lot of numpy.logical_or in this method, - the out= argument allows to recycle buffers and save considerable time in - allocating temporary arrays. - """ - logical_or = numpy.logical_or - shape = data.shape - # ^^^^ this is why data is mandatory ! - if mask is None: - mask = self.mask - if mask is None: - mask = numpy.zeros(shape, dtype=bool) - else: - mask = mask.astype(bool) - if self.USE_LEGACY_MASK_NORMALIZATION: - if mask.sum(dtype=int) > mask.size // 2: - reason = "The provided mask is not complient with other engines. "\ - "The feature which automatically invert it will be removed soon. "\ - "For more information see https://github.com/silx-kit/pyFAI/pull/868" - deprecated_warning(__name__, name="provided mask content", reason=reason) - numpy.logical_not(mask, mask) - if (mask.shape != shape): - try: - mask = mask[:shape[0],:shape[1]] - except Exception as error: # IGNORE:W0703 - logger.error("Mask provided has wrong shape:" - " expected: %s, got %s, error: %s", - shape, mask.shape, error) - mask = numpy.zeros(shape, dtype=bool) - if dummy is not None: - if delta_dummy is None: - logical_or(mask, (data == dummy), out=mask) - else: - logical_or(mask, abs(data - dummy) <= delta_dummy, out=mask) - - if radial_range is not None: - assert unit, "unit is needed when building a mask based on radial_range" - if isinstance(unit, (tuple, list)) and len(unit) == 2: - radial_unit = units.to_unit(unit[0]) - else: - radial_unit = units.to_unit(unit) - rad = self.array_from_unit(shape, "center", radial_unit, scale=False) - logical_or(mask, rad < radial_range[0], out=mask) - logical_or(mask, rad > radial_range[1], out=mask) - if azimuth_range is not None: - if isinstance(unit, (tuple, list)) and len(unit) == 2: - azimuth_unit = units.to_unit(unit[1]) - chi = self.array_from_unit(shape, "center", azimuth_unit, scale=False) - logical_or(mask, chi < azimuth_range[0], out=mask) - logical_or(mask, chi > azimuth_range[1], out=mask) - - # Prepare alternative representation for output: - if mode == "numpy": - numpy.logical_not(mask, mask) - elif mode == "where": - mask = numpy.where(numpy.logical_not(mask)) - return mask - - def dark_correction(self, data, dark=None): - """ - Correct for Dark-current effects. - If dark is not defined, correct for a dark set by "set_darkfiles" - - :param data: input ndarray with the image - :param dark: ndarray with dark noise or None - :return: 2tuple: corrected_data, dark_actually used (or None) - """ - dark = dark if dark is not None else self.detector.darkcurrent - if dark is not None: - return data - dark, dark - else: - return data, None - - def flat_correction(self, data, flat=None): - """ - Correct for flat field. - If flat is not defined, correct for a flat set by "set_flatfiles" - - :param data: input ndarray with the image - :param flat: ndarray with flatfield or None for no correction - :return: 2tuple: corrected_data, flat_actually used (or None) - """ - flat = flat if flat is not None else self.detector.flatfield - if flat is not None: - return data / flat, flat - else: - return data, None - - def _normalize_method(self, method, dim, default): - """ - :rtype: IntegrationMethod - """ - requested_method = method - method = IntegrationMethod.select_one_available(method, dim=dim, default=None, degradable=False) - if method is not None: - return method - method = IntegrationMethod.select_one_available(requested_method, dim=dim, default=default, degradable=True) - logger.warning("Method requested '%s' not available. Method '%s' will be used", requested_method, method) - return default - - def setup_sparse_integrator(self, - shape, - npt, - mask=None, - pos0_range=None, pos1_range=None, - mask_checksum=None, unit=units.TTH, - split="bbox", algo="CSR", - empty=None, scale=True): - """ - Prepare a sparse-matrix integrator based on LUT, CSR or CSC format - - :param shape: shape of the dataset - :type shape: (int, int) - :param npt: number of points in the the output pattern - :type npt: int or (int, int) - :param mask: array with masked pixel (1=masked) - :type mask: ndarray - :param pos0_range: range in radial dimension - :type pos0_range: (float, float) - :param pos1_range: range in azimuthal dimension - :type pos1_range: (float, float) - :param mask_checksum: checksum of the mask buffer - :type mask_checksum: int (or anything else ...) - :param unit: use to propagate the LUT object for further checkings - :type unit: pyFAI.units.Unit or 2-tuple of them for 2D integration - :param split: Splitting scheme: valid options are "no", "bbox", "full" - :param algo: Sparse matrix format to use: "LUT", "CSR" or "CSC" - :param empty: override the default empty value - :param scale: set to False for working in S.I. units for pos0_range - which is faster. By default assumes pos0_range has `units` - Note that pos1_range, the chi-angle, is expected in radians - - - This method is called when a look-up table needs to be set-up. - The *shape* parameter, correspond to the shape of the original - datatset. It is possible to customize the number of point of - the output histogram with the *npt* parameter which can be - either an integer for an 1D integration or a 2-tuple of - integer in case of a 2D integration. The LUT will have a - different shape: (npt, lut_max_size), the later parameter - being calculated during the instanciation of the splitBBoxLUT - class. - - It is possible to prepare the LUT with a predefine - *mask*. This operation can speedup the computation of the - later integrations. Instead of applying the patch on the - dataset, it is taken into account during the histogram - computation. If provided the *mask_checksum* prevent the - re-calculation of the mask. When the mask changes, its - checksum is used to reset (or not) the LUT (which is a very - time consuming operation !) - - It is also possible to restrain the range of the 1D or 2D - pattern with the *pos0_range* (radial) and *pos1_range* (azimuthal). - - The *unit* parameter is just propagated to the LUT integrator - for further checkings: The aim is to prevent an integration to - be performed in 2th-space when the LUT was setup in q space. - Unit can also be a 2-tuple in the case of a 2D integration - """ - if isinstance(unit, (list, tuple)) and len(unit) == 2: - unit0, unit1 = tuple(units.to_unit(u) for u in unit) - else: - unit0 = units.to_unit(unit) - unit1 = units.CHI_DEG - if scale and pos0_range: - pos0_scale = unit0.scale - pos0_range = tuple(pos0_range[i] / pos0_scale for i in (0, -1)) - if "__len__" in dir(npt) and len(npt) == 2: - int2d = True - if scale and pos1_range: - pos1_scale = unit1.scale - pos1_range = tuple(pos1_range[i] / pos1_scale for i in (0, -1)) - else: - int2d = False - empty = self._empty if empty is None else empty - if split == "full": - pos = self.array_from_unit(shape, "corner", unit, scale=False) - else: - pos0 = self.array_from_unit(shape, "center", unit0, scale=False) - if split == "no": - dpos0 = None - else: - dpos0 = self.array_from_unit(shape, "delta", unit0, scale=False) - - pos1 = None - dpos1 = None - if int2d or pos1_range: - pos1 = self.array_from_unit(shape, "center", unit1, scale=False) - if split == "no": - dpos1 = None - else: - dpos1 = self.array_from_unit(shape, "delta", unit1, scale=False) - - if mask is None: - mask_checksum = None - else: - assert mask.shape == shape - algo = algo.upper() - if algo == "LUT": - if split == "full": - if int2d: - return splitPixelFullLUT.HistoLUT2dFullSplit(pos, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - allow_pos0_neg=not unit0.positive, - unit=unit, - empty=empty, - chiDiscAtPi=self.chiDiscAtPi, - clip_pos1=bool(unit1.period), - ) - else: - return splitPixelFullLUT.HistoLUT1dFullSplit(pos, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - allow_pos0_neg=not unit0.positive, - unit=unit, - empty=empty) - else: - if int2d: - return splitBBoxLUT.HistoBBox2d(pos0, dpos0, pos1, dpos1, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - allow_pos0_neg=not unit0.positive, - clip_pos1=bool(unit1.period), - unit=unit, - empty=empty) - else: - return splitBBoxLUT.HistoBBox1d(pos0, dpos0, pos1, dpos1, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - allow_pos0_neg=not unit0.positive, - unit=unit, - empty=empty) - elif algo == "CSR": - if split == "full": - if int2d: - return splitPixelFullCSR.FullSplitCSR_2d(pos, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - allow_pos0_neg=not unit0.positive, - unit=unit, - empty=empty, - chiDiscAtPi=self.chiDiscAtPi, - clip_pos1=bool(unit1.period), - ) - else: - return splitPixelFullCSR.FullSplitCSR_1d(pos, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - allow_pos0_neg=not unit0.positive, - unit=unit, - empty=empty) - else: - if int2d: - return splitBBoxCSR.HistoBBox2d(pos0, dpos0, pos1, dpos1, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - unit=unit, - empty=empty, - allow_pos0_neg=not unit0.positive, - clip_pos1=bool(unit1.period) -) - else: - return splitBBoxCSR.HistoBBox1d(pos0, dpos0, pos1, dpos1, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - allow_pos0_neg=not unit0.positive, - unit=unit, - empty=empty) - elif algo == "CSC": - if split == "full": - if int2d: - return splitPixelFullCSC.FullSplitCSC_2d(pos, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - allow_pos0_neg=not unit0.positive, - unit=unit, - empty=empty, - chiDiscAtPi=self.chiDiscAtPi, - clip_pos1=bool(unit1.period) - ) - else: - return splitPixelFullCSC.FullSplitCSC_1d(pos, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - allow_pos0_neg=not unit0.positive, - unit=unit, - empty=empty) - else: - if int2d: - return splitBBoxCSC.HistoBBox2d(pos0, dpos0, pos1, dpos1, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - unit=unit, - empty=empty, - allow_pos0_neg=not unit0.positive, - clip_pos1=bool(unit1.period) -) - else: - return splitBBoxCSC.HistoBBox1d(pos0, dpos0, pos1, dpos1, - bins=npt, - pos0_range=pos0_range, - pos1_range=pos1_range, - mask=mask, - mask_checksum=mask_checksum, - allow_pos0_neg=not unit0.positive, - unit=unit, - empty=empty) - - @deprecated(since_version="0.22", only_once=True, replacement="setup_sparse_integrator", deprecated_since="0.22.0") - def setup_LUT(self, shape, npt, mask=None, - pos0_range=None, pos1_range=None, - mask_checksum=None, unit=units.TTH, - split="bbox", empty=None, scale=True): - """See documentation of setup_sparse_integrator where algo=LUT""" - return self.setup_sparse_integrator(shape, npt, mask, - pos0_range, pos1_range, - mask_checksum, unit, - split=split, algo="LUT", - empty=empty, scale=scale) - - @deprecated(since_version="0.22", only_once=True, replacement="setup_sparse_integrator", deprecated_since="0.22.0") - def setup_CSR(self, shape, npt, mask=None, - pos0_range=None, pos1_range=None, - mask_checksum=None, unit=units.TTH, - split="bbox", empty=None, scale=True): - """See documentation of setup_sparse_integrator where algo=CSR""" - return self.setup_sparse_integrator(shape, npt, mask, - pos0_range, pos1_range, - mask_checksum, unit, - split=split, algo="CSR", - empty=empty, scale=scale) - - @deprecated(since_version="0.20", only_once=True, replacement="integrate1d_ng", deprecated_since="0.20.0") - def integrate1d_legacy(self, data, npt, filename=None, - correctSolidAngle=True, - variance=None, error_model=None, - radial_range=None, azimuth_range=None, - mask=None, dummy=None, delta_dummy=None, - polarization_factor=None, dark=None, flat=None, - method="csr", unit=units.Q, safe=True, - normalization_factor=1.0, - block_size=None, profile=False, metadata=None): - """Calculate the azimuthal integrated Saxs curve in q(nm^-1) by default - - Multi algorithm implementation (tries to be bullet proof), suitable for SAXS, WAXS, ... and much more - - - - :param data: 2D array from the Detector/CCD camera - :type data: ndarray - :param npt: number of points in the output pattern - :type npt: int - :param filename: output filename in 2/3 column ascii format - :type filename: str - :param correctSolidAngle: correct for solid angle of each pixel if True - :type correctSolidAngle: bool - :param variance: array containing the variance of the data. If not available, no error propagation is done - :type variance: ndarray - :param error_model: When the variance is unknown, an error model can be given: "poisson" (variance = I), "azimuthal" (variance = (I-)^2) - :type error_model: str - :param radial_range: The lower and upper range of the radial unit. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. - :type radial_range: (float, float), optional - :param azimuth_range: The lower and upper range of the azimuthal angle in degree. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. - :type azimuth_range: (float, float), optional - :param mask: array (same size as image) with 1 for masked pixels, and 0 for valid pixels - :type mask: ndarray - :param dummy: value for dead/masked pixels - :type dummy: float - :param delta_dummy: precision for dummy value - :type delta_dummy: float - :param polarization_factor: polarization factor between -1 (vertical) and +1 (horizontal). - 0 for circular polarization or random, - None for no correction, - True for using the former correction - :type polarization_factor: float - :param dark: dark noise image - :type dark: ndarray - :param flat: flat field image - :type flat: ndarray - :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) - :type method: can be Method named tuple, IntegrationMethod instance or str to be parsed - :param unit: Output units, can be "q_nm^-1", "q_A^-1", "2th_deg", "2th_rad", "r_mm" for now - :type unit: pyFAI.units.Unit - :param safe: Do some extra checks to ensure LUT/CSR is still valid. False is faster. - :type safe: bool - :param normalization_factor: Value of a normalization monitor - :type normalization_factor: float - :param block_size: size of the block for OpenCL integration (unused?) - :param profile: set to True to enable profiling in OpenCL - :param all: if true return a dictionary with many more parameters (deprecated, please refer to the documentation of Integrate1dResult). - :type all: bool - :param metadata: JSON serializable object containing the metadata, usually a dictionary. - :return: q/2th/r bins center positions and regrouped intensity (and error array if variance or variance model provided) - :rtype: Integrate1dResult, dict - """ - method = self._normalize_method(method, dim=1, default=self.DEFAULT_METHOD_1D) - assert method.dimension == 1 - unit = units.to_unit(unit) - if mask is None: - has_mask = "from detector" - mask = self.mask - mask_crc = self.detector.get_mask_crc() - if mask is None: - has_mask = False - mask_crc = None - else: - has_mask = "provided" - mask = numpy.ascontiguousarray(mask) - mask_crc = crc32(mask) - - shape = data.shape - pos0_scale = unit.scale - - if radial_range: - radial_range = tuple(radial_range[i] / pos0_scale for i in (0, -1)) - if azimuth_range is not None: - azimuth_range = self.normalize_azimuth_range(azimuth_range) - - if variance is not None: - assert variance.size == data.size - elif error_model: - error_model = error_model.lower() - if error_model == "poisson": - variance = numpy.ascontiguousarray(data, numpy.float32) - - if correctSolidAngle: - solidangle = self.solidAngleArray(shape, correctSolidAngle) - else: - solidangle = None - - if polarization_factor is None: - polarization = polarization_crc = None - else: - polarization, polarization_crc = self.polarization(shape, polarization_factor, with_checksum=True) - - if dark is None: - dark = self.detector.darkcurrent - if dark is None: - has_dark = False - else: - has_dark = "from detector" - else: - has_dark = "provided" - - if flat is None: - flat = self.detector.flatfield - if dark is None: - has_flat = False - else: - has_flat = "from detector" - else: - has_flat = "provided" - - I = None - sigma = None - count = None - sum_ = None - - if method.algo_lower == "lut": - if EXT_LUT_ENGINE not in self.engines: - engine = self.engines[EXT_LUT_ENGINE] = Engine() - else: - engine = self.engines[EXT_LUT_ENGINE] - with engine.lock: - integr = engine.engine - reset = None - if integr is None: - reset = "of first initialization" - if (not reset) and safe: - if integr.unit != unit: - reset = "unit changed" - if integr.bins != npt: - reset = "number of points changed" - if integr.size != data.size: - reset = "input image size changed" - if (mask is not None) and\ - (not integr.check_mask): - reset = "mask but LUT was without mask" - elif (mask is None) and (integr.check_mask): - reset = "no mask but LUT has mask" - elif (mask is not None) and\ - (integr.mask_checksum != mask_crc): - reset = "mask changed" - if (radial_range is None) and\ - (integr.pos0_range is not None): - reset = "radial_range was defined in LUT" - elif (radial_range is not None) and\ - (integr.pos0_range != radial_range): - reset = ("radial_range is defined" - " but not the same as in LUT") - if (azimuth_range is None) and\ - (integr.pos1_range is not None): - reset = ("azimuth_range not defined and" - " LUT had azimuth_range defined") - elif (azimuth_range is not None) and\ - (integr.pos1_range != azimuth_range[0]): - reset = ("azimuth_range requested and" - " LUT's azimuth_range don't match") - if reset: - logger.info("AI.integrate1d: Resetting integrator because %s", reset) - split = method.split_lower - if split == "pseudo": - split = "full" - try: - integr = self.setup_sparse_integrator(shape, npt, mask=mask, - pos0_range=radial_range, pos1_range=azimuth_range, - mask_checksum=mask_crc, - unit=unit, split=split, algo="LUT", - scale=False) - - except MemoryError: - # LUT method is hungry... - logger.warning("MemoryError: falling back on default forward implementation") - integr = None - self.reset_engines() - method = self.DEFAULT_METHOD_1D - else: - engine.set_engine(integr) - if integr: - if method.impl_lower == "opencl": - # TODO: manage the target - if OCL_LUT_ENGINE in self.engines: - ocl_engine = self.engines[OCL_LUT_ENGINE] - else: - ocl_engine = self.engines[OCL_LUT_ENGINE] = Engine() - with ocl_engine.lock: - if method.target is not None: - platformid, deviceid = method.target - ocl_integr = ocl_engine.engine - if (ocl_integr is None) or \ - (ocl_integr.on_device["lut"] != integr.lut_checksum): - ocl_integr = ocl_azim_lut.OCL_LUT_Integrator(integr.lut, - integr.size, - platformid=platformid, - deviceid=deviceid, - checksum=integr.lut_checksum) - ocl_engine.set_engine(ocl_integr) - if ocl_integr is not None: - I, sum_, count = ocl_integr.integrate_legacy(data, dark=dark, flat=flat, - solidangle=solidangle, - solidangle_checksum=self._dssa_crc, - dummy=dummy, - delta_dummy=delta_dummy, - polarization=polarization, - polarization_checksum=polarization_crc, - normalization_factor=normalization_factor) - qAxis = integr.bin_centers # this will be copied later - if error_model == "azimuthal": - - variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 - if variance is not None: - var1d, a, b = ocl_integr.integrate_legacy(variance, - solidangle=None, - dummy=dummy, - delta_dummy=delta_dummy, - normalization_factor=1.0, - coef_power=2) - with numpy.errstate(divide='ignore', invalid='ignore'): - sigma = numpy.sqrt(a) / (b * normalization_factor) - sigma[b == 0] = dummy if dummy is not None else self._empty - else: - qAxis, I, sum_, count = integr.integrate_legacy(data, dark=dark, flat=flat, - solidAngle=solidangle, - dummy=dummy, - delta_dummy=delta_dummy, - polarization=polarization, - normalization_factor=normalization_factor) - - if error_model == "azimuthal": - variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 - if variance is not None: - _, var1d, a, b = integr.integrate_legacy(variance, - solidAngle=None, - dummy=dummy, - delta_dummy=delta_dummy, - coef_power=2, - normalization_factor=1.0) - with numpy.errstate(divide='ignore', invalid='ignore'): - sigma = numpy.sqrt(a) / (b * normalization_factor) - sigma[b == 0] = dummy if dummy is not None else self._empty - - if method.algo_lower == "csr": - if EXT_CSR_ENGINE not in self.engines: - engine = self.engines[EXT_CSR_ENGINE] = Engine() - else: - engine = self.engines[EXT_CSR_ENGINE] - with engine.lock: - integr = engine.engine - reset = None - - if integr is None: - reset = "of first initialization" - if (not reset) and safe: - if integr.unit != unit: - reset = "unit changed" - if integr.bins != npt: - reset = "number of points changed" - if integr.size != data.size: - reset = "input image size changed" - if (mask is not None) and\ - (not integr.check_mask): - reset = "mask but CSR was without mask" - elif (mask is None) and (integr.check_mask): - reset = "no mask but CSR has mask" - elif (mask is not None) and\ - (integr.mask_checksum != mask_crc): - reset = "mask changed" - if radial_range != integr.pos0_range: - reset = "radial_range changed" - if azimuth_range != integr.pos1_range: - reset = "azimuth_range changed" - if reset: - logger.info("AI.integrate1d: Resetting integrator because %s", reset) - split = method.split_lower - if split == "pseudo": - split = "full" - try: - integr = self.setup_sparse_integrator(shape, npt, mask=mask, - pos0_range=radial_range, pos1_range=azimuth_range, - mask_checksum=mask_crc, - unit=unit, split=split, algo="CSR", - scale=False) - except MemoryError: # CSR method is hungry... - logger.warning("MemoryError: falling back on forward implementation") - integr = None - self.reset_engines() - method = self.DEFAULT_METHOD_1D - else: - engine.set_engine(integr) - if integr: - if method.impl_lower == "opencl": - # TODO: manage OpenCL targets - if OCL_CSR_ENGINE not in self.engines: - self.engines[OCL_CSR_ENGINE] = Engine() - ocl_engine = self.engines[OCL_CSR_ENGINE] - with ocl_engine.lock: - if method.target is not None: - platformid, deviceid = method.target - ocl_integr = ocl_engine.engine - if (ocl_integr is None) or \ - (ocl_integr.on_device["data"] != integr.lut_checksum): - ocl_integr = ocl_azim_csr.OCL_CSR_Integrator(integr.lut, - integr.size, - platformid=platformid, - deviceid=deviceid, - checksum=integr.lut_checksum, - block_size=block_size, - profile=profile) - ocl_engine.set_engine(ocl_integr) - I, sum_, count = ocl_integr.integrate_legacy(data, dark=dark, flat=flat, - solidangle=solidangle, - solidangle_checksum=self._dssa_crc, - dummy=dummy, - delta_dummy=delta_dummy, - polarization=polarization, - polarization_checksum=polarization_crc, - normalization_factor=normalization_factor) - qAxis = integr.bin_centers # this will be copied later - if error_model == "azimuthal": - variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 - if variance is not None: - var1d, a, b = ocl_integr.integrate_legacy(variance, - solidangle=None, - dummy=dummy, - delta_dummy=delta_dummy) - with numpy.errstate(divide='ignore', invalid='ignore'): - sigma = numpy.sqrt(a) / (b * normalization_factor) - sigma[b == 0] = dummy if dummy is not None else self._empty - else: - qAxis, I, sum_, count = integr.integrate_legacy(data, dark=dark, flat=flat, - solidAngle=solidangle, - dummy=dummy, - delta_dummy=delta_dummy, - polarization=polarization, - normalization_factor=normalization_factor) - - if error_model == "azimuthal": - variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 - if variance is not None: - _, var1d, a, b = integr.integrate_legacy(variance, - solidAngle=None, - dummy=dummy, - delta_dummy=delta_dummy, - normalization_factor=1.0) - with numpy.errstate(divide='ignore', invalid='ignore'): - sigma = numpy.sqrt(a) / (b * normalization_factor) - sigma[b == 0] = dummy if dummy is not None else self._empty - - if method.method[1:4] == ("full", "histogram", "cython"): - logger.debug("integrate1d uses SplitPixel implementation") - pos = self.array_from_unit(shape, "corner", unit, scale=False) - qAxis, I, sum_, count = splitPixel.fullSplit1D(pos=pos, - weights=data, - bins=npt, - pos0_range=radial_range, - pos1_range=azimuth_range, - dummy=dummy, - delta_dummy=delta_dummy, - mask=mask, - dark=dark, - flat=flat, - solidangle=solidangle, - polarization=polarization, - normalization_factor=normalization_factor - ) - if error_model == "azimuthal": - variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 - if variance is not None: - _, var1d, a, b = splitPixel.fullSplit1D(pos=pos, - weights=variance, - bins=npt, - pos0_range=radial_range, - pos1_range=azimuth_range, - dummy=dummy, - delta_dummy=delta_dummy, - mask=mask, - normalization_factor=1.0 - ) - with numpy.errstate(divide='ignore', invalid='ignore'): - sigma = numpy.sqrt(a) / (b * normalization_factor) - sigma[b == 0] = dummy if dummy is not None else self._empty - - if method.method[1:4] == ("bbox", "histogram", "cython"): - logger.debug("integrate1d uses BBox implementation") - if azimuth_range is not None: - chi = self.chiArray(shape) - dchi = self.deltaChi(shape) - else: - chi = None - dchi = None - pos0 = self.array_from_unit(shape, "center", unit, scale=False) - dpos0 = self.array_from_unit(shape, "delta", unit, scale=False) - qAxis, I, sum_, count = splitBBox.histoBBox1d(weights=data, - pos0=pos0, - delta_pos0=dpos0, - pos1=chi, - delta_pos1=dchi, - bins=npt, - pos0_range=radial_range, - pos1_range=azimuth_range, - dummy=dummy, - delta_dummy=delta_dummy, - mask=mask, - dark=dark, - flat=flat, - solidangle=solidangle, - polarization=polarization, - normalization_factor=normalization_factor) - if error_model == "azimuthal": - variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 - if variance is not None: - _, var1d, a, b = splitBBox.histoBBox1d(weights=variance, - pos0=pos0, - delta_pos0=dpos0, - pos1=chi, - delta_pos1=dchi, - bins=npt, - pos0_range=radial_range, - pos1_range=azimuth_range, - dummy=dummy, - delta_dummy=delta_dummy, - mask=mask, - ) - with numpy.errstate(divide='ignore', invalid='ignore'): - sigma = numpy.sqrt(a) / (b * normalization_factor) - sigma[b == 0] = dummy if dummy is not None else self._empty - - if method.method[1:3] == ("no", "histogram") and method.impl_lower != "opencl": - # Common part for Numpy and Cython - data = data.astype(numpy.float32) - mask = self.create_mask(data, mask, dummy, delta_dummy, - unit=unit, - radial_range=radial_range, - azimuth_range=azimuth_range, - mode="where") - pos0 = self.array_from_unit(shape, "center", unit, scale=False) - if radial_range is None: - radial_range = (pos0.min(), pos0.max()) - pos0 = pos0[mask] - if dark is not None: - data -= dark - if flat is not None: - data /= flat - if polarization is not None: - data /= polarization - if solidangle is not None: - data /= solidangle - data = data[mask] - if variance is not None: - variance = variance[mask] - - if method.impl_lower == "cython": - logger.debug("integrate1d uses cython implementation") - qAxis, I, sum_, count = histogram.histogram(pos=pos0, - weights=data, - bins=npt, - bin_range=radial_range, - pixelSize_in_Pos=0, - empty=dummy if dummy is not None else self._empty, - normalization_factor=normalization_factor) - if error_model == "azimuthal": - variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)[mask]) ** 2 - if variance is not None: - _, var1d, a, b = histogram.histogram(pos=pos0, - weights=variance, - bins=npt, - bin_range=radial_range, - pixelSize_in_Pos=1, - empty=dummy if dummy is not None else self._empty) - with numpy.errstate(divide='ignore', invalid='ignore'): - sigma = numpy.sqrt(a) / (b * normalization_factor) - sigma[b == 0] = dummy if dummy is not None else self._empty - elif method.impl_lower == "python": - logger.debug("integrate1d uses Numpy implementation") - count, b = numpy.histogram(pos0, npt, range=radial_range) - qAxis = (b[1:] + b[:-1]) / 2.0 - sum_, b = numpy.histogram(pos0, npt, weights=data, range=radial_range) - with numpy.errstate(divide='ignore', invalid='ignore'): - if error_model == "azimuthal": - variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)[mask]) ** 2 - if variance is not None: - var1d, b = numpy.histogram(pos0, npt, weights=variance, range=radial_range) - sigma = numpy.sqrt(var1d) / (count * normalization_factor) - sigma[count == 0] = dummy if dummy is not None else self._empty - with numpy.errstate(divide='ignore', invalid='ignore'): - I = sum_ / count / normalization_factor - I[count == 0] = dummy if dummy is not None else self._empty - - if pos0_scale: - # not in place to make a copy - qAxis = qAxis * pos0_scale - - result = Integrate1dResult(qAxis, I, sigma) - result._set_method_called("integrate1d") - result._set_method(method) - result._set_compute_engine(str(method)) - result._set_unit(unit) - result._set_sum(sum_) - result._set_count(count) - result._set_has_dark_correction(has_dark) - result._set_has_flat_correction(has_flat) - result._set_has_mask_applied(has_mask) - result._set_polarization_factor(polarization_factor) - result._set_normalization_factor(normalization_factor) - result._set_metadata(metadata) - - if filename is not None: - save_integrate_result(filename, result) - - return result - - _integrate1d_legacy = integrate1d_legacy - - def integrate1d_ng(self, data, npt, filename=None, - correctSolidAngle=True, - variance=None, error_model=None, - radial_range=None, azimuth_range=None, - mask=None, dummy=None, delta_dummy=None, - polarization_factor=None, dark=None, flat=None, absorption=None, - method=("bbox", "csr", "cython"), unit=units.Q, safe=True, - normalization_factor=1.0, - metadata=None): + def integrate1d(self, data, npt, filename=None, + correctSolidAngle=True, + variance=None, error_model=None, + radial_range=None, azimuth_range=None, + mask=None, dummy=None, delta_dummy=None, + polarization_factor=None, dark=None, flat=None, absorption=None, + method=("bbox", "csr", "cython"), unit=units.Q, safe=True, + normalization_factor=1.0, + metadata=None): """Calculate the azimuthal integration (1d) of a 2D image. Multi algorithm implementation (tries to be bullet proof), suitable for SAXS, WAXS, ... and much more @@ -1561,8 +550,7 @@ def integrate1d_ng(self, data, npt, filename=None, save_integrate_result(filename, result) return result - _integrate1d_ng = integrate1d_ng - integrate1d = integrate1d_ng + _integrate1d_ng = integrate1d_ng = integrate1d def integrate_radial(self, data, npt, npt_rad=100, correctSolidAngle=True, @@ -1643,426 +631,6 @@ def integrate_radial(self, data, npt, npt_rad=100, return result - @deprecated(since_version="0.21", only_once=True, deprecated_since="0.21.0") - def integrate2d_legacy(self, data, npt_rad, npt_azim=360, - filename=None, correctSolidAngle=True, variance=None, - error_model=None, radial_range=None, azimuth_range=None, - mask=None, dummy=None, delta_dummy=None, - polarization_factor=None, dark=None, flat=None, - method=None, unit=units.Q, safe=True, - normalization_factor=1.0, metadata=None): - """ - Calculate the azimuthal regrouped 2d image in q(nm^-1)/chi(deg) by default - - Multi algorithm implementation (tries to be bullet proof) - - :param data: 2D array from the Detector/CCD camera - :type data: ndarray - :param npt_rad: number of points in the radial direction - :type npt_rad: int - :param npt_azim: number of points in the azimuthal direction - :type npt_azim: int - :param filename: output image (as edf format) - :type filename: str - :param correctSolidAngle: correct for solid angle of each pixel if True - :type correctSolidAngle: bool - :param variance: array containing the variance of the data. If not available, no error propagation is done - :type variance: ndarray - :param error_model: When the variance is unknown, an error model can be given: "poisson" (variance = I), "azimuthal" (variance = (I-)^2) - :type error_model: str - :param radial_range: The lower and upper range of the radial unit. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. - :type radial_range: (float, float), optional - :param azimuth_range: The lower and upper range of the azimuthal angle in degree. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. - :type azimuth_range: (float, float), optional - :param mask: array (same size as image) with 1 for masked pixels, and 0 for valid pixels - :type mask: ndarray - :param dummy: value for dead/masked pixels - :type dummy: float - :param delta_dummy: precision for dummy value - :type delta_dummy: float - :param polarization_factor: polarization factor between -1 (vertical) - and +1 (horizontal). 0 for circular polarization or random, - None for no correction - :type polarization_factor: float - :param dark: dark noise image - :type dark: ndarray - :param flat: flat field image - :type flat: ndarray - :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) - :param unit: Output units, can be "q_nm^-1", "q_A^-1", "2th_deg", "2th_rad", "r_mm" for now - :type unit: pyFAI.units.Unit - :param safe: Do some extra checks to ensure LUT is still valid. False is faster. - :type safe: bool - :param normalization_factor: Value of a normalization monitor - :type normalization_factor: float - :param all: if true, return many more intermediate results as a dict (deprecated, please refer to the documentation of Integrate2dResult). - :param metadata: JSON serializable object containing the metadata, usually a dictionary. - :type all: bool - :return: azimuthaly regrouped intensity, q/2theta/r pos. and chi pos. - :rtype: Integrate2dResult, dict - """ - method = self._normalize_method(method, dim=2, default=self.DEFAULT_METHOD_2D) - assert method.dimension == 2 - npt = (npt_rad, npt_azim) - unit = units.to_unit(unit) - pos0_scale = unit.scale - if mask is None: - has_mask = "from detector" - mask = self.mask - mask_crc = self.detector.get_mask_crc() - if mask is None: - has_mask = False - mask_crc = None - else: - has_mask = "provided" - mask = numpy.ascontiguousarray(mask) - mask_crc = crc32(mask) - - shape = data.shape - - if radial_range: - radial_range = tuple([i / pos0_scale for i in radial_range]) - - if variance is not None: - assert variance.size == data.size - elif error_model: - error_model = error_model.lower() - if error_model == "poisson": - variance = numpy.ascontiguousarray(data, numpy.float32) - - if azimuth_range is not None: - azimuth_range = tuple(deg2rad(azimuth_range[i], self.chiDiscAtPi) for i in (0, -1)) - if azimuth_range[1] <= azimuth_range[0]: - azimuth_range = (azimuth_range[0], azimuth_range[1] + 2 * pi) - self.check_chi_disc(azimuth_range) - - if correctSolidAngle: - solidangle = self.solidAngleArray(shape, correctSolidAngle) - else: - solidangle = None - - if polarization_factor is None: - polarization = polarization_crc = None - else: - polarization, polarization_crc = self.polarization(shape, polarization_factor, with_checksum=True) - - if dark is None: - dark = self.detector.darkcurrent - if dark is None: - has_dark = False - else: - has_dark = "from detector" - else: - has_dark = "provided" - - if flat is None: - flat = self.detector.flatfield - if dark is None: - has_flat = False - else: - has_flat = "from detector" - else: - has_flat = "provided" - - I = None - sigma = None - sum_ = None - count = None - - if method.algo_lower == "lut": - if EXT_LUT_ENGINE not in self.engines: - engine = self.engines[EXT_LUT_ENGINE] = Engine() - else: - engine = self.engines[EXT_LUT_ENGINE] - with engine.lock: - integr = engine.engine - reset = None - if integr is None: - reset = "of first initialization" - if (not reset) and safe: - if integr.unit != unit: - reset = "unit changed" - if integr.bins != npt: - reset = "number of points changed" - if integr.size != data.size: - reset = "input image size changed" - if (mask is not None) and (not integr.check_mask): - reset = "mask but LUT was without mask" - elif (mask is None) and (integr.check_mask): - reset = "no mask but LUT has mask" - elif (mask is not None) and (integr.mask_checksum != mask_crc): - reset = "mask changed" - if radial_range != integr.pos0_range: - reset = "radial_range changed" - if azimuth_range != integr.pos1_range: - reset = "azimuth_range changed" - error = False - if reset: - logger.info("ai.integrate2d: Resetting integrator because %s", reset) - try: - integr = self.setup_sparse_integrator(shape, npt, mask=mask, - pos0_range=radial_range, pos1_range=azimuth_range, - mask_checksum=mask_crc, algo="LUT", unit=unit, scale=False) - except MemoryError: - # LUT method is hungry im memory... - logger.warning("MemoryError: falling back on forward implementation") - integr = None - self.reset_engines() - method = self.DEFAULT_METHOD_2D - error = True - else: - error = False - engine.set_engine(integr) - if not error: - if method.impl_lower == "opencl": - if OCL_LUT_ENGINE in self.engines: - ocl_engine = self.engines[OCL_LUT_ENGINE] - else: - ocl_engine = self.engines[OCL_LUT_ENGINE] = Engine() - with ocl_engine.lock: - platformid, deviceid = method.target - ocl_integr = ocl_engine.engine - if (ocl_integr is None) or \ - (ocl_integr.on_device["lut"] != integr.lut_checksum): - ocl_integr = ocl_azim_lut.OCL_LUT_Integrator(integr.lut, - integr.size, - platformid=platformid, - deviceid=deviceid, - checksum=integr.lut_checksum) - ocl_engine.set_engine(ocl_integr) - - if (not error) and (ocl_integr is not None): - I, sum_, count = ocl_integr.integrate_legacy(data, dark=dark, flat=flat, - solidangle=solidangle, - solidangle_checksum=self._dssa_crc, - dummy=dummy, - delta_dummy=delta_dummy, - polarization=polarization, - polarization_checksum=polarization_crc, - normalization_factor=normalization_factor, - safe=safe) - I.shape = npt - I = I.T - bins_rad = integr.bin_centers0 # this will be copied later - bins_azim = integr.bin_centers1 - else: - I, bins_rad, bins_azim, sum_, count = integr.integrate_legacy(data, dark=dark, flat=flat, - solidAngle=solidangle, - dummy=dummy, - delta_dummy=delta_dummy, - polarization=polarization, - normalization_factor=normalization_factor - ) - - if method.algo_lower == "csr": - if EXT_CSR_ENGINE not in self.engines: - engine = self.engines[EXT_CSR_ENGINE] = Engine() - else: - engine = self.engines[EXT_CSR_ENGINE] - with engine.lock: - integr = engine.engine - reset = None - if integr is None: - reset = "of first initialization" - if (not reset) and safe: - if integr.unit != unit: - reset = "unit changed" - if integr.bins != npt: - reset = "number of points changed" - if integr.size != data.size: - reset = "input image size changed" - if (mask is not None) and (not integr.check_mask): - reset = "mask but CSR was without mask" - elif (mask is None) and (integr.check_mask): - reset = "no mask but CSR has mask" - elif (mask is not None) and (integr.mask_checksum != mask_crc): - reset = "mask changed" - if (radial_range is None) and (integr.pos0_range is not None): - reset = "radial_range was defined in CSR" - elif (radial_range is not None) and integr.pos0_range != (min(radial_range), max(radial_range)): - reset = "radial_range is defined but differs in CSR" - if (azimuth_range is None) and (integr.pos1_range is not None): - reset = "azimuth_range not defined and CSR had azimuth_range defined" - elif (azimuth_range is not None) and integr.pos1_range != (min(azimuth_range), max(azimuth_range)): - reset = "azimuth_range requested and CSR's azimuth_range don't match" - error = False - if reset: - logger.info("AI.integrate2d: Resetting integrator because %s", reset) - split = method.split_lower - if split == "pseudo": - split = "full" - try: - integr = self.setup_sparse_integrator(shape, npt, mask=mask, - pos0_range=radial_range, pos1_range=azimuth_range, - mask_checksum=mask_crc, - unit=unit, split=split, algo="CSR", - scale=False) - except MemoryError: - logger.warning("MemoryError: falling back on default forward implementation") - integr = None - self.reset_engines() - method = self.DEFAULT_METHOD_2D - error = True - else: - error = False - engine.set_engine(integr) - if not error: - if method.impl_lower == "opencl": - if OCL_CSR_ENGINE in self.engines: - ocl_engine = self.engines[OCL_CSR_ENGINE] - else: - ocl_engine = self.engines[OCL_CSR_ENGINE] = Engine() - with ocl_engine.lock: - platformid, deviceid = method.target - ocl_integr = ocl_engine.engine - if (ocl_integr is None) or (ocl_integr.on_device["data"] != integr.lut_checksum): - ocl_integr = ocl_azim_csr.OCL_CSR_Integrator(integr.lut, - integr.size, - platformid=platformid, - deviceid=deviceid, - checksum=integr.lut_checksum) - ocl_engine.set_engine(ocl_integr) - if (not error) and (ocl_integr is not None): - I, sum_, count = ocl_integr.integrate_legacy(data, dark=dark, flat=flat, - solidangle=solidangle, - solidangle_checksum=self._dssa_crc, - dummy=dummy, - delta_dummy=delta_dummy, - polarization=polarization, - polarization_checksum=polarization_crc, - safe=safe, - normalization_factor=normalization_factor) - I.shape = npt - I = I.T - bins_rad = integr.bin_centers0 # this will be copied later - bins_azim = integr.bin_centers1 - else: - I, bins_rad, bins_azim, sum_, count = integr.integrate_legacy(data, dark=dark, flat=flat, - solidAngle=solidangle, - dummy=dummy, - delta_dummy=delta_dummy, - polarization=polarization, - normalization_factor=normalization_factor) - - if method.method[1:4] in (("pseudo", "histogram", "cython"), ("full", "histogram", "cython")): - logger.debug("integrate2d uses SplitPixel implementation") - pos = self.array_from_unit(shape, "corner", unit, scale=False) - I, bins_rad, bins_azim, sum_, count = splitPixel.fullSplit2D(pos=pos, - weights=data, - bins=(npt_rad, npt_azim), - pos0_range=radial_range, - pos1_range=azimuth_range, - dummy=dummy, - delta_dummy=delta_dummy, - mask=mask, - dark=dark, - flat=flat, - solidangle=solidangle, - polarization=polarization, - normalization_factor=normalization_factor, - chiDiscAtPi=self.chiDiscAtPi, - empty=dummy if dummy is not None else self._empty) - if method.method[1:4] == ("bbox", "histogram", "cython"): - logger.debug("integrate2d uses BBox implementation") - chi = self.chiArray(shape) - dchi = self.deltaChi(shape) - pos0 = self.array_from_unit(shape, "center", unit, scale=False) - dpos0 = self.array_from_unit(shape, "delta", unit, scale=False) - I, bins_rad, bins_azim, sum_, count = splitBBox.histoBBox2d(weights=data, - pos0=pos0, - delta_pos0=dpos0, - pos1=chi, - delta_pos1=dchi, - bins=(npt_rad, npt_azim), - pos0_range=radial_range, - pos1_range=azimuth_range, - dummy=dummy, - delta_dummy=delta_dummy, - mask=mask, - dark=dark, - flat=flat, - solidangle=solidangle, - polarization=polarization, - normalization_factor=normalization_factor, - chiDiscAtPi=self.chiDiscAtPi, - empty=dummy if dummy is not None else self._empty) - - if method.method[1:3] == ("no", "histogram") and method.impl_lower != "opencl": - logger.debug("integrate2d uses numpy or cython implementation") - data = data.astype(numpy.float32) # it is important to make a copy see issue #88 - mask = self.create_mask(data, mask, dummy, delta_dummy, - unit=unit, - radial_range=radial_range, - azimuth_range=azimuth_range, - mode="where") - pos0 = self.array_from_unit(shape, "center", unit, scale=False) - pos1 = self.chiArray(shape) - - if radial_range is None: - radial_range = [pos0.min(), pos0.max() * EPS32] - - if azimuth_range is None: - azimuth_range = [pos1.min(), pos1.max() * EPS32] - - if variance is not None: - variance = variance[mask] - - if dark is not None: - data -= dark - - if flat is not None: - data /= flat - - if polarization is not None: - data /= polarization - - if solidangle is not None: - data /= solidangle - - data = data[mask] - pos0 = pos0[mask] - pos1 = pos1[mask] - if method.impl_lower == "cython": - I, bins_azim, bins_rad, sum_, count = histogram.histogram2d(pos0=pos1, - pos1=pos0, - weights=data, - bins=(npt_azim, npt_rad), - split=False, - empty=dummy if dummy is not None else self._empty, - normalization_factor=normalization_factor) - elif method.impl_lower == "python": - logger.debug("integrate2d uses Numpy implementation") - count, b, c = numpy.histogram2d(pos1, pos0, (npt_azim, npt_rad), range=[azimuth_range, radial_range]) - bins_azim = (b[1:] + b[:-1]) / 2.0 - bins_rad = (c[1:] + c[:-1]) / 2.0 - count1 = numpy.maximum(1, count) - sum_, b, c = numpy.histogram2d(pos1, pos0, (npt_azim, npt_rad), - weights=data, range=[azimuth_range, radial_range]) - I = sum_ / count1 / normalization_factor - I[count == 0] = dummy if dummy is not None else self._empty - # I know I make copies .... - bins_rad = bins_rad * pos0_scale - bins_azim = bins_azim * 180.0 / pi - - result = Integrate2dResult(I, bins_rad, bins_azim, sigma) - result._set_method_called("integrate2d") - result._set_compute_engine(str(method)) - result._set_unit(unit) - result._set_count(count) - result._set_sum(sum_) - result._set_has_dark_correction(has_dark) - result._set_has_flat_correction(has_flat) - result._set_has_mask_applied(has_mask) - result._set_polarization_factor(polarization_factor) - result._set_normalization_factor(normalization_factor) - result._set_metadata(metadata) - - if filename is not None: - save_integrate_result(filename, result) - - return result - - _integrate2d_legacy = integrate2d_legacy def integrate2d_ng(self, data, npt_rad, npt_azim=360, filename=None, correctSolidAngle=True, variance=None, @@ -2579,144 +1147,6 @@ def integrate2d_ng(self, data, npt_rad, npt_azim=360, integrate2d = _integrate2d_ng = integrate2d_ng - @deprecated(since_version="0.14", reason="Use the class DefaultAiWriter") - def save1D(self, filename, dim1, I, error=None, dim1_unit=units.TTH, - has_dark=False, has_flat=False, polarization_factor=None, normalization_factor=None): - """This method save the result of a 1D integration. - - Deprecated on 13/06/2017 - - :param filename: the filename used to save the 1D integration - :type filename: str - :param dim1: the x coordinates of the integrated curve - :type dim1: numpy.ndarray - :param I: The integrated intensity - :type I: numpy.mdarray - :param error: the error bar for each intensity - :type error: numpy.ndarray or None - :param dim1_unit: the unit of the dim1 array - :type dim1_unit: pyFAI.units.Unit - :param has_dark: save the darks filenames (default: no) - :type has_dark: bool - :param has_flat: save the flat filenames (default: no) - :type has_flat: bool - :param polarization_factor: the polarization factor - :type polarization_factor: float - :param normalization_factor: the monitor value - :type normalization_factor: float - """ - self.__save1D(filename=filename, - dim1=dim1, - I=I, - error=error, - dim1_unit=dim1_unit, - has_dark=has_dark, - has_flat=has_flat, - polarization_factor=polarization_factor, - normalization_factor=normalization_factor) - - def __save1D(self, filename, dim1, I, error=None, dim1_unit=units.TTH, - has_dark=False, has_flat=False, polarization_factor=None, normalization_factor=None): - """This method save the result of a 1D integration. - - :param filename: the filename used to save the 1D integration - :type filename: str - :param dim1: the x coordinates of the integrated curve - :type dim1: numpy.ndarray - :param I: The integrated intensity - :type I: numpy.mdarray - :param error: the error bar for each intensity - :type error: numpy.ndarray or None - :param dim1_unit: the unit of the dim1 array - :type dim1_unit: pyFAI.units.Unit - :param has_dark: save the darks filenames (default: no) - :type has_dark: bool - :param has_flat: save the flat filenames (default: no) - :type has_flat: bool - :param polarization_factor: the polarization factor - :type polarization_factor: float - :param normalization_factor: the monitor value - :type normalization_factor: float - """ - if not filename: - return - writer = DefaultAiWriter(None, self) - writer.save1D(filename, dim1, I, error, dim1_unit, has_dark, has_flat, - polarization_factor, normalization_factor) - - @deprecated(since_version="0.14", reason="Use the class DefaultAiWriter") - def save2D(self, filename, I, dim1, dim2, error=None, dim1_unit=units.TTH, - has_dark=False, has_flat=False, - polarization_factor=None, normalization_factor=None): - """This method save the result of a 2D integration. - - Deprecated on 13/06/2017 - - :param filename: the filename used to save the 2D histogram - :type filename: str - :param dim1: the 1st coordinates of the histogram - :type dim1: numpy.ndarray - :param dim1: the 2nd coordinates of the histogram - :type dim1: numpy.ndarray - :param I: The integrated intensity - :type I: numpy.mdarray - :param error: the error bar for each intensity - :type error: numpy.ndarray or None - :param dim1_unit: the unit of the dim1 array - :type dim1_unit: pyFAI.units.Unit - :param has_dark: save the darks filenames (default: no) - :type has_dark: bool - :param has_flat: save the flat filenames (default: no) - :type has_flat: bool - :param polarization_factor: the polarization factor - :type polarization_factor: float - :param normalization_factor: the monitor value - :type normalization_factor: float - """ - self.__save2D(filename=filename, - I=I, - dim1=dim1, - dim2=dim2, - error=error, - dim1_unit=dim1_unit, - has_dark=has_dark, - has_flat=has_flat, - polarization_factor=polarization_factor, - normalization_factor=normalization_factor) - - def __save2D(self, filename, I, dim1, dim2, error=None, dim1_unit=units.TTH, - has_dark=False, has_flat=False, - polarization_factor=None, normalization_factor=None): - """This method save the result of a 2D integration. - - Deprecated on 13/06/2017 - - :param filename: the filename used to save the 2D histogram - :type filename: str - :param dim1: the 1st coordinates of the histogram - :type dim1: numpy.ndarray - :param dim1: the 2nd coordinates of the histogram - :type dim1: numpy.ndarray - :param I: The integrated intensity - :type I: numpy.mdarray - :param error: the error bar for each intensity - :type error: numpy.ndarray or None - :param dim1_unit: the unit of the dim1 array - :type dim1_unit: pyFAI.units.Unit - :param has_dark: save the darks filenames (default: no) - :type has_dark: bool - :param has_flat: save the flat filenames (default: no) - :type has_flat: bool - :param polarization_factor: the polarization factor - :type polarization_factor: float - :param normalization_factor: the monitor value - :type normalization_factor: float - """ - if not filename: - return - writer = DefaultAiWriter(None, self) - writer.save2D(filename, I, dim1, dim2, error, dim1_unit, has_dark, has_flat, - polarization_factor, normalization_factor) def medfilt1d(self, data, npt_rad=1024, npt_azim=512, correctSolidAngle=True, @@ -3014,28 +1444,28 @@ def sigma_clip_legacy(self, data, npt_rad=1024, npt_azim=512, _sigma_clip_legacy = sigma_clip_legacy - def sigma_clip_ng(self, data, - npt=1024, - correctSolidAngle=True, - polarization_factor=None, - variance=None, - error_model=ErrorModel.NO, - radial_range=None, - azimuth_range=None, - dark=None, - flat=None, - absorption=None, - method=("no", "csr", "cython"), - unit=units.Q, - thres=5.0, - max_iter=5, - dummy=None, - delta_dummy=None, - mask=None, - normalization_factor=1.0, - metadata=None, - safe=True, - **kwargs): + def sigma_clip(self, data, + npt=1024, + correctSolidAngle=True, + polarization_factor=None, + variance=None, + error_model=ErrorModel.NO, + radial_range=None, + azimuth_range=None, + dark=None, + flat=None, + absorption=None, + method=("no", "csr", "cython"), + unit=units.Q, + thres=5.0, + max_iter=5, + dummy=None, + delta_dummy=None, + mask=None, + normalization_factor=1.0, + metadata=None, + safe=True, + **kwargs): """Performs iteratively the 1D integration with variance propagation and performs a sigm-clipping at each iteration, i.e. all pixel which intensity differs more than thres*std is @@ -3315,7 +1745,7 @@ def sigma_clip_ng(self, data, result._set_error_model(error_model) return result - sigma_clip = sigma_clip_ng + sigma_clip_ng = sigma_clip def separate(self, data, npt_rad=1024, npt_azim=512, unit="2th_deg", method="splitpixel", percentile=50, mask=None, restore_mask=True): @@ -3534,108 +1964,3 @@ def guess_polarization(self, img, npt_rad=None, npt_azim=360, unit="2th_deg", opt = minimize_scalar(sfun, bounds=[-1, 1]) logger.info(str(opt)) return opt.x - -################################################################################ -# Some properties -################################################################################ - - def set_darkcurrent(self, dark): - self.detector.set_darkcurrent(dark) - - def get_darkcurrent(self): - return self.detector.get_darkcurrent() - - darkcurrent = property(get_darkcurrent, set_darkcurrent) - - def set_flatfield(self, flat): - self.detector.set_flatfield(flat) - - def get_flatfield(self): - return self.detector.get_flatfield() - - flatfield = property(get_flatfield, set_flatfield) - - @deprecated(reason="Not maintained", since_version="0.17") - def set_darkfiles(self, files=None, method="mean"): - """Set the dark current from one or mutliple files, avaraged - according to the method provided. - - Moved to Detector. - - :param files: file(s) used to compute the dark. - :type files: str or list(str) or None - :param method: method used to compute the dark, "mean" or "median" - :type method: str - """ - self.detector.set_darkfiles(files, method) - - @property - @deprecated(reason="Not maintained", since_version="0.17") - def darkfiles(self): - return self.detector.darkfiles - - @deprecated(reason="Not maintained", since_version="0.17") - def set_flatfiles(self, files, method="mean"): - """Set the flat field from one or mutliple files, averaged - according to the method provided. - - Moved to Detector. - - :param files: file(s) used to compute the flat-field. - :type files: str or list(str) or None - :param method: method used to compute the dark, "mean" or "median" - :type method: str - """ - self.detector.set_flatfiles(files, method) - - @property - @deprecated(reason="Not maintained", since_version="0.17") - def flatfiles(self): - return self.detector.flatfiles - - def get_empty(self): - return self._empty - - def set_empty(self, value): - self._empty = float(value) - # propagate empty values to integrators - for engine in self.engines.values(): - with engine.lock: - if engine.engine is not None: - try: - engine.engine.empty = self._empty - except Exception as exeption: - logger.error(exeption) - - empty = property(get_empty, set_empty) - - def __getnewargs_ex__(self): - "Helper function for pickling ai" - return (self.dist, self.poni1, self.poni2, - self.rot1, self.rot2, self.rot3, - self.pixel1, self.pixel2, - self.splineFile, self.detector, self.wavelength), {} - - def __getstate__(self): - """Helper function for pickling ai - - :return: the state of the object - """ - - state_blacklist = ('_lock', "engines") - state = Geometry.__getstate__(self) - for key in state_blacklist: - if key in state: - del state[key] - return state - - def __setstate__(self, state): - """Helper function for unpickling ai - - :param state: the state of the object - """ - for statekey, statevalue in state.items(): - setattr(self, statekey, statevalue) - self._sem = threading.Semaphore() - self._lock = threading.Semaphore() - self.engines = {} diff --git a/src/pyFAI/integrator/common.py b/src/pyFAI/integrator/common.py new file mode 100644 index 000000000..7262cfda2 --- /dev/null +++ b/src/pyFAI/integrator/common.py @@ -0,0 +1,1751 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Project: Azimuthal integration +# https://github.com/silx-kit/pyFAI +# +# Copyright (C) 2012-2024 European Synchrotron Radiation Facility, Grenoble, France +# +# Principal author: Jérôme Kieffer (Jerome.Kieffer@ESRF.eu) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# . +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# . +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +__author__ = "Jérôme Kieffer" +__contact__ = "Jerome.Kieffer@ESRF.eu" +__license__ = "MIT" +__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" +__date__ = "10/10/2024" +__status__ = "stable" +__docformat__ = 'restructuredtext' + +import logging +logger = logging.getLogger(__name__) +import warnings +import threading +import gc +from math import pi, log +import numpy +from ..geometry import Geometry +from .. import units +from ..utils import EPS32, deg2rad, crc32 +from ..utils.decorators import deprecated, deprecated_warning +from ..containers import Integrate1dResult, Integrate2dResult, SeparateResult, ErrorModel +from ..io import DefaultAiWriter, save_integrate_result +from ..io.ponifile import PoniFile +error = None +from ..method_registry import IntegrationMethod +from .load_engines import ocl_azim_csr, ocl_azim_lut, ocl_sort, histogram, splitBBox, \ + splitPixel, splitBBoxCSR, splitBBoxLUT, splitPixelFullCSR, \ + histogram_engine, splitPixelFullLUT, splitBBoxCSC, splitPixelFullCSC, \ + PREFERED_METHODS_1D, PREFERED_METHODS_2D +from ..engines import Engine + +# Few constants for engine names: +OCL_CSR_ENGINE = "ocl_csr_integr" +OCL_LUT_ENGINE = "ocl_lut_integr" +OCL_HIST_ENGINE = "ocl_histogram" +OCL_SORT_ENGINE = "ocl_sorter" +EXT_LUT_ENGINE = "lut_integrator" +EXT_CSR_ENGINE = "csr_integrator" + + +class Integrator(Geometry): + """ + This class is a base class for azimuthal or fiber integrator + All geometry calculation are done in the parent Geometry class + + """ + + DEFAULT_METHOD_1D = PREFERED_METHODS_1D[0] + DEFAULT_METHOD_2D = PREFERED_METHODS_2D[0] + "Fail-safe low-memory integrator" + + USE_LEGACY_MASK_NORMALIZATION = True + """If true, the Python engine integrator will normalize the mask to use the + most frequent value of the mask as the non-masking value. + + This behaviour is not consistant with other engines and is now deprecated. + This flag will be turned off in the comming releases. + + Turning off this flag force the user to provide a mask with 0 as non-masking + value. And any non-zero as masking value (negative or positive value). A + boolean mask is also accepted (`True` is the masking value). + """ + + def __init__(self, dist=1, poni1=0, poni2=0, + rot1=0, rot2=0, rot3=0, + pixel1=None, pixel2=None, + splineFile=None, detector=None, wavelength=None, orientation=0): + """ + :param dist: distance sample - detector plan (orthogonal distance, not along the beam), in meter. + :type dist: float + :param poni1: coordinate of the point of normal incidence along the detector's first dimension, in meter + :type poni1: float + :param poni2: coordinate of the point of normal incidence along the detector's second dimension, in meter + :type poni2: float + :param rot1: first rotation from sample ref to detector's ref, in radians + :type rot1: float + :param rot2: second rotation from sample ref to detector's ref, in radians + :type rot2: float + :param rot3: third rotation from sample ref to detector's ref, in radians + :type rot3: float + :param pixel1: Deprecated. Pixel size of the fist dimension of the detector, in meter. + If both pixel1 and pixel2 are not None, detector pixel size is overwritten. + Prefer defining the detector pixel size on the provided detector object. + Prefer defining the detector pixel size on the provided detector + object (``detector.pixel1 = 5e-6``). + :type pixel1: float + :param pixel2: Deprecated. Pixel size of the second dimension of the detector, in meter. + If both pixel1 and pixel2 are not None, detector pixel size is overwritten. + Prefer defining the detector pixel size on the provided detector + object (``detector.pixel2 = 5e-6``). + :type pixel2: float + :param splineFile: Deprecated. File containing the geometric distortion of the detector. + If not None, pixel1 and pixel2 are ignored and detector spline is overwritten. + Prefer defining the detector spline manually + (``detector.splineFile = "file.spline"``). + :type splineFile: str + :param detector: name of the detector or Detector instance. String + description is deprecated. Prefer using the result of the detector + factory: ``pyFAI.detector_factory("eiger4m")`` + :type detector: str or pyFAI.Detector + :param float wavelength: Wave length used in meter + :param int orientation: orientation of the detector, see pyFAI.detectors.orientation.Orientation + """ + Geometry.__init__(self, dist, poni1, poni2, + rot1, rot2, rot3, + pixel1, pixel2, splineFile, detector, wavelength, orientation) + + # mask, maskfile, darkcurrent and flatfield are properties pointing to + # self.detector now (16/06/2017) + + self._lock = threading.Semaphore() + self.engines = {} # key: name of the engine, + + self._empty = 0.0 + + def reset(self, collect_garbage=True): + """Reset azimuthal integrator in addition to other arrays. + + :param collect_garbage: set to False to prevent garbage collection, faster + """ + Geometry.reset(self, collect_garbage=False) + self.reset_engines(collect_garbage) + + def reset_engines(self, collect_garbage=True): + """Urgently free memory by deleting all regrid-engines + + :param collect_garbage: set to False to prevent garbage collection, faster + """ + with self._lock: + for key in list(self.engines.keys()): # explicit copy + self.engines.pop(key).reset() + if collect_garbage: + gc.collect() + + def create_mask(self, data, mask=None, + dummy=None, delta_dummy=None, + unit=None, radial_range=None, + azimuth_range=None, + mode="normal"): + """ + Combines various masks into another one. + + :param data: input array of data + :type data: ndarray + :param mask: input mask (if none, self.mask is used) + :type mask: ndarray + :param dummy: value of dead pixels + :type dummy: float + :param delta_dumy: precision of dummy pixels + :type delta_dummy: float + :param mode: can be "normal" or "numpy" (inverted) or "where" applied to the mask + :type mode: str + + :return: the new mask + :rtype: ndarray of bool + + This method combine two masks (dynamic mask from *data & + dummy* and *mask*) to generate a new one with the 'or' binary + operation. One can adjust the level, with the *dummy* and + the *delta_dummy* parameter, when you consider the *data* + values needs to be masked out. + + This method can work in two different *mode*: + + * "normal": False for valid pixels, True for bad pixels + * "numpy": True for valid pixels, false for others + * "where": does a numpy.where on the "numpy" output + + This method tries to accomodate various types of masks (like + valid=0 & masked=-1, ...) + + Note for the developper: we use a lot of numpy.logical_or in this method, + the out= argument allows to recycle buffers and save considerable time in + allocating temporary arrays. + """ + logical_or = numpy.logical_or + shape = data.shape + # ^^^^ this is why data is mandatory ! + if mask is None: + mask = self.mask + if mask is None: + mask = numpy.zeros(shape, dtype=bool) + else: + mask = mask.astype(bool) + if self.USE_LEGACY_MASK_NORMALIZATION: + if mask.sum(dtype=int) > mask.size // 2: + reason = "The provided mask is not complient with other engines. "\ + "The feature which automatically invert it will be removed soon. "\ + "For more information see https://github.com/silx-kit/pyFAI/pull/868" + deprecated_warning(__name__, name="provided mask content", reason=reason) + numpy.logical_not(mask, mask) + if (mask.shape != shape): + try: + mask = mask[:shape[0],:shape[1]] + except Exception as error: # IGNORE:W0703 + logger.error("Mask provided has wrong shape:" + " expected: %s, got %s, error: %s", + shape, mask.shape, error) + mask = numpy.zeros(shape, dtype=bool) + if dummy is not None: + if delta_dummy is None: + logical_or(mask, (data == dummy), out=mask) + else: + logical_or(mask, abs(data - dummy) <= delta_dummy, out=mask) + + if radial_range is not None: + assert unit, "unit is needed when building a mask based on radial_range" + if isinstance(unit, (tuple, list)) and len(unit) == 2: + radial_unit = units.to_unit(unit[0]) + else: + radial_unit = units.to_unit(unit) + rad = self.array_from_unit(shape, "center", radial_unit, scale=False) + logical_or(mask, rad < radial_range[0], out=mask) + logical_or(mask, rad > radial_range[1], out=mask) + if azimuth_range is not None: + if isinstance(unit, (tuple, list)) and len(unit) == 2: + azimuth_unit = units.to_unit(unit[1]) + chi = self.array_from_unit(shape, "center", azimuth_unit, scale=False) + logical_or(mask, chi < azimuth_range[0], out=mask) + logical_or(mask, chi > azimuth_range[1], out=mask) + + # Prepare alternative representation for output: + if mode == "numpy": + numpy.logical_not(mask, mask) + elif mode == "where": + mask = numpy.where(numpy.logical_not(mask)) + return mask + + def dark_correction(self, data, dark=None): + """ + Correct for Dark-current effects. + If dark is not defined, correct for a dark set by "set_darkfiles" + + :param data: input ndarray with the image + :param dark: ndarray with dark noise or None + :return: 2tuple: corrected_data, dark_actually used (or None) + """ + dark = dark if dark is not None else self.detector.darkcurrent + if dark is not None: + return data - dark, dark + else: + return data, None + + def flat_correction(self, data, flat=None): + """ + Correct for flat field. + If flat is not defined, correct for a flat set by "set_flatfiles" + + :param data: input ndarray with the image + :param flat: ndarray with flatfield or None for no correction + :return: 2tuple: corrected_data, flat_actually used (or None) + """ + flat = flat if flat is not None else self.detector.flatfield + if flat is not None: + return data / flat, flat + else: + return data, None + + def _normalize_method(self, method, dim, default): + """ + :rtype: IntegrationMethod + """ + requested_method = method + method = IntegrationMethod.select_one_available(method, dim=dim, default=None, degradable=False) + if method is not None: + return method + method = IntegrationMethod.select_one_available(requested_method, dim=dim, default=default, degradable=True) + logger.warning("Method requested '%s' not available. Method '%s' will be used", requested_method, method) + return default + + def setup_sparse_integrator(self, + shape, + npt, + mask=None, + pos0_range=None, pos1_range=None, + mask_checksum=None, unit=units.TTH, + split="bbox", algo="CSR", + empty=None, scale=True): + """ + Prepare a sparse-matrix integrator based on LUT, CSR or CSC format + + :param shape: shape of the dataset + :type shape: (int, int) + :param npt: number of points in the the output pattern + :type npt: int or (int, int) + :param mask: array with masked pixel (1=masked) + :type mask: ndarray + :param pos0_range: range in radial dimension + :type pos0_range: (float, float) + :param pos1_range: range in azimuthal dimension + :type pos1_range: (float, float) + :param mask_checksum: checksum of the mask buffer + :type mask_checksum: int (or anything else ...) + :param unit: use to propagate the LUT object for further checkings + :type unit: pyFAI.units.Unit or 2-tuple of them for 2D integration + :param split: Splitting scheme: valid options are "no", "bbox", "full" + :param algo: Sparse matrix format to use: "LUT", "CSR" or "CSC" + :param empty: override the default empty value + :param scale: set to False for working in S.I. units for pos0_range + which is faster. By default assumes pos0_range has `units` + Note that pos1_range, the chi-angle, is expected in radians + + + This method is called when a look-up table needs to be set-up. + The *shape* parameter, correspond to the shape of the original + datatset. It is possible to customize the number of point of + the output histogram with the *npt* parameter which can be + either an integer for an 1D integration or a 2-tuple of + integer in case of a 2D integration. The LUT will have a + different shape: (npt, lut_max_size), the later parameter + being calculated during the instanciation of the splitBBoxLUT + class. + + It is possible to prepare the LUT with a predefine + *mask*. This operation can speedup the computation of the + later integrations. Instead of applying the patch on the + dataset, it is taken into account during the histogram + computation. If provided the *mask_checksum* prevent the + re-calculation of the mask. When the mask changes, its + checksum is used to reset (or not) the LUT (which is a very + time consuming operation !) + + It is also possible to restrain the range of the 1D or 2D + pattern with the *pos0_range* (radial) and *pos1_range* (azimuthal). + + The *unit* parameter is just propagated to the LUT integrator + for further checkings: The aim is to prevent an integration to + be performed in 2th-space when the LUT was setup in q space. + Unit can also be a 2-tuple in the case of a 2D integration + """ + if isinstance(unit, (list, tuple)) and len(unit) == 2: + unit0, unit1 = tuple(units.to_unit(u) for u in unit) + else: + unit0 = units.to_unit(unit) + unit1 = units.CHI_DEG + if scale and pos0_range: + pos0_scale = unit0.scale + pos0_range = tuple(pos0_range[i] / pos0_scale for i in (0, -1)) + if "__len__" in dir(npt) and len(npt) == 2: + int2d = True + if scale and pos1_range: + pos1_scale = unit1.scale + pos1_range = tuple(pos1_range[i] / pos1_scale for i in (0, -1)) + else: + int2d = False + empty = self._empty if empty is None else empty + if split == "full": + pos = self.array_from_unit(shape, "corner", unit, scale=False) + else: + pos0 = self.array_from_unit(shape, "center", unit0, scale=False) + if split == "no": + dpos0 = None + else: + dpos0 = self.array_from_unit(shape, "delta", unit0, scale=False) + + pos1 = None + dpos1 = None + if int2d or pos1_range: + pos1 = self.array_from_unit(shape, "center", unit1, scale=False) + if split == "no": + dpos1 = None + else: + dpos1 = self.array_from_unit(shape, "delta", unit1, scale=False) + + if mask is None: + mask_checksum = None + else: + assert mask.shape == shape + algo = algo.upper() + if algo == "LUT": + if split == "full": + if int2d: + return splitPixelFullLUT.HistoLUT2dFullSplit(pos, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + allow_pos0_neg=not unit0.positive, + unit=unit, + empty=empty, + chiDiscAtPi=self.chiDiscAtPi, + clip_pos1=bool(unit1.period), + ) + else: + return splitPixelFullLUT.HistoLUT1dFullSplit(pos, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + allow_pos0_neg=not unit0.positive, + unit=unit, + empty=empty) + else: + if int2d: + return splitBBoxLUT.HistoBBox2d(pos0, dpos0, pos1, dpos1, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + allow_pos0_neg=not unit0.positive, + clip_pos1=bool(unit1.period), + unit=unit, + empty=empty) + else: + return splitBBoxLUT.HistoBBox1d(pos0, dpos0, pos1, dpos1, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + allow_pos0_neg=not unit0.positive, + unit=unit, + empty=empty) + elif algo == "CSR": + if split == "full": + if int2d: + return splitPixelFullCSR.FullSplitCSR_2d(pos, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + allow_pos0_neg=not unit0.positive, + unit=unit, + empty=empty, + chiDiscAtPi=self.chiDiscAtPi, + clip_pos1=bool(unit1.period), + ) + else: + return splitPixelFullCSR.FullSplitCSR_1d(pos, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + allow_pos0_neg=not unit0.positive, + unit=unit, + empty=empty) + else: + if int2d: + return splitBBoxCSR.HistoBBox2d(pos0, dpos0, pos1, dpos1, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + unit=unit, + empty=empty, + allow_pos0_neg=not unit0.positive, + clip_pos1=bool(unit1.period) +) + else: + return splitBBoxCSR.HistoBBox1d(pos0, dpos0, pos1, dpos1, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + allow_pos0_neg=not unit0.positive, + unit=unit, + empty=empty) + elif algo == "CSC": + if split == "full": + if int2d: + return splitPixelFullCSC.FullSplitCSC_2d(pos, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + allow_pos0_neg=not unit0.positive, + unit=unit, + empty=empty, + chiDiscAtPi=self.chiDiscAtPi, + clip_pos1=bool(unit1.period) + ) + else: + return splitPixelFullCSC.FullSplitCSC_1d(pos, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + allow_pos0_neg=not unit0.positive, + unit=unit, + empty=empty) + else: + if int2d: + return splitBBoxCSC.HistoBBox2d(pos0, dpos0, pos1, dpos1, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + unit=unit, + empty=empty, + allow_pos0_neg=not unit0.positive, + clip_pos1=bool(unit1.period) +) + else: + return splitBBoxCSC.HistoBBox1d(pos0, dpos0, pos1, dpos1, + bins=npt, + pos0_range=pos0_range, + pos1_range=pos1_range, + mask=mask, + mask_checksum=mask_checksum, + allow_pos0_neg=not unit0.positive, + unit=unit, + empty=empty) + + @deprecated(since_version="0.22", only_once=True, replacement="setup_sparse_integrator", deprecated_since="0.22.0") + def setup_LUT(self, shape, npt, mask=None, + pos0_range=None, pos1_range=None, + mask_checksum=None, unit=units.TTH, + split="bbox", empty=None, scale=True): + """See documentation of setup_sparse_integrator where algo=LUT""" + return self.setup_sparse_integrator(shape, npt, mask, + pos0_range, pos1_range, + mask_checksum, unit, + split=split, algo="LUT", + empty=empty, scale=scale) + + @deprecated(since_version="0.22", only_once=True, replacement="setup_sparse_integrator", deprecated_since="0.22.0") + def setup_CSR(self, shape, npt, mask=None, + pos0_range=None, pos1_range=None, + mask_checksum=None, unit=units.TTH, + split="bbox", empty=None, scale=True): + """See documentation of setup_sparse_integrator where algo=CSR""" + return self.setup_sparse_integrator(shape, npt, mask, + pos0_range, pos1_range, + mask_checksum, unit, + split=split, algo="CSR", + empty=empty, scale=scale) + + @deprecated(since_version="0.20", only_once=True, replacement="integrate1d_ng", deprecated_since="0.20.0") + def integrate1d_legacy(self, data, npt, filename=None, + correctSolidAngle=True, + variance=None, error_model=None, + radial_range=None, azimuth_range=None, + mask=None, dummy=None, delta_dummy=None, + polarization_factor=None, dark=None, flat=None, + method="csr", unit=units.Q, safe=True, + normalization_factor=1.0, + block_size=None, profile=False, metadata=None): + """Calculate the azimuthal integrated Saxs curve in q(nm^-1) by default + + Multi algorithm implementation (tries to be bullet proof), suitable for SAXS, WAXS, ... and much more + + + + :param data: 2D array from the Detector/CCD camera + :type data: ndarray + :param npt: number of points in the output pattern + :type npt: int + :param filename: output filename in 2/3 column ascii format + :type filename: str + :param correctSolidAngle: correct for solid angle of each pixel if True + :type correctSolidAngle: bool + :param variance: array containing the variance of the data. If not available, no error propagation is done + :type variance: ndarray + :param error_model: When the variance is unknown, an error model can be given: "poisson" (variance = I), "azimuthal" (variance = (I-)^2) + :type error_model: str + :param radial_range: The lower and upper range of the radial unit. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. + :type radial_range: (float, float), optional + :param azimuth_range: The lower and upper range of the azimuthal angle in degree. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. + :type azimuth_range: (float, float), optional + :param mask: array (same size as image) with 1 for masked pixels, and 0 for valid pixels + :type mask: ndarray + :param dummy: value for dead/masked pixels + :type dummy: float + :param delta_dummy: precision for dummy value + :type delta_dummy: float + :param polarization_factor: polarization factor between -1 (vertical) and +1 (horizontal). + 0 for circular polarization or random, + None for no correction, + True for using the former correction + :type polarization_factor: float + :param dark: dark noise image + :type dark: ndarray + :param flat: flat field image + :type flat: ndarray + :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) + :type method: can be Method named tuple, IntegrationMethod instance or str to be parsed + :param unit: Output units, can be "q_nm^-1", "q_A^-1", "2th_deg", "2th_rad", "r_mm" for now + :type unit: pyFAI.units.Unit + :param safe: Do some extra checks to ensure LUT/CSR is still valid. False is faster. + :type safe: bool + :param normalization_factor: Value of a normalization monitor + :type normalization_factor: float + :param block_size: size of the block for OpenCL integration (unused?) + :param profile: set to True to enable profiling in OpenCL + :param all: if true return a dictionary with many more parameters (deprecated, please refer to the documentation of Integrate1dResult). + :type all: bool + :param metadata: JSON serializable object containing the metadata, usually a dictionary. + :return: q/2th/r bins center positions and regrouped intensity (and error array if variance or variance model provided) + :rtype: Integrate1dResult, dict + """ + method = self._normalize_method(method, dim=1, default=self.DEFAULT_METHOD_1D) + assert method.dimension == 1 + unit = units.to_unit(unit) + + if mask is None: + has_mask = "from detector" + mask = self.mask + mask_crc = self.detector.get_mask_crc() + if mask is None: + has_mask = False + mask_crc = None + else: + has_mask = "provided" + mask = numpy.ascontiguousarray(mask) + mask_crc = crc32(mask) + + shape = data.shape + pos0_scale = unit.scale + + if radial_range: + radial_range = tuple(radial_range[i] / pos0_scale for i in (0, -1)) + if azimuth_range is not None: + azimuth_range = self.normalize_azimuth_range(azimuth_range) + + if variance is not None: + assert variance.size == data.size + elif error_model: + error_model = error_model.lower() + if error_model == "poisson": + variance = numpy.ascontiguousarray(data, numpy.float32) + + if correctSolidAngle: + solidangle = self.solidAngleArray(shape, correctSolidAngle) + else: + solidangle = None + + if polarization_factor is None: + polarization = polarization_crc = None + else: + polarization, polarization_crc = self.polarization(shape, polarization_factor, with_checksum=True) + + if dark is None: + dark = self.detector.darkcurrent + if dark is None: + has_dark = False + else: + has_dark = "from detector" + else: + has_dark = "provided" + + if flat is None: + flat = self.detector.flatfield + if dark is None: + has_flat = False + else: + has_flat = "from detector" + else: + has_flat = "provided" + + I = None + sigma = None + count = None + sum_ = None + + if method.algo_lower == "lut": + if EXT_LUT_ENGINE not in self.engines: + engine = self.engines[EXT_LUT_ENGINE] = Engine() + else: + engine = self.engines[EXT_LUT_ENGINE] + with engine.lock: + integr = engine.engine + reset = None + if integr is None: + reset = "of first initialization" + if (not reset) and safe: + if integr.unit != unit: + reset = "unit changed" + if integr.bins != npt: + reset = "number of points changed" + if integr.size != data.size: + reset = "input image size changed" + if (mask is not None) and\ + (not integr.check_mask): + reset = "mask but LUT was without mask" + elif (mask is None) and (integr.check_mask): + reset = "no mask but LUT has mask" + elif (mask is not None) and\ + (integr.mask_checksum != mask_crc): + reset = "mask changed" + if (radial_range is None) and\ + (integr.pos0_range is not None): + reset = "radial_range was defined in LUT" + elif (radial_range is not None) and\ + (integr.pos0_range != radial_range): + reset = ("radial_range is defined" + " but not the same as in LUT") + if (azimuth_range is None) and\ + (integr.pos1_range is not None): + reset = ("azimuth_range not defined and" + " LUT had azimuth_range defined") + elif (azimuth_range is not None) and\ + (integr.pos1_range != azimuth_range[0]): + reset = ("azimuth_range requested and" + " LUT's azimuth_range don't match") + if reset: + logger.info("AI.integrate1d: Resetting integrator because %s", reset) + split = method.split_lower + if split == "pseudo": + split = "full" + try: + integr = self.setup_sparse_integrator(shape, npt, mask=mask, + pos0_range=radial_range, pos1_range=azimuth_range, + mask_checksum=mask_crc, + unit=unit, split=split, algo="LUT", + scale=False) + + except MemoryError: + # LUT method is hungry... + logger.warning("MemoryError: falling back on default forward implementation") + integr = None + self.reset_engines() + method = self.DEFAULT_METHOD_1D + else: + engine.set_engine(integr) + if integr: + if method.impl_lower == "opencl": + # TODO: manage the target + if OCL_LUT_ENGINE in self.engines: + ocl_engine = self.engines[OCL_LUT_ENGINE] + else: + ocl_engine = self.engines[OCL_LUT_ENGINE] = Engine() + with ocl_engine.lock: + if method.target is not None: + platformid, deviceid = method.target + ocl_integr = ocl_engine.engine + if (ocl_integr is None) or \ + (ocl_integr.on_device["lut"] != integr.lut_checksum): + ocl_integr = ocl_azim_lut.OCL_LUT_Integrator(integr.lut, + integr.size, + platformid=platformid, + deviceid=deviceid, + checksum=integr.lut_checksum) + ocl_engine.set_engine(ocl_integr) + if ocl_integr is not None: + I, sum_, count = ocl_integr.integrate_legacy(data, dark=dark, flat=flat, + solidangle=solidangle, + solidangle_checksum=self._dssa_crc, + dummy=dummy, + delta_dummy=delta_dummy, + polarization=polarization, + polarization_checksum=polarization_crc, + normalization_factor=normalization_factor) + qAxis = integr.bin_centers # this will be copied later + if error_model == "azimuthal": + + variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 + if variance is not None: + var1d, a, b = ocl_integr.integrate_legacy(variance, + solidangle=None, + dummy=dummy, + delta_dummy=delta_dummy, + normalization_factor=1.0, + coef_power=2) + with numpy.errstate(divide='ignore', invalid='ignore'): + sigma = numpy.sqrt(a) / (b * normalization_factor) + sigma[b == 0] = dummy if dummy is not None else self._empty + else: + qAxis, I, sum_, count = integr.integrate_legacy(data, dark=dark, flat=flat, + solidAngle=solidangle, + dummy=dummy, + delta_dummy=delta_dummy, + polarization=polarization, + normalization_factor=normalization_factor) + + if error_model == "azimuthal": + variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 + if variance is not None: + _, var1d, a, b = integr.integrate_legacy(variance, + solidAngle=None, + dummy=dummy, + delta_dummy=delta_dummy, + coef_power=2, + normalization_factor=1.0) + with numpy.errstate(divide='ignore', invalid='ignore'): + sigma = numpy.sqrt(a) / (b * normalization_factor) + sigma[b == 0] = dummy if dummy is not None else self._empty + + if method.algo_lower == "csr": + if EXT_CSR_ENGINE not in self.engines: + engine = self.engines[EXT_CSR_ENGINE] = Engine() + else: + engine = self.engines[EXT_CSR_ENGINE] + with engine.lock: + integr = engine.engine + reset = None + + if integr is None: + reset = "of first initialization" + if (not reset) and safe: + if integr.unit != unit: + reset = "unit changed" + if integr.bins != npt: + reset = "number of points changed" + if integr.size != data.size: + reset = "input image size changed" + if (mask is not None) and\ + (not integr.check_mask): + reset = "mask but CSR was without mask" + elif (mask is None) and (integr.check_mask): + reset = "no mask but CSR has mask" + elif (mask is not None) and\ + (integr.mask_checksum != mask_crc): + reset = "mask changed" + if radial_range != integr.pos0_range: + reset = "radial_range changed" + if azimuth_range != integr.pos1_range: + reset = "azimuth_range changed" + if reset: + logger.info("AI.integrate1d: Resetting integrator because %s", reset) + split = method.split_lower + if split == "pseudo": + split = "full" + try: + integr = self.setup_sparse_integrator(shape, npt, mask=mask, + pos0_range=radial_range, pos1_range=azimuth_range, + mask_checksum=mask_crc, + unit=unit, split=split, algo="CSR", + scale=False) + except MemoryError: # CSR method is hungry... + logger.warning("MemoryError: falling back on forward implementation") + integr = None + self.reset_engines() + method = self.DEFAULT_METHOD_1D + else: + engine.set_engine(integr) + if integr: + if method.impl_lower == "opencl": + # TODO: manage OpenCL targets + if OCL_CSR_ENGINE not in self.engines: + self.engines[OCL_CSR_ENGINE] = Engine() + ocl_engine = self.engines[OCL_CSR_ENGINE] + with ocl_engine.lock: + if method.target is not None: + platformid, deviceid = method.target + ocl_integr = ocl_engine.engine + if (ocl_integr is None) or \ + (ocl_integr.on_device["data"] != integr.lut_checksum): + ocl_integr = ocl_azim_csr.OCL_CSR_Integrator(integr.lut, + integr.size, + platformid=platformid, + deviceid=deviceid, + checksum=integr.lut_checksum, + block_size=block_size, + profile=profile) + ocl_engine.set_engine(ocl_integr) + I, sum_, count = ocl_integr.integrate_legacy(data, dark=dark, flat=flat, + solidangle=solidangle, + solidangle_checksum=self._dssa_crc, + dummy=dummy, + delta_dummy=delta_dummy, + polarization=polarization, + polarization_checksum=polarization_crc, + normalization_factor=normalization_factor) + qAxis = integr.bin_centers # this will be copied later + if error_model == "azimuthal": + variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 + if variance is not None: + var1d, a, b = ocl_integr.integrate_legacy(variance, + solidangle=None, + dummy=dummy, + delta_dummy=delta_dummy) + with numpy.errstate(divide='ignore', invalid='ignore'): + sigma = numpy.sqrt(a) / (b * normalization_factor) + sigma[b == 0] = dummy if dummy is not None else self._empty + else: + qAxis, I, sum_, count = integr.integrate_legacy(data, dark=dark, flat=flat, + solidAngle=solidangle, + dummy=dummy, + delta_dummy=delta_dummy, + polarization=polarization, + normalization_factor=normalization_factor) + + if error_model == "azimuthal": + variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 + if variance is not None: + _, var1d, a, b = integr.integrate_legacy(variance, + solidAngle=None, + dummy=dummy, + delta_dummy=delta_dummy, + normalization_factor=1.0) + with numpy.errstate(divide='ignore', invalid='ignore'): + sigma = numpy.sqrt(a) / (b * normalization_factor) + sigma[b == 0] = dummy if dummy is not None else self._empty + + if method.method[1:4] == ("full", "histogram", "cython"): + logger.debug("integrate1d uses SplitPixel implementation") + pos = self.array_from_unit(shape, "corner", unit, scale=False) + qAxis, I, sum_, count = splitPixel.fullSplit1D(pos=pos, + weights=data, + bins=npt, + pos0_range=radial_range, + pos1_range=azimuth_range, + dummy=dummy, + delta_dummy=delta_dummy, + mask=mask, + dark=dark, + flat=flat, + solidangle=solidangle, + polarization=polarization, + normalization_factor=normalization_factor + ) + if error_model == "azimuthal": + variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 + if variance is not None: + _, var1d, a, b = splitPixel.fullSplit1D(pos=pos, + weights=variance, + bins=npt, + pos0_range=radial_range, + pos1_range=azimuth_range, + dummy=dummy, + delta_dummy=delta_dummy, + mask=mask, + normalization_factor=1.0 + ) + with numpy.errstate(divide='ignore', invalid='ignore'): + sigma = numpy.sqrt(a) / (b * normalization_factor) + sigma[b == 0] = dummy if dummy is not None else self._empty + + if method.method[1:4] == ("bbox", "histogram", "cython"): + logger.debug("integrate1d uses BBox implementation") + if azimuth_range is not None: + chi = self.chiArray(shape) + dchi = self.deltaChi(shape) + else: + chi = None + dchi = None + pos0 = self.array_from_unit(shape, "center", unit, scale=False) + dpos0 = self.array_from_unit(shape, "delta", unit, scale=False) + qAxis, I, sum_, count = splitBBox.histoBBox1d(weights=data, + pos0=pos0, + delta_pos0=dpos0, + pos1=chi, + delta_pos1=dchi, + bins=npt, + pos0_range=radial_range, + pos1_range=azimuth_range, + dummy=dummy, + delta_dummy=delta_dummy, + mask=mask, + dark=dark, + flat=flat, + solidangle=solidangle, + polarization=polarization, + normalization_factor=normalization_factor) + if error_model == "azimuthal": + variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)) ** 2 + if variance is not None: + _, var1d, a, b = splitBBox.histoBBox1d(weights=variance, + pos0=pos0, + delta_pos0=dpos0, + pos1=chi, + delta_pos1=dchi, + bins=npt, + pos0_range=radial_range, + pos1_range=azimuth_range, + dummy=dummy, + delta_dummy=delta_dummy, + mask=mask, + ) + with numpy.errstate(divide='ignore', invalid='ignore'): + sigma = numpy.sqrt(a) / (b * normalization_factor) + sigma[b == 0] = dummy if dummy is not None else self._empty + + if method.method[1:3] == ("no", "histogram") and method.impl_lower != "opencl": + # Common part for Numpy and Cython + data = data.astype(numpy.float32) + mask = self.create_mask(data, mask, dummy, delta_dummy, + unit=unit, + radial_range=radial_range, + azimuth_range=azimuth_range, + mode="where") + pos0 = self.array_from_unit(shape, "center", unit, scale=False) + if radial_range is None: + radial_range = (pos0.min(), pos0.max()) + pos0 = pos0[mask] + if dark is not None: + data -= dark + if flat is not None: + data /= flat + if polarization is not None: + data /= polarization + if solidangle is not None: + data /= solidangle + data = data[mask] + if variance is not None: + variance = variance[mask] + + if method.impl_lower == "cython": + logger.debug("integrate1d uses cython implementation") + qAxis, I, sum_, count = histogram.histogram(pos=pos0, + weights=data, + bins=npt, + bin_range=radial_range, + pixelSize_in_Pos=0, + empty=dummy if dummy is not None else self._empty, + normalization_factor=normalization_factor) + if error_model == "azimuthal": + variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)[mask]) ** 2 + if variance is not None: + _, var1d, a, b = histogram.histogram(pos=pos0, + weights=variance, + bins=npt, + bin_range=radial_range, + pixelSize_in_Pos=1, + empty=dummy if dummy is not None else self._empty) + with numpy.errstate(divide='ignore', invalid='ignore'): + sigma = numpy.sqrt(a) / (b * normalization_factor) + sigma[b == 0] = dummy if dummy is not None else self._empty + elif method.impl_lower == "python": + logger.debug("integrate1d uses Numpy implementation") + count, b = numpy.histogram(pos0, npt, range=radial_range) + qAxis = (b[1:] + b[:-1]) / 2.0 + sum_, b = numpy.histogram(pos0, npt, weights=data, range=radial_range) + with numpy.errstate(divide='ignore', invalid='ignore'): + if error_model == "azimuthal": + variance = (data - self.calcfrom1d(qAxis * pos0_scale, I, dim1_unit=unit, shape=shape)[mask]) ** 2 + if variance is not None: + var1d, b = numpy.histogram(pos0, npt, weights=variance, range=radial_range) + sigma = numpy.sqrt(var1d) / (count * normalization_factor) + sigma[count == 0] = dummy if dummy is not None else self._empty + with numpy.errstate(divide='ignore', invalid='ignore'): + I = sum_ / count / normalization_factor + I[count == 0] = dummy if dummy is not None else self._empty + + if pos0_scale: + # not in place to make a copy + qAxis = qAxis * pos0_scale + + result = Integrate1dResult(qAxis, I, sigma) + result._set_method_called("integrate1d") + result._set_method(method) + result._set_compute_engine(str(method)) + result._set_unit(unit) + result._set_sum(sum_) + result._set_count(count) + result._set_has_dark_correction(has_dark) + result._set_has_flat_correction(has_flat) + result._set_has_mask_applied(has_mask) + result._set_polarization_factor(polarization_factor) + result._set_normalization_factor(normalization_factor) + result._set_metadata(metadata) + + if filename is not None: + save_integrate_result(filename, result) + + return result + + _integrate1d_legacy = integrate1d_legacy + + @deprecated(since_version="0.21", only_once=True, deprecated_since="0.21.0") + def integrate2d_legacy(self, data, npt_rad, npt_azim=360, + filename=None, correctSolidAngle=True, variance=None, + error_model=None, radial_range=None, azimuth_range=None, + mask=None, dummy=None, delta_dummy=None, + polarization_factor=None, dark=None, flat=None, + method=None, unit=units.Q, safe=True, + normalization_factor=1.0, metadata=None): + """ + Calculate the azimuthal regrouped 2d image in q(nm^-1)/chi(deg) by default + + Multi algorithm implementation (tries to be bullet proof) + + :param data: 2D array from the Detector/CCD camera + :type data: ndarray + :param npt_rad: number of points in the radial direction + :type npt_rad: int + :param npt_azim: number of points in the azimuthal direction + :type npt_azim: int + :param filename: output image (as edf format) + :type filename: str + :param correctSolidAngle: correct for solid angle of each pixel if True + :type correctSolidAngle: bool + :param variance: array containing the variance of the data. If not available, no error propagation is done + :type variance: ndarray + :param error_model: When the variance is unknown, an error model can be given: "poisson" (variance = I), "azimuthal" (variance = (I-)^2) + :type error_model: str + :param radial_range: The lower and upper range of the radial unit. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. + :type radial_range: (float, float), optional + :param azimuth_range: The lower and upper range of the azimuthal angle in degree. If not provided, range is simply (data.min(), data.max()). Values outside the range are ignored. + :type azimuth_range: (float, float), optional + :param mask: array (same size as image) with 1 for masked pixels, and 0 for valid pixels + :type mask: ndarray + :param dummy: value for dead/masked pixels + :type dummy: float + :param delta_dummy: precision for dummy value + :type delta_dummy: float + :param polarization_factor: polarization factor between -1 (vertical) + and +1 (horizontal). 0 for circular polarization or random, + None for no correction + :type polarization_factor: float + :param dark: dark noise image + :type dark: ndarray + :param flat: flat field image + :type flat: ndarray + :param IntegrationMethod method: IntegrationMethod instance or 3-tuple with (splitting, algorithm, implementation) + :param unit: Output units, can be "q_nm^-1", "q_A^-1", "2th_deg", "2th_rad", "r_mm" for now + :type unit: pyFAI.units.Unit + :param safe: Do some extra checks to ensure LUT is still valid. False is faster. + :type safe: bool + :param normalization_factor: Value of a normalization monitor + :type normalization_factor: float + :param all: if true, return many more intermediate results as a dict (deprecated, please refer to the documentation of Integrate2dResult). + :param metadata: JSON serializable object containing the metadata, usually a dictionary. + :type all: bool + :return: azimuthaly regrouped intensity, q/2theta/r pos. and chi pos. + :rtype: Integrate2dResult, dict + """ + method = self._normalize_method(method, dim=2, default=self.DEFAULT_METHOD_2D) + assert method.dimension == 2 + npt = (npt_rad, npt_azim) + unit = units.to_unit(unit) + pos0_scale = unit.scale + if mask is None: + has_mask = "from detector" + mask = self.mask + mask_crc = self.detector.get_mask_crc() + if mask is None: + has_mask = False + mask_crc = None + else: + has_mask = "provided" + mask = numpy.ascontiguousarray(mask) + mask_crc = crc32(mask) + + shape = data.shape + + if radial_range: + radial_range = tuple([i / pos0_scale for i in radial_range]) + + if variance is not None: + assert variance.size == data.size + elif error_model: + error_model = error_model.lower() + if error_model == "poisson": + variance = numpy.ascontiguousarray(data, numpy.float32) + + if azimuth_range is not None: + azimuth_range = tuple(deg2rad(azimuth_range[i], self.chiDiscAtPi) for i in (0, -1)) + if azimuth_range[1] <= azimuth_range[0]: + azimuth_range = (azimuth_range[0], azimuth_range[1] + 2 * pi) + self.check_chi_disc(azimuth_range) + + if correctSolidAngle: + solidangle = self.solidAngleArray(shape, correctSolidAngle) + else: + solidangle = None + + if polarization_factor is None: + polarization = polarization_crc = None + else: + polarization, polarization_crc = self.polarization(shape, polarization_factor, with_checksum=True) + + if dark is None: + dark = self.detector.darkcurrent + if dark is None: + has_dark = False + else: + has_dark = "from detector" + else: + has_dark = "provided" + + if flat is None: + flat = self.detector.flatfield + if dark is None: + has_flat = False + else: + has_flat = "from detector" + else: + has_flat = "provided" + + I = None + sigma = None + sum_ = None + count = None + + if method.algo_lower == "lut": + if EXT_LUT_ENGINE not in self.engines: + engine = self.engines[EXT_LUT_ENGINE] = Engine() + else: + engine = self.engines[EXT_LUT_ENGINE] + with engine.lock: + integr = engine.engine + reset = None + if integr is None: + reset = "of first initialization" + if (not reset) and safe: + if integr.unit != unit: + reset = "unit changed" + if integr.bins != npt: + reset = "number of points changed" + if integr.size != data.size: + reset = "input image size changed" + if (mask is not None) and (not integr.check_mask): + reset = "mask but LUT was without mask" + elif (mask is None) and (integr.check_mask): + reset = "no mask but LUT has mask" + elif (mask is not None) and (integr.mask_checksum != mask_crc): + reset = "mask changed" + if radial_range != integr.pos0_range: + reset = "radial_range changed" + if azimuth_range != integr.pos1_range: + reset = "azimuth_range changed" + error = False + if reset: + logger.info("ai.integrate2d: Resetting integrator because %s", reset) + try: + integr = self.setup_sparse_integrator(shape, npt, mask=mask, + pos0_range=radial_range, pos1_range=azimuth_range, + mask_checksum=mask_crc, algo="LUT", unit=unit, scale=False) + except MemoryError: + # LUT method is hungry im memory... + logger.warning("MemoryError: falling back on forward implementation") + integr = None + self.reset_engines() + method = self.DEFAULT_METHOD_2D + error = True + else: + error = False + engine.set_engine(integr) + if not error: + if method.impl_lower == "opencl": + if OCL_LUT_ENGINE in self.engines: + ocl_engine = self.engines[OCL_LUT_ENGINE] + else: + ocl_engine = self.engines[OCL_LUT_ENGINE] = Engine() + with ocl_engine.lock: + platformid, deviceid = method.target + ocl_integr = ocl_engine.engine + if (ocl_integr is None) or \ + (ocl_integr.on_device["lut"] != integr.lut_checksum): + ocl_integr = ocl_azim_lut.OCL_LUT_Integrator(integr.lut, + integr.size, + platformid=platformid, + deviceid=deviceid, + checksum=integr.lut_checksum) + ocl_engine.set_engine(ocl_integr) + + if (not error) and (ocl_integr is not None): + I, sum_, count = ocl_integr.integrate_legacy(data, dark=dark, flat=flat, + solidangle=solidangle, + solidangle_checksum=self._dssa_crc, + dummy=dummy, + delta_dummy=delta_dummy, + polarization=polarization, + polarization_checksum=polarization_crc, + normalization_factor=normalization_factor, + safe=safe) + I.shape = npt + I = I.T + bins_rad = integr.bin_centers0 # this will be copied later + bins_azim = integr.bin_centers1 + else: + I, bins_rad, bins_azim, sum_, count = integr.integrate_legacy(data, dark=dark, flat=flat, + solidAngle=solidangle, + dummy=dummy, + delta_dummy=delta_dummy, + polarization=polarization, + normalization_factor=normalization_factor + ) + + if method.algo_lower == "csr": + if EXT_CSR_ENGINE not in self.engines: + engine = self.engines[EXT_CSR_ENGINE] = Engine() + else: + engine = self.engines[EXT_CSR_ENGINE] + with engine.lock: + integr = engine.engine + reset = None + if integr is None: + reset = "of first initialization" + if (not reset) and safe: + if integr.unit != unit: + reset = "unit changed" + if integr.bins != npt: + reset = "number of points changed" + if integr.size != data.size: + reset = "input image size changed" + if (mask is not None) and (not integr.check_mask): + reset = "mask but CSR was without mask" + elif (mask is None) and (integr.check_mask): + reset = "no mask but CSR has mask" + elif (mask is not None) and (integr.mask_checksum != mask_crc): + reset = "mask changed" + if (radial_range is None) and (integr.pos0_range is not None): + reset = "radial_range was defined in CSR" + elif (radial_range is not None) and integr.pos0_range != (min(radial_range), max(radial_range)): + reset = "radial_range is defined but differs in CSR" + if (azimuth_range is None) and (integr.pos1_range is not None): + reset = "azimuth_range not defined and CSR had azimuth_range defined" + elif (azimuth_range is not None) and integr.pos1_range != (min(azimuth_range), max(azimuth_range)): + reset = "azimuth_range requested and CSR's azimuth_range don't match" + error = False + if reset: + logger.info("AI.integrate2d: Resetting integrator because %s", reset) + split = method.split_lower + if split == "pseudo": + split = "full" + try: + integr = self.setup_sparse_integrator(shape, npt, mask=mask, + pos0_range=radial_range, pos1_range=azimuth_range, + mask_checksum=mask_crc, + unit=unit, split=split, algo="CSR", + scale=False) + except MemoryError: + logger.warning("MemoryError: falling back on default forward implementation") + integr = None + self.reset_engines() + method = self.DEFAULT_METHOD_2D + error = True + else: + error = False + engine.set_engine(integr) + if not error: + if method.impl_lower == "opencl": + if OCL_CSR_ENGINE in self.engines: + ocl_engine = self.engines[OCL_CSR_ENGINE] + else: + ocl_engine = self.engines[OCL_CSR_ENGINE] = Engine() + with ocl_engine.lock: + platformid, deviceid = method.target + ocl_integr = ocl_engine.engine + if (ocl_integr is None) or (ocl_integr.on_device["data"] != integr.lut_checksum): + ocl_integr = ocl_azim_csr.OCL_CSR_Integrator(integr.lut, + integr.size, + platformid=platformid, + deviceid=deviceid, + checksum=integr.lut_checksum) + ocl_engine.set_engine(ocl_integr) + if (not error) and (ocl_integr is not None): + I, sum_, count = ocl_integr.integrate_legacy(data, dark=dark, flat=flat, + solidangle=solidangle, + solidangle_checksum=self._dssa_crc, + dummy=dummy, + delta_dummy=delta_dummy, + polarization=polarization, + polarization_checksum=polarization_crc, + safe=safe, + normalization_factor=normalization_factor) + I.shape = npt + I = I.T + bins_rad = integr.bin_centers0 # this will be copied later + bins_azim = integr.bin_centers1 + else: + I, bins_rad, bins_azim, sum_, count = integr.integrate_legacy(data, dark=dark, flat=flat, + solidAngle=solidangle, + dummy=dummy, + delta_dummy=delta_dummy, + polarization=polarization, + normalization_factor=normalization_factor) + + if method.method[1:4] in (("pseudo", "histogram", "cython"), ("full", "histogram", "cython")): + logger.debug("integrate2d uses SplitPixel implementation") + pos = self.array_from_unit(shape, "corner", unit, scale=False) + I, bins_rad, bins_azim, sum_, count = splitPixel.fullSplit2D(pos=pos, + weights=data, + bins=(npt_rad, npt_azim), + pos0_range=radial_range, + pos1_range=azimuth_range, + dummy=dummy, + delta_dummy=delta_dummy, + mask=mask, + dark=dark, + flat=flat, + solidangle=solidangle, + polarization=polarization, + normalization_factor=normalization_factor, + chiDiscAtPi=self.chiDiscAtPi, + empty=dummy if dummy is not None else self._empty) + if method.method[1:4] == ("bbox", "histogram", "cython"): + logger.debug("integrate2d uses BBox implementation") + chi = self.chiArray(shape) + dchi = self.deltaChi(shape) + pos0 = self.array_from_unit(shape, "center", unit, scale=False) + dpos0 = self.array_from_unit(shape, "delta", unit, scale=False) + I, bins_rad, bins_azim, sum_, count = splitBBox.histoBBox2d(weights=data, + pos0=pos0, + delta_pos0=dpos0, + pos1=chi, + delta_pos1=dchi, + bins=(npt_rad, npt_azim), + pos0_range=radial_range, + pos1_range=azimuth_range, + dummy=dummy, + delta_dummy=delta_dummy, + mask=mask, + dark=dark, + flat=flat, + solidangle=solidangle, + polarization=polarization, + normalization_factor=normalization_factor, + chiDiscAtPi=self.chiDiscAtPi, + empty=dummy if dummy is not None else self._empty) + + if method.method[1:3] == ("no", "histogram") and method.impl_lower != "opencl": + logger.debug("integrate2d uses numpy or cython implementation") + data = data.astype(numpy.float32) # it is important to make a copy see issue #88 + mask = self.create_mask(data, mask, dummy, delta_dummy, + unit=unit, + radial_range=radial_range, + azimuth_range=azimuth_range, + mode="where") + pos0 = self.array_from_unit(shape, "center", unit, scale=False) + pos1 = self.chiArray(shape) + + if radial_range is None: + radial_range = [pos0.min(), pos0.max() * EPS32] + + if azimuth_range is None: + azimuth_range = [pos1.min(), pos1.max() * EPS32] + + if variance is not None: + variance = variance[mask] + + if dark is not None: + data -= dark + + if flat is not None: + data /= flat + + if polarization is not None: + data /= polarization + + if solidangle is not None: + data /= solidangle + + data = data[mask] + pos0 = pos0[mask] + pos1 = pos1[mask] + if method.impl_lower == "cython": + I, bins_azim, bins_rad, sum_, count = histogram.histogram2d(pos0=pos1, + pos1=pos0, + weights=data, + bins=(npt_azim, npt_rad), + split=False, + empty=dummy if dummy is not None else self._empty, + normalization_factor=normalization_factor) + elif method.impl_lower == "python": + logger.debug("integrate2d uses Numpy implementation") + count, b, c = numpy.histogram2d(pos1, pos0, (npt_azim, npt_rad), range=[azimuth_range, radial_range]) + bins_azim = (b[1:] + b[:-1]) / 2.0 + bins_rad = (c[1:] + c[:-1]) / 2.0 + count1 = numpy.maximum(1, count) + sum_, b, c = numpy.histogram2d(pos1, pos0, (npt_azim, npt_rad), + weights=data, range=[azimuth_range, radial_range]) + I = sum_ / count1 / normalization_factor + I[count == 0] = dummy if dummy is not None else self._empty + # I know I make copies .... + bins_rad = bins_rad * pos0_scale + bins_azim = bins_azim * 180.0 / pi + + result = Integrate2dResult(I, bins_rad, bins_azim, sigma) + result._set_method_called("integrate2d") + result._set_compute_engine(str(method)) + result._set_unit(unit) + result._set_count(count) + result._set_sum(sum_) + result._set_has_dark_correction(has_dark) + result._set_has_flat_correction(has_flat) + result._set_has_mask_applied(has_mask) + result._set_polarization_factor(polarization_factor) + result._set_normalization_factor(normalization_factor) + result._set_metadata(metadata) + + if filename is not None: + save_integrate_result(filename, result) + + return result + + _integrate2d_legacy = integrate2d_legacy + + @deprecated(since_version="0.14", reason="Use the class DefaultAiWriter") + def save1D(self, filename, dim1, I, error=None, dim1_unit=units.TTH, + has_dark=False, has_flat=False, polarization_factor=None, normalization_factor=None): + """This method save the result of a 1D integration. + + Deprecated on 13/06/2017 + + :param filename: the filename used to save the 1D integration + :type filename: str + :param dim1: the x coordinates of the integrated curve + :type dim1: numpy.ndarray + :param I: The integrated intensity + :type I: numpy.mdarray + :param error: the error bar for each intensity + :type error: numpy.ndarray or None + :param dim1_unit: the unit of the dim1 array + :type dim1_unit: pyFAI.units.Unit + :param has_dark: save the darks filenames (default: no) + :type has_dark: bool + :param has_flat: save the flat filenames (default: no) + :type has_flat: bool + :param polarization_factor: the polarization factor + :type polarization_factor: float + :param normalization_factor: the monitor value + :type normalization_factor: float + """ + self.__save1D(filename=filename, + dim1=dim1, + I=I, + error=error, + dim1_unit=dim1_unit, + has_dark=has_dark, + has_flat=has_flat, + polarization_factor=polarization_factor, + normalization_factor=normalization_factor) + + def __save1D(self, filename, dim1, I, error=None, dim1_unit=units.TTH, + has_dark=False, has_flat=False, polarization_factor=None, normalization_factor=None): + """This method save the result of a 1D integration. + + :param filename: the filename used to save the 1D integration + :type filename: str + :param dim1: the x coordinates of the integrated curve + :type dim1: numpy.ndarray + :param I: The integrated intensity + :type I: numpy.mdarray + :param error: the error bar for each intensity + :type error: numpy.ndarray or None + :param dim1_unit: the unit of the dim1 array + :type dim1_unit: pyFAI.units.Unit + :param has_dark: save the darks filenames (default: no) + :type has_dark: bool + :param has_flat: save the flat filenames (default: no) + :type has_flat: bool + :param polarization_factor: the polarization factor + :type polarization_factor: float + :param normalization_factor: the monitor value + :type normalization_factor: float + """ + if not filename: + return + writer = DefaultAiWriter(None, self) + writer.save1D(filename, dim1, I, error, dim1_unit, has_dark, has_flat, + polarization_factor, normalization_factor) + + @deprecated(since_version="0.14", reason="Use the class DefaultAiWriter") + def save2D(self, filename, I, dim1, dim2, error=None, dim1_unit=units.TTH, + has_dark=False, has_flat=False, + polarization_factor=None, normalization_factor=None): + """This method save the result of a 2D integration. + + Deprecated on 13/06/2017 + + :param filename: the filename used to save the 2D histogram + :type filename: str + :param dim1: the 1st coordinates of the histogram + :type dim1: numpy.ndarray + :param dim1: the 2nd coordinates of the histogram + :type dim1: numpy.ndarray + :param I: The integrated intensity + :type I: numpy.mdarray + :param error: the error bar for each intensity + :type error: numpy.ndarray or None + :param dim1_unit: the unit of the dim1 array + :type dim1_unit: pyFAI.units.Unit + :param has_dark: save the darks filenames (default: no) + :type has_dark: bool + :param has_flat: save the flat filenames (default: no) + :type has_flat: bool + :param polarization_factor: the polarization factor + :type polarization_factor: float + :param normalization_factor: the monitor value + :type normalization_factor: float + """ + self.__save2D(filename=filename, + I=I, + dim1=dim1, + dim2=dim2, + error=error, + dim1_unit=dim1_unit, + has_dark=has_dark, + has_flat=has_flat, + polarization_factor=polarization_factor, + normalization_factor=normalization_factor) + + def __save2D(self, filename, I, dim1, dim2, error=None, dim1_unit=units.TTH, + has_dark=False, has_flat=False, + polarization_factor=None, normalization_factor=None): + """This method save the result of a 2D integration. + + Deprecated on 13/06/2017 + + :param filename: the filename used to save the 2D histogram + :type filename: str + :param dim1: the 1st coordinates of the histogram + :type dim1: numpy.ndarray + :param dim1: the 2nd coordinates of the histogram + :type dim1: numpy.ndarray + :param I: The integrated intensity + :type I: numpy.mdarray + :param error: the error bar for each intensity + :type error: numpy.ndarray or None + :param dim1_unit: the unit of the dim1 array + :type dim1_unit: pyFAI.units.Unit + :param has_dark: save the darks filenames (default: no) + :type has_dark: bool + :param has_flat: save the flat filenames (default: no) + :type has_flat: bool + :param polarization_factor: the polarization factor + :type polarization_factor: float + :param normalization_factor: the monitor value + :type normalization_factor: float + """ + if not filename: + return + writer = DefaultAiWriter(None, self) + writer.save2D(filename, I, dim1, dim2, error, dim1_unit, has_dark, has_flat, + polarization_factor, normalization_factor) + + +################################################################################ +# Some properties +################################################################################ + + def set_darkcurrent(self, dark): + self.detector.set_darkcurrent(dark) + + def get_darkcurrent(self): + return self.detector.get_darkcurrent() + + darkcurrent = property(get_darkcurrent, set_darkcurrent) + + def set_flatfield(self, flat): + self.detector.set_flatfield(flat) + + def get_flatfield(self): + return self.detector.get_flatfield() + + flatfield = property(get_flatfield, set_flatfield) + + @deprecated(reason="Not maintained", since_version="0.17") + def set_darkfiles(self, files=None, method="mean"): + """Set the dark current from one or mutliple files, avaraged + according to the method provided. + + Moved to Detector. + + :param files: file(s) used to compute the dark. + :type files: str or list(str) or None + :param method: method used to compute the dark, "mean" or "median" + :type method: str + """ + self.detector.set_darkfiles(files, method) + + @property + @deprecated(reason="Not maintained", since_version="0.17") + def darkfiles(self): + return self.detector.darkfiles + + @deprecated(reason="Not maintained", since_version="0.17") + def set_flatfiles(self, files, method="mean"): + """Set the flat field from one or mutliple files, averaged + according to the method provided. + + Moved to Detector. + + :param files: file(s) used to compute the flat-field. + :type files: str or list(str) or None + :param method: method used to compute the dark, "mean" or "median" + :type method: str + """ + self.detector.set_flatfiles(files, method) + + @property + @deprecated(reason="Not maintained", since_version="0.17") + def flatfiles(self): + return self.detector.flatfiles + + def get_empty(self): + return self._empty + + def set_empty(self, value): + self._empty = float(value) + # propagate empty values to integrators + for engine in self.engines.values(): + with engine.lock: + if engine.engine is not None: + try: + engine.engine.empty = self._empty + except Exception as exeption: + logger.error(exeption) + + empty = property(get_empty, set_empty) + + def __getnewargs_ex__(self): + "Helper function for pickling ai" + return (self.dist, self.poni1, self.poni2, + self.rot1, self.rot2, self.rot3, + self.pixel1, self.pixel2, + self.splineFile, self.detector, self.wavelength), {} + + def __getstate__(self): + """Helper function for pickling ai + + :return: the state of the object + """ + + state_blacklist = ('_lock', "engines") + state = Geometry.__getstate__(self) + for key in state_blacklist: + if key in state: + del state[key] + return state + + def __setstate__(self, state): + """Helper function for unpickling ai + + :param state: the state of the object + """ + for statekey, statevalue in state.items(): + setattr(self, statekey, statevalue) + self._sem = threading.Semaphore() + self._lock = threading.Semaphore() + self.engines = {} diff --git a/src/pyFAI/integrator/meson.build b/src/pyFAI/integrator/meson.build index 2573c18a2..35f3c81bf 100644 --- a/src/pyFAI/integrator/meson.build +++ b/src/pyFAI/integrator/meson.build @@ -1,6 +1,7 @@ py.install_sources( ['__init__.py', 'azimuthal.py', + 'common.py', 'fiber.py', 'load_engines.py'], pure: false, # Will be installed next to binaries diff --git a/src/pyFAI/load_integrators.py b/src/pyFAI/load_integrators.py index 508d4df1b..98cfd6b02 100644 --- a/src/pyFAI/load_integrators.py +++ b/src/pyFAI/load_integrators.py @@ -30,7 +30,7 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "09/10/2024" +__date__ = "10/10/2024" __status__ = "stable" __docformat__ = 'restructuredtext' @@ -40,4 +40,8 @@ 09/10/2024 """ +# TODO put in place a deprecation warning when importing this module + from .integrator.load_engines import * +from .utils.decorators import deprecated_warning +deprecated_warning("Module", "pyFAI.load_integrators", replacement="pyFAI.integrator.load_engines", since_version="2024.10", only_once=False) diff --git a/src/pyFAI/multi_geometry.py b/src/pyFAI/multi_geometry.py index b80cca437..daba12b6e 100644 --- a/src/pyFAI/multi_geometry.py +++ b/src/pyFAI/multi_geometry.py @@ -39,7 +39,7 @@ import gc import logging logger = logging.getLogger(__name__) -from .azimuthalIntegrator import AzimuthalIntegrator +from .integrator.azimuthal import AzimuthalIntegrator from .containers import Integrate1dResult from .containers import Integrate2dResult from . import units diff --git a/src/pyFAI/opencl/test/test_ocl_azim_csr.py b/src/pyFAI/opencl/test/test_ocl_azim_csr.py index 23fa07e67..cf2bce570 100644 --- a/src/pyFAI/opencl/test/test_ocl_azim_csr.py +++ b/src/pyFAI/opencl/test/test_ocl_azim_csr.py @@ -44,7 +44,7 @@ import pyopencl.array from ...test.utilstest import UtilsTest from silx.opencl.common import _measure_workgroup_size -from ...azimuthalIntegrator import AzimuthalIntegrator +from ...integrator.azimuthal import AzimuthalIntegrator from ...method_registry import IntegrationMethod from scipy.ndimage import gaussian_filter1d logger = logging.getLogger(__name__) diff --git a/src/pyFAI/opencl/test/test_ocl_azim_lut.py b/src/pyFAI/opencl/test/test_ocl_azim_lut.py index 45a33d4a5..a4831af07 100644 --- a/src/pyFAI/opencl/test/test_ocl_azim_lut.py +++ b/src/pyFAI/opencl/test/test_ocl_azim_lut.py @@ -44,7 +44,7 @@ import pyopencl.array from ...test.utilstest import UtilsTest from silx.opencl.common import _measure_workgroup_size -from ...azimuthalIntegrator import AzimuthalIntegrator +from ...integrator.azimuthal import AzimuthalIntegrator from ...method_registry import IntegrationMethod from scipy.ndimage import gaussian_filter1d logger = logging.getLogger(__name__) diff --git a/src/pyFAI/opencl/test/test_ocl_histo.py b/src/pyFAI/opencl/test/test_ocl_histo.py index c6039f738..28787d6ab 100644 --- a/src/pyFAI/opencl/test/test_ocl_histo.py +++ b/src/pyFAI/opencl/test/test_ocl_histo.py @@ -45,7 +45,7 @@ import pyopencl.array from ...test.utilstest import UtilsTest from silx.opencl.common import _measure_workgroup_size -from ...azimuthalIntegrator import AzimuthalIntegrator +from ...integrator.azimuthal import AzimuthalIntegrator from ...containers import ErrorModel from scipy.ndimage import gaussian_filter1d logger = logging.getLogger(__name__) diff --git a/src/pyFAI/opencl/test/test_peak_finder.py b/src/pyFAI/opencl/test/test_peak_finder.py index e9a90295a..651f8395f 100644 --- a/src/pyFAI/opencl/test/test_peak_finder.py +++ b/src/pyFAI/opencl/test/test_peak_finder.py @@ -42,7 +42,7 @@ from .. import ocl import fabio from ...test.utilstest import UtilsTest -from ...azimuthalIntegrator import AzimuthalIntegrator +from ...integrator.azimuthal import AzimuthalIntegrator if ocl: from ..peak_finder import OCL_SimplePeakFinder, OCL_PeakFinder, densify logger = logging.getLogger(__name__) diff --git a/src/pyFAI/test/test_azimuthal_integrator.py b/src/pyFAI/test/test_azimuthal_integrator.py index c4d8e993a..3fb39dad8 100644 --- a/src/pyFAI/test/test_azimuthal_integrator.py +++ b/src/pyFAI/test/test_azimuthal_integrator.py @@ -45,7 +45,7 @@ import gc from .utilstest import UtilsTest logger = logging.getLogger(__name__) -from ..azimuthalIntegrator import AzimuthalIntegrator +from ..integrator.azimuthal import AzimuthalIntegrator from ..method_registry import IntegrationMethod from ..containers import ErrorModel from ..detectors import Detector, detector_factory diff --git a/src/pyFAI/test/test_blob_detection.py b/src/pyFAI/test/test_blob_detection.py index b7ac5da13..638cfeeaf 100644 --- a/src/pyFAI/test/test_blob_detection.py +++ b/src/pyFAI/test/test_blob_detection.py @@ -39,7 +39,7 @@ import logging logger = logging.getLogger(__name__) from ..detectors import detector_factory -from ..azimuthalIntegrator import AzimuthalIntegrator +from ..integrator.azimuthal import AzimuthalIntegrator from ..blob_detection import BlobDetection, local_max from ..ext import _blob from ..ext import morphology diff --git a/src/pyFAI/test/test_bug_regression.py b/src/pyFAI/test/test_bug_regression.py index 199905a69..ffcd2d154 100644 --- a/src/pyFAI/test/test_bug_regression.py +++ b/src/pyFAI/test/test_bug_regression.py @@ -50,7 +50,7 @@ from ..utils import mathutil import fabio from .. import load -from ..azimuthalIntegrator import AzimuthalIntegrator, logger as ai_logger +from ..integrator.azimuthal import AzimuthalIntegrator, logger as ai_logger from .. import detectors from .. import units from math import pi diff --git a/src/pyFAI/test/test_calibrant.py b/src/pyFAI/test/test_calibrant.py index e4542ca0a..df1f90426 100644 --- a/src/pyFAI/test/test_calibrant.py +++ b/src/pyFAI/test/test_calibrant.py @@ -43,7 +43,7 @@ logger = logging.getLogger(__name__) from ..calibrant import Calibrant, get_calibrant, Cell, CALIBRANT_FACTORY from ..detectors import ALL_DETECTORS -from ..azimuthalIntegrator import AzimuthalIntegrator +from ..integrator.azimuthal import AzimuthalIntegrator class TestCalibrant(unittest.TestCase): diff --git a/src/pyFAI/test/test_detector.py b/src/pyFAI/test/test_detector.py index 6eda2cc30..768e597e9 100644 --- a/src/pyFAI/test/test_detector.py +++ b/src/pyFAI/test/test_detector.py @@ -357,7 +357,7 @@ def test_hexagonal_detector(self): wl = 1e-10 from ..calibrant import ALL_CALIBRANTS - from ..azimuthalIntegrator import AzimuthalIntegrator + from ..integrator.azimuthal import AzimuthalIntegrator AgBh = ALL_CALIBRANTS("AgBh") AgBh.wavelength = 1e-10 ai = AzimuthalIntegrator(detector=pix, wavelength=wl) diff --git a/src/pyFAI/test/test_error_model.py b/src/pyFAI/test/test_error_model.py index c4882d546..a7ba50c04 100644 --- a/src/pyFAI/test/test_error_model.py +++ b/src/pyFAI/test/test_error_model.py @@ -42,7 +42,7 @@ import numpy from ..utils.mathutil import cormap from ..detectors import Detector -from ..azimuthalIntegrator import AzimuthalIntegrator +from ..integrator.azimuthal import AzimuthalIntegrator from .utilstest import UtilsTest diff --git a/src/pyFAI/test/test_export.py b/src/pyFAI/test/test_export.py index c97930c33..4af9b28b1 100644 --- a/src/pyFAI/test/test_export.py +++ b/src/pyFAI/test/test_export.py @@ -38,7 +38,7 @@ import logging import numpy from .utilstest import UtilsTest -from ..azimuthalIntegrator import AzimuthalIntegrator +from ..integrator.azimuthal import AzimuthalIntegrator logger = logging.getLogger(__name__) diff --git a/src/pyFAI/test/test_flat.py b/src/pyFAI/test/test_flat.py index 10972517f..4a7569760 100644 --- a/src/pyFAI/test/test_flat.py +++ b/src/pyFAI/test/test_flat.py @@ -42,7 +42,7 @@ pyFAI = sys.modules["pyFAI"] from ..opencl import ocl from .utilstest import UtilsTest -from ..azimuthalIntegrator import AzimuthalIntegrator +from ..integrator.azimuthal import AzimuthalIntegrator from ..method_registry import IntegrationMethod diff --git a/src/pyFAI/test/test_geometry.py b/src/pyFAI/test/test_geometry.py index f106ed947..b08d5d603 100644 --- a/src/pyFAI/test/test_geometry.py +++ b/src/pyFAI/test/test_geometry.py @@ -49,7 +49,7 @@ from . import utilstest from ..io.ponifile import PoniFile from .. import geometry -from ..azimuthalIntegrator import AzimuthalIntegrator +from ..integrator.azimuthal import AzimuthalIntegrator from .. import units from ..detectors import detector_factory from ..third_party import transformations diff --git a/src/pyFAI/test/test_integrate.py b/src/pyFAI/test/test_integrate.py index 322f56ce6..c85e364c1 100644 --- a/src/pyFAI/test/test_integrate.py +++ b/src/pyFAI/test/test_integrate.py @@ -41,7 +41,7 @@ import logging from .utilstest import UtilsTest logger = logging.getLogger(__name__) -from ..azimuthalIntegrator import AzimuthalIntegrator +from ..integrator.azimuthal import AzimuthalIntegrator from ..containers import Integrate1dResult, Integrate2dResult from ..io import DefaultAiWriter from ..detectors import Pilatus1M diff --git a/src/pyFAI/test/test_invert_geometry.py b/src/pyFAI/test/test_invert_geometry.py index 8a2ebb137..03ae7fb8c 100644 --- a/src/pyFAI/test/test_invert_geometry.py +++ b/src/pyFAI/test/test_invert_geometry.py @@ -35,7 +35,7 @@ import unittest import numpy -from ..azimuthalIntegrator import AzimuthalIntegrator +from ..integrator.azimuthal import AzimuthalIntegrator from ..ext.invert_geometry import InvertGeometry diff --git a/src/pyFAI/test/test_mask.py b/src/pyFAI/test/test_mask.py index 24b69d623..d008b40e3 100644 --- a/src/pyFAI/test/test_mask.py +++ b/src/pyFAI/test/test_mask.py @@ -44,7 +44,7 @@ if logger.getEffectiveLevel() <= logging.INFO: import pylab from .. import load, detectors -from ..azimuthalIntegrator import AzimuthalIntegrator +from ..integrator.azimuthal import AzimuthalIntegrator class TestMask(unittest.TestCase): diff --git a/src/pyFAI/test/test_multi_geometry.py b/src/pyFAI/test/test_multi_geometry.py index f7643e71e..36fc5098b 100644 --- a/src/pyFAI/test/test_multi_geometry.py +++ b/src/pyFAI/test/test_multi_geometry.py @@ -39,7 +39,7 @@ from .utilstest import UtilsTest logger = logging.getLogger(__name__) import numpy -from ..azimuthalIntegrator import AzimuthalIntegrator +from ..integrator.azimuthal import AzimuthalIntegrator from ..multi_geometry import MultiGeometry from ..detectors import Detector diff --git a/src/pyFAI/test/test_pickle.py b/src/pyFAI/test/test_pickle.py index c887eddb3..dadf9a226 100644 --- a/src/pyFAI/test/test_pickle.py +++ b/src/pyFAI/test/test_pickle.py @@ -35,7 +35,7 @@ __date__ = "05/09/2023" import numpy -from pyFAI.azimuthalIntegrator import AzimuthalIntegrator +from pyFAI.integrator.azimuthal import AzimuthalIntegrator from pyFAI.detectors import detector_factory from pickle import dumps, loads import unittest diff --git a/src/pyFAI/test/test_polarization.py b/src/pyFAI/test/test_polarization.py index 3ef6711c3..e4eaf91fc 100644 --- a/src/pyFAI/test/test_polarization.py +++ b/src/pyFAI/test/test_polarization.py @@ -38,7 +38,7 @@ import numpy import logging logger = logging.getLogger(__name__) -from ..azimuthalIntegrator import AzimuthalIntegrator +from ..integrator.azimuthal import AzimuthalIntegrator class TestPolarization(unittest.TestCase): diff --git a/src/pyFAI/test/test_saxs.py b/src/pyFAI/test/test_saxs.py index 3b6a33558..99cdec1b0 100644 --- a/src/pyFAI/test/test_saxs.py +++ b/src/pyFAI/test/test_saxs.py @@ -39,7 +39,7 @@ import fabio from .utilstest import UtilsTest logger = logging.getLogger(__name__) -from ..azimuthalIntegrator import AzimuthalIntegrator +from ..integrator.azimuthal import AzimuthalIntegrator from ..detectors import Pilatus1M if logger.getEffectiveLevel() <= logging.INFO: import pylab diff --git a/src/pyFAI/test/test_sparse.py b/src/pyFAI/test/test_sparse.py index c1f373dea..682a6bb93 100644 --- a/src/pyFAI/test/test_sparse.py +++ b/src/pyFAI/test/test_sparse.py @@ -41,7 +41,7 @@ logger = logging.getLogger(__name__) from .. import load from ..detectors import detector_factory -from ..azimuthalIntegrator import AzimuthalIntegrator +from ..integrator.azimuthal import AzimuthalIntegrator from ..ext import sparse_utils from ..utils.mathutil import rwp import fabio diff --git a/src/pyFAI/test/test_split_pixel.py b/src/pyFAI/test/test_split_pixel.py index 93e680822..f4a77cabc 100644 --- a/src/pyFAI/test/test_split_pixel.py +++ b/src/pyFAI/test/test_split_pixel.py @@ -43,7 +43,7 @@ import logging from .utilstest import UtilsTest logger = logging.getLogger(__name__) -from ..azimuthalIntegrator import AzimuthalIntegrator +from ..integrator.azimuthal import AzimuthalIntegrator from ..detectors import Detector from ..utils import mathutil from ..ext import splitBBox, splitPixel diff --git a/src/pyFAI/test/test_worker.py b/src/pyFAI/test/test_worker.py index 09d85e54b..7f9d400f7 100644 --- a/src/pyFAI/test/test_worker.py +++ b/src/pyFAI/test/test_worker.py @@ -43,7 +43,7 @@ from .. import units from .. import worker as worker_mdl from ..worker import Worker, PixelwiseWorker -from ..azimuthalIntegrator import AzimuthalIntegrator +from ..integrator.azimuthal import AzimuthalIntegrator from ..containers import Integrate1dResult from ..containers import Integrate2dResult from ..io.integration_config import ConfigurationReader diff --git a/src/pyFAI/worker.py b/src/pyFAI/worker.py index 08365aeec..76193ac63 100644 --- a/src/pyFAI/worker.py +++ b/src/pyFAI/worker.py @@ -95,7 +95,7 @@ from . import average from . import method_registry -from .azimuthalIntegrator import AzimuthalIntegrator +from .integrator.azimuthal import AzimuthalIntegrator from .containers import ErrorModel from .method_registry import IntegrationMethod from .distortion import Distortion From 795747404169738a6aba59769b91044f14bf36d7 Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Thu, 10 Oct 2024 10:54:37 +0200 Subject: [PATCH 06/11] Be more verbose about deprecation --- src/pyFAI/azimuthalIntegrator.py | 3 ++- src/pyFAI/geometryRefinement.py | 13 ++++++------- src/pyFAI/load_integrators.py | 3 ++- src/pyFAI/utils/decorators.py | 6 ++++-- 4 files changed, 14 insertions(+), 11 deletions(-) diff --git a/src/pyFAI/azimuthalIntegrator.py b/src/pyFAI/azimuthalIntegrator.py index a106e2c05..559d248f8 100644 --- a/src/pyFAI/azimuthalIntegrator.py +++ b/src/pyFAI/azimuthalIntegrator.py @@ -41,4 +41,5 @@ from .integrator.azimuthal import AzimuthalIntegrator, logger from .utils.decorators import deprecated_warning -deprecated_warning("Module", "pyFAI.azimuthalIntegrator", replacement="pyFAI.integrator.azimuthal", since_version="2024.10", only_once=False) +deprecated_warning("Module", "pyFAI.azimuthalIntegrator", replacement="pyFAI.integrator.azimuthal", + since_version="2024.10", only_once=False, skip_backtrace_count=1) diff --git a/src/pyFAI/geometryRefinement.py b/src/pyFAI/geometryRefinement.py index a09ba81d3..dfdf6f7f9 100644 --- a/src/pyFAI/geometryRefinement.py +++ b/src/pyFAI/geometryRefinement.py @@ -32,7 +32,7 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "21/11/2023" +__date__ = "10/10/2024" __status__ = "development" import os @@ -43,11 +43,10 @@ import numpy import math from math import pi -from . import azimuthalIntegrator +from .integrator.azimuthal import AzimuthalIntegrator from .calibrant import Calibrant, CALIBRANT_FACTORY from .utils.ellipse import fit_ellipse from .utils.decorators import deprecated -AzimuthalIntegrator = azimuthalIntegrator.AzimuthalIntegrator from scipy.optimize import fmin, leastsq, fmin_slsqp logger = logging.getLogger(__name__) @@ -111,10 +110,10 @@ def __init__(self, data=None, calibrant=None, if (pixel1 is None) and (pixel2 is None) and (splineFile is None) and (detector is None): raise RuntimeError("Setting up the geometry refinement without knowing the detector makes little sense") - AzimuthalIntegrator.__init__(self, dist, 0, 0, - rot1, rot2, rot3, - pixel1, pixel2, splineFile, detector, - wavelength=wavelength, **kwargs) + super().__init__(dist, 0, 0, + rot1, rot2, rot3, + pixel1, pixel2, splineFile, detector, + wavelength=wavelength, **kwargs) if calibrant is None: self.calibrant = Calibrant() diff --git a/src/pyFAI/load_integrators.py b/src/pyFAI/load_integrators.py index 98cfd6b02..26b364238 100644 --- a/src/pyFAI/load_integrators.py +++ b/src/pyFAI/load_integrators.py @@ -44,4 +44,5 @@ from .integrator.load_engines import * from .utils.decorators import deprecated_warning -deprecated_warning("Module", "pyFAI.load_integrators", replacement="pyFAI.integrator.load_engines", since_version="2024.10", only_once=False) +deprecated_warning("Module", "pyFAI.load_integrators", replacement="pyFAI.integrator.load_engines", + since_version="2024.10", only_once=False, skip_backtrace_count=1) diff --git a/src/pyFAI/utils/decorators.py b/src/pyFAI/utils/decorators.py index 81676031c..42cfcf80b 100644 --- a/src/pyFAI/utils/decorators.py +++ b/src/pyFAI/utils/decorators.py @@ -30,7 +30,7 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "18/12/2023" +__date__ = "10/10/2024" __status__ = "development" __docformat__ = 'restructuredtext' @@ -128,7 +128,9 @@ def deprecated_warning(type_, name, reason=None, replacement=None, msg += f" Use '{replacement}' instead." msg += "\n%s" limit = 2 + skip_backtrace_count - backtrace = "".join(traceback.format_stack(limit=limit)[0]) + # print(limit) + # print(traceback.format_stack(limit=limit)) + backtrace = "".join(traceback.format_stack()[:-limit]) backtrace = backtrace.rstrip() if only_once: data = (msg, type_, name, backtrace) From e1416fd114ea9a7eb6a9835890c1472218e16ca8 Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Thu, 10 Oct 2024 10:56:44 +0200 Subject: [PATCH 07/11] This is now the new location of AzimuthalIntegrator --- src/pyFAI/geometry/core.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyFAI/geometry/core.py b/src/pyFAI/geometry/core.py index ffe72f3f3..85b0a302c 100644 --- a/src/pyFAI/geometry/core.py +++ b/src/pyFAI/geometry/core.py @@ -2088,7 +2088,7 @@ def calcfrom2d(self, I, tth, chi, shape=None, mask=None, calcimage[numpy.where(mask)] = dummy return calcimage - def promote(self, klass_name="pyFAI.azimuthalIntegrator.AzimuthalIntegrator"): + def promote(self, klass_name="pyFAI.integrator.azimuthal.AzimuthalIntegrator"): """Promote this instance into one of its derived class (deep copy like) :param klass: Fully qualified name of the class to promote to From 393c3e61d66d6575bbf527123561771508a634aa Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Thu, 10 Oct 2024 10:56:54 +0200 Subject: [PATCH 08/11] remove deprec warning --- src/pyFAI/test/test_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pyFAI/test/test_utils.py b/src/pyFAI/test/test_utils.py index e3278f6c0..6bc33e7e8 100644 --- a/src/pyFAI/test/test_utils.py +++ b/src/pyFAI/test/test_utils.py @@ -32,7 +32,7 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "18/12/2023" +__date__ = "10/10/2024" import os import unittest @@ -43,7 +43,7 @@ from .. import utils from ..version import calc_hexversion from ..method_registry import IntegrationMethod -from .. import azimuthalIntegrator +# from .. import azimuthalIntegrator from ..detectors import detector_factory # to increase test coverage of missing files: from .. import directories From 285caea0ce19996183bb07700117f3d9875cdcc6 Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Thu, 10 Oct 2024 11:02:21 +0200 Subject: [PATCH 09/11] Remove deprecation warnings --- src/pyFAI/gui/utils/unitutils.py | 6 +++--- src/pyFAI/test/test_csr.py | 2 +- src/pyFAI/test/test_io.py | 4 ++-- src/pyFAI/test/test_utils.py | 1 - src/pyFAI/test/utilstest.py | 10 +++++----- 5 files changed, 11 insertions(+), 12 deletions(-) diff --git a/src/pyFAI/gui/utils/unitutils.py b/src/pyFAI/gui/utils/unitutils.py index 17d28b224..83fb8638c 100644 --- a/src/pyFAI/gui/utils/unitutils.py +++ b/src/pyFAI/gui/utils/unitutils.py @@ -24,7 +24,7 @@ __authors__ = ["V. Valls"] __license__ = "MIT" -__date__ = "05/10/2023" +__date__ = "10/10/2024" import numpy import collections.abc @@ -51,7 +51,7 @@ def tthToRad( :param unit: instance of pyFAI.units.Unit :param wavelength: wavelength in m :param directDist: distance from sample to beam-center on the detector in _mm_ - :param ai: instance of pyFAI.azimuthalIntegrator.AzimuthalIntegrator + :param ai: instance of pyFAI.integrator.azimuthal.AzimuthalIntegrator """ if isinstance(twoTheta, numpy.ndarray): pass @@ -98,7 +98,7 @@ def from2ThRad(twoTheta, unit, wavelength=None, directDist=None, ai=None): :param unit: instance of pyFAI.units.Unit :param wavelength: wavelength in m :param directDist: distance from sample to beam-center on the detector in _mm_ - :param ai: instance of pyFAI.azimuthalIntegrator.AzimuthalIntegrator + :param ai: instance of pyFAI.integrator.azimuthal.AzimuthalIntegrator """ if isinstance(twoTheta, numpy.ndarray): pass diff --git a/src/pyFAI/test/test_csr.py b/src/pyFAI/test/test_csr.py index 5453bdd5e..c26098653 100644 --- a/src/pyFAI/test/test_csr.py +++ b/src/pyFAI/test/test_csr.py @@ -42,7 +42,7 @@ from ..ext import splitBBoxCSR from ..engines.CSR_engine import CsrIntegrator2d, CsrIntegrator1d from ..method_registry import IntegrationMethod -from .. import azimuthalIntegrator +from ..integrator import azimuthal as azimuthalIntegrator from ..containers import ErrorModel if opencl.ocl: from ..opencl import azim_csr as ocl_azim_csr diff --git a/src/pyFAI/test/test_io.py b/src/pyFAI/test/test_io.py index 5512555c8..8318286a2 100644 --- a/src/pyFAI/test/test_io.py +++ b/src/pyFAI/test/test_io.py @@ -32,7 +32,7 @@ __contact__ = "Jerome.Kieffer@ESRF.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "03/06/2024" +__date__ = "10/10/2024" import unittest import os @@ -51,7 +51,7 @@ from ..io.ponifile import PoniFile import h5py import fabio -import pyFAI.azimuthalIntegrator +import ..integrator.azimuthal as azimuthalIntegrator class TestPoniFile(unittest.TestCase): diff --git a/src/pyFAI/test/test_utils.py b/src/pyFAI/test/test_utils.py index 6bc33e7e8..439458787 100644 --- a/src/pyFAI/test/test_utils.py +++ b/src/pyFAI/test/test_utils.py @@ -43,7 +43,6 @@ from .. import utils from ..version import calc_hexversion from ..method_registry import IntegrationMethod -# from .. import azimuthalIntegrator from ..detectors import detector_factory # to increase test coverage of missing files: from .. import directories diff --git a/src/pyFAI/test/utilstest.py b/src/pyFAI/test/utilstest.py index 857962da6..f879d883c 100644 --- a/src/pyFAI/test/utilstest.py +++ b/src/pyFAI/test/utilstest.py @@ -28,7 +28,7 @@ __contact__ = "jerome.kieffer@esrf.eu" __license__ = "MIT" __copyright__ = "European Synchrotron Radiation Facility, Grenoble, France" -__date__ = "10/10/2023" +__date__ = "10/10/2024" PACKAGE = "pyFAI" @@ -508,12 +508,12 @@ def create_fake_data(dist=1, poni1=0, poni2=0, rot1=0, rot2=0, rot3=0, :return: image, azimuthalIngtegrator """ from .. import calibrant as pyFAI_calibrant - from .. import azimuthalIntegrator + from ..integrator.azimuthal import AzimuthalIntegrator cal = pyFAI_calibrant.get_calibrant(calibrant) cal.wavelength = wavelength - ai = azimuthalIntegrator.AzimuthalIntegrator(dist, poni1, poni2, - rot1, rot2, rot3, - detector=detector, wavelength=wavelength) + ai = AzimuthalIntegrator(dist, poni1, poni2, + rot1, rot2, rot3, + detector=detector, wavelength=wavelength) img = cal.fake_calibration_image(ai, Imax=Imax) + offset if poissonian and test_options.TEST_RANDOM: rng = test_options.get_rng() From 8381a296ab7b4e60c438a096daf127b3f5dfcab3 Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Thu, 10 Oct 2024 14:33:56 +0200 Subject: [PATCH 10/11] typo --- src/pyFAI/test/test_io.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyFAI/test/test_io.py b/src/pyFAI/test/test_io.py index 8318286a2..5620eb75d 100644 --- a/src/pyFAI/test/test_io.py +++ b/src/pyFAI/test/test_io.py @@ -51,7 +51,7 @@ from ..io.ponifile import PoniFile import h5py import fabio -import ..integrator.azimuthal as azimuthalIntegrator +from ..integrator import azimuthal as azimuthalIntegrator class TestPoniFile(unittest.TestCase): From b7f7420aa9d46afaeb86be4b37b27363702ab0d6 Mon Sep 17 00:00:00 2001 From: Jerome Kieffer Date: Thu, 10 Oct 2024 15:30:41 +0200 Subject: [PATCH 11/11] Update test_io.py --- src/pyFAI/test/test_io.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyFAI/test/test_io.py b/src/pyFAI/test/test_io.py index 5620eb75d..a1ad1fe2f 100644 --- a/src/pyFAI/test/test_io.py +++ b/src/pyFAI/test/test_io.py @@ -294,7 +294,7 @@ def setUp(self): unittest.TestCase.setUp(self) detector = pyFAI.detector_factory("pilatus300k") - self.ai = pyFAI.azimuthalIntegrator.AzimuthalIntegrator(detector=detector) + self.ai = azimuthalIntegrator.AzimuthalIntegrator(detector=detector) nframes = 100 nspots = UtilsTest.get_rng().uniform(1, nframes, size=nframes).astype(numpy.int64) self.spots = [numpy.empty(count, dtype=[("index", numpy.int32),