diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 5aa620cdd..bbc1afa08 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 3.24.dev +current_version = 3.26.dev commit = True tag = False parse = (?P\d+)\.(?P\d+)\.(?P[a-z]+)?(?P\d+)? @@ -8,9 +8,9 @@ serialize = {major}.{minor}.{release} {major}.{minor}.{patch} -[bumpversion:file:setup.py] -search = __version_tag__ = "{current_version}" -replace = __version_tag__ = "{new_version}" +[bumpversion:file:pyproject.toml] +search = version = "{current_version}" +replace = version = "{new_version}" [bumpversion:part:patch] diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml index 59da8a915..868edb9c6 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test.yml @@ -14,7 +14,7 @@ jobs: fail-fast: false matrix: os: [ubuntu, macOS, windows] - python: ["3.9", "3.12"] + python: ["3.11", "3.12", "3.13"] runs-on: ${{ matrix.os }}-latest # Micromamba needs a login shell to activate defaults: @@ -45,6 +45,7 @@ jobs: run: echo "CMAKE_GENERATOR=Ninja" >> $GITHUB_ENV - name: Build run: | + pip3 install "nxmx>=0.0.5" mkdir build cd build cmake ../dxtbx -DCMAKE_UNITY_BUILD=true -DPython_ROOT_DIR="${CONDA_PREFIX}" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5c52d77ef..88e826b7e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,10 @@ +ci: + autoupdate_schedule: quarterly + repos: # Syntax validation and some basic sanity checks - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: check-merge-conflict - id: check-ast @@ -16,7 +19,7 @@ repos: name: "Don't commit to 'main' directly" - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: v0.4.8 + rev: v0.11.12 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix, --show-fixes] @@ -25,7 +28,7 @@ repos: types: [file] - repo: https://github.com/pre-commit/mirrors-clang-format - rev: v14.0.6 + rev: v20.1.5 hooks: - id: clang-format files: \.c(c|pp|xx)?$|\.h(pp)?$ diff --git a/AUTHORS b/AUTHORS index b3edf888c..4a46d621f 100644 --- a/AUTHORS +++ b/AUTHORS @@ -1,6 +1,7 @@ Contributors include: Aaron Brewster +Aaron Finke Asmit Bhowmick Ben Williams Billy K. Poon diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 83793fb62..e883f6204 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,3 +1,108 @@ +dxtbx 3.25.0 (2025-06-20) +========================= + +Features +-------- + +- DXTBX is now compatible with numpy 2. (`#751 `_) +- Allow the creation of a simple ``Crystal`` model from PHIL parameters. (`#819 `_) +- Python 3.13 compatibility. (`#826 `_) + + +Bugfixes +-------- + +- Convert tests to use ``dials-data``, rather than ``dials_regression``. This means they can be run outside of Diamond. (`#812 `_) +- Increase displayed precision of crystal U, B and A matrices. (`#823 `_) +- Add DECTRIS Singla to detectors.lib. (`#824 `_) +- Avoid reading h5 imagesets as having a single image, when a format class is not available (`#825 `_) +- ``dials.modify_experiments``: Bug fixes for ``CrystalFactory.from_phil``. (`#829 `_) + + +Deprecations and Removals +------------------------- + +- The ``dials_regression`` and ``dials_regression_path`` fixtures are removed, as no tests use this repository any more. (`#815 `_) + + +Misc +---- + +- `#778 `_, `#807 `_, `#810 `_, `#817 `_, `#818 `_, `#827 `_ + + +DIALS 3.24.1 (2025-05-13) +========================= + +Bugfixes +-------- + +- ``dxtbx.install_format``: Fix for modern setuptools versions which drop legacy ``setup.py`` features. (`#808 `_) + + +dxtbx 3.24.0 (2025-04-29) +========================= + +Features +-------- + +- ``FormatROD_Arc``: Support for Rigaku HyPix-Arc 100° and 150° curved detectors. (`#787 `_) +- ``FormatBrukerELA``: Add support for the DECTRIS-ELA detector images in Bruker SFRM format. (`#802 `_) +- ``dxtbx.any2nexus``: Add ``sensor_material=`` and ``sensor_thickness=`` options. (`#803 `_) +- ``FormatXTC``: New features for managing wavelength calibration and an adjustment to the minimum trusted range for the ePix. (`#804 `_) + + +Bugfixes +-------- + +- Fix issue where attempting to group experiments would fail if the source images are currently inaccessible. This caused failures in downstream tooling such as ``xia2.ssx_reduce``. (`#772 `_) +- ``dials.import``: Reduce excessive memory usage when importing many (>100s) FormatNXMX files. (`#789 `_) +- ``FormatNXmxEigerFilewriter``: Use a lookup table when deciding whether to swap image dimensions, rather than relying on a firmware version check. (`#793 `_) +- ``FormatROD``: Use the weighted average of K-alpha1 and K-alpha2 as the monochromatic wavelength for the beam model. (`#800 `_) +- ``FormatRAXIS``: Allow the possibility of reading compressed files. (`#801 `_) + + +Improved Documentation +---------------------- + +- The user support mailing list is now ``dials-user-group@jiscmail.net`` (`#795 `_) + + +Misc +---- + +- `#197 `_, `#788 `_, `#796 `_, `#797 `_, `#805 `_, `#978 `_ + + +dxtbx 3.23.0 (2025-01-08) +========================= + +Features +-------- + +- Nexus support: Handle reading new scale_factor fields (used for detector gain). (`#756 `_) +- ``dials.import``: Add a progress bar, so that it doesn't look like progress has stopped with large collections of images. (`#768 `_) +- Add ``FormatSMVADSCCetaD`` to allow easier processing of 3DED images from the Ceta-D detector, which have been converted to SMV. (`#770 `_) + + +Bugfixes +-------- + +- ``dials.show``: Hide progress bar if DIALS_NOBANNER (`#774 `_) + + +Deprecations and Removals +------------------------- + +- Python 3.10 is now the minimum required (`#769 `_) + + +Misc +---- + +- `#767 `_, `#773 `_, `#775 `_ + + dxtbx 3.22.0 (2024-10-15) ========================= diff --git a/CMakeLists.txt b/CMakeLists.txt index c356a050e..867cc5fb7 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -11,6 +11,11 @@ include(AlwaysColourCompilation) # Always show coloured compiler output set(CMAKE_EXPORT_COMPILE_COMMANDS ON) # Generate compile_commands.json set(CMAKE_CXX_STANDARD 14) +# Handle unity build on Windows +if (CMAKE_UNITY_BUILD AND MSVC) + add_compile_options(/bigobj) +endif() + find_package(Python REQUIRED COMPONENTS Interpreter Development) find_package(CCTBX COMPONENTS scitbx cctbx REQUIRED) find_package(pybind11 REQUIRED) diff --git a/SConscript b/SConscript index 47c27e746..966fdf0b2 100644 --- a/SConscript +++ b/SConscript @@ -215,6 +215,7 @@ if not env_etc.no_boost_python and hasattr(env_etc, "boost_adaptbx_include"): "src/dxtbx/model/boost_python/crystal.cc", "src/dxtbx/model/boost_python/parallax_correction.cc", "src/dxtbx/model/boost_python/pixel_to_millimeter.cc", + "src/dxtbx/model/boost_python/history.cc", "src/dxtbx/model/boost_python/experiment.cc", "src/dxtbx/model/boost_python/experiment_list.cc", "src/dxtbx/model/boost_python/model_ext.cc", diff --git a/build.py b/build.py deleted file mode 100644 index 21682eaa2..000000000 --- a/build.py +++ /dev/null @@ -1,113 +0,0 @@ -""" -Handle dynamic aspects of setup.py and building. - -This is separate because the non-dynamic stuff can generally be moved -out of a setup.py, but mainly because at the moment it's how poetry -offloads the unresolved build phases. -""" - -from __future__ import annotations - -import ast -import itertools -import re -import sys -from pathlib import Path -from typing import Any - - -def get_entry_point(filename: Path, prefix: str, import_path: str) -> list[str]: - """Returns the entry point string for a given path. - - This looks for LIBTBX_SET_DISPATCHER_NAME, and a root function - named 'run'. It can return multiple results for each file, if more - than one dispatcher name is bound. - - Args: - filename: - The python file to parse. Will look for a run() function - and any number of LIBTBX_SET_DISPATCHER_NAME. - prefix: The prefix to output the entry point console script with - import_path: The import path to get to the package the file is in - - Returns: - A list of entry_point specifications - """ - contents = filename.read_text() - tree = ast.parse(contents) - # Find root functions named "run" - has_run = any( - x - for x in tree.body - if (isinstance(x, ast.FunctionDef) and x.name == "run") - or (isinstance(x, ast.ImportFrom) and "run" in [a.name for a in x.names]) - ) - if not has_run: - return [] - # Find if we need an alternate name via LIBTBX_SET_DISPATCHER_NAME - alternate_names = re.findall( - r"^#\s*LIBTBX_SET_DISPATCHER_NAME\s+(.*)$", contents, re.M - ) - if alternate_names: - return [f"{name}={import_path}.{filename.stem}:run" for name in alternate_names] - - return [f"{prefix}.{filename.stem}={import_path}.{filename.stem}:run"] - - -def enumerate_format_classes(path: Path) -> list[str]: - """Find all Format*.py files and contained Format classes in a path""" - format_classes = [] - for filename in path.glob("Format*.py"): - content = filename.read_bytes() - try: - parsetree = ast.parse(content) - except SyntaxError: - print(f" *** Could not parse {filename.name}") - continue - for top_level_def in parsetree.body: - if not isinstance(top_level_def, ast.ClassDef): - continue - base_names = [ - baseclass.id - for baseclass in top_level_def.bases - if isinstance(baseclass, ast.Name) and baseclass.id.startswith("Format") - ] - if base_names: - classname = top_level_def.name - format_classes.append( - f"{classname}:{','.join(base_names)} = dxtbx.format.{filename.stem}:{classname}" - ) - # print(" found", classname, " based on ", str(base_names)) - return format_classes - - -def build(setup_kwargs: dict[str, Any]) -> None: - """Called by setup.py to inject any dynamic configuration""" - package_path = Path(__file__).parent / "src" / "dxtbx" - entry_points = setup_kwargs.setdefault("entry_points", {}) - console_scripts = entry_points.setdefault("console_scripts", []) - # Work out what dispatchers to add - all_dispatchers = sorted( - itertools.chain( - *[ - get_entry_point(f, "dxtbx", "dxtbx.command_line") - for f in (package_path / "command_line").glob("*.py") - ] - ) - ) - console_scripts.extend(x for x in all_dispatchers if x not in console_scripts) - libtbx_dispatchers = entry_points.setdefault("libtbx.dispatcher.script", []) - libtbx_dispatchers.extend( - "{name}={name}".format(name=x.split("=")[0]) for x in console_scripts - ) - - dxtbx_format = entry_points.setdefault("dxtbx.format", []) - format_classes = sorted(enumerate_format_classes(package_path / "format")) - dxtbx_format.extend([x for x in format_classes if x not in dxtbx_format]) - - print(f"Found {len(entry_points['console_scripts'])} dxtbx dispatchers") - print(f"Found {len(entry_points['dxtbx.format'])} Format classes") - - -if __name__ == "__main__": - sys.exit("Cannot call build.py directly, please use setup.py instead") diff --git a/conftest.py b/conftest.py index 3e2491ea5..22d528e7f 100644 --- a/conftest.py +++ b/conftest.py @@ -5,50 +5,11 @@ from __future__ import annotations -import os -import socket - import pytest collect_ignore = [] -def dials_regression_path(): - """Return the absolute path to the dials_regression module as a string. - This function is used directly by tests/test_regression_images.py""" - - if "DIALS_REGRESSION" in os.environ: - return os.environ["DIALS_REGRESSION"] - - try: - import dials_regression as dr - - return os.path.abspath(os.path.dirname(dr.__file__)) - except ImportError: - pass # dials_regression not configured - - # Check if we are in a known location - reference_copy = ( - "/dls/science/groups/scisoft/DIALS/repositories/git-reference/dials_regression" - ) - if ( - os.name == "posix" - and socket.gethostname().endswith(".diamond.ac.uk") - and os.path.exists(reference_copy) - ): - return reference_copy - - -@pytest.fixture(scope="session") -def dials_regression(): - """Return the absolute path to the dials_regression module as a string. - Skip the test if dials_regression is not available.""" - d_r = dials_regression_path() - if d_r: - return d_r - pytest.skip("dials_regression required for this test") - - def pytest_addoption(parser): """Add '--regression' options to pytest.""" try: diff --git a/dependencies.yaml b/dependencies.yaml index 28aa16884..6ce22991c 100644 --- a/dependencies.yaml +++ b/dependencies.yaml @@ -55,12 +55,11 @@ build: host: - cctbx-base # [prebuilt_cctbx and bootstrap] - - cctbx-base >=2024 # [not bootstrap] + - cctbx-base >=2025 # [not bootstrap] - hdf5 - libboost-devel - libboost-python-devel - - numpy >=1.21.5,<2 #[bootstrap] - - numpy # [not bootstrap] + - numpy - pip - pybind11 - python @@ -74,7 +73,7 @@ run: - mrcfile - natsort - {{ pin_compatible('numpy') }} # [not bootstrap] - - nxmx >=0.0.4 + - nxmx >=0.0.5 - ordered-set - pint - pycbf # [prebuilt_cctbx] @@ -86,7 +85,7 @@ run: test: - dials-data - pip - - pytest + - pytest >6 - pytest-mock - pytest-nunit # [win] - pytest-xdist diff --git a/hatch_build.py b/hatch_build.py new file mode 100644 index 000000000..1d66d8f9d --- /dev/null +++ b/hatch_build.py @@ -0,0 +1,98 @@ +""" +Dynamically generate the list of console_scripts dxtbx.format entry-points. +""" + +from __future__ import annotations + +import ast +import re +from pathlib import Path + +from hatchling.metadata.plugin.interface import MetadataHookInterface + + +def get_entry_point( + filename: Path, prefix: str, import_path: str +) -> list[tuple[str, str]]: + """Returns any entry point strings for a given path. + + This looks for LIBTBX_SET_DISPATCHER_NAME, and a root function + named 'run'. It can return multiple results for each file, if more + than one dispatcher name is bound. + + Args: + filename: + The python file to parse. Will look for a run() function + and any number of LIBTBX_SET_DISPATCHER_NAME. + prefix: The prefix to output the entry point console script with + import_path: The import path to get to the package the file is in + + Returns: + A list of entry_point specifications + """ + contents = filename.read_text() + tree = ast.parse(contents) + # Find root functions named "run" + has_run = any( + x + for x in tree.body + if (isinstance(x, ast.FunctionDef) and x.name == "run") + or (isinstance(x, ast.ImportFrom) and "run" in [a.name for a in x.names]) + ) + if not has_run: + return [] + # Find if we need an alternate name via LIBTBX_SET_DISPATCHER_NAME + alternate_names = re.findall( + r"^#\s*LIBTBX_SET_DISPATCHER_NAME\s+(.*)$", contents, re.M + ) + if alternate_names: + return [ + (name, f"{import_path}.{filename.stem}:run") for name in alternate_names + ] + + return [(f"{prefix}.{filename.stem}", f"{import_path}.{filename.stem}:run")] + + +def enumerate_format_classes(path: Path) -> list[tuple[str, str]]: + """Find all Format*.py files and contained Format classes in a path""" + format_classes = [] + for filename in path.glob("Format*.py"): + content = filename.read_bytes() + try: + parsetree = ast.parse(content) + except SyntaxError: + print(f" *** Could not parse {filename.name}") + continue + for top_level_def in parsetree.body: + if not isinstance(top_level_def, ast.ClassDef): + continue + base_names = [ + baseclass.id + for baseclass in top_level_def.bases + if isinstance(baseclass, ast.Name) and baseclass.id.startswith("Format") + ] + if base_names: + classname = top_level_def.name + format_classes.append( + ( + f"{classname}:{','.join(base_names)}", + f"dxtbx.format.{filename.stem}:{classname}", + ) + ) + return format_classes + + +class CustomMetadataHook(MetadataHookInterface): + def update(self, metadata): + scripts = metadata.setdefault("scripts", {}) + package_path = Path(self.root) / "src" / "dxtbx" + for file in package_path.joinpath("command_line").glob("*.py"): + for name, symbol in get_entry_point(file, "dxtbx", "dxtbx.command_line"): + if name not in scripts: + scripts[name] = symbol + + plugins = metadata.setdefault("entry-points", {}) + formats = plugins.setdefault("dxtbx.format", {}) + for name, symbol in sorted(enumerate_format_classes(package_path / "format")): + if name not in formats: + formats[name] = symbol diff --git a/libtbx_refresh.py b/libtbx_refresh.py index eef867e0f..f8088aff1 100644 --- a/libtbx_refresh.py +++ b/libtbx_refresh.py @@ -7,6 +7,7 @@ import os import subprocess import sys +from pathlib import Path import libtbx import libtbx.pkg_utils @@ -19,6 +20,25 @@ pass +def _find_site_packages_with_metadata(package_name: str, build_path: Path): + """ + Find the site-packages directory containing the package metadata. + Returns the site-packages directory if metadata is found, None otherwise. + """ + # Look for Python site-packages directories in the build path + for python_dir in build_path.glob("lib/python*"): + site_packages = python_dir / "site-packages" + if site_packages.exists(): + # Look for both .dist-info and .egg-info directories + for pattern in [f"{package_name}*.dist-info", f"{package_name}*.egg-info"]: + for metadata_dir in site_packages.glob(pattern): + if metadata_dir.exists(): + # Return site-packages only if we actually found metadata + return site_packages + # If no metadata found in this site-packages, continue searching + return None + + def _install_setup_readonly_fallback(package_name: str): """ Partially install package in the libtbx build folder. @@ -51,14 +71,15 @@ def _install_setup_readonly_fallback(package_name: str): # Get the actual environment being configured (NOT libtbx.env) env = _get_real_env_hack_hack_hack() - # Update the libtbx environment pythonpaths to point to the source - # location which now has an .egg-info folder; this will mean that - # the PYTHONPATH is written into the libtbx dispatchers - rel_path = libtbx.env.as_relocatable_path(import_path) - if rel_path not in env.pythonpath: - env.pythonpath.insert(0, rel_path) + # As of PEP 660, the package metadata (dist-info) goes in the install dir, + # not the source dir. Add this location to the python path. + metadata_dir = _find_site_packages_with_metadata(package_name, Path(build_path)) + if metadata_dir and metadata_dir not in sys.path: + metadata_rel = libtbx.env.as_relocatable_path(str(metadata_dir)) + if metadata_rel not in env.pythonpath: + env.pythonpath.insert(0, metadata_rel) - # Update the sys.path so that we can find the .egg-info in this process + # Update the sys.path so we can find the package in this process # if we do a full reconstruction of the working set if import_path not in sys.path: sys.path.insert(0, import_path) diff --git a/newsfragments/197.misc b/newsfragments/197.misc deleted file mode 100644 index 675881082..000000000 --- a/newsfragments/197.misc +++ /dev/null @@ -1 +0,0 @@ -XTC support: add wavelength_fallback parameter diff --git a/newsfragments/756.feature b/newsfragments/756.feature deleted file mode 100644 index 3b3f6e056..000000000 --- a/newsfragments/756.feature +++ /dev/null @@ -1 +0,0 @@ -Add support for reading the detector gain for nexus files diff --git a/newsfragments/767.misc b/newsfragments/767.misc deleted file mode 100644 index 42225ae11..000000000 --- a/newsfragments/767.misc +++ /dev/null @@ -1 +0,0 @@ -Add overloads for panel resolution methods to use beam objects instead of s0 directly. diff --git a/newsfragments/768.feature b/newsfragments/768.feature deleted file mode 100644 index 9ac92167c..000000000 --- a/newsfragments/768.feature +++ /dev/null @@ -1,2 +0,0 @@ -``dials.import``: add a progress bar for the import process, particularly -useful when importing thousands of files diff --git a/newsfragments/769.removal b/newsfragments/769.removal deleted file mode 100644 index 4a9b23023..000000000 --- a/newsfragments/769.removal +++ /dev/null @@ -1 +0,0 @@ -Python 3.10 is now the minimum required diff --git a/newsfragments/770.feature b/newsfragments/770.feature deleted file mode 100644 index 25c7bbc79..000000000 --- a/newsfragments/770.feature +++ /dev/null @@ -1 +0,0 @@ -Add ``FormatSMVADSCCetaD`` to allow easier processing of 3DED images from the Ceta-D detector, which have been converted to SMV. diff --git a/newsfragments/772.bugfix b/newsfragments/772.bugfix deleted file mode 100644 index be6889d36..000000000 --- a/newsfragments/772.bugfix +++ /dev/null @@ -1 +0,0 @@ -Do not try to resolve file paths if they are not accessible. diff --git a/newsfragments/773.bugfix b/newsfragments/773.bugfix deleted file mode 100644 index a9df61b72..000000000 --- a/newsfragments/773.bugfix +++ /dev/null @@ -1 +0,0 @@ -``dials.import``: force tqdm output to stdout not stderr as default \ No newline at end of file diff --git a/newsfragments/774.bugfix b/newsfragments/774.bugfix deleted file mode 100644 index 6f355b5f5..000000000 --- a/newsfragments/774.bugfix +++ /dev/null @@ -1 +0,0 @@ -``dials.show``: hide tqdm if DIALS_NOBANNER diff --git a/newsfragments/775.bugfix b/newsfragments/775.bugfix deleted file mode 100644 index 450b4bd4a..000000000 --- a/newsfragments/775.bugfix +++ /dev/null @@ -1 +0,0 @@ -``dials.import``: set tqdm output to stdout (again) diff --git a/newsfragments/780.feature b/newsfragments/780.feature new file mode 100644 index 000000000..7c6a90fa1 --- /dev/null +++ b/newsfragments/780.feature @@ -0,0 +1 @@ +modified `` FormatESSNMX.py `` for processing simulated NMX data from McStas. \ No newline at end of file diff --git a/newsfragments/787.feature b/newsfragments/787.feature deleted file mode 100644 index 243de412e..000000000 --- a/newsfragments/787.feature +++ /dev/null @@ -1 +0,0 @@ -``FormatROD_Arc``: Support for Rigaku HyPix-Arc 100° and 150° curved detectors diff --git a/newsfragments/788.bugfix b/newsfragments/788.bugfix deleted file mode 100644 index fcfe8992c..000000000 --- a/newsfragments/788.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix ``DeprecationWarning``s raised during tests diff --git a/newsfragments/789.bugfix b/newsfragments/789.bugfix deleted file mode 100644 index 800f48062..000000000 --- a/newsfragments/789.bugfix +++ /dev/null @@ -1 +0,0 @@ -``dials.import``: Reduce excessive memory usage when importing many (>100s) FormatNXMX files. diff --git a/newsfragments/793.bugfix b/newsfragments/793.bugfix deleted file mode 100644 index 138b10797..000000000 --- a/newsfragments/793.bugfix +++ /dev/null @@ -1 +0,0 @@ -``FormatNXmxEigerFilewriter``: rather than relying on a firmware version check to decide whether to swap image dimensions (which can break), do this by a lookup table. diff --git a/newsfragments/795.doc b/newsfragments/795.doc deleted file mode 100644 index 62353f016..000000000 --- a/newsfragments/795.doc +++ /dev/null @@ -1 +0,0 @@ -The user support mailing list is now dials-user-group@jiscmail.net diff --git a/newsfragments/796.bugfix b/newsfragments/796.bugfix deleted file mode 100644 index 2f821dd86..000000000 --- a/newsfragments/796.bugfix +++ /dev/null @@ -1 +0,0 @@ -Switch from using unmaintained ``orderedset`` to ``ordered_set`` diff --git a/newsfragments/797.misc b/newsfragments/797.misc deleted file mode 100644 index feeb2785b..000000000 --- a/newsfragments/797.misc +++ /dev/null @@ -1 +0,0 @@ -Update FormatISISSXD panel positions and add time-of-flight bin widths to scan. diff --git a/newsfragments/800.bugfix b/newsfragments/800.bugfix deleted file mode 100644 index 6b3a826ef..000000000 --- a/newsfragments/800.bugfix +++ /dev/null @@ -1 +0,0 @@ -``FormatROD``: Use the weighted average of K-alpha1 and K-alpha2 as the monochromatic wavelength for the beam model. diff --git a/newsfragments/801.bugfix b/newsfragments/801.bugfix deleted file mode 100644 index b7bfa54f3..000000000 --- a/newsfragments/801.bugfix +++ /dev/null @@ -1,2 +0,0 @@ -- Change dxtbx tests to use publicly-available data on dials-data -- ``FormatRAXIS``: Allow the possibility of reading compressed files diff --git a/newsfragments/816.feature b/newsfragments/816.feature new file mode 100644 index 000000000..867027709 --- /dev/null +++ b/newsfragments/816.feature @@ -0,0 +1 @@ +Serialization history information is now stored with each ``Experiment``, and added to each time an ``ExperimentList`` is saved to disk. diff --git a/newsfragments/830.feature b/newsfragments/830.feature new file mode 100644 index 000000000..efdb416c0 --- /dev/null +++ b/newsfragments/830.feature @@ -0,0 +1 @@ +Support for the Bruker Photon-IV detector. diff --git a/newsfragments/831.feature b/newsfragments/831.feature new file mode 100644 index 000000000..8af6e5339 --- /dev/null +++ b/newsfragments/831.feature @@ -0,0 +1 @@ +``FormatTRPX``: adds support for images in compressed ``.trpx`` format, when ``pyterse`` is installed. diff --git a/newsfragments/833.feature b/newsfragments/833.feature new file mode 100644 index 000000000..ce506cb19 --- /dev/null +++ b/newsfragments/833.feature @@ -0,0 +1 @@ +Add support for treating data from MANDI as Laue data. diff --git a/newsfragments/835.feature b/newsfragments/835.feature new file mode 100644 index 000000000..dc5665e27 --- /dev/null +++ b/newsfragments/835.feature @@ -0,0 +1 @@ +Update known bad-module detector masks for Diamond I23's Pilatus 12M. diff --git a/newsfragments/837.misc b/newsfragments/837.misc new file mode 100644 index 000000000..d6e39419d --- /dev/null +++ b/newsfragments/837.misc @@ -0,0 +1 @@ +Allow nxmx_writer to handle trusted range values beyond the previous limit of 2e31. diff --git a/newsfragments/839.misc b/newsfragments/839.misc new file mode 100644 index 000000000..68add6a04 --- /dev/null +++ b/newsfragments/839.misc @@ -0,0 +1 @@ +Improve support for Windows compilers in non-bootstrap builds diff --git a/newsfragments/840.bugfix b/newsfragments/840.bugfix new file mode 100644 index 000000000..60e1760e6 --- /dev/null +++ b/newsfragments/840.bugfix @@ -0,0 +1 @@ +Add missing index param for `FormatMANDI.get_detector`. diff --git a/newsfragments/841.bugfix b/newsfragments/841.bugfix new file mode 100644 index 000000000..cf96b9aa8 --- /dev/null +++ b/newsfragments/841.bugfix @@ -0,0 +1 @@ +Fix `generate_laue_data_for_panel` typo in `FormatMANDILaue`. diff --git a/newsfragments/842.misc b/newsfragments/842.misc new file mode 100644 index 000000000..5c6811fba --- /dev/null +++ b/newsfragments/842.misc @@ -0,0 +1 @@ +Add `tof_helpers.InstrumentDefinitionReader` to read instrument information directly from xml data, and update FormatMANDI to use this by default. diff --git a/newsfragments/843.bugfix b/newsfragments/843.bugfix new file mode 100644 index 000000000..eaf908535 --- /dev/null +++ b/newsfragments/843.bugfix @@ -0,0 +1 @@ +Fix typo in tof_helpers.InstrumentDefinitionReader swapping panel axes. diff --git a/newsfragments/844.misc b/newsfragments/844.misc new file mode 100644 index 000000000..0c7e918e1 --- /dev/null +++ b/newsfragments/844.misc @@ -0,0 +1 @@ +Modify the panel hierarchy of FormatMANDI, FormatISISSXD, and FormatESSNMX to enable individual panel positions to be optimised in refinement. diff --git a/newsfragments/849.bugfix b/newsfragments/849.bugfix new file mode 100644 index 000000000..c5293d557 --- /dev/null +++ b/newsfragments/849.bugfix @@ -0,0 +1 @@ +Ensure imageset reject lists are preserved in multiprocessing (for https://github.com/dials/dials/issues/2998) diff --git a/newsfragments/851.bugfix b/newsfragments/851.bugfix new file mode 100644 index 000000000..d07612b43 --- /dev/null +++ b/newsfragments/851.bugfix @@ -0,0 +1 @@ +Support for miniCBF images with inverted rotation axis from SSRF beamlines BL18U1, BL19U1, and BL17U1. diff --git a/newsfragments/978.misc b/newsfragments/978.misc deleted file mode 100644 index 797504ced..000000000 --- a/newsfragments/978.misc +++ /dev/null @@ -1 +0,0 @@ -Only run actions on push for main branch, to avoid running twice. diff --git a/newsfragments/misc.XXX b/newsfragments/misc.XXX new file mode 100644 index 000000000..021dd2091 --- /dev/null +++ b/newsfragments/misc.XXX @@ -0,0 +1 @@ +Add overloads for Detector.get_ray_intersection to allow for arrays of s1 vectors. diff --git a/pyproject.toml b/pyproject.toml index 2f5bf55a8..2607eb744 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,34 @@ -[tool.black] -include = '\.pyi?$|/SConscript$|/libtbx_config$' +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "dxtbx" +version = "3.26.dev" +description = "Diffraction Experiment Toolkit" +authors = [ + { name = "Diamond Light Source", email = "dials-user-group@jiscmail.net" }, +] +license = { file = "LICENSE.txt" } +readme = "README.md" +requires-python = ">=3.11" +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Environment :: Console", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: BSD License", + "Operating System :: MacOS", + "Operating System :: Microsoft :: Windows", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python :: 3", +] +dynamic = ["entry-points", "scripts"] + +[project.urls] +Homepage = "https://dials.github.io" +Repository = "https://github.com/cctbx/dxtbx" + +[tool.hatch.metadata.hooks.custom.entry-points] [tool.towncrier] package = "dxtbx" @@ -67,3 +96,12 @@ section-order = [ [tool.mypy] no_implicit_optional = true + +[tool.pytest.ini_options] +addopts = "-rsxX" +filterwarnings = [ + "ignore:the matrix subclass is not the recommended way:PendingDeprecationWarning", + "ignore:numpy.dtype size changed:RuntimeWarning", + "ignore:Deprecated call to `pkg_resources.declare_namespace:DeprecationWarning", + "ignore:`product` is deprecated as of NumPy:DeprecationWarning:h5py|numpy", +] diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index 5d6319d5e..000000000 --- a/pytest.ini +++ /dev/null @@ -1,10 +0,0 @@ -[pytest] -addopts = -rsxX -filterwarnings = - ignore:the matrix subclass is not the recommended way:PendingDeprecationWarning - ignore:numpy.dtype size changed:RuntimeWarning - ignore:Deprecated call to `pkg_resources.declare_namespace:DeprecationWarning - ignore:`product` is deprecated as of NumPy:DeprecationWarning:h5py|numpy -junit_family = legacy -markers = - regression: dxtbx regression test diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index ae760f5f5..000000000 --- a/setup.cfg +++ /dev/null @@ -1,13 +0,0 @@ -[metadata] -classifiers = - Development Status :: 5 - Production/Stable - Environment :: Console - Intended Audience :: Science/Research - License :: OSI Approved :: BSD License - Operating System :: MacOS - Operating System :: Microsoft :: Windows - Operating System :: POSIX :: Linux - Programming Language :: Python :: 3 - Programming Language :: Python :: 3.9 - Programming Language :: Python :: 3.10 - Programming Language :: Python :: 3.11 diff --git a/setup.py b/setup.py deleted file mode 100644 index fba51a7f2..000000000 --- a/setup.py +++ /dev/null @@ -1,41 +0,0 @@ -from __future__ import annotations - -from pathlib import Path - -import setuptools - -from build import build - -# Static version number which is updated by bump2version -# Do not change this manually - use 'bump2version ' -__version_tag__ = "3.24.dev" - -setup_kwargs = { - "name": "dxtbx", - "version": __version_tag__, - "long_description": Path(__file__).parent.joinpath("README.md").read_text(), - "description": "Diffraction Experiment Toolbox", - "author": "Diamond Light Source", - "license": "BSD-3-Clause", - "author_email": "dials-user-group@jiscmail.net", - "project_urls": { - "homepage": "https://dials.github.io", - "repository": "https://github.com/cctbx/dxtbx", - }, - "packages": setuptools.find_packages(where="src"), - "package_dir": {"": "src"}, - "package_data": { - "": ["*"], - "dxtbx": ["array_family/*", "boost_python/*", "example/*", "py.typed"], - "dxtbx.format": ["boost_python/*"], - "dxtbx.masking": ["boost_python/*"], - "dxtbx.model": ["boost_python/*"], - }, - "entry_points": { - "libtbx.precommit": ["dxtbx=dxtbx"], - "libtbx.dispatcher.script": ["pytest=pytest"], - }, -} - -build(setup_kwargs) -setuptools.setup(**setup_kwargs) diff --git a/src/dxtbx/CMakeLists.txt b/src/dxtbx/CMakeLists.txt index 495615812..dc8d2fdfc 100644 --- a/src/dxtbx/CMakeLists.txt +++ b/src/dxtbx/CMakeLists.txt @@ -39,6 +39,7 @@ Python_add_library( dxtbx_model_ext model/boost_python/crystal.cc model/boost_python/parallax_correction.cc model/boost_python/pixel_to_millimeter.cc + model/boost_python/history.cc model/boost_python/experiment.cc model/boost_python/experiment_list.cc model/boost_python/model_ext.cc ) diff --git a/src/dxtbx/boost_python/flumpy.cc b/src/dxtbx/boost_python/flumpy.cc index d7ce34b20..e8cb4eab6 100644 --- a/src/dxtbx/boost_python/flumpy.cc +++ b/src/dxtbx/boost_python/flumpy.cc @@ -587,7 +587,7 @@ py::object miller_index_from_numpy(py::array np_array) { if (accepted_types.find(dtype) == std::string::npos) { throw std::invalid_argument( std::string("miller_index only supports int32 or intc types - cannot convert '") - + std::to_string(dtype) + "'"); + + dtype + "'"); } return vec_from_numpy(np_array); } diff --git a/src/dxtbx/boost_python/imageset_ext.cc b/src/dxtbx/boost_python/imageset_ext.cc index 858f7b372..495731a78 100644 --- a/src/dxtbx/boost_python/imageset_ext.cc +++ b/src/dxtbx/boost_python/imageset_ext.cc @@ -121,7 +121,7 @@ namespace dxtbx { namespace boost_python { } return std::shared_ptr(new ImageSet( - data, boost::python::extract >(indices)())); + data, boost::python::extract>(indices)())); } /** @@ -153,7 +153,7 @@ namespace dxtbx { namespace boost_python { template static boost::python::tuple get_model_list(ImageSetData obj, Func get) { // Create a list of models and a list of indices - std::vector > model_list; + std::vector> model_list; std::vector index_list; for (std::size_t i = 0; i < obj.size(); ++i) { std::shared_ptr m = get(obj, i); @@ -214,7 +214,8 @@ namespace dxtbx { namespace boost_python { obj.get_template(), obj.get_vendor(), detail::bytes_from_std_string(obj.get_params()), - detail::bytes_from_std_string(obj.get_format())); + detail::bytes_from_std_string(obj.get_format()), + obj.get_reject_list()); } template @@ -226,11 +227,11 @@ namespace dxtbx { namespace boost_python { boost::python::extract(data[1])(); // Convert to c++ vectors - std::vector > model_list; + std::vector> model_list; std::vector index_list; for (std::size_t i = 0; i < boost::python::len(models); ++i) { model_list.push_back( - boost::python::extract >(models[i])()); + boost::python::extract>(models[i])()); } for (std::size_t i = 0; i < boost::python::len(indices); ++i) { index_list.push_back(boost::python::extract(indices[i])()); @@ -281,30 +282,30 @@ namespace dxtbx { namespace boost_python { static void set_lookup_tuple(ImageSetData &obj, boost::python::tuple lookup) { DXTBX_ASSERT(boost::python::len(lookup) == 5); - ImageSetDataPickleSuite::set_lookup_item >( + ImageSetDataPickleSuite::set_lookup_item>( obj, boost::python::extract(lookup[0])(), &ExternalLookup::mask); - ImageSetDataPickleSuite::set_lookup_item >( + ImageSetDataPickleSuite::set_lookup_item>( obj, boost::python::extract(lookup[1])(), &ExternalLookup::gain); - ImageSetDataPickleSuite::set_lookup_item >( + ImageSetDataPickleSuite::set_lookup_item>( obj, boost::python::extract(lookup[2])(), &ExternalLookup::pedestal); - ImageSetDataPickleSuite::set_lookup_item >( + ImageSetDataPickleSuite::set_lookup_item>( obj, boost::python::extract(lookup[3])(), &ExternalLookup::dx); - ImageSetDataPickleSuite::set_lookup_item >( + ImageSetDataPickleSuite::set_lookup_item>( obj, boost::python::extract(lookup[4])(), &ExternalLookup::dy); } static void setstate(ImageSetData &obj, boost::python::tuple state) { - DXTBX_ASSERT(boost::python::len(state) == 6); + DXTBX_ASSERT(boost::python::len(state) == 7); // Set the models ImageSetDataPickleSuite::set_model_tuple( @@ -319,6 +320,10 @@ namespace dxtbx { namespace boost_python { obj.set_vendor(boost::python::extract(state[3])()); obj.set_params(boost::python::extract(state[4])()); obj.set_format(boost::python::extract(state[5])()); + + // Set the reject list + obj.set_reject_list( + boost::python::extract>(state[6])()); } }; @@ -400,18 +405,18 @@ namespace dxtbx { namespace boost_python { // If we have a tuple then add items of tuple to image data for (std::size_t i = 0; i < boost::python::len(item); ++i) { flex_type a = boost::python::extract(item)(); - data.push_back(ImageTile(scitbx::af::versa >( + data.push_back(ImageTile(scitbx::af::versa>( a.handle(), scitbx::af::c_grid<2>(a.accessor())))); } } else { try { // If we have a single array then add flex_type a = boost::python::extract(item)(); - data.push_back(ImageTile(scitbx::af::versa >( + data.push_back(ImageTile(scitbx::af::versa>( a.handle(), scitbx::af::c_grid<2>(a.accessor())))); } catch (boost::python::error_already_set) { - data = boost::python::extract >(item)(); + data = boost::python::extract>(item)(); boost::python::handle_exception(); } } @@ -468,7 +473,7 @@ namespace dxtbx { namespace boost_python { void external_lookup_item_wrapper(const char *name) { using namespace boost::python; - class_ >(name) + class_>(name) .add_property("filename", &ExternalLookupItem::get_filename, &ExternalLookupItem::set_filename) @@ -568,7 +573,7 @@ namespace dxtbx { namespace boost_python { .add_property("dy", make_function(&ExternalLookup::dy, return_internal_reference<>())); - class_ >("ImageSetData", no_init) + class_>("ImageSetData", no_init) .def("__init__", make_constructor(&make_imageset_data1, default_call_policies(), @@ -652,7 +657,7 @@ namespace dxtbx { namespace boost_python { make_function(&ImageSet::external_lookup, return_internal_reference<>())) .def_pickle(ImageSetPickleSuite()); - class_ >("ImageGrid", no_init) + class_>("ImageGrid", no_init) .def(init((arg("data"), arg("grid_size")))) .def(init &, int2>( (arg("data"), arg("indices"), arg("grid_size")))) @@ -661,7 +666,7 @@ namespace dxtbx { namespace boost_python { .staticmethod("from_imageset") .def_pickle(ImageGridPickleSuite()); - class_ >("ImageSequence", no_init) + class_>("ImageSequence", no_init) .def(init= 3 - ), "When using centers as target for superpose, detector needs at least 3 panels" + assert len(panel_ids) >= 3, ( + "When using centers as target for superpose, detector needs at least 3 panels" + ) def rmsd_from_centers(a, b): assert len(a) == len(b) diff --git a/src/dxtbx/command_line/image_average.py b/src/dxtbx/command_line/image_average.py index 52e287027..b0ba1898b 100644 --- a/src/dxtbx/command_line/image_average.py +++ b/src/dxtbx/command_line/image_average.py @@ -135,9 +135,9 @@ def read(self, path): print("Processing %s" % path) format_class = dxtbx.format.Registry.get_format_class_for_file(path) - assert not issubclass( - format_class, FormatMultiImage - ), "Average container files separately" + assert not issubclass(format_class, FormatMultiImage), ( + "Average container files separately" + ) img_instance = format_class(path) beam = img_instance.get_beam() @@ -281,7 +281,7 @@ def run(argv=None): if command_line.options.mpi: try: - from mpi4py import MPI + from libtbx.mpi4py import MPI except ImportError: raise Sorry("MPI not found") comm = MPI.COMM_WORLD diff --git a/src/dxtbx/command_line/install_format.py b/src/dxtbx/command_line/install_format.py index 4eaee5f2b..20c7c3bf6 100644 --- a/src/dxtbx/command_line/install_format.py +++ b/src/dxtbx/command_line/install_format.py @@ -97,7 +97,7 @@ def install_package(home_location: Path, format_classes: list[str]): if not custom_folder.exists(): os.symlink(home_location, custom_folder) subprocess.run( - ["libtbx.python", str(home_location / "setup.py"), "develop", "--user"], + [sys.executable, "-m", "pip", "install", "-e", str(home_location), "--user"], cwd=home_location, capture_output=True, check=True, diff --git a/src/dxtbx/command_line/radial_average.py b/src/dxtbx/command_line/radial_average.py index 70e950557..cae568ea9 100644 --- a/src/dxtbx/command_line/radial_average.py +++ b/src/dxtbx/command_line/radial_average.py @@ -135,9 +135,9 @@ def run(args=None, imageset=None): ref_expts = ExperimentListFactory.from_json_file( params.reference_geometry, check_format=None ) - assert ( - len(ref_expts.detectors()) == 1 - ), "Provide only one detector in the reference geometry file" + assert len(ref_expts.detectors()) == 1, ( + "Provide only one detector in the reference geometry file" + ) detector = ref_expts.detectors()[0] # Allow writing to a file instead of stdout diff --git a/src/dxtbx/command_line/read_sequence.py b/src/dxtbx/command_line/read_sequence.py index 8bef785c2..3eac39b23 100644 --- a/src/dxtbx/command_line/read_sequence.py +++ b/src/dxtbx/command_line/read_sequence.py @@ -25,7 +25,7 @@ def read_sequence(images: list[str]): sequence.get_raw_data(i) t1 = time.time() - print(f"Reading {len(indices)} frames took {t1-t0:.2f}s") + print(f"Reading {len(indices)} frames took {t1 - t0:.2f}s") def run(args=None): diff --git a/src/dxtbx/data/detectors.lib b/src/dxtbx/data/detectors.lib index f05fd5afe..14a305680 100644 --- a/src/dxtbx/data/detectors.lib +++ b/src/dxtbx/data/detectors.lib @@ -46,3 +46,4 @@ CCD 1024 1024 90 90 rigaku-saturn-92-2x2-binned CCD 2084 2084 45 45 rigaku-saturn-944 CCD 1042 1042 90 90 rigaku-saturn-944-2x2-binned CCD 2048 2048 100 100 rigaku-saturn-a200 +PAD 1028 1062 75 75 singla diff --git a/src/dxtbx/format/FormatBrukerELA.py b/src/dxtbx/format/FormatBrukerELA.py new file mode 100644 index 000000000..20a1c61b1 --- /dev/null +++ b/src/dxtbx/format/FormatBrukerELA.py @@ -0,0 +1,271 @@ +from __future__ import annotations + +from boost_adaptbx.boost.python import streambuf +from scitbx import matrix +from scitbx.array_family import flex + +from dxtbx import IncorrectFormatError +from dxtbx.ext import ( + is_big_endian, + read_uint8, + read_uint16, + read_uint16_bs, + read_uint32, + read_uint32_bs, +) +from dxtbx.format.FormatBruker import FormatBruker +from dxtbx.model import SimplePxMmStrategy +from dxtbx.model.beam import Probe + + +class FormatBrukerELA(FormatBruker): + @staticmethod + def understand(image_file): + try: + header_lines = FormatBruker.read_header_lines(image_file) + except OSError: + return False + + header_dic = FormatBruker.parse_header(header_lines) + + dettype = header_dic.get("DETTYPE").upper() + if dettype is None: + return False + + if "DECTRIS-ELA" not in dettype: + return False + + return True + + def _start(self): + try: + header_lines = FormatBruker.read_header_lines(self._image_file) + except OSError: + return False + + self.header_dict = FormatBrukerELA.parse_header(header_lines) + + # The ELA format can't currently use BrukerImage, for the same reason + # as for the Photon-II/III (hardcoded image size is one factor) + # https://github.com/cctbx/cctbx_project/issues/65 + # from iotbx.detectors.bruker import BrukerImage + # self.detectorbase = BrukerImage(self._image_file) + + def _goniometer(self): + # goniometer angles in ANGLES are 2-theta, omega, phi, chi (FIXED) + # AXIS indexes into this list to define the scan axis (in FORTRAN counting) + # START and RANGE define the start and step size for each image + + _, omega, phi, chi = map(float, self.header_dict["ANGLES"].split()) + scan_axis = ["NONE", "2THETA", "OMEGA", "PHI", "CHI", "X", "Y", "Z"] + scan_axis = scan_axis[int(self.header_dict["AXIS"])] + names = flex.std_string(("PHI", "CHI", "OMEGA")) + scan_axis = flex.first_index(names, scan_axis) + if scan_axis is None: + scan_axis = 2 # "OMEGA" default + + # Axes here may be incorrect + axes = flex.vec3_double(((1, 0, 0), (0, 0, -1), (0, 1, 0))) + omega -= 180 + angles = flex.double((phi, chi, omega)) + + g = self._goniometer_factory.make_multi_axis_goniometer( + axes, angles, names, scan_axis + ) + + # It is preferable to return a single axis goniometer, as this can be + # further optimised by dials.find_rotation_axis + return self._goniometer_factory.known_axis(g.get_rotation_axis()) + + def _calculate_gain(self, wavelength): + """The CCDPARM header item contains 5 items. For an X-ray detector + are described by: + 1. readnoise + 2. e/ADU + 3. e/photon + 4. bias + 5. full scale + and the gain in ADU/X-ray is given by (e/photon) / (e/ADU). + It is not exactly clear what the gain is for this electron detector, but + these files seem to set it to 1 + """ + ccdparm = self.header_dict["CCDPARM"].split() + e_ADU = float(ccdparm[1]) + e_photon = float(ccdparm[2]) + if e_ADU == 0: + return 1.0 + gain = e_photon / e_ADU + return gain + + def _detector(self): + # goniometer angles in ANGLES are 2-theta, omega, phi, chi (FIXED) + two_theta = float(self.header_dict["ANGLES"].split()[0]) + + # Assume Bruker full_scale value means saturation + full_scale = float(self.header_dict["CCDPARM"].split()[-1]) + min_trusted_value = 0 + + fast = matrix.col((1, 0, 0)) + slow = matrix.col((0, -1, 0)) + beam = matrix.col((0, 0, 1)) + pixel_mm = 0.075 + beam_pixel = [float(bp) for bp in self.header_dict["CENTER"].split()[:-3:-1]] + distance_mm = 10.0 * float(self.header_dict["DISTANC"].split()[1]) + origin = ( + -distance_mm * beam + - fast * pixel_mm * beam_pixel[1] + - slow * pixel_mm * beam_pixel[0] + ) + # 2theta rotation appears to be around the slow axis + origin = origin.rotate_around_origin(slow, two_theta, deg=True) + fast = fast.rotate_around_origin(slow, two_theta, deg=True) + pixel_size = pixel_mm, pixel_mm + # ncols is nfast, nrows is nslow + image_size = ( + int(self.header_dict["NCOLS"].split()[0]), + int(self.header_dict["NROWS"].split()[0]), + ) + + gain = self._calculate_gain(float(self.header_dict["WAVELEN"].split()[0])) + detector = self._detector_factory.complex( + "PAD", + origin.elems, + fast.elems, + slow.elems, + pixel_size, + image_size, + (min_trusted_value, full_scale), + ) + + # Here we set specifics, notably parallax correction and + # QE correction are effectively disabled by setting the simple + # pixel-to-millimetre strategy and a very high mu value. + for panel in detector: + panel.set_gain(gain) + panel.set_thickness(0.450) # Assume same as Eiger + panel.set_material("Si") + panel.set_px_mm_strategy(SimplePxMmStrategy()) + panel.set_mu(1e10) + + return detector + + def _beam(self): + """Make unpolarized beam""" + wavelength = float(self.header_dict["WAVELEN"].split()[0]) + + return self._beam_factory.make_polarized_beam( + sample_to_source=(0.0, 0.0, 1.0), + wavelength=wavelength, + polarization=(0, 1, 0), + polarization_fraction=0.5, + probe=Probe.electron, + ) + + def _scan(self): + start = float(self.header_dict["START"].split()[0]) + incr = float(self.header_dict["INCREME"].split()[0]) + if incr < 0: + start *= -1 + incr *= -1 + + return self._scan_factory.single_file( + filename=self._image_file, + exposure_times=1, + osc_start=start, + osc_width=incr, + epoch=None, + ) + + def get_raw_data(self): + """Get the pixel intensities (i.e. read the image and return as a + flex array of integers.)""" + + # It is better to catch FORMAT 86 here and fail with a sensible error msg + # as soon as something is attempted with the image data rather than in + # the understand method. Otherwise the user gets FormatBruker reading the + # image improperly but without failing + if self.header_dict["FORMAT"] != "100": + raise NotImplementedError("Only FORMAT 100 images are currently supported") + + f = self.open_file(self._image_file, "rb") + header_size = int(self.header_dict["HDRBLKS"]) * 512 + f.read(header_size) + + if is_big_endian(): + read_2b = read_uint16_bs + read_4b = read_uint32_bs + else: + read_2b = read_uint16 + read_4b = read_uint32 + + # NPIXELB stores the number of bytes/pixel for the data and the underflow + # table. We expect 1 byte for underflows and either 2 or 1 byte per pixel + # for the data + npixelb = [int(e) for e in self.header_dict["NPIXELB"].split()] + assert npixelb[1] == 1 + + if npixelb[0] == 1: + read_data = read_uint8 + elif npixelb[0] == 2: + read_data = read_2b + else: + raise IncorrectFormatError(f"{npixelb[0]} bytes per pixel is not supported") + + nrows = int(self.header_dict["NROWS"].split()[0]) + ncols = int(self.header_dict["NCOLS"].split()[0]) + + raw_data = read_data(streambuf(f), nrows * ncols) + + image_size = (nrows, ncols) + raw_data.reshape(flex.grid(*image_size)) + + (num_underflows, num_2b_overflows, num_4b_overflows) = ( + int(e) for e in self.header_dict["NOVERFL"].split() + ) + + # read underflows + if num_underflows > 0: + # stored values are padded to a multiple of 16 bytes + nbytes = num_underflows + 15 & ~(15) + underflow_vals = read_uint8(streambuf(f), nbytes)[:num_underflows] + else: + underflow_vals = None + + # handle 2 byte overflows + if num_2b_overflows > 0: + # stored values are padded to a multiple of 16 bytes + nbytes = num_2b_overflows * 2 + 15 & ~(15) + overflow_vals = read_2b(streambuf(f), nbytes // 2)[:num_2b_overflows] + overflow = flex.int(nrows * ncols, 0) + sel = (raw_data == 255).as_1d() + overflow.set_selected(sel, overflow_vals - 255) + overflow.reshape(flex.grid(*image_size)) + raw_data += overflow + + # handle 4 byte overflows + if num_4b_overflows > 0: + # stored values are padded to a multiple of 16 bytes + nbytes = num_4b_overflows * 4 + 15 & ~(15) + overflow_vals = read_4b(streambuf(f), nbytes // 4)[:num_4b_overflows] + overflow = flex.int(nrows * ncols, 0) + sel = (raw_data == 65535).as_1d() + overflow.set_selected(sel, overflow_vals - 65535) + overflow.reshape(flex.grid(*image_size)) + raw_data += overflow + + # handle underflows + if underflow_vals is not None: + sel = (raw_data == 0).as_1d() + underflow = flex.int(nrows * ncols, 0) + underflow.set_selected(sel, underflow_vals) + underflow.reshape(flex.grid(*image_size)) + raw_data += underflow + + # handle baseline. num_underflows == -1 means no baseline subtraction. See + # https://github.com/cctbx/cctbx_project/files/1262952/BISFrameFileFormats.zip + if num_underflows != -1: + num_exposures = [int(e) for e in self.header_dict["NEXP"].split()] + baseline = num_exposures[2] + raw_data += baseline + + return raw_data diff --git a/src/dxtbx/format/FormatBrukerPhoton.py b/src/dxtbx/format/FormatBrukerPhoton.py index a0f8e4300..0568d3b7e 100644 --- a/src/dxtbx/format/FormatBrukerPhoton.py +++ b/src/dxtbx/format/FormatBrukerPhoton.py @@ -31,8 +31,8 @@ def understand(image_file): dettype = header_dic.get("DETTYPE") if dettype is None: return False - # We support Photon-II and Photon-III detectors - if not dettype.startswith("CMOS-PHOTONII"): + # We support Photon-II, -III and -IV detectors. The earlier generation was Photon100 + if not dettype.startswith("CMOS-PHOTONI"): return False return True diff --git a/src/dxtbx/format/FormatCBF.py b/src/dxtbx/format/FormatCBF.py index a9c1a6a21..79ffed613 100644 --- a/src/dxtbx/format/FormatCBF.py +++ b/src/dxtbx/format/FormatCBF.py @@ -40,8 +40,7 @@ def get_cbf_header(image_file): add = fin.read(4096) if not add: # If the marker is not contained in the file then we return the - # entire file. This behaviour is enforced by test involving - # dials_regression/image_examples/ADSC_CBF/thaumatin_die_M1S5_1_asc_0041.cbf + # entire file. marker_index = None break header += add diff --git a/src/dxtbx/format/FormatCBFMiniEigerReverse.py b/src/dxtbx/format/FormatCBFMiniEigerReverse.py new file mode 100644 index 000000000..91535f44c --- /dev/null +++ b/src/dxtbx/format/FormatCBFMiniEigerReverse.py @@ -0,0 +1,42 @@ +""" +An implementation of the CBF image reader for Eiger images for beamlines +with a horizontal, but reversed rotation axis. +""" + +from __future__ import annotations + +import sys + +from dxtbx.format.FormatCBFMiniEiger import FormatCBFMiniEiger + + +class FormatCBFMiniEigerReverse(FormatCBFMiniEiger): + """A class for reading mini CBF format Eiger images for beamlines with + a horizontal reversed rotation axis.""" + + @staticmethod + def understand(image_file): + """Check to see if this looks like an Eiger mini CBF format image, + i.e. we can make sense of it.""" + + header = FormatCBFMiniEiger.get_cbf_header(image_file) + + for record in header.split("\n"): + if "# Detector" in record and "Eiger" in record: + if "S/N E-32-0111" in record: + # S/N E-32-0111: SSRF BL17U1 + return True + + return False + + def _goniometer(self): + """Return a model for a simple single-axis goniometer. This should + probably be checked against the image header, though for miniCBF + there are limited options for this.""" + + return self._goniometer_factory.single_axis_reverse() + + +if __name__ == "__main__": + for arg in sys.argv[1:]: + print(FormatCBFMiniEigerReverse.understand(arg)) diff --git a/src/dxtbx/format/FormatCBFMiniPilatusDLS12M.py b/src/dxtbx/format/FormatCBFMiniPilatusDLS12M.py index d1ec06052..0df314b29 100644 --- a/src/dxtbx/format/FormatCBFMiniPilatusDLS12M.py +++ b/src/dxtbx/format/FormatCBFMiniPilatusDLS12M.py @@ -170,7 +170,10 @@ def _mask_bad_modules(self, detector): cy = 97 # chip pixels y dx = 7 # module gap size - if timestamp > calendar.timegm((2024, 9, 1, 0, 0, 0)): + if timestamp > calendar.timegm((2024, 11, 25, 0, 0, 0)): + # Detector serviced by DECTRIS + pass + elif timestamp > calendar.timegm((2024, 9, 1, 0, 0, 0)): # 2024 run 4 # modules @ row 22 column 1...5 if self._multi_panel: diff --git a/src/dxtbx/format/FormatCBFMiniPilatusXXX.py b/src/dxtbx/format/FormatCBFMiniPilatusReverse.py similarity index 55% rename from src/dxtbx/format/FormatCBFMiniPilatusXXX.py rename to src/dxtbx/format/FormatCBFMiniPilatusReverse.py index 42382bb7a..cec81868e 100644 --- a/src/dxtbx/format/FormatCBFMiniPilatusXXX.py +++ b/src/dxtbx/format/FormatCBFMiniPilatusReverse.py @@ -1,6 +1,6 @@ """ -An implementation of the CBF image reader for Pilatus images, from the Pilatus -6M SN 100 currently on Diamond I04. +An implementation of the CBF image reader for Pilatus images for beamlines +with a horizontal, but reversed rotation axis. """ from __future__ import annotations @@ -8,8 +8,9 @@ from dxtbx.format.FormatCBFMiniPilatus import FormatCBFMiniPilatus -class FormatCBFMiniPilatusXXX(FormatCBFMiniPilatus): - """A class for reading mini CBF format Pilatus images for 6M SN XXX.""" +class FormatCBFMiniPilatusReverse(FormatCBFMiniPilatus): + """A class for reading mini CBF format Pilatus images for beamlines with + a horizontal reversed rotation axis.""" @staticmethod def understand(image_file): @@ -19,12 +20,11 @@ def understand(image_file): header = FormatCBFMiniPilatus.get_cbf_header(image_file) for record in header.split("\n"): - if ( - "# Detector" in record - and "PILATUS" in record - and "S/N XX-XXX" in header - ): - return True + if "# Detector" in record and "PILATUS" in record: + if "S/N XX-XXX" in record or "S/N 60-0123" in record: + # S/N 60-0123: SSRF BL18U1 + # S/N XX-XXX: SSRF BL19U1 + return True return False diff --git a/src/dxtbx/format/FormatESSNMX.py b/src/dxtbx/format/FormatESSNMX.py index 8747cb1e6..2234aae3a 100644 --- a/src/dxtbx/format/FormatESSNMX.py +++ b/src/dxtbx/format/FormatESSNMX.py @@ -38,6 +38,276 @@ def understand(image_file: str) -> bool: except (OSError, KeyError): return False + @staticmethod + def is_nmx_file(image_file: str) -> bool: + def get_name(image_file): + try: + with h5py.File(image_file, "r") as handle: + return handle["entry/instrument/name"][...].item().decode() + except (OSError, KeyError, AttributeError): + return "" + + return get_name(image_file) == "NMX" + + def get_instrument_name(self) -> str: + return "NMX" + + def get_experiment_description(self) -> str: + return "Simulated data" + + def _load_raw_data(self) -> None: + raw_data = [] + for panel in self._get_panels(): + spectra = panel["data"][...] + raw_data.append(flumpy.from_numpy(np.ascontiguousarray(spectra))) + + self._raw_data = tuple(raw_data) + + def get_raw_data( + self, index: int, use_loaded_data: bool = False + ) -> tuple[flex.int]: + raw_data = [] + # image_size = self._get_image_size() + # total_pixels = image_size[0] * image_size[1] + + if use_loaded_data: + if self._raw_data is None: + self._load_raw_data() + for panel in self._raw_data: + data = panel[:, :, index : index + 1].astype(np.int32) + data.reshape(flex.grid(panel.all()[0], panel.all()[1])) + data.matrix_transpose_in_place() + raw_data.append(data) + + else: + for panel in self._get_panels(): + spectra = panel["data"][:, :, index].astype(np.int32) + raw_data.append(flumpy.from_numpy(np.ascontiguousarray(spectra))) + + return tuple(raw_data) + + def _get_time_channel_bins(self) -> list[float]: + # (usec) + # the tofs are recorded separately per panel but they + # should all be the same + for panel in self._get_panels(): + return panel["time_of_flight"][...] * 1e6 + + def _get_time_of_flight(self) -> list[float]: + # (usec) + bins = self._get_time_channel_bins() + return [float((bins[i] + bins[i + 1]) * 0.5) for i in range(len(bins) - 1)] + + def get_num_images(self) -> int: + return len(self._get_time_of_flight()) + + def get_detector(self, index: int = None) -> Detector: + panel_names = self._get_panel_names() + panel_type = self._get_panel_type() + trusted_range = self._get_panel_trusted_range() + pixel_size = self._get_pixel_size() + gain = self._get_panel_gain() + detector = Detector() + root = detector.hierarchy() + panels = self._get_panels() + panel_projections = self._get_panel_projections_2d(panels) + for panel_dset, panel_name in zip(panels, panel_names): + panel = root.add_panel() + panel.set_type(panel_type) + panel.set_name(panel_name) + panel.set_image_size( + self._get_image_size(panel_dset) + ) # XXX fix to include detectors possibly not being the same size + panel.set_trusted_range(trusted_range) + panel.set_pixel_size(pixel_size) + fast_axis = self._get_panel_fast_axes(panel_dset) + slow_axis = self._get_panel_slow_axes(panel_dset) + panel_origin = self._get_panel_origins(panel_dset) + panel.set_local_frame(fast_axis, slow_axis, panel_origin) + + panel.set_gain(gain) + i = int(panel_name[-1]) + r, t = panel_projections[i] + r = tuple(map(int, r)) + t = tuple(map(int, t)) + panel.set_projection_2d(r, t) + + return detector + + def _get_num_panels(self) -> int: + return len(self._get_panels()) + + def _get_panels(self) -> list[h5py._hl.group.Group]: + """get the detector panel locations in file""" + panels = [] + inst_dset = self._nxs_file["/entry/instrument/"] + for _, dset in inst_dset.items(): + if dset.attrs.get("NX_class") == "NXdetector": + panels.append(dset) + return panels + + def _get_panel_names(self) -> list[str]: + panel_names = [] + inst_dset = self._nxs_file["/entry/instrument/"] + for name, dset in inst_dset.items(): + if dset.attrs.get("NX_class") == "NXdetector": + panel_names.append(name) + return panel_names + + def _get_panel_name(self, panel) -> str: + return panel.name.split("/")[-1] + + def _get_panel_type(self) -> str: + return "Triple_GEM_Gd" + + def _get_image_size(self, panel) -> tuple[int, int]: + # (px) + dset = panel["data"] + return dset[:, :, 0].shape + + def _get_panel_trusted_range(self) -> tuple[int, int]: + # 4 * 1280**2 plus buffer + return (-1, 7000000) + + def _get_pixel_size(self) -> tuple[float, float]: + # (mm) + return (0.4, 0.4) + + def _get_panel_fast_axes(self, panel) -> tuple[float, float, float]: + # return ((1.0, 0.0, 0.0), (0.0, 0.0, 1.0), (0.0, 0.0, -1.0)) + fast_axis = panel["fast_axis"][...] + return tuple(fast_axis) + + def _get_panel_slow_axes(self, panel) -> tuple[float, float, float]: + # return ((0.0, 1.0, 0.0), (0.0, 1.0, 0.0), (0.0, 1.0, 0.0)) + slow_axis = panel["slow_axis"][...] + return tuple(slow_axis) + + def _get_panel_origins(self, panel) -> tuple[float, float, float]: + # (mm) + # return ((-250, -250.0, -292.0), (290, -250.0, -250), (-290, -250.0, 250.0)) + + origin = panel["origin"][...] + panelnum = int(panel.name[-1]) + origin *= 1000 # convert to mm + corrfact = self._get_correction_factor()[panelnum] + + corrorg = origin + corrfact + return tuple(corrorg) + + def _get_correction_factor(self): + return np.array([[0, -256, -256], [-256.0, -256.0, 0], [0, -256, 256]]) + + def _get_panel_projections_2d(self, panels) -> dict[int : tuple[tuple, tuple]]: + p_w, p_h = self._get_image_size(panels[0]) # XXX fix later + p_w += 10 + p_h += 10 + panel_pos = { + int(panels[0].name[-1]): ((-1, 0, 0, -1), (p_h, 0)), + int(panels[1].name[-1]): ((-1, 0, 0, -1), (p_h, p_w)), + int(panels[2].name[-1]): ((-1, 0, 0, -1), (p_h, -p_w)), + } + + return panel_pos + + def get_beam(self, index: int = None) -> PolychromaticBeam: + direction = self._get_sample_to_source_direction() + distance = self._get_sample_to_source_distance() + wavelength_range = self._get_wavelength_range() + return BeamFactory.make_polychromatic_beam( + direction=direction, + sample_to_source_distance=distance, + probe=Probe.neutron, + wavelength_range=wavelength_range, + ) + + def _get_sample_to_source_direction(self) -> tuple[float, float, float]: + return (0, 0, -1) + + def _get_wavelength_range(self) -> tuple[float, float]: + # (A) + tofs = np.array(self._get_time_of_flight()) / 1e6 + tof_low = tofs[0] + tof_high = tofs[-1] + lambda_low = self._tof_to_lambda(tof_low) + lambda_high = self._tof_to_lambda(tof_high) + return (round(lambda_low, 2), round(lambda_high, 2)) + + def _tof_to_lambda(self, tof): + """given tof in s, return lambda in Angstrom""" + neutron_mass = 1.67492749804e-27 + h = 6.62607015e-34 + distance = self._get_sample_to_source_distance() / 1000 # convert to m + return h / (neutron_mass * (distance / tof)) * 1e10 + + def _get_sample_to_source_distance(self) -> float: + """get sample to source distance in mm""" + try: + dist = abs(self._nxs_file["entry/instrument/source/distance"][...]) * 1000 + return dist + except (KeyError, ValueError): + logger.warning("sample to moderator_distance not found, using dummy value") + return 156714 + + def _get_panel_gain(self) -> float: + return 1.0 + + def get_goniometer_phi_angle(self) -> float: + return self.get_goniometer_orientations()[1] + + def get_goniometer(self, index: int = None) -> Goniometer: + rotation_axis = (0.0, 1.0, 0.0) + fixed_rotation = (1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0) + goniometer = GoniometerFactory.make_goniometer(rotation_axis, fixed_rotation) + try: + angles = self.get_goniometer_orientations() + except KeyError: + logger.warning("crystal_rotation not found, using default") + return goniometer + axes = ((1, 0, 0), (0, 1, 0), (0, 0, 1)) + for idx, angle in enumerate(angles): + goniometer.rotate_around_origin(axes[idx], angle) + return goniometer + + def get_goniometer_orientations(self) -> tuple[float, float, float]: + # Angles in deg along x, y, z + return self._nxs_file["entry/sample/crystal_rotation"][...] + + def get_scan(self, index=None) -> Scan: + image_range = (1, self.get_num_images()) + properties = {"time_of_flight": tuple(self._get_time_of_flight())} + return ScanFactory.make_scan_from_properties( + image_range=image_range, properties=properties + ) + + def get_proton_charge(self) -> float: + """McStas Simulations don't have a proton charge + so this is a calculated value""" + return self._nxs_file["entry/metadata/mcstas_weight2count_scale_factor"][ + ... + ].item() + + +class FormatESSNMX_old(FormatHDF5): + """ + Class to read files from NMX + https://europeanspallationsource.se/instruments/nmx + preprocessed files in scipp to obtain binned data + """ + + def __init__(self, image_file, **kwargs) -> None: + if not FormatESSNMX_old.understand(image_file): + raise IncorrectFormatError(self, image_file) + self._nxs_file = h5py.File(image_file, "r") + self._raw_data = None + + @staticmethod + def understand(image_file: str) -> bool: + try: + return FormatESSNMX_old.is_nmx_file(image_file) + except (OSError, KeyError): + return False + @staticmethod def is_nmx_file(image_file: str) -> bool: def get_name(image_file): @@ -58,14 +328,12 @@ def get_experiment_description(self) -> str: def _load_raw_data(self) -> None: raw_data = [] image_size = self._get_image_size() - total_pixels = image_size[0] * image_size[1] + # total_pixels = image_size[0] * image_size[1] num_images = self.get_num_images() for i in range(self._get_num_panels()): - spectra = self._nxs_file["NMX_data"]["detector_1"]["counts"][ - 0, total_pixels * i : total_pixels * (i + 1), : - ] - spectra = np.reshape(spectra, (image_size[0], image_size[1], num_images)) - raw_data.append(flumpy.from_numpy(spectra)) + spectra = self._nxs_file["NMX_data"]["detector_1"]["counts"][i, :, :] + spectra = np.reshape(spectra, (image_size, num_images)) + raw_data.append(flumpy.from_numpy(np.ascontiguousarray(spectra))) self._raw_data = tuple(raw_data) @@ -74,7 +342,7 @@ def get_raw_data( ) -> tuple[flex.int]: raw_data = [] image_size = self._get_image_size() - total_pixels = image_size[0] * image_size[1] + # total_pixels = image_size[0] * image_size[1] if use_loaded_data: if self._raw_data is None: @@ -87,8 +355,11 @@ def get_raw_data( else: for i in range(self._get_num_panels()): + # spectra = self._nxs_file["NMX_data"]["detector_1"]["counts"][ + # 0, total_pixels * i : total_pixels * (i + 1), index : index + 1 + # ] spectra = self._nxs_file["NMX_data"]["detector_1"]["counts"][ - 0, total_pixels * i : total_pixels * (i + 1), index : index + 1 + i, :, index ] spectra = np.reshape(spectra, image_size) raw_data.append(flumpy.from_numpy(np.ascontiguousarray(spectra))) @@ -97,7 +368,10 @@ def get_raw_data( def _get_time_channel_bins(self) -> list[float]: # (usec) - return self._nxs_file["NMX_data"]["detector_1"]["t_bin"][:] * 10**6 + if np.ndim(self._nxs_file["NMX_data"]["detector_1"]["t_bin"][:]) == 1: + return self._nxs_file["NMX_data"]["detector_1"]["t_bin"][:] * 10**6 + else: + return self._nxs_file["NMX_data"]["detector_1"]["t_bin"][1] * 10**6 def _get_time_of_flight(self) -> list[float]: # (usec) @@ -123,13 +397,22 @@ def get_detector(self, index: int = None) -> Detector: root = detector.hierarchy() for i in range(num_panels): - panel = root.add_panel() + group = root.add_group() + panel = group.add_panel() panel.set_type(panel_type) panel.set_name(panel_names[i]) panel.set_image_size(image_size) panel.set_trusted_range(trusted_range) panel.set_pixel_size(pixel_size) + # old_origin = np.array(panel_origins[i]) + # fast_axis = np.array(fast_axes[i]) + # slow_axis = np.array(slow_axes[i]) + # New_origin = tuple(old_origin + slow_axis* 0.4 * 1280 + fast_axis* 0.4 * 1280) + # fast_axis = tuple(fast_axis) + # slow_axis = tuple(slow_axis) + # panel.set_local_frame(fast_axis, slow_axis, New_origin) panel.set_local_frame(fast_axes[i], slow_axes[i], panel_origins[i]) + panel.set_gain(gain) r, t = panel_projections[i] r = tuple(map(int, r)) @@ -139,13 +422,11 @@ def get_detector(self, index: int = None) -> Detector: return detector def _get_num_panels(self) -> int: - return self._nxs_file["NMX_data/instrument"].attrs["nr_detector"] + num_rows = self._nxs_file["NMX_data"]["NXdetector"]["fast_axis"].shape + return num_rows[0] def _get_panel_names(self) -> list[str]: - return [ - "%02d" % (i + 1) - for i in range(self._nxs_file["NMX_data/instrument"].attrs["nr_detector"]) - ] + return ["%02d" % (i + 1) for i in range(self._get_num_panels())] def _get_panel_type(self) -> str: return "SENSOR_PAD" @@ -163,14 +444,25 @@ def _get_pixel_size(self) -> tuple[float, float]: return (0.4, 0.4) def _get_panel_fast_axes(self) -> tuple[tuple[float, float, float]]: - return ((1.0, 0.0, 0.0), (0.0, 0.0, 1.0), (0.0, 0.0, -1.0)) + # return ((1.0, 0.0, 0.0), (0.0, 0.0, 1.0), (0.0, 0.0, -1.0)) + fast_axes = self._nxs_file["NMX_data/NXdetector"]["fast_axis"][:] + return (tuple(fast_axes[0]), tuple(fast_axes[1]), tuple(fast_axes[2])) def _get_panel_slow_axes(self) -> tuple[tuple[float, float, float]]: - return ((0.0, 1.0, 0.0), (0.0, 1.0, 0.0), (0.0, 1.0, 0.0)) + # return ((0.0, 1.0, 0.0), (0.0, 1.0, 0.0), (0.0, 1.0, 0.0)) + slow_axes = self._nxs_file["NMX_data/NXdetector"]["slow_axis"][:] + return (tuple(slow_axes[0]), tuple(slow_axes[1]), tuple(slow_axes[2])) def _get_panel_origins(self) -> tuple[tuple[float, float, float]]: # (mm) - return ((-250, -250.0, -292.0), (290, -250.0, -250), (-290, -250.0, 250.0)) + # return ((-250, -250.0, -292.0), (290, -250.0, -250), (-290, -250.0, 250.0)) + + origin = self._nxs_file["NMX_data/NXdetector"]["origin"][:] * 1000 + corrfact = np.array([[0, -256, -256], [-256.0, -256.0, 0], [0, -256, 256]]) + + corrorg = origin + corrfact + return (tuple(corrorg[0]), tuple(corrorg[1]), tuple(corrorg[2])) + # return (tuple(origin[0]),tuple(origin[1]),tuple(origin[2])) def _get_panel_projections_2d(self) -> dict[int : tuple[tuple, tuple]]: p_w, p_h = self._get_image_size() @@ -196,13 +488,14 @@ def get_beam(self, index: int = None) -> PolychromaticBeam: ) def _get_sample_to_source_direction(self) -> tuple[float, float, float]: - return (0, 0, 1) + return (0, 0, -1) def _get_wavelength_range(self) -> tuple[float, float]: # (A) - return (1.8, 2.55) + return (1.8, 3.55) def _get_sample_to_source_distance(self) -> float: + """get sample to source distance in mm""" try: dist = abs(self._nxs_file["NMX_data/NXsource/distance"][...]) * 1000 return dist @@ -214,7 +507,7 @@ def _get_panel_gain(self) -> float: return 1.0 def get_goniometer_phi_angle(self) -> float: - return self.get_goniometer_orientations()[0] + return self.get_goniometer_orientations()[1] def get_goniometer(self, index: int = None) -> Goniometer: rotation_axis = (0.0, 1.0, 0.0) @@ -223,15 +516,16 @@ def get_goniometer(self, index: int = None) -> Goniometer: try: angles = self.get_goniometer_orientations() except KeyError: + logger.warning("crystal_rotation not found, using default") return goniometer axes = ((1, 0, 0), (0, 1, 0), (0, 0, 1)) for idx, angle in enumerate(angles): - goniometer.rotate_around_origin(axes[idx], -angle) + goniometer.rotate_around_origin(axes[idx], angle) return goniometer def get_goniometer_orientations(self) -> tuple[float, float, float]: # Angles in deg along x, y, z - return self._nxs_file["NMX_data/crystal_orientation"][...] + return self._nxs_file["NMX_data/NXsample/crystal_rotation"][...] def get_scan(self, index=None) -> Scan: image_range = (1, self.get_num_images()) @@ -260,12 +554,12 @@ def get_flattened_data( panel_data, (panel_size[0], panel_size[1], num_tof_bins) ) if image_range is not None: - assert ( - len(image_range) == 2 - ), "expected image_range to be only two values" - assert ( - image_range[0] >= 0 and image_range[0] < image_range[1] - ), "image_range[0] out of range" + assert len(image_range) == 2, ( + "expected image_range to be only two values" + ) + assert image_range[0] >= 0 and image_range[0] < image_range[1], ( + "image_range[0] out of range" + ) assert image_range[1] <= num_tof_bins, "image_range[1] out of range" panel_data = np.sum( panel_data[:, :, image_range[0] : image_range[1]], axis=2 diff --git a/src/dxtbx/format/FormatHDF5EigerNearlyNexus.py b/src/dxtbx/format/FormatHDF5EigerNearlyNexus.py index 25d7a4cb4..77af1fd5a 100644 --- a/src/dxtbx/format/FormatHDF5EigerNearlyNexus.py +++ b/src/dxtbx/format/FormatHDF5EigerNearlyNexus.py @@ -32,7 +32,7 @@ def find_entries(nx_file): if "entry" in nx_file: entry = nx_file["entry"] if "NX_class" in entry.attrs: - if entry.attrs["NX_class"] == np.string_("NXentry"): + if entry.attrs["NX_class"] == np.bytes_("NXentry"): if "definition" not in entry: return entry return None @@ -49,7 +49,7 @@ def is_eiger_nearly_nexus_file(filename): if entry is not None: try: return ( - np.string_("dectris eiger") + np.bytes_("dectris eiger") in entry["instrument"]["detector"]["description"][()].lower() ) except KeyError: @@ -76,7 +76,7 @@ def create_scalar(handle, path, dtype, value): dataset[()] = value # Add NXmx definition - create_scalar(handle["entry"], "definition", "S4", np.string_("NXmx")) + create_scalar(handle["entry"], "definition", "S4", np.bytes_("NXmx")) # Add saturation value try: @@ -100,7 +100,7 @@ def create_scalar(handle, path, dtype, value): # Add detector type create_scalar( - handle["entry/instrument/detector"], "type", "S5", np.string_("PIXEL") + handle["entry/instrument/detector"], "type", "S5", np.bytes_("PIXEL") ) # Move the beam @@ -111,7 +111,7 @@ def create_scalar(handle, path, dtype, value): module_path = "/entry/instrument/detector/module" # print "Creating detector module %s" % (module_path) group = handle.create_group(module_path) - group.attrs["NX_class"] = np.string_("NXdetector_module") + group.attrs["NX_class"] = np.bytes_("NXdetector_module") # Add a module index create_scalar(group, "module_index", "int64", 0) @@ -174,13 +174,13 @@ def create_scalar(handle, path, dtype, value): "float32", handle["/entry/instrument/detector/x_pixel_size"][()], ) - group["fast_pixel_direction"].attrs["transformation_type"] = np.string_( + group["fast_pixel_direction"].attrs["transformation_type"] = np.bytes_( "translation" ) group["fast_pixel_direction"].attrs["vector"] = fast_axis group["fast_pixel_direction"].attrs["offset"] = (0, 0, 0) - group["fast_pixel_direction"].attrs["units"] = np.string_("m") - group["fast_pixel_direction"].attrs["depends_on"] = np.string_(depends_on) + group["fast_pixel_direction"].attrs["units"] = np.bytes_("m") + group["fast_pixel_direction"].attrs["depends_on"] = np.bytes_(depends_on) # Add slow_pixel_size dataset create_scalar( @@ -189,29 +189,29 @@ def create_scalar(handle, path, dtype, value): "float32", handle["/entry/instrument/detector/y_pixel_size"][()], ) - group["slow_pixel_direction"].attrs["transformation_type"] = np.string_( + group["slow_pixel_direction"].attrs["transformation_type"] = np.bytes_( "translation" ) group["slow_pixel_direction"].attrs["vector"] = slow_axis group["slow_pixel_direction"].attrs["offset"] = (0, 0, 0) - group["slow_pixel_direction"].attrs["units"] = np.string_("m") - group["slow_pixel_direction"].attrs["depends_on"] = np.string_(depends_on) + group["slow_pixel_direction"].attrs["units"] = np.bytes_("m") + group["slow_pixel_direction"].attrs["depends_on"] = np.bytes_(depends_on) # Add module offset dataset # print "Set module offset to be zero relative to detector" create_scalar(group, "module_offset", "float32", 0) - group["module_offset"].attrs["transformation_type"] = np.string_("translation") + group["module_offset"].attrs["transformation_type"] = np.bytes_("translation") group["module_offset"].attrs["vector"] = (0, 0, 0) group["module_offset"].attrs["offset"] = (0, 0, 0) - group["module_offset"].attrs["units"] = np.string_("m") - group["module_offset"].attrs["depends_on"] = np.string_(depends_on) + group["module_offset"].attrs["units"] = np.bytes_("m") + group["module_offset"].attrs["depends_on"] = np.bytes_(depends_on) # Create detector depends_on create_scalar( handle["/entry/instrument/detector"], "depends_on", "S%d" % len(depends_on), - np.string_(depends_on), + np.bytes_(depends_on), ) # Add detector position @@ -228,22 +228,22 @@ def create_scalar(handle, path, dtype, value): ) ) group = handle.create_group("/entry/instrument/detector/transformations") - group.attrs["NX_class"] = np.string_("NXtransformations") + group.attrs["NX_class"] = np.bytes_("NXtransformations") create_scalar(group, "translation", "float32", detector_offset_vector.length()) - group["translation"].attrs["transformation_type"] = np.string_("translation") + group["translation"].attrs["transformation_type"] = np.bytes_("translation") if detector_offset_vector.length() > 0: group["translation"].attrs["vector"] = detector_offset_vector.normalize() else: group["translation"].attrs["vector"] = detector_offset_vector group["translation"].attrs["offset"] = 0 - group["translation"].attrs["units"] = np.string_("m") - group["translation"].attrs["depends_on"] = np.string_(".") + group["translation"].attrs["units"] = np.bytes_("m") + group["translation"].attrs["depends_on"] = np.bytes_(".") # Create goniometer transformations if not found if "/entry/sample/transformations" not in handle: # print "Creating group /entry/sample/transformation" group = handle.create_group("/entry/sample/transformations") - group.attrs["NX_class"] = np.string_("NXtransformations") + group.attrs["NX_class"] = np.bytes_("NXtransformations") else: group = handle["/entry/sample/transformations"] @@ -274,11 +274,11 @@ def create_scalar(handle, path, dtype, value): for name in sorted(handle["/entry/data"]): num_images += handle_orig_entry_properties[name]["length"] dataset = group.create_dataset("omega", (num_images,), dtype="float32") - dataset.attrs["units"] = np.string_("degree") - dataset.attrs["transformation_type"] = np.string_("rotation") + dataset.attrs["units"] = np.bytes_("degree") + dataset.attrs["transformation_type"] = np.bytes_("rotation") dataset.attrs["vector"] = default_axis dataset.attrs["offset"] = 0 - dataset.attrs["depends_on"] = np.string_(".") + dataset.attrs["depends_on"] = np.bytes_(".") omega_range_average = handle[ "/entry/sample/goniometer/omega_range_average" ][()] @@ -295,7 +295,7 @@ def create_scalar(handle, path, dtype, value): handle["/entry/sample"], "depends_on", "S%d" % len(dataset.name), - np.string_(dataset.name), + np.bytes_(dataset.name), ) # Change relative paths to absolute paths @@ -326,16 +326,16 @@ def _start(self): # Only support 1 set of models at the moment assert len(reader.entries) == 1, "Currently only supports 1 NXmx entry" assert len(reader.entries[0].data) == 1, "Currently only supports 1 NXdata" - assert ( - len(reader.entries[0].instruments) == 1 - ), "Currently only supports 1 NXinstrument" + assert len(reader.entries[0].instruments) == 1, ( + "Currently only supports 1 NXinstrument" + ) assert len(reader.entries[0].samples) == 1, "Currently only supports 1 NXsample" - assert ( - len(reader.entries[0].instruments[0].detectors) == 1 - ), "Currently only supports 1 NXdetector" - assert ( - len(reader.entries[0].instruments[0].detectors[0].modules) == 1 - ), "Currently only supports 1 NXdetector_module" + assert len(reader.entries[0].instruments[0].detectors) == 1, ( + "Currently only supports 1 NXdetector" + ) + assert len(reader.entries[0].instruments[0].detectors[0].modules) == 1, ( + "Currently only supports 1 NXdetector_module" + ) assert ( len(reader.entries[0].samples[0].beams) == 1 or len(reader.entries[0].instruments[0].beams) == 1 diff --git a/src/dxtbx/format/FormatISISSXD.py b/src/dxtbx/format/FormatISISSXD.py index 3fd5fed93..01a4c2a66 100644 --- a/src/dxtbx/format/FormatISISSXD.py +++ b/src/dxtbx/format/FormatISISSXD.py @@ -97,7 +97,8 @@ def get_detector(self, index: int = None) -> Detector: root = detector.hierarchy() for i in range(num_panels): - panel = root.add_panel() + group = root.add_group() + panel = group.add_panel() panel.set_type(panel_type) panel.set_name(panel_names[i]) panel.set_image_size(image_size) @@ -447,12 +448,12 @@ def get_flattened_data( panel_data, (panel_size[0], panel_size[1], num_tof_bins) ) if image_range is not None: - assert ( - len(image_range) == 2 - ), "expected image_range to be only two values" - assert ( - image_range[0] >= 0 and image_range[0] < image_range[1] - ), "image_range[0] out of range" + assert len(image_range) == 2, ( + "expected image_range to be only two values" + ) + assert image_range[0] >= 0 and image_range[0] < image_range[1], ( + "image_range[0] out of range" + ) assert image_range[1] <= num_tof_bins, "image_range[1] out of range" panel_data = np.flipud( np.sum(panel_data[:, :, image_range[0] : image_range[1]], axis=2) diff --git a/src/dxtbx/format/FormatMANDI.py b/src/dxtbx/format/FormatMANDI.py index 8fd283b5f..650f23dc5 100644 --- a/src/dxtbx/format/FormatMANDI.py +++ b/src/dxtbx/format/FormatMANDI.py @@ -1,8 +1,10 @@ from __future__ import annotations +import sys +import xml +import xml.etree.ElementTree as ET from multiprocessing import Pool, cpu_count from os.path import join -from sys import argv import h5py import numpy as np @@ -16,6 +18,7 @@ from dxtbx.model.beam import BeamFactory, PolychromaticBeam, Probe from dxtbx.model.goniometer import Goniometer, GoniometerFactory from dxtbx.model.scan import Scan, ScanFactory +from dxtbx.model.tof_helpers import InstrumentDefinitionReader class FormatMANDI(FormatHDF5): @@ -33,6 +36,8 @@ def __init__(self, image_file: str, **kwargs) -> None: self._base_entry = self.get_base_entry_name(self.nxs_file) self.detector = None self.raw_data = None + self.xml_reader = InstrumentDefinitionReader() + self.xml_file = self.get_xml_file() def open_file(self, image_file_path: str) -> h5py.File: return h5py.File(image_file_path, "r") @@ -40,6 +45,12 @@ def open_file(self, image_file_path: str) -> h5py.File: def get_base_entry_name(self, nxs_file: h5py.File) -> str: return list(nxs_file.keys())[0] + def get_xml_file(self) -> xml.etree.ElementTree.Element: + xml_string = self.nxs_file[self._base_entry]["instrument/instrument_xml/data"][ + 0 + ].decode() + return ET.fromstring(xml_string) + @staticmethod def understand(image_file: str) -> bool: try: @@ -63,6 +74,20 @@ def get_name(image_file: str) -> str: return get_name(image_file) == "MANDI" + def get_instrument_name(self) -> str: + return self.nxs_file[self._base_entry]["instrument/name"][0].decode() + + def get_experiment_title(self) -> str: + return self.nxs_file[self._base_entry]["title"][0].decode() + + def get_experiment_run_number(self) -> str: + return str(self.nxs_file[self._base_entry]["run_number"][0].decode()) + + def get_experiment_description(self) -> str: + title = self.get_experiment_title() + run_number = self.get_experiment_run_number() + return f"{title} ({run_number})" + def get_raw_data(self, index: int) -> tuple[flex.int]: raw_data = [] panel_size = self._get_image_size() @@ -73,27 +98,29 @@ def get_raw_data(self, index: int) -> tuple[flex.int]: ], panel_size, ).T + # spectra = self.nxs_file[self._base_entry][f"{panel_name}_events"]["spectra"] raw_data.append(flumpy.from_numpy(np.ascontiguousarray(spectra))) return tuple(raw_data) - def get_detector(self) -> Detector: + def get_detector(self, index: int = None) -> Detector: num_panels = self._get_num_panels() panel_names = self._get_panel_names() panel_type = self._get_panel_type() image_size = self._get_image_size() trusted_range = self._get_panel_trusted_range() pixel_size = self._get_pixel_size() - fast_axes = self._get_panel_fast_axes() - slow_axes = self._get_panel_slow_axes() - panel_origins = self._get_panel_origins() + panel_origins, fast_axes, slow_axes = ( + self.xml_reader.get_dials_detector_geometry(self.xml_file) + ) gain = self._get_panel_gain() panel_projections = self._get_panel_projections_2d() detector = Detector() root = detector.hierarchy() for i in range(num_panels): - panel = root.add_panel() + group = root.add_group() + panel = group.add_panel() panel.set_type(panel_type) panel.set_name(panel_names[i]) panel.set_image_size(image_size) @@ -315,8 +342,12 @@ def get_num_images(self) -> int: def get_beam(self, index: int = None) -> PolychromaticBeam: direction = self._get_sample_to_source_direction() distance = self._get_sample_to_source_distance() + wavelength_range = (2.0, 4.0) return BeamFactory.make_polychromatic_beam( - direction=direction, sample_to_source_distance=distance, probe=Probe.neutron + direction=direction, + wavelength_range=wavelength_range, + sample_to_source_distance=distance, + probe=Probe.neutron, ) def get_scan(self, index=None) -> Scan: @@ -354,7 +385,7 @@ def _get_panel_type(self) -> str: return "SENSOR_PAD" def _get_panel_projections_2d(self) -> dict: - p_w, p_h = self._get_pixel_size() + p_w, p_h = self._get_image_size() panel_pos = {} count = 1 for i in range(8): @@ -421,15 +452,21 @@ def delete_event_data(nxs_file, base_dir, panel_name): output_path = join(base_dir, panel_name) output_path = join(output_path, spectra_output_name) print(f"Writing spectra to {output_path}") + if nxs_file.get(output_path): + print(f"deleting {output_path}...") + del nxs_file[output_path] panel_spectra = FormatMANDI.generate_histogram_data_for_panel( nxs_file, tof_bins, panel_size, panel_name, nproc ) + nxs_file.create_dataset(output_path, data=panel_spectra, compression="gzip") if remove_event_data: delete_event_data(nxs_file, base_dir, panel_name) print(f"Removed event data for {panel_name}") if write_tof_bins and not written_tof_bins: tof_path = join(base_dir, "time_of_flight") + if nxs_file.get(tof_path): + del nxs_file[tof_path] print(f"Writing time of flight bins to {tof_path}") nxs_file.create_dataset(tof_path, data=tof_bins, compression="gzip") written_tof_bins = True @@ -583,7 +620,7 @@ def generate_tof_bins( min_tof = min_tof - padding max_tof = max_tof + padding print( - f"Time of flight range for {nxs_file}: {round(min_tof,3)} - {round(max_tof,3)} (usec)" + f"Time of flight range for {nxs_file}: {round(min_tof, 3)} - {round(max_tof, 3)} (usec)" ) num_bins = int((max_tof - min_tof) / delta_tof) return np.linspace(min_tof, max_tof, num_bins) @@ -606,5 +643,9 @@ def get_panel_number(panel_name: str) -> int: if __name__ == "__main__": - for arg in argv[1:]: - print(FormatMANDI.understand(arg)) + nxs_file = sys.argv[1] + remove_event_data = False + delta_tof = 50 # usec + FormatMANDI.add_histogram_data_to_nxs_file( + nxs_file_path=nxs_file, remove_event_data=remove_event_data, delta_tof=delta_tof + ) diff --git a/src/dxtbx/format/FormatMANDILaue.py b/src/dxtbx/format/FormatMANDILaue.py new file mode 100644 index 000000000..8f79a8db3 --- /dev/null +++ b/src/dxtbx/format/FormatMANDILaue.py @@ -0,0 +1,148 @@ +from __future__ import annotations + +from os.path import join + +import h5py +import numpy as np +from dials.array_family import flex + +from dxtbx import flumpy +from dxtbx.format.FormatMANDI import FormatMANDI +from dxtbx.model.beam import BeamFactory, PolychromaticBeam, Probe +from dxtbx.model.scan import Scan, ScanFactory + + +class FormatMANDILaue(FormatMANDI): + """ + Class to read MANDI data flattened along the ToF dimension + to approximate data expected from IMAGINE-X + https://neutrons.ornl.gov/imagine + + To add Laue data to a MANDI NeXus file: + >>> from dxtbx.format.FormatMANDILaue import FormatMANDILaue + >>> nxs_file = "/path/to/file/example.nxs.h5" + >>> FormatMANDILaue.add_laue_data_to_nxs_file(nxs_file) + """ + + @staticmethod + def understand(image_file: str) -> bool: + try: + return FormatMANDILaue.is_mandi_laue_file(image_file) + except OSError: + return False + + @staticmethod + def is_mandi_laue_file(image_file: str) -> bool: + with h5py.File(image_file, "r") as handle: + # File is not empty + if len(handle) == 0: + return False + base_entry = list(handle.keys())[0] + # Has a name entry + if f"{base_entry}/instrument/name" not in handle: + return False + # Name can be decoded as is MANDI + try: + name = handle[f"{base_entry}/instrument/name"][0].decode() + if name == "MANDI" and "bank10_events" in handle[base_entry].keys(): + # Laue data present + return "laue_data" in handle[f"{base_entry}/bank10_events"] + + except (ValueError, IndexError): + return False + + def get_raw_data(self, index: int) -> tuple[flex.int]: + raw_data = [] + for panel_name in self._get_panel_names(): + panel_data = self.nxs_file[self._base_entry][f"{panel_name}_events"][ + "laue_data" + ][:] + panel_data = panel_data.astype(np.int32) + raw_data.append(flumpy.from_numpy(np.ascontiguousarray(panel_data))) + return tuple(raw_data) + + def get_scan(self, index: int | None = None) -> Scan: + image_range = (1, self.get_num_images()) + return ScanFactory.make_scan_from_properties( + image_range=image_range, properties={} + ) + + def get_num_images(self) -> int: + return 1 + + def get_beam(self, index: int | None = None) -> PolychromaticBeam: + direction = self._get_sample_to_source_direction() + distance = self._get_sample_to_source_distance() + wavelength_range = (2.0, 4.0) + return BeamFactory.make_polychromatic_beam( + direction=direction, + sample_to_source_distance=distance, + probe=Probe.neutron, + wavelength_range=wavelength_range, + ) + + @staticmethod + def add_laue_data_to_nxs_file( + nxs_file_path: str, + output_name: str = "laue_data", + panel_size: tuple[int, int] = (256, 256), # (px) + compress=True, + ) -> None: + """ + Extracts event data and writes out laue data for each panel in + nxs_file_path + """ + + print(f"Writing laue data in {nxs_file_path}") + + nxs_file = h5py.File(nxs_file_path, "r+") + base_dir = list(nxs_file.keys())[0] + + panel_names = FormatMANDI.get_panel_names(nxs_file) + for panel_name in panel_names: + output_path = join(base_dir, panel_name) + output_path = join(output_path, output_name) + panel_data = FormatMANDILaue.generate_laue_data_for_panel( + nxs_file, panel_size, panel_name + ) + if compress: + nxs_file.create_dataset( + output_path, data=panel_data, compression="gzip" + ) + else: + nxs_file.create_dataset(output_path, data=panel_data) + + nxs_file.close() + + @staticmethod + def generate_laue_data_for_panel( + nxs_file: h5py.File, + panel_size: tuple[int, int], + panel_name: str, + ) -> np.array: + """ + Generates laue data for a given panel + """ + + ## Get panel data + panel_number = FormatMANDI.get_panel_number(panel_name) + # Actual pixel ids, starting from bottom left and going up y axis first + event_id = nxs_file[f"entry/{panel_name}/event_id"][:] + + # event_ids are given with an offset + event_id_offset = panel_number * panel_size[0] * panel_size[1] + corrected_event_id = event_id - event_id_offset + + num_pixels = panel_size[0] * panel_size[1] + + counts = np.bincount(corrected_event_id) + + # Pad if no events for pixels outside of max event id + if len(counts) < num_pixels: + counts = np.pad(counts, (0, num_pixels - len(counts)), mode="constant") + else: + counts = counts[:num_pixels] + + laue_array = np.reshape(counts, panel_size) + + return laue_array diff --git a/src/dxtbx/format/FormatNexus.py b/src/dxtbx/format/FormatNexus.py index f4ed4a1a8..b772b5624 100644 --- a/src/dxtbx/format/FormatNexus.py +++ b/src/dxtbx/format/FormatNexus.py @@ -24,9 +24,9 @@ def _start(self): # Only support 1 set of models at the moment assert len(reader.entries) == 1, "Currently only supports 1 NXmx entry" assert len(reader.entries[0].data) == 1, "Currently only supports 1 NXdata" - assert ( - len(reader.entries[0].instruments) == 1 - ), "Currently only supports 1 NXinstrument" + assert len(reader.entries[0].instruments) == 1, ( + "Currently only supports 1 NXinstrument" + ) assert len(reader.entries[0].samples) == 1, "Currently only supports 1 NXsample" assert ( len(reader.entries[0].samples[0].beams) == 1 @@ -54,12 +54,12 @@ def _start(self): num_images = 0 if len(instrument.detector_groups) == 0: - assert ( - len(reader.entries[0].instruments[0].detectors) == 1 - ), "Currently only supports 1 NXdetector unless in a detector group" - assert ( - len(reader.entries[0].instruments[0].detectors[0].modules) == 1 - ), "Currently only supports 1 NXdetector_module unless in a detector group" + assert len(reader.entries[0].instruments[0].detectors) == 1, ( + "Currently only supports 1 NXdetector unless in a detector group" + ) + assert len(reader.entries[0].instruments[0].detectors[0].modules) == 1, ( + "Currently only supports 1 NXdetector_module unless in a detector group" + ) self._raw_data = nexus.DataFactory(data, max_size=num_images) self._detector_model = nexus.DetectorFactory( diff --git a/src/dxtbx/format/FormatNexusJungfrauHack.py b/src/dxtbx/format/FormatNexusJungfrauHack.py index e43892c6f..27469a8d8 100644 --- a/src/dxtbx/format/FormatNexusJungfrauHack.py +++ b/src/dxtbx/format/FormatNexusJungfrauHack.py @@ -44,9 +44,9 @@ def _start(self): # Only support 1 set of models at the moment assert len(reader.entries) == 1, "Currently only supports 1 NXmx entry" assert len(reader.entries[0].data) == 1, "Currently only supports 1 NXdata" - assert ( - len(reader.entries[0].instruments) == 1 - ), "Currently only supports 1 NXinstrument" + assert len(reader.entries[0].instruments) == 1, ( + "Currently only supports 1 NXinstrument" + ) assert len(reader.entries[0].samples) == 1, "Currently only supports 1 NXsample" assert ( len(reader.entries[0].samples[0].beams) == 1 @@ -106,11 +106,11 @@ def _setup_detector(self, detector, beam): detector_material = clean_string(str(material)) material = { "Si": "Si", - np.string_("Si"): "Si", - np.string_("Silicon"): "Si", - np.string_("Sillicon"): "Si", - np.string_("CdTe"): "CdTe", - np.string_("GaAs"): "GaAs", + np.bytes_("Si"): "Si", + np.bytes_("Silicon"): "Si", + np.bytes_("Sillicon"): "Si", + np.bytes_("CdTe"): "CdTe", + np.bytes_("GaAs"): "GaAs", }.get(detector_material) if not material: raise RuntimeError("Unknown material: %s" % detector_material) diff --git a/src/dxtbx/format/FormatSER.py b/src/dxtbx/format/FormatSER.py index 0276b889b..5e4f5172e 100644 --- a/src/dxtbx/format/FormatSER.py +++ b/src/dxtbx/format/FormatSER.py @@ -142,7 +142,7 @@ def read_emi(filename): def _parseEntry_emi(value): - """Auxiliary function to parse string entry to int, float or np.string_(). + """Auxiliary function to parse string entry to int, float or np.bytes_(). Parameters ---------- value : str @@ -162,7 +162,7 @@ def _parseEntry_emi(value): p = float(value) except ValueError: # if neither int nor float, stay with string - p = np.string_(str(value)) + p = np.bytes_(str(value)) return p diff --git a/src/dxtbx/format/FormatTRPX.py b/src/dxtbx/format/FormatTRPX.py new file mode 100644 index 000000000..0ded4e71e --- /dev/null +++ b/src/dxtbx/format/FormatTRPX.py @@ -0,0 +1,159 @@ +from __future__ import annotations + +import os +import re +import sys +import xml.etree.ElementTree as ET + +import numpy as np + +sys.path.append(os.path.join(os.getcwd(), "build")) + +try: + import pyterse +except ImportError: + pyterse = None + + +from dxtbx import flumpy +from dxtbx.format.Format import Format +from dxtbx.model import ScanFactory +from dxtbx.model.beam import Probe + + +class FormatTRPX(Format): + @staticmethod + def understand(image_file): + try: + with FormatTRPX.open_file(image_file, "rb") as fh: + header = fh.read(24) + + except OSError: + return False + + # Check if header starts with '' + raise RuntimeError("Failed to find the end of the header.") + header_data += chunk + if b"/>" in header_data: + end_index = header_data.find(b"/>") + 2 + header_data = header_data[:end_index] + break + + try: + header_str = header_data.decode("utf-8") + except UnicodeDecodeError as e: + raise RuntimeError(f"Failed to decode header data as UTF-8: {e}") + + try: + root = ET.fromstring(header_str) + except ET.ParseError as e: + raise RuntimeError(f"Failed to parse XML header: {e}") + + hd["prolix_bits"] = int(root.attrib.get("prolix_bits", "12")) + hd["signed"] = int(root.attrib.get("signed", "0")) + hd["block"] = int(root.attrib.get("block", "12")) + hd["memory_size"] = int(root.attrib.get("memory_size", "0")) + hd["number_of_values"] = int(root.attrib.get("number_of_values", "0")) + hd["number_of_frames"] = int(root.attrib.get("number_of_frames", "1")) + + hd["pixel_size"] = (0.055, 0.055) + hd["trusted_range"] = (0, 65535) + hd["distance"] = 478.0 + hd["start_tilt"] = 0.0 + hd["delta_tilt"] = 0.0 + hd["exposure"] = 1.0 + + if "dimensions" in root.attrib: + dimensions_str = root.attrib["dimensions"] + dimensions = list(map(int, dimensions_str.strip().split())) + hd["image_size"] = dimensions + else: + num_values = hd["number_of_values"] + image_dimension = int(np.sqrt(num_values)) + hd["image_size"] = (image_dimension, image_dimension) + + hd["distance"] = float(root.attrib.get("distance", "478.0")) + pixel_size_str = root.attrib.get("pixel_size", "0.055 0.055") + hd["pixel_size"] = tuple(map(float, pixel_size_str.strip().split())) + trusted_range_str = root.attrib.get("trusted_range", "0 65535") + hd["trusted_range"] = tuple(map(float, trusted_range_str.strip().split())) + + return hd + + def _start(self): + """Open the image file and read useful metadata into an internal dictionary""" + self._header_dictionary = self._read_metadata(self._image_file) + + def _goniometer(self): + """Dummy goniometer, 'vertical' as the images are viewed.""" + goniometer = self._goniometer_factory.known_axis((0, -1, 0)) + return goniometer + + def _detector(self): + beam_centre = [ + (p * i) / 2 + for p, i in zip( + self._header_dictionary["pixel_size"], + self._header_dictionary["image_size"], + ) + ] + d = self._detector_factory.simple( + "PAD", + self._header_dictionary["distance"], + beam_centre, + "+x", + "-y", + self._header_dictionary["pixel_size"], + self._header_dictionary["image_size"], + self._header_dictionary["trusted_range"], + ) + return d + + def _beam(self): + """Unpolarized beam, default energy 200 keV""" + beam = self._beam_factory.make_polarized_beam( + sample_to_source=(0.0, 0.0, 1.0), + wavelength=0.02508, + polarization=(0, 1, 0), + polarization_fraction=0.5, + probe=Probe.electron, + ) + return beam + + def _scan(self): + """Scan model for this image, filling out any unavailable items with dummy values""" + alpha = self._header_dictionary.get("start_tilt", 0.0) + dalpha = self._header_dictionary.get("delta_tilt", 1.0) + exposure = self._header_dictionary.get("exposure", 0.0) + oscillation = (alpha, dalpha) + fname = os.path.split(self._image_file)[-1] + s = fname.split("_")[-1].split(".")[0] + try: + index = int(re.match(".*?([0-9]+)$", s).group(1)) + except AttributeError: + index = 1 + scan = ScanFactory.make_scan((index, index), exposure, oscillation, {index: 0}) + return scan + + def get_raw_data(self): + if pyterse is None: + raise ImportError( + "The package pyterse is not installed. Please install it to read TRPX files." + ) + terse = pyterse.Terse.load(self._image_file) + decompressed_data = terse.prolix() + raw_data_flex = flumpy.from_numpy(decompressed_data) + return raw_data_flex diff --git a/src/dxtbx/format/FormatXTC.py b/src/dxtbx/format/FormatXTC.py index 013f4fced..bf3c83ad7 100644 --- a/src/dxtbx/format/FormatXTC.py +++ b/src/dxtbx/format/FormatXTC.py @@ -6,6 +6,7 @@ from itertools import groupby import numpy as np +from scipy.signal import convolve import serialtbx.detector.xtc import serialtbx.util @@ -73,6 +74,11 @@ for any events with a dropped spectrum. If the spectrum is \ present and calibration constants are provided, \ wavelength_offset is ignored. + wavelength_scale = None + .type = float + .help = Optional scalar to apply to each wavelength (see \ + wavelength_offset). If both scale and offset are present, \ + the final wavelength is (scale * initial wavelength) + offset. wavelength_fallback = None .type = float .help = If the wavelength cannot be found from the XTC stream, fall \ @@ -101,6 +107,27 @@ .type = bool .help = Raise an exception for any event where the spectrum is not \ available. + spectrum_index_offset = None + .type = int + .help = Optional offset if the spectrometer and images are not in sync in \ + XTC stream + check_spectrum { + enable = False + .type = bool + smooth_window = 50 + .type = int + .help = Before determining spectral width, smooth it by convolution with \ + a box of this width in pixels. + max_width = .003 + .type = float + .help = Reject spectra with greater than this fractional width. + intensity_threshold = 0.2 + .type = float + .help = Determine the spectral width at this fraction of the maximum. + min_height = 500 + .type = float + .help = Reject spectra below this max intensity (after smoothing). + } filter { evr_address = evr1 .type = str @@ -172,6 +199,28 @@ def __init__(self, image_file, **kwargs): else: self._spectrum_pedestal = None + """ + # Prototype code for checking automatically determining the offst between the eBeam + # and the spectrometer + if self.params.spectrum_eV_per_pixel is not None and self.params.wavelength_offset is None: + i = 0; count = 0 + all_fee_wav, all_eBeam_wav = [],[] + while i < self.get_num_images() and count < 200: + evt = self._get_event(i) + spectrum = self.get_spectrum(i) + eBeam_wav = serialtbx.detector.xtc.evt_wavelength(evt, delta_k=self.params.wavelength_delta_k) + i += 1 + if spectrum is None: continue + if eBeam_wav is None: continue + all_fee_wav.append(spectrum.get_weighted_wavelength()) + all_eBeam_wav.append(eBeam_wav) + count += 1 + if count >= 200: + mean_eBeam_wav = sum(all_eBeam_wav)/len(all_eBeam_wav) + mean_fee_wav = sum(all_fee_wav)/len(all_fee_wav) + self.params.wavelength_offset = mean_fee_wav - mean_eBeam_wav + """ + def _load_hit_indices(self): self._hit_inds = None if self.params.hits_file is not None: @@ -217,7 +266,7 @@ def params_from_phil(master_phil, user_phil, strict=False): for unused_arg in unused_args: print(unused_arg) print( - "Incorrect of unused parameter in locator file. Please check and retry" + "Incorrect or unused parameter in locator file. Please check and retry" ) return None params = working_phil.extract() @@ -429,6 +478,21 @@ def get_num_images(self): def get_beam(self, index=None): return self._beam(index) + def check_spectrum(self, spectrum): + """Verify the spectrum is above a certain threshold""" + xvals = spectrum.get_energies_eV() + yvals = spectrum.get_weights() + window = self.params.check_spectrum.smooth_window + yvals = convolve(yvals, np.ones((window,)) / window, mode="same") + ymax = max(yvals) + if ymax < self.params.check_spectrum.min_height: + return False + threshold = ymax * self.params.check_spectrum.intensity_threshold + indices = np.where(yvals > threshold)[0] + width = xvals[indices[-1]] - xvals[indices[0]] + frac_width = width / spectrum.get_weighted_energy_eV() + return frac_width < self.params.check_spectrum.max_width + def _beam(self, index=None): """Returns a simple model for the beam""" if index is None: @@ -441,6 +505,9 @@ def _beam(self, index=None): return None spectrum = self.get_spectrum(index) if spectrum: + if self.params.check_spectrum.enable: + if not self.check_spectrum(spectrum): + return None wavelength = spectrum.get_weighted_wavelength() else: wavelength = serialtbx.detector.xtc.evt_wavelength( @@ -448,8 +515,11 @@ def _beam(self, index=None): ) if wavelength is None or wavelength <= 0: wavelength = self.params.wavelength_fallback - if self.params.wavelength_offset is not None: - wavelength += self.params.wavelength_offset + if wavelength is not None and wavelength > 0: + if self.params.wavelength_scale is not None: + wavelength *= self.params.wavelength_scale + if self.params.wavelength_offset is not None: + wavelength += self.params.wavelength_offset if wavelength is None: self._beam_cache = None else: @@ -475,6 +545,10 @@ def get_spectrum(self, index=None): def _spectrum(self, index=None): if index is None: index = 0 + if self.params.spectrum_index_offset: + index += self.params.spectrum_index_offset + if index < 0: + return None if self.params.spectrum_eV_per_pixel is None: return None diff --git a/src/dxtbx/format/FormatXTCCspad.py b/src/dxtbx/format/FormatXTCCspad.py index cb72009f4..594e1c0ec 100644 --- a/src/dxtbx/format/FormatXTCCspad.py +++ b/src/dxtbx/format/FormatXTCCspad.py @@ -44,9 +44,9 @@ class FormatXTCCspad(FormatXTC): def __init__(self, image_file, locator_scope=cspad_locator_scope, **kwargs): super().__init__(image_file, locator_scope=locator_scope, **kwargs) - assert ( - self.params.cspad.detz_offset is not None - ), "Supply a detz_offset for the cspad" + assert self.params.cspad.detz_offset is not None, ( + "Supply a detz_offset for the cspad" + ) self._cache_psana_pedestals() # NOTE: move to base FormatXTC class self._psana_gain_map_cache = {} diff --git a/src/dxtbx/format/FormatXTCEpix.py b/src/dxtbx/format/FormatXTCEpix.py index e5b05af1c..f1172b2c1 100644 --- a/src/dxtbx/format/FormatXTCEpix.py +++ b/src/dxtbx/format/FormatXTCEpix.py @@ -134,7 +134,7 @@ def _detector(self, index=None): p.set_local_frame(fast.elems, slow.elems, origin.elems) p.set_pixel_size((pixel_size, pixel_size)) p.set_image_size((dim_fast // 2, dim_slow // 2)) - p.set_trusted_range((0, 2e6)) + p.set_trusted_range((-100, 2e6)) p.set_gain(factor_kev_angstrom / wavelength) p.set_name(val) diff --git a/src/dxtbx/format/FormatXTCRayonix.py b/src/dxtbx/format/FormatXTCRayonix.py index 230aa21e7..a81717c87 100644 --- a/src/dxtbx/format/FormatXTCRayonix.py +++ b/src/dxtbx/format/FormatXTCRayonix.py @@ -86,9 +86,9 @@ def get_detector(self, index=None): self._distance_mm = ( self._detz_encoder(self.current_event) + self.params.rayonix.detz_offset ) - assert ( - self._distance_mm > 0 - ), "something is wrong with encoder or detz_offset" + assert self._distance_mm > 0, ( + "something is wrong with encoder or detz_offset" + ) return self._detector() def _detector(self): diff --git a/src/dxtbx/format/boost_python/cbf_read_buffer.cpp b/src/dxtbx/format/boost_python/cbf_read_buffer.cpp index 8d70f074f..b644ec4cf 100644 --- a/src/dxtbx/format/boost_python/cbf_read_buffer.cpp +++ b/src/dxtbx/format/boost_python/cbf_read_buffer.cpp @@ -5,7 +5,7 @@ #include -//#include +// #include extern "C" { typedef struct cbf_handle_struct; int cbf_read_buffered_file(cbf_handle_struct *handle, diff --git a/src/dxtbx/format/cbf_writer.py b/src/dxtbx/format/cbf_writer.py index 62d72125f..c6e2fda4c 100644 --- a/src/dxtbx/format/cbf_writer.py +++ b/src/dxtbx/format/cbf_writer.py @@ -75,9 +75,7 @@ def add_frame_specific_cbf_tables( "electrospray" if is_xfel else ( - "unknown" "crystals injected by electrospray" - if is_xfel - else "unknown" + "unknowncrystals injected by electrospray" if is_xfel else "unknown" ) ), ] @@ -150,9 +148,9 @@ class FullCBFWriter: def __init__(self, filename=None, imageset=None): """Provide a file name or imageset as input""" - assert [filename, imageset].count( - None - ) == 1, "Supply either filename or imageset" + assert [filename, imageset].count(None) == 1, ( + "Supply either filename or imageset" + ) if filename is not None: format_class = dxtbx.format.Registry.get_format_class_for_file(filename) diff --git a/src/dxtbx/format/nexus.py b/src/dxtbx/format/nexus.py index 3586c77f4..6ea3eb7c1 100644 --- a/src/dxtbx/format/nexus.py +++ b/src/dxtbx/format/nexus.py @@ -49,15 +49,15 @@ NXNode = Union[h5py.File, h5py.Group] -def h5str(h5_value: str | numpy.string_ | bytes) -> str: +def h5str(h5_value: str | numpy.bytes_ | bytes) -> str: """ Convert a value returned an h5py attribute to str. - h5py can return either a bytes-like (numpy.string_) or str object + h5py can return either a bytes-like (numpy.bytes_) or str object for attribute values depending on whether the value was written as fixed or variable length. This function collapses the two to str. """ - if isinstance(h5_value, (numpy.string_, bytes)): + if isinstance(h5_value, (numpy.bytes_, bytes)): return h5_value.decode("utf-8") return h5_value @@ -69,7 +69,7 @@ def dataset_as_flex(dataset, selection): assert numpy.issubdtype(dataset.dtype, numpy.floating) double_types = [ numpy.double, - numpy.longfloat, + numpy.longdouble, numpy.float64, ] if hasattr(numpy, "float96"): @@ -79,7 +79,7 @@ def dataset_as_flex(dataset, selection): if dataset.dtype in [ numpy.half, numpy.single, - numpy.float_, + numpy.float64, numpy.float16, numpy.float32, ]: @@ -795,13 +795,12 @@ def __init__(self, instrument, beam, idx=None): expected_detectors = [] root_name = None for i, parent_id in enumerate(group_parent): - assert ( - parent_id - in [ - -1, - 1, - ] - ), "Hierarchy of detectors not supported. Hierarchy of module components within detector elements is supported" + assert parent_id in [ + -1, + 1, + ], ( + "Hierarchy of detectors not supported. Hierarchy of module components within detector elements is supported" + ) if parent_id == -1: assert root_name is None, "Multiple roots not supported" @@ -812,9 +811,9 @@ def __init__(self, instrument, beam, idx=None): assert root_name is not None, "Detector root not found" assert sorted( os.path.basename(d.handle.name) for d in instrument.detectors - ) == sorted( - expected_detectors - ), "Mismatch between detector group names and detectors available" + ) == sorted(expected_detectors), ( + "Mismatch between detector group names and detectors available" + ) root = None @@ -1096,7 +1095,7 @@ def __init__(self, obj, beam, shape=None): # mu_at_angstrom returns cm^-1, but need mu in mm^-1 table = attenuation_coefficient.get_table(material) wavelength = beam.get_wavelength() - mu = table.mu_at_angstrom(wavelength) / 10.0 + mu = float(table.mu_at_angstrom(wavelength)) / 10.0 # Construct the detector model pixel_size = (fast_pixel_direction_value, slow_pixel_direction_value) @@ -1113,9 +1112,9 @@ def __init__(self, obj, beam, shape=None): Panel( detector_type, detector_name, - tuple(fast_axis), - tuple(slow_axis), - tuple(origin), + tuple(float(x) for x in fast_axis), + tuple(float(x) for x in slow_axis), + tuple(float(x) for x in origin), pixel_size, image_size, trusted_range, @@ -1262,9 +1261,9 @@ def __init__(self, datalists): self._datalists = datalists lengths = [len(datalist) for datalist in datalists] self._num_images = lengths[0] - assert all( - length == self._num_images for length in lengths - ), "Not all datasets are the same length" + assert all(length == self._num_images for length in lengths), ( + "Not all datasets are the same length" + ) def __len__(self): return self._num_images diff --git a/src/dxtbx/format/nxmx_writer.py b/src/dxtbx/format/nxmx_writer.py index cfae85129..a214463ac 100644 --- a/src/dxtbx/format/nxmx_writer.py +++ b/src/dxtbx/format/nxmx_writer.py @@ -104,6 +104,18 @@ .type = float .help = flux incident on beam plane in photons per second } + detector { + sensor_material = None + .type = str + .help = At times, radiation is not directly sensed by the detector. Rather, \ + the detector might sense the output from some converter like a \ + scintillator. This is the name of this converter material. + sensor_thickness = None + .type = float + .help = At times, radiation is not directly sensed by the detector. Rather, \ + the detector might sense the output from some converter like a \ + scintillator. This is the thickness of this converter material. + } """ ) @@ -119,14 +131,15 @@ class NXmxWriter: def __init__(self, params, experiments=None, imageset=None): self.params = params + self.detector = None if experiments or imageset: self.setup(experiments, imageset) self.handle = None def setup(self, experiments=None, imageset=None): - assert [experiments, imageset].count( - None - ) == 1, "Supply either experiments or imagset, not both" + assert [experiments, imageset].count(None) == 1, ( + "Supply either experiments or imagset, not both" + ) if experiments: self.imagesets = experiments.imagesets() assert len(experiments.detectors()) == 1, "Multiple detectors not supported" @@ -173,7 +186,7 @@ def construct_entry(self): entry["end_time_estimated"] = self.params.nexus_details.end_time_estimated # --> definition - self._create_scalar(entry, "definition", "S4", np.string_("NXmx")) + self._create_scalar(entry, "definition", "S4", np.bytes_("NXmx")) # --> sample sample = self.handle["entry"].create_group("sample") @@ -356,7 +369,7 @@ def recursive_setup_basis_dict(key, parent_name="", panel_id=0): det_group.attrs["NX_class"] = "NXdetector_group" det_group.create_dataset("group_index", data=list(range(1, 3)), dtype="i") - data = [np.string_("detector"), np.string_("detector")] + data = [np.bytes_("detector"), np.bytes_("detector")] det_group.create_dataset("group_names", (2,), data=data, dtype="S12") det_group.create_dataset("group_parent", (2,), data=[-1, 1], dtype="i") det_group.create_dataset("group_type", (2,), data=[1, 2], dtype="i") @@ -365,12 +378,17 @@ def recursive_setup_basis_dict(key, parent_name="", panel_id=0): det["description"] = "Detector converted from DIALS models" det["depends_on"] = "/entry/instrument/detector/transformations/AXIS_RAIL" det["gain_setting"] = "auto" - assert len({p.get_material() for p in detector}) == 1 - assert len({p.get_thickness() for p in detector}) == 1 - det["sensor_material"] = detector[0].get_material() - self._create_scalar( - det, "sensor_thickness", "f", detector[0].get_thickness() * 1000 - ) + if self.params.detector.sensor_material: + det["sensor_material"] = self.params.detector.sensor_material + else: + assert len({p.get_material() for p in detector}) == 1 + assert len({p.get_thickness() for p in detector}) == 1 + det["sensor_material"] = detector[0].get_material() + if self.params.detector.sensor_thickness: + thickness = self.params.detector.sensor_thickness + else: + thickness = detector[0].get_thickness() + self._create_scalar(det, "sensor_thickness", "f", thickness * 1000) det["sensor_thickness"].attrs["units"] = "microns" if self.params.nexus_details.count_time is not None: self._create_scalar( @@ -402,8 +420,8 @@ def recursive_setup_basis_dict(key, parent_name="", panel_id=0): else: trusted_min, trusted_max = self.params.trusted_range # DIALS definitions match up with NXmx - det.create_dataset("underload_value", (1,), data=[trusted_min], dtype="int32") - det.create_dataset("saturation_value", (1,), data=[trusted_max], dtype="int32") + det.create_dataset("underload_value", (1,), data=[trusted_min], dtype="int") + det.create_dataset("saturation_value", (1,), data=[trusted_max], dtype="int") def find_panel_id(panel): for i in range(len(detector)): diff --git a/src/dxtbx/imageset.py b/src/dxtbx/imageset.py index 67665c272..3ace21567 100644 --- a/src/dxtbx/imageset.py +++ b/src/dxtbx/imageset.py @@ -450,6 +450,10 @@ def from_template( if not check_format: assert not check_headers + # Import here as Format and Imageset have cyclic dependencies + from dxtbx.format.Format import Format + from dxtbx.format.FormatMultiImage import FormatMultiImage + # Check the template is valid if "#" in template: # Get the template image range @@ -459,23 +463,30 @@ def from_template( # Set the image range indices = range(image_range[0], image_range[1] + 1) filenames = _expand_template_to_sorted_filenames(template, indices) + if check_format: + format_class = dxtbx.format.Registry.get_format_class_for_file( + filenames[0] + ) + else: + format_class = Format else: + # Note, this assumes image stacks can only be written by dectris detectors, + # but doesn't account for other imagestack formats like MRC. if "master" not in template: raise ValueError("Invalid template") filenames = [template] - - # Import here as Format and Imageset have cyclic dependencies - from dxtbx.format.Format import Format - - # Get the format class - if check_format: - format_class = dxtbx.format.Registry.get_format_class_for_file(filenames[0]) - else: - format_class = Format + indices = range(image_range[0], image_range[1] + 1) + if check_format: + format_class = dxtbx.format.Registry.get_format_class_for_file( + filenames[0] + ) + else: + format_class = FormatMultiImage # Create the sequence object sequence = format_class.get_imageset( filenames, + single_file_indices=indices, template=template, as_sequence=True, beam=beam, @@ -571,11 +582,30 @@ def make_sequence( """Create a sequence""" indices = sorted(indices) + # Import here as Format and Imageset have cyclic dependencies + from dxtbx.format.Format import Format + from dxtbx.format.FormatMultiImage import FormatMultiImage + # Get the template format if "#" in template: filenames = _expand_template_to_sorted_filenames(template, indices) + # Get the format object and reader + if format_class is None: + if check_format: + format_class = dxtbx.format.Registry.get_format_class_for_file( + filenames[0] + ) + else: + format_class = Format else: filenames = [template] + if format_class is None: + if check_format: + format_class = dxtbx.format.Registry.get_format_class_for_file( + filenames[0] + ) + else: + format_class = FormatMultiImage # Set the image range array_range = (min(indices) - 1, max(indices)) @@ -583,18 +613,6 @@ def make_sequence( assert array_range == scan.get_array_range() scan.set_batch_offset(array_range[0]) - # Get the format object and reader - if format_class is None: - # Import here as Format and Imageset have cyclic dependencies - from dxtbx.format.Format import Format - - if check_format: - format_class = dxtbx.format.Registry.get_format_class_for_file( - filenames[0] - ) - else: - format_class = Format - return format_class.get_imageset( filenames, beam=beam, diff --git a/src/dxtbx/masking/goniometer_shadow_masking.h b/src/dxtbx/masking/goniometer_shadow_masking.h index 5c0742a01..5af221bae 100644 --- a/src/dxtbx/masking/goniometer_shadow_masking.h +++ b/src/dxtbx/masking/goniometer_shadow_masking.h @@ -217,7 +217,7 @@ namespace dxtbx { namespace masking { goniometer_.set_angles(angles); } - virtual ~GoniometerShadowMasker(){}; + virtual ~GoniometerShadowMasker() {}; protected: MultiAxisGoniometer goniometer_; diff --git a/src/dxtbx/masking/masking.h b/src/dxtbx/masking/masking.h index 2a849c631..3a7c0dd86 100644 --- a/src/dxtbx/masking/masking.h +++ b/src/dxtbx/masking/masking.h @@ -191,8 +191,8 @@ namespace dxtbx { namespace masking { * @param panel The panel model */ ResolutionMaskGenerator(const BeamBase &beam, const Panel &panel) - : resolution_( - scitbx::af::c_grid<2>(panel.get_image_size()[1], panel.get_image_size()[0])) { + : resolution_(scitbx::af::c_grid<2>(panel.get_image_size()[1], + panel.get_image_size()[0])) { vec3 s0 = beam.get_s0(); for (std::size_t j = 0; j < resolution_.accessor()[0]; ++j) { for (std::size_t i = 0; i < resolution_.accessor()[1]; ++i) { diff --git a/src/dxtbx/model/__init__.py b/src/dxtbx/model/__init__.py index 5788c2c07..095e69db7 100644 --- a/src/dxtbx/model/__init__.py +++ b/src/dxtbx/model/__init__.py @@ -1,16 +1,20 @@ from __future__ import annotations import copy +import importlib.metadata +import inspect import json import os import sys +import dateutil.parser from ordered_set import OrderedSet import boost_adaptbx.boost.python import cctbx.crystal import cctbx.sgtbx import cctbx.uctbx +import libtbx.load_env from scitbx import matrix from scitbx.array_family import flex @@ -36,6 +40,7 @@ ExperimentType, Goniometer, GoniometerBase, + History, KappaDirection, KappaGoniometer, KappaScanAxis, @@ -73,6 +78,7 @@ ExperimentType, Goniometer, GoniometerBase, + History, KappaDirection, KappaGoniometer, KappaScanAxis, @@ -191,17 +197,17 @@ def as_str(self, show_scan_varying=False): sg = str(self.get_space_group().info()) umat = ( matrix.sqr(self.get_U()) - .mathematica_form(format="% 5.4f", one_row_per_line=True) + .mathematica_form(format="% 7.6f", one_row_per_line=True) .splitlines() ) bmat = ( matrix.sqr(self.get_B()) - .mathematica_form(format="% 5.4f", one_row_per_line=True) + .mathematica_form(format="% 7.6f", one_row_per_line=True) .splitlines() ) amat = ( (matrix.sqr(self.get_U()) * matrix.sqr(self.get_B())) - .mathematica_form(format="% 5.4f", one_row_per_line=True) + .mathematica_form(format="% 7.6f", one_row_per_line=True) .splitlines() ) @@ -599,6 +605,10 @@ def imagesets(self): """Get a list of the unique imagesets.""" return list(OrderedSet([e.imageset for e in self if e.imageset is not None])) + def histories(self) -> list[History]: + """Get a list of the unique history objects.""" + return list(OrderedSet([e.history for e in self if e.history is not None])) + def all_stills(self): """Check if all the experiments are stills""" return all(exp.get_type() == ExperimentType.STILL for exp in self) @@ -629,6 +639,30 @@ def all_same_type(self): return False return True + def consolidate_histories(self) -> History: + """ + Consolidate a list of histories into a single history and set this in each + experiment. + + At the moment, this just combines the lines from the histories and sorts + them by timestamp. + + :return History: The consolidated history + """ + histories = self.histories() + if len(histories) == 0: + lines = [] + else: + lines = [l for h in histories for l in h.get_history()] + lines.sort(key=lambda x: dateutil.parser.isoparse(x.split("|")[0])) + history = History(lines) + + # Set the consolidated history in each experiment + for experiment in self: + experiment.history = history + + return history + def to_dict(self): """Serialize the experiment list to dictionary.""" @@ -660,10 +694,14 @@ def abspath_or_none(filename): for name, models, _ in lookup_members } + # If multiple histories are present, consolidate them + history = self.consolidate_histories() + # Create the output dictionary result = { "__id__": "ExperimentList", "experiment": [], + "history": history.get_history(), } # Add the experiments to the dictionary @@ -746,8 +784,71 @@ def nullify_all_single_file_reader_format_instances(self): if experiment.imageset.reader().is_single_file_reader(): experiment.imageset.reader().nullify_format_instance() - def as_json(self, filename=None, compact=False, split=False): + def as_json( + self, + filename=None, + compact=False, + split=False, + history_as_integrated=False, + history_as_scaled=False, + ): """Dump experiment list as json""" + + # Find the module that called this function for the history + stack = inspect.stack() + this_module = inspect.getmodule(stack[0].frame) + caller_module_name = "Unknown" + for f in stack[1:]: + module = inspect.getmodule(f.frame) + if module != this_module and module is not None: + caller_module_name = module.__name__ + break + + # If that module was called directly, look up via file path + if caller_module_name == "__main__": + caller_module_name = os.path.splitext(os.path.basename(module.__file__))[0] + + # Look up the dispatcher name for the caller module + try: + lookup = {e.module: e.name for e in importlib.metadata.entry_points()} + except AttributeError: # Python < 3.10 + lookup = { + e.module: e.name + for e in importlib.metadata.entry_points()["console_scripts"] + } + dispatcher = lookup.get(caller_module_name, caller_module_name) + + # If dispatcher lookup by entry_points did not work, try via libtbx + if dispatcher == caller_module_name: + dispatcher = libtbx.env.dispatcher_name + + # Final fallback to the module name + if dispatcher in ["dials.python", "libtbx.python", "cctbx.python", None]: + dispatcher = caller_module_name + + # Get software version + try: + version = "v" + importlib.metadata.version(dispatcher.split(".")[0]) + except importlib.metadata.PackageNotFoundError: + version = "v?" + + # Set the flags string for the history + flags = [] + if history_as_integrated: + flags.append("integrated") + if history_as_scaled: + flags.append("scaled") + if flags: + flags = ",".join(flags) + else: + flags = "" + + # Consolidate existing history objects + history = self.consolidate_histories() + + # Append the new history line + history.append_history_item(dispatcher, version, flags) + # Get the dictionary and get the JSON string dictionary = self.to_dict() diff --git a/src/dxtbx/model/beam.py b/src/dxtbx/model/beam.py index 3c9899bae..3f46df22d 100644 --- a/src/dxtbx/model/beam.py +++ b/src/dxtbx/model/beam.py @@ -138,9 +138,9 @@ def from_dict(dict: dict, template: dict = None) -> Beam | PolychromaticBeam: if template is not None: if "__id__" in dict and "__id__" in template: - assert ( - dict["__id__"] == template["__id__"] - ), "Beam and template dictionaries are not the same type." + assert dict["__id__"] == template["__id__"], ( + "Beam and template dictionaries are not the same type." + ) if dict is None and template is None: return None diff --git a/src/dxtbx/model/boost_python/detector.cc b/src/dxtbx/model/boost_python/detector.cc index 7f1776822..e79ffbc32 100644 --- a/src/dxtbx/model/boost_python/detector.cc +++ b/src/dxtbx/model/boost_python/detector.cc @@ -271,34 +271,54 @@ namespace dxtbx { namespace model { namespace boost_python { return detector_detail::detector_from_dict(result, obj); } + boost::python::tuple get_ray_intersection_py(const Detector &self, + scitbx::af::const_ref> s1) { + /** + * Wrapper to convert coord_type to Python Tuple + */ + + scitbx::af::shared result = self.get_ray_intersection(s1); + + scitbx::af::shared> xy = + scitbx::af::shared>(result.size()); + scitbx::af::shared panel = + scitbx::af::shared(result.size()); + + for (std::size_t i = 0; i < result.size(); ++i) { + panel[i] = result[i].first; + xy[i] = result[i].second; + } + return boost::python::make_tuple(panel, xy); + } + void export_detector() { using namespace boost::python; using namespace detector_detail; - class_ >("DetectorNode", no_init) + class_>("DetectorNode", no_init) .def("add_group", - (Detector::Node::pointer(Detector::Node::*)()) & Detector::Node::add_group, + (Detector::Node::pointer (Detector::Node::*)())&Detector::Node::add_group, return_internal_reference<>()) .def("add_group", - (Detector::Node::pointer(Detector::Node::*)(const Panel &)) - & Detector::Node::add_group, + (Detector::Node::pointer (Detector::Node::*)( + const Panel &))&Detector::Node::add_group, return_internal_reference<>()) .def("add_panel", - (Detector::Node::pointer(Detector::Node::*)()) & Detector::Node::add_panel, + (Detector::Node::pointer (Detector::Node::*)())&Detector::Node::add_panel, return_internal_reference<>()) .def("add_panel", - (Detector::Node::pointer(Detector::Node::*)(const Panel &)) - & Detector::Node::add_panel, + (Detector::Node::pointer (Detector::Node::*)( + const Panel &))&Detector::Node::add_panel, return_internal_reference<>()) .def("parent", - (Detector::Node::pointer(Detector::Node::*)()) & Detector::Node::parent, + (Detector::Node::pointer (Detector::Node::*)())&Detector::Node::parent, return_internal_reference<>()) .def("root", - (Detector::Node::pointer(Detector::Node::*)()) & Detector::Node::root, + (Detector::Node::pointer (Detector::Node::*)())&Detector::Node::root, return_internal_reference<>()) .def("__getitem__", - (Detector::Node::pointer(Detector::Node::*)(std::size_t)) - & Detector::Node::operator[], + (Detector::Node::pointer (Detector::Node::*)(std::size_t))&Detector::Node:: + operator[], return_internal_reference<>()) .def("empty", &Detector::Node::empty) .def("__len__", &Detector::Node::size) @@ -324,31 +344,31 @@ namespace dxtbx { namespace model { namespace boost_python { arg("origin_tolerance") = 1e-6, arg("static_only") = false, arg("ignore_trusted_range") = false)) - .def("__iter__", iterator >()) - .def("children", iterator >()); + .def("__iter__", iterator>()) + .def("children", iterator>()); // Export a Detector base class - class_ >("Detector") + class_>("Detector") .def(init()) .def("hierarchy", - (Detector::node_pointer(Detector::*)()) & Detector::root, + (Detector::node_pointer (Detector::*)())&Detector::root, return_internal_reference<>()) .def("add_group", - (Detector::node_pointer(Detector::*)()) & Detector::add_group, + (Detector::node_pointer (Detector::*)())&Detector::add_group, return_internal_reference<>()) .def("add_group", - (Detector::node_pointer(Detector::*)(const Panel &)) & Detector::add_group, + (Detector::node_pointer (Detector::*)(const Panel &))&Detector::add_group, return_internal_reference<>()) .def("add_panel", - (Detector::node_pointer(Detector::*)()) & Detector::add_panel, + (Detector::node_pointer (Detector::*)())&Detector::add_panel, return_internal_reference<>()) .def("add_panel", - (Detector::node_pointer(Detector::*)(const Panel &)) & Detector::add_panel, + (Detector::node_pointer (Detector::*)(const Panel &))&Detector::add_panel, return_internal_reference<>()) .def("__len__", &Detector::size) .def("__setitem__", &detector_set_item) .def("__getitem__", &detector_get_item, return_internal_reference<>()) - .def("__iter__", iterator >()) + .def("__iter__", iterator>()) .def("__eq__", &Detector::operator==) .def("__ne__", &Detector::operator!=) .def("is_similar_to", @@ -363,7 +383,17 @@ namespace dxtbx { namespace model { namespace boost_python { .def("get_max_inscribed_resolution", &Detector::get_max_inscribed_resolution, (arg("s0"))) - .def("get_ray_intersection", &Detector::get_ray_intersection, (arg("s1"))) + .def("get_ray_intersection", + (Detector::coord_type (Detector::*)(vec3) const) + & Detector::get_ray_intersection, + (arg("s1"))) + .def("get_ray_intersection", &get_ray_intersection_py, (arg("s1"))) + .def("get_ray_intersection", + (scitbx::af::shared> (Detector::*)( + scitbx::af::const_ref>, scitbx::af::const_ref) + const) + & Detector::get_ray_intersection, + (arg("s1"), arg("panel"))) .def("get_panel_intersection", &Detector::get_panel_intersection, (arg("s1"))) //.def("do_panels_intersect", // &Detector::do_panels_intersect) @@ -383,7 +413,7 @@ namespace dxtbx { namespace model { namespace boost_python { .staticmethod("from_dict") .def_pickle(DetectorPickleSuite()); - boost_adaptbx::std_pair_conversions::to_and_from_tuple >(); + boost_adaptbx::std_pair_conversions::to_and_from_tuple>(); } }}} // namespace dxtbx::model::boost_python diff --git a/src/dxtbx/model/boost_python/experiment.cc b/src/dxtbx/model/boost_python/experiment.cc index 7a84f30a4..78eba4a02 100644 --- a/src/dxtbx/model/boost_python/experiment.cc +++ b/src/dxtbx/model/boost_python/experiment.cc @@ -31,7 +31,8 @@ namespace dxtbx { namespace model { namespace boost_python { obj.get_profile(), obj.get_imageset(), obj.get_scaling_model(), - obj.get_identifier()); + obj.get_identifier(), + obj.get_history()); } }; @@ -89,15 +90,18 @@ namespace dxtbx { namespace model { namespace boost_python { boost::python::object, boost::python::object, boost::python::object, - std::string>((arg("beam") = std::shared_ptr(), - arg("detector") = std::shared_ptr(), - arg("goniometer") = std::shared_ptr(), - arg("scan") = std::shared_ptr(), - arg("crystal") = std::shared_ptr(), - arg("profile") = boost::python::object(), - arg("imageset") = boost::python::object(), - arg("scaling_model") = boost::python::object(), - arg("identifier") = ""))) + std::string, + std::shared_ptr >( + (arg("beam") = std::shared_ptr(), + arg("detector") = std::shared_ptr(), + arg("goniometer") = std::shared_ptr(), + arg("scan") = std::shared_ptr(), + arg("crystal") = std::shared_ptr(), + arg("profile") = boost::python::object(), + arg("imageset") = boost::python::object(), + arg("scaling_model") = boost::python::object(), + arg("identifier") = "", + arg("history") = std::shared_ptr()))) .add_property("beam", &Experiment::get_beam, &Experiment::set_beam) .add_property("detector", &Experiment::get_detector, &Experiment::set_detector) .add_property( @@ -110,6 +114,7 @@ namespace dxtbx { namespace model { namespace boost_python { "scaling_model", &Experiment::get_scaling_model, &Experiment::set_scaling_model) .add_property( "identifier", &Experiment::get_identifier, &Experiment::set_identifier) + .add_property("history", &Experiment::get_history, &Experiment::set_history) .def("__contains__", experiment_contains_pointers::beam()) .def("__contains__", experiment_contains_pointers::detector()) .def("__contains__", experiment_contains_pointers::goniometer()) diff --git a/src/dxtbx/model/boost_python/history.cc b/src/dxtbx/model/boost_python/history.cc new file mode 100644 index 000000000..376de45ac --- /dev/null +++ b/src/dxtbx/model/boost_python/history.cc @@ -0,0 +1,43 @@ +#include +#include +#include +#include +#include + +namespace dxtbx { namespace model { namespace boost_python { + + struct HistoryPickleSuite : boost::python::pickle_suite { + static boost::python::tuple getstate(boost::python::object obj) { + const History &history = boost::python::extract(obj)(); + return boost::python::make_tuple(obj.attr("__dict__"), + history.get_history_as_list()); + } + + static void setstate(boost::python::object obj, boost::python::tuple state) { + History &history = boost::python::extract(obj)(); + DXTBX_ASSERT(boost::python::len(state) == 2); + + // restore the object's __dict__ + boost::python::dict d = + boost::python::extract(obj.attr("__dict__"))(); + d.update(state[0]); + + // restore the internal state of the C++ object + history.set_history_from_list( + boost::python::extract(state[1])()); + } + }; + + void export_history() { + using boost::python::arg; + + boost::python::class_("History") + .def(boost::python::init()) + .def("set_history", &History::set_history_from_list) + .def("get_history", &History::get_history_as_list) + .def("append_history_item", + &History::append_history_item, + (arg("dispatcher"), arg("version"), arg("flag"))) + .def_pickle(HistoryPickleSuite()); + } +}}} // namespace dxtbx::model::boost_python \ No newline at end of file diff --git a/src/dxtbx/model/boost_python/model_ext.cc b/src/dxtbx/model/boost_python/model_ext.cc index a484fedd9..2012d3b44 100644 --- a/src/dxtbx/model/boost_python/model_ext.cc +++ b/src/dxtbx/model/boost_python/model_ext.cc @@ -29,6 +29,7 @@ namespace dxtbx { namespace model { namespace boost_python { void export_experiment(); void export_experiment_list(); void export_spectrum(); + void export_history(); BOOST_PYTHON_MODULE(dxtbx_model_ext) { export_beam(); @@ -45,6 +46,7 @@ namespace dxtbx { namespace model { namespace boost_python { export_experiment(); export_experiment_list(); export_spectrum(); + export_history(); } }}} // namespace dxtbx::model::boost_python diff --git a/src/dxtbx/model/crystal.py b/src/dxtbx/model/crystal.py index 825941857..96e983267 100644 --- a/src/dxtbx/model/crystal.py +++ b/src/dxtbx/model/crystal.py @@ -1,5 +1,6 @@ from __future__ import annotations +import iotbx.phil from cctbx.sgtbx import space_group as SG from scitbx import matrix @@ -16,6 +17,25 @@ MosaicCrystalSauter2014, ) +crystal_phil_scope = iotbx.phil.parse( + """ + crystal + .expert_level = 2 + .short_caption = "Crystal overrides" + { + unit_cell = None + .type = unit_cell + + A_matrix = None + .type = floats(size=9) + .help = "The crystal setting A=UB matrix. If set, this will override the unit cell." + + space_group = None + .type = space_group + } +""" +) + class CrystalFactory: @staticmethod @@ -126,3 +146,35 @@ def from_mosflm_matrix( _c = rotate_mosflm_to_imgCIF * c return Crystal(_a, _b, _c, space_group=space_group) + + @staticmethod + def from_phil( + params: iotbx.phil.scope_extract, + reference: Crystal | None = None, + ) -> Crystal: + """ + Convert the phil parameters into a crystal model + """ + + all_params = [ + params.crystal.unit_cell, + params.crystal.A_matrix, + params.crystal.space_group, + ] + if all_params.count(None) == len(all_params): + return reference + + if reference is None: + crystal = Crystal((1, 0, 0), (0, 1, 0), (0, 0, 1), "P1") + else: + crystal = reference + crystal.reset_scan_points() + + if params.crystal.unit_cell is not None: + crystal.set_unit_cell(params.crystal.unit_cell) + if params.crystal.A_matrix is not None: + crystal.set_A(params.crystal.A_matrix) + if params.crystal.space_group is not None: + crystal.set_space_group(params.crystal.space_group.group()) + + return crystal diff --git a/src/dxtbx/model/detector.h b/src/dxtbx/model/detector.h index 0639443d1..99b2e51b3 100644 --- a/src/dxtbx/model/detector.h +++ b/src/dxtbx/model/detector.h @@ -49,11 +49,11 @@ namespace dxtbx { namespace model { * @param x The set of input points * @return The points in the convex hull */ - inline scitbx::af::shared > convex_hull( - const scitbx::af::const_ref > &x) { + inline scitbx::af::shared> convex_hull( + const scitbx::af::const_ref> &x) { DXTBX_ASSERT(x.size() > 2); - scitbx::af::shared > result; + scitbx::af::shared> result; // Find the leftmost point std::size_t current = 0; @@ -446,7 +446,7 @@ namespace dxtbx { namespace model { bool is_panel_; }; - typedef std::pair > coord_type; + typedef std::pair> coord_type; typedef Node::pointer node_pointer; typedef Node::const_pointer const_node_pointer; typedef Panel panel_type; @@ -668,7 +668,7 @@ namespace dxtbx { namespace model { vec3 xa = za.cross(ya).normalize(); // Compute the stereographic projection of panel corners - scitbx::af::shared > points; + scitbx::af::shared> points; for (std::size_t i = 0; i < size(); ++i) { std::size_t width = (*this)[i].get_image_size()[0]; std::size_t height = (*this)[i].get_image_size()[1]; @@ -688,7 +688,7 @@ namespace dxtbx { namespace model { } // Compute the convex hull of points - scitbx::af::shared > hull = detail::convex_hull(points.const_ref()); + scitbx::af::shared> hull = detail::convex_hull(points.const_ref()); DXTBX_ASSERT(hull.size() >= 4); // Compute the minimum distance to the line segments @@ -738,6 +738,30 @@ namespace dxtbx { namespace model { return pxy; } + /** Get ray intersection with detector */ + scitbx::af::shared get_ray_intersection( + scitbx::af::const_ref> s1) const { + scitbx::af::shared result = scitbx::af::shared(s1.size()); + + for (std::size_t i = 0; i < s1.size(); ++i) { + result[i] = get_ray_intersection(s1[i]); + } + return result; + } + + /** Get ray intersection with detector */ + scitbx::af::shared> get_ray_intersection( + scitbx::af::const_ref> s1, + scitbx::af::const_ref panel) const { + DXTBX_ASSERT(s1.size() == panel.size()); + scitbx::af::shared> xy = scitbx::af::shared>(s1.size()); + + for (std::size_t i = 0; i < s1.size(); ++i) { + xy[i] = (*this)[panel[i]].get_ray_intersection(s1[i]); + } + return xy; + } + /** finds the panel id with which s1 intersects. Returns -1 if none do. **/ int get_panel_intersection(vec3 s1) { int found_panel = -1; diff --git a/src/dxtbx/model/experiment.h b/src/dxtbx/model/experiment.h index dc31457e6..ef6036a7b 100644 --- a/src/dxtbx/model/experiment.h +++ b/src/dxtbx/model/experiment.h @@ -24,6 +24,7 @@ #include #include #include +#include #include namespace dxtbx { namespace model { @@ -42,6 +43,8 @@ namespace dxtbx { namespace model { * - crystal The crystal model * - profile The profile model * - scaling_model The scaling model + * - identifier The experiment identifier + * - history The serialization history of the experiment * * Some of these may be set to "None" * @@ -61,7 +64,8 @@ namespace dxtbx { namespace model { boost::python::object profile, boost::python::object imageset, boost::python::object scaling_model, - std::string identifier) + std::string identifier, + std::shared_ptr history) : beam_(beam), detector_(detector), goniometer_(goniometer), @@ -70,7 +74,8 @@ namespace dxtbx { namespace model { profile_(profile), imageset_(imageset), scaling_model_(scaling_model), - identifier_(identifier) {} + identifier_(identifier), + history_(history) {} /** * Check if the beam model is the same. @@ -248,6 +253,14 @@ namespace dxtbx { namespace model { return identifier_; } + void set_history(std::shared_ptr history) { + history_ = history; + } + + std::shared_ptr get_history() const { + return history_; + } + protected: std::shared_ptr beam_; std::shared_ptr detector_; @@ -258,6 +271,7 @@ namespace dxtbx { namespace model { boost::python::object imageset_; boost::python::object scaling_model_; std::string identifier_; + std::shared_ptr history_; }; }} // namespace dxtbx::model diff --git a/src/dxtbx/model/experiment_list.py b/src/dxtbx/model/experiment_list.py index 0619766bb..de193aa67 100644 --- a/src/dxtbx/model/experiment_list.py +++ b/src/dxtbx/model/experiment_list.py @@ -30,6 +30,7 @@ Experiment, ExperimentList, GoniometerFactory, + History, ProfileModelFactory, ScanFactory, ) @@ -510,6 +511,12 @@ def decode(self): ) ) + # Add the history to the experiments if it exists + if "history" in self._obj: + history = History(self._obj["history"]) + for expt in el: + expt.history = history + return el def _make_mem_imageset(self, imageset): diff --git a/src/dxtbx/model/history.h b/src/dxtbx/model/history.h new file mode 100644 index 000000000..3bfce609d --- /dev/null +++ b/src/dxtbx/model/history.h @@ -0,0 +1,73 @@ +#ifndef DXTBX_MODEL_HISTORY_H +#define DXTBX_MODEL_HISTORY_H + +#include +#include +#include +#include +#include + +namespace dxtbx { namespace model { + + /** + * This class keeps track of the serialization history of an experiment. + */ + class History { + public: + History() {} + + History(const boost::python::list &history) { + set_history_from_list(history); + } + + void set_history(const std::vector &history) { + history_ = history; + } + + void append_history_item(const std::string &dispatcher, + const std::string &version, + const std::string &flag) { + // Timestamp as current UTC time + boost::posix_time::ptime now_utc = + boost::posix_time::second_clock::universal_time(); + std::string utc_string = boost::posix_time::to_iso_extended_string(now_utc) + "Z"; + + // Format the message + std::string message = utc_string + "|" + dispatcher + "|" + version; + if (!flag.empty()) { + message += "|" + flag; + } + history_.push_back(message); + } + + void set_history_from_list(const boost::python::list &history) { + history_.clear(); + + long length = boost::python::len(history); + history_.reserve(length); + + for (long i = 0; i < length; ++i) { + boost::python::extract extractor(history[i]); + history_.push_back(extractor()); + } + } + + std::vector get_history() const { + return history_; + } + + boost::python::list get_history_as_list() const { + boost::python::list result; + for (const auto &item : history_) { + result.append(item); + } + return result; + } + + private: + std::vector history_; + }; + +}} // namespace dxtbx::model + +#endif // DXTBX_MODEL_HISTORY_H \ No newline at end of file diff --git a/src/dxtbx/model/tof_helpers.py b/src/dxtbx/model/tof_helpers.py index 243ce0099..920d78524 100644 --- a/src/dxtbx/model/tof_helpers.py +++ b/src/dxtbx/model/tof_helpers.py @@ -1,10 +1,27 @@ from __future__ import annotations +import xml +from typing import Dict, Generic, List, Tuple, TypeVar + +import numpy as np from scipy.constants import Planck, m_n from scipy.interpolate import interp1d import cctbx.array_family.flex as flex +Shape = TypeVar("Shape") +DType = TypeVar("DType") + + +class Array(np.ndarray, Generic[Shape, DType]): + pass + + +vec3float = Array["3", float] +vec2float = Array["2", float] +vec3int = Array["3", int] +vec2int = Array["2", int] + def wavelength_from_tof( distance: float | flex.double, tof: float | flex.double @@ -55,3 +72,450 @@ def tof_to_frame_interpolator(tof: list[float], frames: list[float]) -> interp1d assert all(i < j for i, j in zip(frames, frames[1:])) assert all(i < j for i, j in zip(tof, tof[1:])) return interp1d(tof, frames, kind="cubic") + + +class InstrumentDefinitionReader: + """ + Class to obtain instrument information in dxtbx format from an + Instrument Definition File as described here: + https://docs.mantidproject.org/nightly/concepts/InstrumentDefinitionFile.html#instrumentdefinitionfile + + Example usage: + >>> import h5py + >>> h5_file = h5py.File("path/to/nexus_file") + >>> xml_string = h5_file["path/to/xml_file"] + >>> import xml.etree.ElementTree as ET + >>> xml_file = ET.fromstring(xml_string) + >>> from dxtx.model.tof_helpers import InstrumentDefinitionReader + >>> reader = InstrumentDefinitionReader() + >>> origins, fast_axes, slow_axes = reader.get_dials_detector_geometry(xml_file) + """ + + class Panel: + """ + Class to store panel properties in spherical coordinates + """ + + def __init__( + self, + idx: int, + centre_origin_in_m: float, + gam_in_deg: float, + nu_in_deg: float, + num_pixels: vec2int, + pixel_size_in_m: vec2float, + x_orientation: vec2int = np.array((1, 0)), + y_orientation: vec2int = np.array((0, 1)), + ) -> None: + self.idx = idx + self.centre_origin_in_m = centre_origin_in_m + self.gam_in_deg = gam_in_deg + self.nu_in_deg = nu_in_deg + self.num_pixels = num_pixels + self.pixel_size_in_m = pixel_size_in_m + self.x_orientation = x_orientation + self.y_orientation = y_orientation + + def panel_size_in_m(self) -> vec2float: + panel_size = [] + for i in range(len(self.num_pixels)): + panel_size.append(self.num_pixels[i] * self.pixel_size_in_m[i]) + return np.array(panel_size) + + def orientations_flipped(self) -> bool: + abs_x = tuple([abs(i) for i in self.x_orientation]) + abs_y = tuple([abs(i) for i in self.y_orientation]) + return abs_x == (0, 1) and abs_y == (1, 0) + + def orientation_direction_flipped(self) -> bool: + return sum(self.x_orientation) == sum(self.y_orientation) == -1 + + def __repr__(self) -> None: + return f"idx: {self.idx} \n \ + centre origin (m): {self.centre_origin_in_m} \n \ + gam (deg): {self.gam_in_deg} \n \ + nu (deg): {self.nu_in_deg} \n \ + num pixels: {self.num_pixels} \n \ + pixel size (m): {self.pixel_size_in_m} \n \ + x_orientation: {self.x_orientation} \n \ + y_orientation: {self.y_orientation}" + + def is_panel(self, tree_component: xml.etree.ElementTree.Element) -> bool: + if "type" not in tree_component.attrib: + return False + return ( + "panel" in tree_component.attrib["type"] + and "location" in tree_component[0].tag + ) + + def get_rotation_vals( + self, rot: xml.etree.ElementTree.Element + ) -> Tuple[float, vec3float]: + if "rot" in rot.attrib: + val = float(rot.attrib["rot"]) + elif "val" in rot.attrib: + val = float(rot.attrib["val"]) + else: + return None + try: + x = int(rot.attrib["axis-x"]) + y = int(rot.attrib["axis-y"]) + z = int(rot.attrib["axis-z"]) + except KeyError: # if no axes given axis-z is assumed + x = 0 + y = 0 + z = 1 + return (val, (x, y, z)) + + def get_rotations( + self, + line: xml.etree.ElementTree.Element, + rotations: List[Tuple[float, vec3float]], + ) -> List[Tuple[float, vec3float]]: + rotation_vals = self.get_rotation_vals(line) + if rotation_vals is not None: + rotations.append(self.get_rotation_vals(line)) + try: + return self.get_rotations(line[0], rotations=rotations) + except IndexError: + return rotations + + def x_project_v(self, x: vec3float, v: vec3float) -> Dict[str, vec3float]: + """Project x onto v, returning parallel and perpendicular components + >> d = xProject(x, v) + >> np.allclose(d['par'] + d['perp'], x) + True + """ + par = self.x_par_v(x, v) + perp = x - par + return {"par": par, "perp": perp} + + def rotate_about(self, a: vec3float, b: vec3float, theta: float) -> vec3float: + if np.isclose(abs(np.dot(a, b)), 1): + return a + """Rotate vector a about vector b by theta radians.""" + + proj = self.x_project_v(a, b) + w = np.cross(b, proj["perp"]) + unit_w = w / np.linalg.norm(w) + return ( + proj["par"] + + proj["perp"] * np.cos(theta) + + np.linalg.norm(proj["perp"]) * unit_w * np.sin(theta) + ) + + def x_par_v(self, x: vec3float, v: vec3float) -> vec3float: + """Project x onto v. Result will be parallel to v.""" + return np.dot(x, v) / np.dot(v, v) * v + + def x_perp_v(self, x: vec3float, v: vec3float) -> vec3float: + """Component of x orthogonal to v. Result is perpendicular to v.""" + return x - self.x_par_v(x, v) + + def get_panel_axes( + self, + xml_file: xml.etree.ElementTree.Element, + init_fast_axis: vec3float, + init_slow_axis: vec3float, + ) -> Tuple[vec3float, vec3float, vec3float]: + def get_rot_axis( + raw_rot_axis: vec3float, + fast_axis: vec3float, + slow_axis: vec3float, + z_axis: vec3float, + init_fast_axis: vec3float, + init_slow_axis: vec3float, + init_z_axis: vec3float, + ) -> vec3float: + if abs(np.dot(raw_rot_axis, init_fast_axis)) == 1: + return fast_axis + if abs(np.dot(raw_rot_axis, init_slow_axis)) == 1: + return slow_axis + if abs(np.dot(raw_rot_axis, init_z_axis)) == 1: + return z_axis + raise NotImplementedError + + def get_axes( + rotations: List[Tuple[float, vec3float]], + init_fast_axis: vec3float, + init_slow_axis: vec3float, + init_z_axis: vec3float, + ) -> Tuple[Tuple[vec3float], Tuple[vec3float]]: + fast_axis = init_fast_axis + slow_axis = init_slow_axis + z_axis = init_z_axis + for rot in rotations: + raw_rot_axis = np.array(rot[1]) + rot_val = np.deg2rad(rot[0]) + rot_axis = get_rot_axis( + raw_rot_axis, + fast_axis, + slow_axis, + z_axis, + init_fast_axis, + init_slow_axis, + init_z_axis, + ) + + fast_axis = self.rotate_about(fast_axis, rot_axis, rot_val) + slow_axis = self.rotate_about(slow_axis, rot_axis, rot_val) + z_axis = self.rotate_about(z_axis, rot_axis, rot_val) + return fast_axis, slow_axis, z_axis + + slow_axes = [] + fast_axes = [] + init_z_axis = np.cross(init_fast_axis, init_slow_axis) + for child in xml_file: + if self.is_panel(child): + panel = child[0] + rotations = [] + rotations = self.get_rotations( + line=panel, + rotations=rotations, + ) + axes = get_axes(rotations, init_fast_axis, init_slow_axis, init_z_axis) + fast_axes.append(tuple(axes[0])) + slow_axes.append(tuple(axes[1])) + return fast_axes, slow_axes + + def shift_origin_centre_to_top_left( + self, + centre_origin_in_m: vec3float, + fast_axis: vec3float, + slow_axis: vec3float, + panel_size_in_m: vec2float, + ) -> vec3float: + # Assumes fast and slow axes are unit vectors + + slow_axis = slow_axis * panel_size_in_m[1] * 0.5 + fast_axis = fast_axis * panel_size_in_m[0] * 0.5 + return centre_origin_in_m - slow_axis - fast_axis + + def rotations_to_spherical_coordinates( + self, + zeroth_pixel_origin: vec2float, + rotations: Tuple[Tuple[float, vec3int], ...], + ) -> vec2float: + """ + Corrects rotations for zeroth_pixel_origin and returns + the gam and nu angles in spherical coordinates. + """ + + gam = rotations[0][0] + + try: + nu = rotations[1][0] + except IndexError: + nu = 0 + + xstart, ystart = zeroth_pixel_origin + if np.sign(xstart) == np.sign(ystart): + if abs(nu) > 0: + nu *= -1 + else: + gam *= -1 + + elif abs(nu) > 0: + gam -= 180 + + return gam, nu + + def get_panel_types( + self, xml_file: xml.etree.ElementTree.Element + ) -> Dict[str, float | int]: + def is_panel_settings(tree_element: xml.etree.ElementTree.Element) -> bool: + required_fields = [ + "xstart", + "ystart", + "xpixels", + "ypixels", + "xstep", + "ystep", + ] + for i in required_fields: + if i not in tree_element.attrib: + return False + + return True + + panel_types = {} + + for child in xml_file: + if is_panel_settings(child): + key = child.attrib["name"] + xstart = float(child.attrib["xstart"]) + ystart = float(child.attrib["ystart"]) + xpixels = int(child.attrib["xpixels"]) + ypixels = int(child.attrib["ypixels"]) + xpixel_size = abs(float(child.attrib["xstep"])) + ypixel_size = abs(float(child.attrib["ystep"])) + panel_types[key] = { + "xstart": xstart, + "ystart": ystart, + "xpixels": xpixels, + "ypixels": ypixels, + "xpixel_size": xpixel_size, + "ypixel_size": ypixel_size, + } + + return panel_types + + def get_panels(self, xml_file: xml.etree.ElementTree.Element) -> Tuple[Panel]: + def panel_name_to_idx(name: str) -> int: + return int("".join(filter(str.isdigit, name))) + + def get_cartesian_origin(panel_attrib): + try: + x = float(panel_attrib["x"]) + y = float(panel_attrib["y"]) + z = float(panel_attrib["z"]) + except KeyError: # assume spherical coordinates + r = float(panel_attrib["r"]) + t = np.deg2rad(float(panel_attrib["t"])) + p = np.deg2rad(float(panel_attrib["p"])) + x = r * np.sin(t) * np.cos(p) + y = r * np.sin(t) * np.sin(p) + z = r * np.cos(t) + return x, y, z + + def get_rotations_from_origin( + panel_attrib: Dict[str, str], + ) -> Tuple[float, float]: + return float(panel_attrib["t"]), float(panel_attrib["p"]) + + def get_rotation_vals( + rot: xml.etree.ElementTree.Element, + ) -> Tuple[float, vec3float]: + val = float(rot.attrib["val"]) + try: + x = int(rot.attrib["axis-x"]) + y = int(rot.attrib["axis-y"]) + z = int(rot.attrib["axis-z"]) + except KeyError: # if no axes given axis-z is assumed + x = 0 + y = 0 + z = 1 + return (val, (x, y, z)) + + def get_rotations( + line: xml.etree.ElementTree.Element, + rotations: List[Tuple[float, vec3float]], + ) -> List[Tuple[float, vec3float]]: + rotations.append(get_rotation_vals(line)) + try: + return get_rotations(line[0], rotations=rotations) + except IndexError: + return rotations + + def get_panel_orentation(name: str) -> Tuple[vec2int, vec2int]: + """ + This is a hack for SXD, where the Mantid SXD_Definition.xml + does not seem to identify SXD panel 1 as upsidedown. + """ + + if name == "bank1": + return np.array((0, -1)), np.array((-1, 0)) + else: + return np.array((1, 0)), np.array((0, 1)) + + panels = [] + panel_types = self.get_panel_types(xml_file) + + for child in xml_file: + if self.is_panel(child): + panel = child[0] + name = panel.attrib["name"] + idx = panel_name_to_idx(name) + x, y, z = get_cartesian_origin(panel.attrib) + rotation_start = panel[0] + rotations = [] + rotations = get_rotations( + line=rotation_start, + rotations=rotations, + ) + + panel_type = child.attrib["type"] + panel_info = panel_types[panel_type] + + zeroth_pixel_origin = (panel_info["xstart"], panel_info["ystart"]) + gam_in_deg, nu_in_deg = self.rotations_to_spherical_coordinates( + zeroth_pixel_origin=zeroth_pixel_origin, rotations=rotations + ) + try: + gam_in_deg, nu_in_deg = get_rotations_from_origin(panel.attrib) + except KeyError: + gam_in_deg = 0 + nu_in_deg = 0 + + num_pixels = ( + panel_info["xpixels"], + panel_info["ypixels"], + ) + + pixel_size_in_m = (panel_info["xpixel_size"], panel_info["ypixel_size"]) + + x_or, y_or = get_panel_orentation(name=name) + + panels.append( + InstrumentDefinitionReader.Panel( + idx=idx, + centre_origin_in_m=(x, y, z), + gam_in_deg=gam_in_deg, + nu_in_deg=nu_in_deg, + num_pixels=num_pixels, + pixel_size_in_m=pixel_size_in_m, + x_orientation=x_or, + y_orientation=y_or, + ) + ) + + return tuple(panels) + + def panel_idx_to_name(self, idx: int) -> str: + return "bank" + str(idx) + + def get_panel_names(self, xml_file: xml.etree.ElementTree.Element) -> Tuple[str]: + panels = self.get_panels(xml_file) + panel_names = [self.panel_idx_to_name(i.idx) for i in panels] + return panel_names + + def get_num_panels(self, xml_file: xml.etree.ElementTree.Element) -> int: + return len(self.get_panels(xml_file)) + + def get_dials_detector_geometry( + self, xml_file: xml.etree.ElementTree.Element + ) -> Tuple[Tuple[vec3float], Tuple[vec3float], Tuple[vec3float]]: + # Get panel data in spherical coordinates + panels = self.get_panels(xml_file) + panel_dict = {self.panel_idx_to_name(i.idx): i for i in panels} + + # Get panel axes + init_slow_axis = np.array((0, 1, 0)) + init_fast_axis = np.array((1, 0, 0)) + + fast_axes, slow_axes = self.get_panel_axes( + xml_file, init_fast_axis, init_slow_axis + ) + + # Get panel origins + origins = [] + count = 0 + for name in panel_dict: + new_panel = panel_dict[name] + fast_axis = np.array(fast_axes[count]) + slow_axis = np.array(slow_axes[count]) + top_left_origin_in_m = self.shift_origin_centre_to_top_left( + centre_origin_in_m=new_panel.centre_origin_in_m, + fast_axis=fast_axis, + slow_axis=slow_axis, + panel_size_in_m=new_panel.panel_size_in_m(), + ) + top_left_origin_in_mm = np.array([i * 1000 for i in top_left_origin_in_m]) + + origins.append(tuple(top_left_origin_in_mm)) + count += 1 + + origins = tuple(tuple(float(v) for v in vec) for vec in origins) + fast_axes = tuple(tuple(float(v) for v in vec) for vec in fast_axes) + slow_axes = tuple(tuple(float(v) for v in vec) for vec in slow_axes) + return origins, fast_axes, slow_axes diff --git a/src/dxtbx/nexus/__init__.py b/src/dxtbx/nexus/__init__.py index ec37e6a0a..8dc71d88c 100644 --- a/src/dxtbx/nexus/__init__.py +++ b/src/dxtbx/nexus/__init__.py @@ -225,7 +225,7 @@ def get_dxtbx_scan( oscillation = tuple(float(f) for f in (start.magnitude, step.magnitude)) if nxdetector.frame_time is not None: - frame_time = nxdetector.frame_time.to("seconds").magnitude + frame_time = float(nxdetector.frame_time.to("seconds").magnitude) exposure_times = flex.double(num_images, frame_time) epochs = flex.double_range(0, num_images) * frame_time else: @@ -441,7 +441,7 @@ def equipment_component_key(dependency): material = KNOWN_SENSOR_MATERIALS.get(nxdetector.sensor_material) if not material: raise ValueError(f"Unknown material: {nxdetector.sensor_material}") - thickness = nxdetector.sensor_thickness.to("mm").magnitude + thickness = float(nxdetector.sensor_thickness.to("mm").magnitude) table = eltbx.attenuation_coefficient.get_table(material) mu = table.mu_at_angstrom(wavelength) / 10.0 px_mm = dxtbx.model.ParallaxCorrectedPxMmStrategy(mu, thickness) diff --git a/src/dxtbx/serialize/xds.py b/src/dxtbx/serialize/xds.py index e79ef1c9c..1e4694739 100644 --- a/src/dxtbx/serialize/xds.py +++ b/src/dxtbx/serialize/xds.py @@ -146,6 +146,8 @@ def xds_detector_name(dxtbx_name): return "MAR345" if "mar" in dxtbx_name: return "MAR" + if "singla" in dxtbx_name: + return "SINGLA" if "unknown" in dxtbx_name: return "ADSC" diff --git a/src/dxtbx/util/rotate_and_average.py b/src/dxtbx/util/rotate_and_average.py index 61012ea7d..02fb53e29 100644 --- a/src/dxtbx/util/rotate_and_average.py +++ b/src/dxtbx/util/rotate_and_average.py @@ -16,7 +16,7 @@ def rotate_and_average(data, angle, deg=False, mask=None): ny, nx = np.shape(data) xx, yy = np.meshgrid(np.arange(nx), np.arange(ny)) - xx_yy = np.row_stack((xx.ravel(), yy.ravel())) + xx_yy = np.vstack((xx.ravel(), yy.ravel())) R = np.array(((np.cos(angle), -np.sin(angle)), (np.sin(angle), np.cos(angle)))) xx_yy_rotated = np.matmul(R, xx_yy) xx_rotated = xx_yy_rotated[0, :].reshape((ny, nx)) diff --git a/tests/conftest.py b/tests/conftest.py index cc1761ff3..effe2e610 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -27,9 +27,7 @@ def nxmx_example(): instrument = entry.create_group("instrument") instrument.attrs["NX_class"] = "NXinstrument" - name = instrument.create_dataset( - "name", data=np.string_("DIAMOND BEAMLINE I03") - ) + name = instrument.create_dataset("name", data=np.bytes_("DIAMOND BEAMLINE I03")) name.attrs["short_name"] = "I03" beam = instrument.create_group("beam") diff --git a/tests/format/test_FormatISISSXD.py b/tests/format/test_FormatISISSXD.py index 361e6cbf0..cf01e6e81 100644 --- a/tests/format/test_FormatISISSXD.py +++ b/tests/format/test_FormatISISSXD.py @@ -294,17 +294,226 @@ def test_import(nacl): "pedestal": 0.0, "px_mm_strategy": {"type": "SimplePxMmStrategy"}, "children": [ - {"panel": 0}, - {"panel": 1}, - {"panel": 2}, - {"panel": 3}, - {"panel": 4}, - {"panel": 5}, - {"panel": 6}, - {"panel": 7}, - {"panel": 8}, - {"panel": 9}, - {"panel": 10}, + { + "name": "", + "type": "", + "fast_axis": (1.0, 0.0, 0.0), + "slow_axis": (0.0, 1.0, 0.0), + "origin": (0.0, 0.0, 0.0), + "raw_image_offset": (0, 0), + "image_size": (0, 0), + "pixel_size": (0.0, 0.0), + "trusted_range": (0.0, 0.0), + "thickness": 0.0, + "material": "", + "mu": 0.0, + "identifier": "", + "mask": [], + "gain": 1.0, + "pedestal": 0.0, + "px_mm_strategy": {"type": "SimplePxMmStrategy"}, + "children": [{"panel": 0}], + }, + { + "name": "", + "type": "", + "fast_axis": (1.0, 0.0, 0.0), + "slow_axis": (0.0, 1.0, 0.0), + "origin": (0.0, 0.0, 0.0), + "raw_image_offset": (0, 0), + "image_size": (0, 0), + "pixel_size": (0.0, 0.0), + "trusted_range": (0.0, 0.0), + "thickness": 0.0, + "material": "", + "mu": 0.0, + "identifier": "", + "mask": [], + "gain": 1.0, + "pedestal": 0.0, + "px_mm_strategy": {"type": "SimplePxMmStrategy"}, + "children": [{"panel": 1}], + }, + { + "name": "", + "type": "", + "fast_axis": (1.0, 0.0, 0.0), + "slow_axis": (0.0, 1.0, 0.0), + "origin": (0.0, 0.0, 0.0), + "raw_image_offset": (0, 0), + "image_size": (0, 0), + "pixel_size": (0.0, 0.0), + "trusted_range": (0.0, 0.0), + "thickness": 0.0, + "material": "", + "mu": 0.0, + "identifier": "", + "mask": [], + "gain": 1.0, + "pedestal": 0.0, + "px_mm_strategy": {"type": "SimplePxMmStrategy"}, + "children": [{"panel": 2}], + }, + { + "name": "", + "type": "", + "fast_axis": (1.0, 0.0, 0.0), + "slow_axis": (0.0, 1.0, 0.0), + "origin": (0.0, 0.0, 0.0), + "raw_image_offset": (0, 0), + "image_size": (0, 0), + "pixel_size": (0.0, 0.0), + "trusted_range": (0.0, 0.0), + "thickness": 0.0, + "material": "", + "mu": 0.0, + "identifier": "", + "mask": [], + "gain": 1.0, + "pedestal": 0.0, + "px_mm_strategy": {"type": "SimplePxMmStrategy"}, + "children": [{"panel": 3}], + }, + { + "name": "", + "type": "", + "fast_axis": (1.0, 0.0, 0.0), + "slow_axis": (0.0, 1.0, 0.0), + "origin": (0.0, 0.0, 0.0), + "raw_image_offset": (0, 0), + "image_size": (0, 0), + "pixel_size": (0.0, 0.0), + "trusted_range": (0.0, 0.0), + "thickness": 0.0, + "material": "", + "mu": 0.0, + "identifier": "", + "mask": [], + "gain": 1.0, + "pedestal": 0.0, + "px_mm_strategy": {"type": "SimplePxMmStrategy"}, + "children": [{"panel": 4}], + }, + { + "name": "", + "type": "", + "fast_axis": (1.0, 0.0, 0.0), + "slow_axis": (0.0, 1.0, 0.0), + "origin": (0.0, 0.0, 0.0), + "raw_image_offset": (0, 0), + "image_size": (0, 0), + "pixel_size": (0.0, 0.0), + "trusted_range": (0.0, 0.0), + "thickness": 0.0, + "material": "", + "mu": 0.0, + "identifier": "", + "mask": [], + "gain": 1.0, + "pedestal": 0.0, + "px_mm_strategy": {"type": "SimplePxMmStrategy"}, + "children": [{"panel": 5}], + }, + { + "name": "", + "type": "", + "fast_axis": (1.0, 0.0, 0.0), + "slow_axis": (0.0, 1.0, 0.0), + "origin": (0.0, 0.0, 0.0), + "raw_image_offset": (0, 0), + "image_size": (0, 0), + "pixel_size": (0.0, 0.0), + "trusted_range": (0.0, 0.0), + "thickness": 0.0, + "material": "", + "mu": 0.0, + "identifier": "", + "mask": [], + "gain": 1.0, + "pedestal": 0.0, + "px_mm_strategy": {"type": "SimplePxMmStrategy"}, + "children": [{"panel": 6}], + }, + { + "name": "", + "type": "", + "fast_axis": (1.0, 0.0, 0.0), + "slow_axis": (0.0, 1.0, 0.0), + "origin": (0.0, 0.0, 0.0), + "raw_image_offset": (0, 0), + "image_size": (0, 0), + "pixel_size": (0.0, 0.0), + "trusted_range": (0.0, 0.0), + "thickness": 0.0, + "material": "", + "mu": 0.0, + "identifier": "", + "mask": [], + "gain": 1.0, + "pedestal": 0.0, + "px_mm_strategy": {"type": "SimplePxMmStrategy"}, + "children": [{"panel": 7}], + }, + { + "name": "", + "type": "", + "fast_axis": (1.0, 0.0, 0.0), + "slow_axis": (0.0, 1.0, 0.0), + "origin": (0.0, 0.0, 0.0), + "raw_image_offset": (0, 0), + "image_size": (0, 0), + "pixel_size": (0.0, 0.0), + "trusted_range": (0.0, 0.0), + "thickness": 0.0, + "material": "", + "mu": 0.0, + "identifier": "", + "mask": [], + "gain": 1.0, + "pedestal": 0.0, + "px_mm_strategy": {"type": "SimplePxMmStrategy"}, + "children": [{"panel": 8}], + }, + { + "name": "", + "type": "", + "fast_axis": (1.0, 0.0, 0.0), + "slow_axis": (0.0, 1.0, 0.0), + "origin": (0.0, 0.0, 0.0), + "raw_image_offset": (0, 0), + "image_size": (0, 0), + "pixel_size": (0.0, 0.0), + "trusted_range": (0.0, 0.0), + "thickness": 0.0, + "material": "", + "mu": 0.0, + "identifier": "", + "mask": [], + "gain": 1.0, + "pedestal": 0.0, + "px_mm_strategy": {"type": "SimplePxMmStrategy"}, + "children": [{"panel": 9}], + }, + { + "name": "", + "type": "", + "fast_axis": (1.0, 0.0, 0.0), + "slow_axis": (0.0, 1.0, 0.0), + "origin": (0.0, 0.0, 0.0), + "raw_image_offset": (0, 0), + "image_size": (0, 0), + "pixel_size": (0.0, 0.0), + "trusted_range": (0.0, 0.0), + "thickness": 0.0, + "material": "", + "mu": 0.0, + "identifier": "", + "mask": [], + "gain": 1.0, + "pedestal": 0.0, + "px_mm_strategy": {"type": "SimplePxMmStrategy"}, + "children": [{"panel": 10}], + }, ], }, } diff --git a/tests/format/test_FormatNXmxDLS16M.py b/tests/format/test_FormatNXmxDLS16M.py index 56ecc1434..a9ba1d2a1 100644 --- a/tests/format/test_FormatNXmxDLS16M.py +++ b/tests/format/test_FormatNXmxDLS16M.py @@ -200,11 +200,11 @@ def test_understand(beamline, tmp_path): with h5py.File(nxs, mode="w") as fh: entry = fh.create_group("entry") instrument = entry.create_group("instrument") - instrument.attrs["short_name"] = np.string_(f"DLS {beamline}") + instrument.attrs["short_name"] = np.bytes_(f"DLS {beamline}") name = instrument.create_dataset( - "name", data=np.string_(f"DIAMOND BEAMLINE {beamline}") + "name", data=np.bytes_(f"DIAMOND BEAMLINE {beamline}") ) - name.attrs["short_name"] = np.string_(f"DLS {beamline}") + name.attrs["short_name"] = np.bytes_(f"DLS {beamline}") assert FormatNXmxDLS16M.understand(nxs) assert FormatNXmxDLS.understand(nxs) @@ -216,9 +216,9 @@ def test_understand_legacy(beamline, tmp_path): with h5py.File(nxs, mode="w") as fh: entry = fh.create_group("entry") instrument = entry.create_group("instrument") - instrument.attrs["short_name"] = np.string_(f"{beamline}") - name = instrument.create_dataset("name", data=np.string_(f"{beamline}")) - name.attrs["short_name"] = np.string_(f"{beamline}") + instrument.attrs["short_name"] = np.bytes_(f"{beamline}") + name = instrument.create_dataset("name", data=np.bytes_(f"{beamline}")) + name.attrs["short_name"] = np.bytes_(f"{beamline}") assert FormatNXmxDLS16M.understand(nxs) assert FormatNXmxDLS.understand(nxs) @@ -236,9 +236,7 @@ def test_do_not_understand_i24(tmp_path): with h5py.File(nxs, mode="w") as fh: entry = fh.create_group("entry") instrument = entry.create_group("instrument") - instrument.attrs["short_name"] = np.string_("DLS I24") - name = instrument.create_dataset( - "name", data=np.string_("DIAMOND BEAMLINE I24") - ) - name.attrs["short_name"] = np.string_("DLS I24") + instrument.attrs["short_name"] = np.bytes_("DLS I24") + name = instrument.create_dataset("name", data=np.bytes_("DIAMOND BEAMLINE I24")) + name.attrs["short_name"] = np.bytes_("DLS I24") assert not FormatNXmxDLS16M.understand(nxs) diff --git a/tests/format/test_format.py b/tests/format/test_format.py index 3b951ba8d..806af7d86 100644 --- a/tests/format/test_format.py +++ b/tests/format/test_format.py @@ -6,7 +6,6 @@ def test_reading_refl_failure(dials_data): test_data = dials_data("centroid_test_data", pathlib=True) - # Without dials_regression, none of the dials-data tests check for this "invalid binary data" case assert Registry.get_format_class_for_file(test_data / "indexed.refl") is None # Check .expt while here assert Registry.get_format_class_for_file(test_data / "indexed.expt") is None diff --git a/tests/masking/test_masking.py b/tests/masking/test_masking.py index d71b8fefa..1ee31d245 100644 --- a/tests/masking/test_masking.py +++ b/tests/masking/test_masking.py @@ -1,7 +1,6 @@ from __future__ import annotations import math -import os import pickle import pytest @@ -120,9 +119,10 @@ def _construct_shadow_masker(goniometer): @pytest.fixture -def dls_i23_experiment(dials_regression): - experiments_file = os.path.join( - dials_regression, "shadow_test_data/DLS_I23_Kappa/data_1_0400.cbf.gz" +def dls_i23_experiment(dials_data): + experiments_file = str( + dials_data("shadow_test_data", pathlib=True) + / "DLS_I23_Kappa-data_1_0400.cbf.gz" ) experiments = ExperimentListFactory.from_filenames([experiments_file]) return experiments[0] diff --git a/tests/model/test_crystal_model.py b/tests/model/test_crystal_model.py index c694f181d..8e817c08b 100644 --- a/tests/model/test_crystal_model.py +++ b/tests/model/test_crystal_model.py @@ -7,6 +7,7 @@ from cctbx import crystal, sgtbx, uctbx from cctbx.sgtbx import change_of_basis_op +from iotbx.phil import parse from libtbx.test_utils import approx_equal from scitbx import matrix from scitbx.array_family import flex @@ -18,6 +19,7 @@ MosaicCrystalKabsch2010, MosaicCrystalSauter2014, ) +from dxtbx.model.crystal import crystal_phil_scope from .crystal_model_old import crystal_model_old @@ -149,20 +151,20 @@ def test_crystal_model(): assert approx_equal(b_, R * real_space_b) assert approx_equal(c_, R * real_space_c) assert ( - str(model).replace("-0.0000", " 0.0000") + str(model).replace("-0.000000", " 0.000000") == """\ Crystal: Unit cell: 10.000, 11.000, 12.000, 90.000, 90.000, 90.000 Space group: P 1 - U matrix: {{ 0.4330, -0.7500, 0.5000}, - { 0.7891, 0.0474, -0.6124}, - { 0.4356, 0.6597, 0.6124}} - B matrix: {{ 0.1000, 0.0000, 0.0000}, - { 0.0000, 0.0909, 0.0000}, - { 0.0000, 0.0000, 0.0833}} - A = UB: {{ 0.0433, -0.0682, 0.0417}, - { 0.0789, 0.0043, -0.0510}, - { 0.0436, 0.0600, 0.0510}}""" + U matrix: {{ 0.433013, -0.750000, 0.500000}, + { 0.789149, 0.047367, -0.612372}, + { 0.435596, 0.659740, 0.612372}} + B matrix: {{ 0.100000, 0.000000, 0.000000}, + { 0.000000, 0.090909, 0.000000}, + { 0.000000, 0.000000, 0.083333}} + A = UB: {{ 0.043301, -0.068182, 0.041667}, + { 0.078915, 0.004306, -0.051031}, + { 0.043560, 0.059976, 0.051031}}""" ) model.set_B((1 / 12, 0, 0, 0, 1 / 12, 0, 0, 0, 1 / 12)) assert approx_equal(model.get_unit_cell().parameters(), (12, 12, 12, 90, 90, 90)) @@ -329,20 +331,20 @@ def test_MosaicCrystalKabsch2010(): space_group_symbol="P 1", ) assert ( - str(mosaic_model).replace("-0.0000", " 0.0000") + str(mosaic_model).replace("-0.000000", " 0.000000") == """\ Crystal: Unit cell: 10.000, 11.000, 12.000, 90.000, 90.000, 90.000 Space group: P 1 - U matrix: {{ 1.0000, 0.0000, 0.0000}, - { 0.0000, 1.0000, 0.0000}, - { 0.0000, 0.0000, 1.0000}} - B matrix: {{ 0.1000, 0.0000, 0.0000}, - { 0.0000, 0.0909, 0.0000}, - { 0.0000, 0.0000, 0.0833}} - A = UB: {{ 0.1000, 0.0000, 0.0000}, - { 0.0000, 0.0909, 0.0000}, - { 0.0000, 0.0000, 0.0833}} + U matrix: {{ 1.000000, 0.000000, 0.000000}, + { 0.000000, 1.000000, 0.000000}, + { 0.000000, 0.000000, 1.000000}} + B matrix: {{ 0.100000, 0.000000, 0.000000}, + { 0.000000, 0.090909, 0.000000}, + { 0.000000, 0.000000, 0.083333}} + A = UB: {{ 0.100000, 0.000000, 0.000000}, + { 0.000000, 0.090909, 0.000000}, + { 0.000000, 0.000000, 0.083333}} Mosaicity: 0.000000""" ) assert approx_equal(mosaic_model.get_mosaicity(), 0) @@ -375,20 +377,20 @@ def test_MosaicCrystalSauter2014(): space_group_symbol="P 1", ) assert ( - str(mosaic_model).replace("-0.0000", " 0.0000") + str(mosaic_model).replace("-0.000000", " 0.000000") == """\ Crystal: Unit cell: 10.000, 11.000, 12.000, 90.000, 90.000, 90.000 Space group: P 1 - U matrix: {{ 1.0000, 0.0000, 0.0000}, - { 0.0000, 1.0000, 0.0000}, - { 0.0000, 0.0000, 1.0000}} - B matrix: {{ 0.1000, 0.0000, 0.0000}, - { 0.0000, 0.0909, 0.0000}, - { 0.0000, 0.0000, 0.0833}} - A = UB: {{ 0.1000, 0.0000, 0.0000}, - { 0.0000, 0.0909, 0.0000}, - { 0.0000, 0.0000, 0.0833}} + U matrix: {{ 1.000000, 0.000000, 0.000000}, + { 0.000000, 1.000000, 0.000000}, + { 0.000000, 0.000000, 1.000000}} + B matrix: {{ 0.100000, 0.000000, 0.000000}, + { 0.000000, 0.090909, 0.000000}, + { 0.000000, 0.000000, 0.083333}} + A = UB: {{ 0.100000, 0.000000, 0.000000}, + { 0.000000, 0.090909, 0.000000}, + { 0.000000, 0.000000, 0.083333}} Half mosaic angle (degrees): 0.000000 Domain size (Angstroms): 0.000000""" ) @@ -639,3 +641,42 @@ def test_recalculated_cell(crystal_class): xl.set_recalculated_unit_cell(uctbx.unit_cell((11, 12, 13, 90, 90, 90))) assert xl.get_recalculated_cell_parameter_sd() == () assert xl.get_recalculated_cell_volume_sd() == 0 + + +def test_from_phil(): + params = crystal_phil_scope.extract() + assert CrystalFactory.from_phil(params) is None + + params = crystal_phil_scope.fetch( + parse( + """ + crystal { + unit_cell = 10, 20, 30, 90, 90, 90 + space_group = P222 + } + """ + ) + ).extract() + + # Create the crystal + crystal = CrystalFactory.from_phil(params) + assert crystal.get_unit_cell().parameters() == (10, 20, 30, 90, 90, 90) + assert crystal.get_space_group().type().lookup_symbol() == "P 2 2 2" + + params = crystal_phil_scope.fetch( + parse( + """ + crystal { + A_matrix = 0.0541, 0.0832, -0.0060, 0.0049, -0.0175, -0.0492, -0.0840, 0.0526, -0.0068 + space_group = P4 + } + """ + ) + ).extract() + + # Create the crystal + crystal = CrystalFactory.from_phil(params) + assert crystal.get_A() == pytest.approx( + (0.0541, 0.0832, -0.0060, 0.0049, -0.0175, -0.0492, -0.0840, 0.0526, -0.0068) + ) + assert crystal.get_space_group().type().lookup_symbol() == "P 4" diff --git a/tests/model/test_experiment_list.py b/tests/model/test_experiment_list.py index e9548a198..6909ccf6f 100644 --- a/tests/model/test_experiment_list.py +++ b/tests/model/test_experiment_list.py @@ -1,11 +1,14 @@ from __future__ import annotations +import bz2 import collections import errno import os import pickle +import shutil from unittest import mock +import dateutil.parser import pytest from cctbx import sgtbx @@ -24,6 +27,7 @@ ExperimentList, ExperimentType, Goniometer, + History, Scan, ScanFactory, ) @@ -84,7 +88,7 @@ def all_image_examples(dials_data): ("SSRL_bl111-mar325_1_001.mccd.bz2"), ("xia2-merge2cbf_averaged_0001.cbf.bz2"), ) - return [str(dials_data("image_examples") / f) for f in filenames] + return [str(dials_data("image_examples", pathlib=True) / f) for f in filenames] @pytest.fixture @@ -400,21 +404,16 @@ def experiment_list(): return experiments -def test_experimentlist_factory_from_json(monkeypatch, dials_regression): +def test_experimentlist_factory_from_json(monkeypatch, dials_data): + data_dir = dials_data("experiment_test_data", pathlib=True) + dials_data_root = data_dir / ".." # Get all the filenames - filename1 = os.path.join( - dials_regression, "experiment_test_data", "experiment_1.json" - ) - filename3 = os.path.join( - dials_regression, "experiment_test_data", "experiment_3.json" - ) - filename4 = os.path.join( - dials_regression, "experiment_test_data", "experiment_4.json" - ) + filename1 = str(data_dir / "experiment_1.json") + filename3 = str(data_dir / "experiment_3.json") + filename4 = str(data_dir / "experiment_4.json") - # Read all the experiment lists in with monkeypatch.context() as m: - m.setenv("DIALS_REGRESSION", dials_regression) + m.setenv("DIALS_DATA", str(dials_data_root.resolve())) el1 = ExperimentListFactory.from_json_file(filename1) el3 = ExperimentListFactory.from_json_file(filename3) el4 = ExperimentListFactory.from_json_file(filename4) @@ -442,15 +441,15 @@ def test_experimentlist_factory_from_json(monkeypatch, dials_regression): assert e1.crystal == ee.crystal -def test_experimentlist_factory_from_pickle(monkeypatch, dials_regression): +def test_experimentlist_factory_from_pickle(monkeypatch, dials_data): + data_dir = dials_data("experiment_test_data", pathlib=True) + dials_data_root = data_dir / ".." # Get all the filenames - filename1 = os.path.join( - dials_regression, "experiment_test_data", "experiment_1.json" - ) + filename1 = str(data_dir / "experiment_1.json") # Read all the experiment lists in with monkeypatch.context() as m: - m.setenv("DIALS_REGRESSION", dials_regression) + m.setenv("DIALS_DATA", str(dials_data_root.resolve())) el1 = ExperimentListFactory.from_json_file(filename1) # Pickle then load again @@ -470,20 +469,22 @@ def test_experimentlist_factory_from_pickle(monkeypatch, dials_regression): assert e1.crystal and e1.crystal == e2.crystal -def test_experimentlist_factory_from_args(monkeypatch, dials_regression): +def test_experimentlist_factory_from_args(monkeypatch, dials_data): pytest.importorskip("dials") + data_dir = dials_data("experiment_test_data", pathlib=True) + dials_data_root = data_dir / ".." + # Get all the filenames filenames = [ - os.path.join(dials_regression, "experiment_test_data", "experiment_1.json"), - # os.path.join(dials_regression, 'experiment_test_data', 'experiment_2.json'), - os.path.join(dials_regression, "experiment_test_data", "experiment_3.json"), - os.path.join(dials_regression, "experiment_test_data", "experiment_4.json"), + str(data_dir / "experiment_1.json"), + str(data_dir / "experiment_3.json"), + str(data_dir / "experiment_4.json"), ] # Get the experiments from a list of filenames with monkeypatch.context() as m: - m.setenv("DIALS_REGRESSION", dials_regression) + m.setenv("DIALS_DATA", str(dials_data_root.resolve())) experiments = ExperimentListFactory.from_args(filenames) assert len(experiments) == 3 @@ -536,15 +537,16 @@ def test_experimentlist_factory_from_sequence(): assert experiments[0].crystal -def test_experimentlist_dumper_dump_formats(monkeypatch, dials_regression, tmp_path): +def test_experimentlist_dumper_dump_formats(monkeypatch, dials_data, tmp_path): + data_dir = dials_data("experiment_test_data", pathlib=True) + dials_data_root = data_dir / ".." + # Get all the filenames - filename1 = os.path.join( - dials_regression, "experiment_test_data", "experiment_1.json" - ) + filename1 = str(data_dir / "experiment_1.json") # Read all the experiment lists in with monkeypatch.context() as m: - m.setenv("DIALS_REGRESSION", dials_regression) + m.setenv("DIALS_DATA", str(dials_data_root.resolve())) elist1 = ExperimentListFactory.from_json_file(filename1) # Dump as JSON file and reload @@ -560,17 +562,15 @@ def test_experimentlist_dumper_dump_formats(monkeypatch, dials_regression, tmp_p check(elist1, elist2) -def test_experimentlist_dumper_dump_scan_varying( - monkeypatch, dials_regression, tmp_path -): +def test_experimentlist_dumper_dump_scan_varying(monkeypatch, dials_data, tmp_path): + data_dir = dials_data("experiment_test_data", pathlib=True) + dials_data_root = data_dir / ".." # Get all the filenames - filename1 = os.path.join( - dials_regression, "experiment_test_data", "experiment_1.json" - ) + filename1 = str(data_dir / "experiment_1.json") # Read the experiment list in with monkeypatch.context() as m: - m.setenv("DIALS_REGRESSION", dials_regression) + m.setenv("DIALS_DATA", str(dials_data_root.resolve())) elist1 = ExperimentListFactory.from_json_file(filename1) # Make trivial scan-varying models @@ -621,10 +621,21 @@ def test_experimentlist_dumper_dump_empty_sequence(tmp_path): check(experiments, experiments2) -def test_experimentlist_dumper_dump_with_lookup(dials_regression, tmp_path): - filename = os.path.join( - dials_regression, "centroid_test_data", "experiments_with_lookup.json" - ) +def test_experimentlist_dumper_dump_with_lookup(dials_data, tmp_path): + data_dir = dials_data("centroid_test_data", pathlib=True) + + # Copy to the tmp directory, because we need to unpack some files + filename = shutil.copy(data_dir / "experiments_with_lookup.json", tmp_path) + gain_bz2 = shutil.copy(data_dir / "lookup_gain.pickle.bz2", tmp_path) + pedestal_bz2 = shutil.copy(data_dir / "lookup_pedestal.pickle.bz2", tmp_path) + shutil.copy(data_dir / "lookup_mask.pickle", tmp_path) + for image in data_dir.glob("centroid_000*.cbf"): + shutil.copy(image, tmp_path) + + for f in [gain_bz2, pedestal_bz2]: + with bz2.BZ2File(f) as compr: + with open(f[:-4], "wb") as decompr: + shutil.copyfileobj(compr, decompr) experiments = ExperimentListFactory.from_json_file(filename, check_format=True) @@ -835,24 +846,23 @@ def compare_experiment(exp1, exp2): ) -def test_experimentlist_from_file(monkeypatch, dials_regression, tmpdir): +def test_experimentlist_from_file(dials_data, monkeypatch, tmpdir): # With the default check_format=True this file should fail to load with an # appropriate error as we can't find the images on disk + data_dir = dials_data("experiment_test_data", pathlib=True) + dials_data_root = data_dir / ".." + with monkeypatch.context() as m: - m.delenv("DIALS_REGRESSION", raising=False) + m.delenv("DIALS_DATA", raising=False) with pytest.raises(IOError) as e: - exp_list = ExperimentList.from_file( - os.path.join( - dials_regression, "experiment_test_data", "experiment_1.json" - ) - ) + exp_list = ExperimentList.from_file(str(data_dir / "experiment_1.json")) assert e.value.errno == errno.ENOENT assert "No such file or directory" in str(e.value) assert "centroid_0001.cbf" in str(e.value) # Setting check_format=False should allow the file to load exp_list = ExperimentList.from_file( - os.path.join(dials_regression, "experiment_test_data", "experiment_1.json"), + str(data_dir / "experiment_1.json"), check_format=False, ) assert len(exp_list) == 1 @@ -862,10 +872,8 @@ def test_experimentlist_from_file(monkeypatch, dials_regression, tmpdir): # file to load with check_format=True with monkeypatch.context() as m: - m.setenv("DIALS_REGRESSION", dials_regression) - exp_list = ExperimentList.from_file( - os.path.join(dials_regression, "experiment_test_data", "experiment_1.json") - ) + m.setenv("DIALS_DATA", str(dials_data_root.resolve())) + exp_list = ExperimentList.from_file(str(data_dir / "experiment_1.json")) assert len(exp_list) == 1 assert exp_list[0].beam @@ -1285,3 +1293,53 @@ def test_experiment_list_all(): ) assert experiments.all_stills() assert experiments.all_same_type() + + +def test_history(tmp_path): + # Check basic construction and pickling + history = History(["foo"]) + assert history.get_history() == ["foo"] + history2 = pickle.loads(pickle.dumps(history)) + assert history2.get_history() == history.get_history() + + # Check append history, which adds a timestamp + history = History() + history.append_history_item("dxtbx.program", "v42.0.0", "flag") + h = history.get_history() + assert len(h) == 1 + timestamp, program, version, flag = h[-1].split("|") + assert dateutil.parser.isoparse(timestamp) + assert program == "dxtbx.program" + assert version == "v42.0.0" + assert flag == "flag" + + # Now check saving and loading, which adds new history items + el = ExperimentList() + el.append(Experiment()) + el[0].history = history + el.as_file(tmp_path / "temp.expt") + el2 = ExperimentList.from_file(tmp_path / "temp.expt") + assert el2[0].history.get_history()[0:2] == el[0].history.get_history() + + _, program, _ = el2[0].history.get_history()[-1].split("|") + # This module called as_json, so its name is in the history + assert program == __name__ + + # Check flags + el.as_file(tmp_path / "temp2.expt", history_as_integrated=True) + check = ExperimentList.from_file(tmp_path / "temp2.expt")[0].history.get_history()[ + -1 + ] + assert "integrated" in check + el.as_file(tmp_path / "temp3.expt", history_as_scaled=True) + check = ExperimentList.from_file(tmp_path / "temp3.expt")[0].history.get_history()[ + -1 + ] + assert "scaled" in check + el.as_file( + tmp_path / "temp4.expt", history_as_integrated=True, history_as_scaled=True + ) + check = ExperimentList.from_file(tmp_path / "temp4.expt")[0].history.get_history()[ + -1 + ] + assert "integrated,scaled" in check diff --git a/tests/model/test_parallax_correction.py b/tests/model/test_parallax_correction.py index fab4e52eb..1c233cd11 100644 --- a/tests/model/test_parallax_correction.py +++ b/tests/model/test_parallax_correction.py @@ -1,6 +1,5 @@ from __future__ import annotations -import os import random from scitbx import matrix @@ -21,8 +20,8 @@ def correct_gold(detector, attlen, xy): return mmcal -def test_run(dials_regression): - filename = os.path.join(dials_regression, "image_examples", "XDS", "XPARM.XDS") +def test_run(dials_data): + filename = str(dials_data("misc_regression", pathlib=True) / "sim_mx-GXPARM.XDS") models = dxtbx.load(filename) detector = models.get_detector() diff --git a/tests/model/test_pixel_to_millimeter.py b/tests/model/test_pixel_to_millimeter.py index 990594a54..2e131f131 100644 --- a/tests/model/test_pixel_to_millimeter.py +++ b/tests/model/test_pixel_to_millimeter.py @@ -1,7 +1,6 @@ from __future__ import annotations import math -import os import pickle import random @@ -25,8 +24,8 @@ @pytest.fixture -def model(dials_regression): - filename = os.path.join(dials_regression, "image_examples", "XDS", "XPARM.XDS") +def model(dials_data): + filename = str(dials_data("misc_regression", pathlib=True) / "sim_mx-GXPARM.XDS") models = dxtbx.load(filename) detector = models.get_detector() diff --git a/tests/nexus/test_build_dxtbx_models.py b/tests/nexus/test_build_dxtbx_models.py index 4f4591ca7..26fc12b8a 100644 --- a/tests/nexus/test_build_dxtbx_models.py +++ b/tests/nexus/test_build_dxtbx_models.py @@ -805,7 +805,7 @@ def test_dataset_as_flex_float(): def test_dataset_as_flex_double(): slices = () np_double_types = ( - np.float_, + np.float64, np.double, np.float64, ) diff --git a/tests/serialize/test_xds.py b/tests/serialize/test_xds.py index 8bfa8e38f..f3f182012 100644 --- a/tests/serialize/test_xds.py +++ b/tests/serialize/test_xds.py @@ -127,3 +127,42 @@ def test_vmxi_thaumatin(dials_data): s = to_xds.XDS_INP() assert "DETECTOR=EIGER" in s assert "SENSOR_THICKNESS= 0.450" in s + + +def test_xds_to_imageset(tmp_path, dials_data): + input_file = """ +DATA_RANGE=1 100 +DETECTOR=EIGER +DETECTOR_DISTANCE=259.000000 +DIRECTION_OF_DETECTOR_X-AXIS=1.00000 0.00000 0.00000 +DIRECTION_OF_DETECTOR_Y-AXIS=0.00000 1.00000 0.00000 +FRACTION_OF_POLARIZATION=0.999 +FRIEDEL'S_LAW=TRUE +INCIDENT_BEAM_DIRECTION=-0.000 -0.000 1.049 +MINIMUM_VALID_PIXEL_VALUE=0 +NAME_TEMPLATE_OF_DATA_FRAMES=thaumatin_??????.h5 +NX=4148 +NY=4362 +ORGX=2050.50 +ORGY=2150.50 +OSCILLATION_RANGE=0.100 +OVERLOAD=126952 +POLARIZATION_PLANE_NORMAL=0.000 1.000 0.000 +QX=0.0750 +QY=0.0750 +ROTATION_AXIS=0.00000 -1.00000 -0.00000 +STARTING_ANGLE=200.000 +TRUSTED_REGION=0.0 1.41 +SENSOR_THICKNESS=0.450 +X-RAY_WAVELENGTH=0.95372 +SPACE_GROUP_NUMBER=89 +UNIT_CELL_CONSTANTS=57.500000 57.500000 149.000000 90.000000 90.000000 90.000000 +""" + xds_inp = tmp_path / "XDS.INP" + with open(xds_inp, "w") as f: + f.write(input_file) + ## Note that the XDS.INP is for thaumatin, and centroid test data is not, but + ## this is sufficient for testing the creation of an imageset of the correct size. + xds_other = dials_data("centroid_test_data", pathlib=True) / "INTEGRATE.HKL" + iset = xds.to_imageset(xds_inp, xds_other) + assert len(iset) == 100 diff --git a/tests/test_FormatCBFFull.py b/tests/test_FormatCBFFull.py index cb7eeb792..a345cef5b 100644 --- a/tests/test_FormatCBFFull.py +++ b/tests/test_FormatCBFFull.py @@ -1,21 +1,19 @@ from __future__ import annotations -import os - from libtbx.test_utils import approx_equal from dxtbx.imageset import ImageSetFactory -def test_multi_axis_goniometer(dials_regression): - data_dir = os.path.join(dials_regression, "image_examples", "dials-190") +def test_multi_axis_goniometer(dials_data): + data_dir = dials_data("misc_regression", pathlib=True) - imgset = ImageSetFactory.new(os.path.join(data_dir, "whatev1_01_00001.cbf"))[0] + imgset = ImageSetFactory.new(str(data_dir / "dials-190_01_00001.cbf"))[0] gonio = imgset.get_goniometer(0) assert approx_equal(gonio.get_fixed_rotation(), (1, 0, 0, 0, 1, 0, 0, 0, 1)) assert approx_equal(gonio.get_setting_rotation(), (1, 0, 0, 0, 1, 0, 0, 0, 1)) - imgset = ImageSetFactory.new(os.path.join(data_dir, "whatev1_02_00001.cbf"))[0] + imgset = ImageSetFactory.new(str(data_dir / "dials-190_02_00001.cbf"))[0] gonio = imgset.get_goniometer(0) assert approx_equal(gonio.get_fixed_rotation(), (1, 0, 0, 0, 1, 0, 0, 0, 1)) assert approx_equal( @@ -24,7 +22,7 @@ def test_multi_axis_goniometer(dials_regression): eps=1e-4, ) - imgset = ImageSetFactory.new(os.path.join(data_dir, "whatev1_03_00001.cbf"))[0] + imgset = ImageSetFactory.new(str(data_dir / "dials-190_03_00001.cbf"))[0] gonio = imgset.get_goniometer(0) assert approx_equal(gonio.get_fixed_rotation(), (1, 0, 0, 0, 1, 0, 0, 0, 1)) assert approx_equal( diff --git a/tests/test_dataset_as_flex.py b/tests/test_dataset_as_flex.py index 9df763aab..ee863ce6b 100644 --- a/tests/test_dataset_as_flex.py +++ b/tests/test_dataset_as_flex.py @@ -5,7 +5,7 @@ import numpy import pytest -from scitbx.array_family import flex # noqa: F401; boost python bindings +from scitbx.array_family import flex # noqa: F401 # boost python bindings from dxtbx_format_nexus_ext import ( dataset_as_flex_double, diff --git a/tests/test_flumpy.py b/tests/test_flumpy.py index a8d996519..5fc2f2ea3 100644 --- a/tests/test_flumpy.py +++ b/tests/test_flumpy.py @@ -217,7 +217,7 @@ def test_reverse_vec3_dtype(dtype): @pytest.mark.parametrize("dtype", [np.int32, np.intc, int]) def test_reverse_miller_index(dtype): hkl = np.array([(1, 0, 0), (0, 1, 0), (0, 0, 1)], dtype=dtype) - if dtype is int and np.dtype("l").itemsize != np.dtype("i").itemsize: + if np.dtype(dtype).itemsize != np.dtype(np.intc).itemsize: with pytest.raises(ValueError): flumpy.miller_index_from_numpy(hkl) else: diff --git a/tests/test_image_readers.py b/tests/test_image_readers.py index 9b03bc66f..508506e68 100644 --- a/tests/test_image_readers.py +++ b/tests/test_image_readers.py @@ -1,7 +1,5 @@ from __future__ import annotations -import os - import pycbf from dxtbx.model.detector import DetectorFactory @@ -16,10 +14,9 @@ def cbf_read_buffer(handle, contents, flags): return handle.read_buffer(contents, flags) -def test_cbf_buffer(dials_regression): - filename = os.path.join( - dials_regression, "image_examples", "dials-190", "whatev1_01_00001.cbf" - ) +def test_cbf_buffer(dials_data): + data_dir = dials_data("misc_regression", pathlib=True) + filename = str(data_dir / "dials-190_01_00001.cbf") with open(filename, "rb") as f: contents = f.read() diff --git a/tests/test_imageset.py b/tests/test_imageset.py index 1e60c81bf..adda72624 100644 --- a/tests/test_imageset.py +++ b/tests/test_imageset.py @@ -571,6 +571,11 @@ def test_pickle_imageset(centroid_files): # Read the 5th image assert sequence[4] + # Set a rejected image + assert not sequence.is_marked_for_rejection(8) + sequence.mark_for_rejection(8, True) + assert sequence.is_marked_for_rejection(8) + # Pickle, then unpickle pickled_sequence = pickle.dumps(sequence) sequence2 = pickle.loads(pickled_sequence) @@ -589,6 +594,9 @@ def test_pickle_imageset(centroid_files): sequence4.get_detectorbase(0) sequence4[0] + # Check reject list is preserved for https://github.com/dials/dials/issues/2998 + assert sequence2.is_marked_for_rejection(8) + def test_get_corrected_data(centroid_files): sequence = ImageSetFactory.new(centroid_files)[0]