From 9642a9448f1ed15703feadc3bc112499aea5b575 Mon Sep 17 00:00:00 2001 From: Thomas Bischof Date: Thu, 8 Jan 2026 11:12:53 +0100 Subject: [PATCH] Add ptu support and pyproject.toml, deprecate cython A user requested the ability to load ptu (t2) files from a PicoQuant PicoHarp. The test suite covers HydraHarp (t2, t3) and PicoHarp (t2) but the code should be as general as the underlying ptufile package. In the process of setting this up I debugged a bit and made a few changes: * rewrite fib4.pyx as fib4.py, which deprecates cython * bump minimum Python version to 3.11 to support ptufile * migrated from setup.py to pyproject.toml (tested on Linux) * set up tox.ini for running the test suite * set up ruff, ty, and codespell placeholders for gradual linting (remove the exclusions as desired) * fix some stray numpy bugs (np.float_ deprecation) * remove distutils due to deprecation in Python 3.12 * do version generation through setuptools_scm * ensure the test suite fully passes for all versions supported * ensure the CI check stage works up through testing Deprecating cython There was only one function that required cython. I have replaced it with a pure numpy implementation that seems to perform equivalently and gets the same results for both random and test data. In tests/test_file_format.py:test_pt3_basic the behavior is the same as before (expected 2037 is actually 2037.5) but I have no insight as to why that happened in the first place. The correlation data (last two lines of that test) pass on my machine though. So all I can say is that no observable behavior has changed for loading pt3 or running dividAndConquer, not that the trace value is actually correct. --- .github/workflows/check.yml | 97 ++++++--- .github/workflows/deploy_github.yml | 4 +- .github/workflows/deploy_pypi.yml | 2 +- .gitignore | 4 + CHANGELOG | 6 + CONTRIBUTING.md | 12 ++ MANIFEST.in | 2 +- build-recipes/win_build_requirements.txt | 1 + pycorrfit/__init__.py | 18 +- pycorrfit/_version.py | 197 ------------------ pycorrfit/fit.py | 2 +- pycorrfit/gui/frontend.py | 2 +- pycorrfit/gui/main.py | 2 +- pycorrfit/gui/page.py | 4 +- pycorrfit/gui/tools/statistics.py | 2 +- pycorrfit/gui/update.py | 2 +- pycorrfit/openfile.py | 7 +- pycorrfit/readfiles/__init__.py | 2 + pycorrfit/readfiles/read_ASC_ALV.py | 4 +- pycorrfit/readfiles/read_pt3_scripts/README | 4 +- .../read_pt3_scripts/correlation_methods.py | 6 +- .../read_pt3_scripts/correlation_objects.py | 15 +- pycorrfit/readfiles/read_pt3_scripts/fib4.py | 89 ++++++++ pycorrfit/readfiles/read_pt3_scripts/fib4.pyx | 60 ------ .../read_pt3_scripts/import_methods.py | 27 +++ pycorrfit/readfiles/read_ptu_PicoQuant.py | 8 + pyproject.toml | 68 ++++++ setup.py | 116 ++--------- tests/data_file_dl.py | 8 + tests/test_file_formats.py | 100 ++++++--- tox.ini | 34 +++ 31 files changed, 459 insertions(+), 446 deletions(-) create mode 100644 CONTRIBUTING.md create mode 100644 build-recipes/win_build_requirements.txt delete mode 100644 pycorrfit/_version.py create mode 100644 pycorrfit/readfiles/read_pt3_scripts/fib4.py delete mode 100644 pycorrfit/readfiles/read_pt3_scripts/fib4.pyx create mode 100644 pycorrfit/readfiles/read_ptu_PicoQuant.py create mode 100644 pyproject.toml create mode 100644 tox.ini diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml index 689259f3..ad708f25 100644 --- a/.github/workflows/check.yml +++ b/.github/workflows/check.yml @@ -14,7 +14,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python-version: ['3.10'] + python-version: ['3.11'] os: [macos-latest, ubuntu-latest, windows-latest] env: # Display must be available globally for linux to know where xvfb is @@ -27,11 +27,32 @@ jobs: uses: actions/setup-python@v3 with: python-version: ${{ matrix.python-version }} + # building on macos results in a long run due to having to build the font cache + - name: Cache Matplotlib font cache + uses: actions/cache@v3 + with: + path: ~/.cache/matplotlib + key: ${{ runner.os }}-matplotlib-${{ hashFiles('pyproject.toml') }} + restore-keys: | + ${{ runner.os }}-matplotlib- - name: Setup xvfb (Linux) if: runner.os == 'Linux' run: | sudo apt-get update - sudo apt-get install -y xvfb libxkbcommon-x11-0 libxcb-icccm4 libxcb-image0 libxcb-keysyms1 libxcb-randr0 libxcb-render-util0 libxcb-xinerama0 libxcb-xinput0 libxcb-xfixes0 libegl1-mesa + sudo apt-get install -y \ + xvfb \ + libxkbcommon-x11-0 \ + libxcb-icccm4 \ + libxcb-image0 \ + libxcb-keysyms1 \ + libxcb-randr0 \ + libxcb-render-util0 \ + libxcb-xinerama0 \ + libxcb-xinput0 \ + libxcb-xfixes0 \ + libegl1 \ + libegl-mesa0 \ + libgl1-mesa-dri # start xvfb in the background sudo /usr/bin/Xvfb $DISPLAY -screen 0 1280x1024x24 & - name: Install dependencies @@ -44,37 +65,51 @@ jobs: pip install -e . # show installed packages pip freeze +# - name: Install large dependencies (Linux) +# if: runner.os == 'Linux' +# run: | +# pip install -U -f https://extras.wxpython.org/wxPython4/extras/linux/gtk3/ubuntu-24.04 wxPython +# pip install .[GUI] +# - name: Install large dependencies (macOS) +# if: runner.os == 'macOS' +# run: | +# pip install -U -f https://extras.wxpython.org/wxPython4/extras/linux/gtk3/ubuntu-24.04 wxPython +# pip install --prefer-binary matplotlib +# pip install .[GUI] +# - name: Install large dependencies (Windows) +# if: runner.os == 'windows' +# run: | +# pip install .[GUI] - name: Test run: | coverage run --source=pycorrfit -m pytest tests - name: Upload coverage to Codecov uses: codecov/codecov-action@v3 - - name: Create app and check if it runs (macOS) - if: runner.os == 'macOS' - working-directory: ./build-recipes - run: | - bash ./macos_build_app.sh PyCorrFit $(python -m pycorrfit --version) - - name: Upload build artifacts - if: (runner.os == 'macOS') - uses: actions/upload-artifact@v3 - with: - name: PyCorrFit_macosx - path: | - ./build-recipes/dist/*.dmg - - - name: Create app and check if it runs (Win) - if: runner.os == 'windows' - working-directory: ./build-recipes - run: | - pip install -r win_build_requirements.txt - pyinstaller -y --log-level=WARN win_PyCorrFit.spec - .\dist\PyCorrFit\PyCorrFit.exe --version - python win_make_iss.py - iscc /Q win_bmicro.iss - - name: Upload build artifacts - if: (runner.os == 'windows') - uses: actions/upload-artifact@v3 - with: - name: PyCorrFit - path: | - ./build-recipes/Output/*.exe +# - name: Create app and check if it runs (macOS) +# if: runner.os == 'macOS' +# working-directory: ./build-recipes +# run: | +# bash ./macos_build_app.sh PyCorrFit $(python -m pycorrfit --version) +# - name: Upload build artifacts +# if: (runner.os == 'macOS') +# uses: actions/upload-artifact@v4 +# with: +# name: PyCorrFit_macosx +# path: | +# ./build-recipes/dist/*.dmg +# - name: Create app and check if it runs (Win) +# if: runner.os == 'windows' +# working-directory: ./build-recipes +# run: | +# pip install -r win_build_requirements.txt +# pyinstaller -y --log-level=WARN win_PyCorrFit.spec +# .\dist\PyCorrFit\PyCorrFit.exe --version +# python win_make_iss.py +# iscc /Q win_bmicro.iss +# - name: Upload build artifacts +# if: (runner.os == 'windows') +# uses: actions/upload-artifact@v4 +# with: +# name: PyCorrFit +# path: | +# ./build-recipes/Output/*.exe diff --git a/.github/workflows/deploy_github.yml b/.github/workflows/deploy_github.yml index b2935a63..46fff8e5 100644 --- a/.github/workflows/deploy_github.yml +++ b/.github/workflows/deploy_github.yml @@ -11,7 +11,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python-version: ['3.10'] + python-version: ['3.11'] os: [macos-latest, windows-latest] steps: - uses: actions/checkout@v3 @@ -20,7 +20,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v3 with: - python-version: "3.10" + python-version: "3.11" - name: Install dependencies run: | python -m pip install --upgrade pip diff --git a/.github/workflows/deploy_pypi.yml b/.github/workflows/deploy_pypi.yml index 87c6d7d9..551f016f 100644 --- a/.github/workflows/deploy_pypi.yml +++ b/.github/workflows/deploy_pypi.yml @@ -11,7 +11,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python-version: ['3.10'] + python-version: ['3.11'] os: [macos-latest, windows-latest] steps: - uses: actions/checkout@v3 diff --git a/.gitignore b/.gitignore index 69e4170e..e6fa5735 100644 --- a/.gitignore +++ b/.gitignore @@ -77,6 +77,7 @@ nosetests.xml tests/data .cache +__pycache__ .eggs .env @@ -86,3 +87,6 @@ docs/_build # pycharm .idea + +_version.py +*.swp diff --git a/CHANGELOG b/CHANGELOG index 76723a5f..866acf5b 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,3 +1,9 @@ +[unreleased] + - enh: add support for PicoQuant ptu file format + - setup: drop support for Python <3.11 (due to ptufile) + - tests: set up tox for all supported python version + - tests: add placeholder ruff/ty/codespell configuration for gradual improvements + - enh: deprecate cython 1.2.1 - fix: compatibility with newer versions of scipy (#211) 1.2.0 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..04d1028f --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,12 @@ +# Quickstart + +``` +uv build +``` + +Running tests: + +``` +uv tool install tox --with tox-uv +uvx tox +``` diff --git a/MANIFEST.in b/MANIFEST.in index 9903e192..847ea660 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -5,6 +5,6 @@ recursive-include examples *.txt *.pcfs recursive-include doc *.tex *.bib *.pdf *.md *.png *.svg recursive-include docs *.py *.md *.txt *.rst *.bib *.gif *.jpg *.png recursive-include tests *.py *.md -recursive-include pycorrfit LICENCE README +recursive-include pycorrfit LICENSE README prune docs/_build exclude docs/_version_save.py diff --git a/build-recipes/win_build_requirements.txt b/build-recipes/win_build_requirements.txt new file mode 100644 index 00000000..ef376ca8 --- /dev/null +++ b/build-recipes/win_build_requirements.txt @@ -0,0 +1 @@ +pyinstaller diff --git a/pycorrfit/__init__.py b/pycorrfit/__init__.py index db30ea29..5c31508b 100644 --- a/pycorrfit/__init__.py +++ b/pycorrfit/__init__.py @@ -2,15 +2,21 @@ PyCorrFit is a tool to fit fluorescence correlation spectroscopy data on a logarithmic scale. """ -from . import meta -from . import models -from . import openfile -from . import readfiles +from importlib.metadata import PackageNotFoundError, version + +from . import meta, models, openfile, readfiles from .correlation import Correlation from .fit import Fit from .trace import Trace -from ._version import version as __version__ -__author__ = u"Paul Müller" +try: + __version__ = version("pycorrfit") +except PackageNotFoundError: + # package is not installed + __version__ = "unknown" + + +__author__ = "Paul Müller" __license__ = "GPL v2" +__all__ = ["meta", "models", "openfile", "readfiles", "Fit", "Trace", "Correlation"] diff --git a/pycorrfit/_version.py b/pycorrfit/_version.py deleted file mode 100644 index dcae2dae..00000000 --- a/pycorrfit/_version.py +++ /dev/null @@ -1,197 +0,0 @@ -#!/usr/bin/env python -"""Determine package version from git repository tag - -Each time this file is imported it checks whether the package version -can be determined using `git describe`. If this fails (because either -this file is not located at the 1st level down the repository root or -it is not under version control), the version is read from the script -"_version_save.py" which is not versioned by git, but always included -in the final distribution archive (e.g. via PyPI). If the git version -does not match the saved version, then "_version_save.py" is updated. - - -Usage ------ -1. Put this file in your main module directory: - - REPO_ROOT/package_name/_version.py - -2. Add this line to REPO_ROOT/package_name/__init__.py - - from ._version import version as __version__ # noqa: F401 - -3. (Optional) Add this line to REPO_ROOT/.gitignore - - _version_save.py - -Features --------- -- supports Python 2 and 3 -- supports frozen applications (e.g. PyInstaller) -- supports installing into a virtual environment that is located in - a git repository -- saved version is located in a python file and therefore no other - files (e.g. MANIFEST.in) need be edited -- fallback version is the creation date -- excluded from code coverage via "pragma: no cover" - -Changelog ---------- -2019-11-06.2 - - use os.path.split instead of counting os.path.sep (Windows) -2019-11-06 - - remove deprecated imp dependency (replace with parser) - - check whether this file is versioned and its location is correct - - code cleanup and docs update -""" -from __future__ import print_function - -# Put the entire script into a `True` statement and add the hint -# `pragma: no cover` to ignore code coverage here. -if True: # pragma: no cover - import os - from os.path import abspath, basename, dirname, join, split - import subprocess - import sys - import time - import traceback - import warnings - - def git_describe(): - """ - Return a string describing the version returned by the - command `git describe --tags HEAD`. - If it is not possible to determine the correct version, - then an empty string is returned. - """ - # Make sure we are in a directory that belongs to the correct - # repository. - ourdir = dirname(abspath(__file__)) - - def _minimal_ext_cmd(cmd): - # Construct minimal environment - env = {} - for k in ['SYSTEMROOT', 'PATH']: - v = os.environ.get(k) - if v is not None: - env[k] = v - # LANGUAGE is used on win32 - env['LANGUAGE'] = 'C' - env['LANG'] = 'C' - env['LC_ALL'] = 'C' - pop = subprocess.Popen(cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - env=env) - out = pop.communicate()[0] - return out.strip().decode('ascii', errors="ignore") - - # change directory - olddir = abspath(os.curdir) - os.chdir(ourdir) - - # Make sure that we are getting "git describe" from our own - # repository (and not from a repository where we just happen - # to be in the directory tree). - git_revision = "" - try: - # If this file is not under version control, "loc" will - # be empty. - loc = _minimal_ext_cmd(['git', 'ls-files', '--full-name', - __file__]) - # If it is under version control, it should be located - # one hierarchy down from the repository root (either - # __file__ is "docs/conf.py" or "package_name/_version.py". - if len(split(loc)) == 2: - try: - git_revision = _minimal_ext_cmd(['git', 'describe', - '--tags', 'HEAD']) - except OSError: - pass - except OSError: - pass - # Go back to original directory - os.chdir(olddir) - - return git_revision - - def load_version(versionfile): - """load version from version_save.py""" - longversion = "" - try: - with open(versionfile, "r") as fd: - data = fd.readlines() - for line in data: - if line.startswith("longversion"): - longversion = line.split("=")[1].strip().strip("'") - except BaseException: - try: - from ._version_save import longversion - except BaseException: - try: - from _version_save import longversion - except BaseException: - pass - - return longversion - - def write_version(version, versionfile): - """save version to version_save.py""" - data = "#!/usr/bin/env python\n" \ - + "# This file was created automatically\n" \ - + "longversion = '{VERSION}'\n" - try: - with open(versionfile, "w") as fd: - fd.write(data.format(VERSION=version)) - except BaseException: - if not os.path.exists(versionfile): - # Only issue a warning if the file does not exist. - msg = "Could not write package version to {}.".format( - versionfile) - warnings.warn(msg) - - hdir = dirname(abspath(__file__)) - if basename(__file__) == "conf.py" and "name" in locals(): - # This script is executed in conf.py from the docs directory - versionfile = join(join(join(hdir, ".."), - name), # noqa: F821 - "_version_save.py") - else: - # This script is imported as a module - versionfile = join(hdir, "_version_save.py") - - # Determine the accurate version - longversion = "" - - # 1. git describe - try: - # Get the version using `git describe` - longversion = git_describe() - except BaseException: - pass - - # 2. previously created version file - if longversion == "": - # Either this is not a git repository or we are in the - # wrong git repository. - # Get the version from the previously generated `_version_save.py` - longversion = load_version(versionfile) - - # 3. last resort: date - if longversion == "": - print("Could not determine version. Reason:") - print(traceback.format_exc()) - ctime = os.stat(__file__)[8] - longversion = time.strftime("%Y.%m.%d-%H-%M-%S", time.gmtime(ctime)) - print("Using creation time as version: {}".format(longversion)) - - if not hasattr(sys, 'frozen'): - # Save the version to `_version_save.py` to allow distribution using - # `python setup.py sdist`. - # This is only done if the program is not frozen (with e.g. - # pyinstaller), - if longversion != load_version(versionfile): - write_version(longversion, versionfile) - - # PEP 440-conform development version: - version = ".post".join(longversion.split("-")[:2]) diff --git a/pycorrfit/fit.py b/pycorrfit/fit.py index 647ba7eb..3d098f8f 100644 --- a/pycorrfit/fit.py +++ b/pycorrfit/fit.py @@ -348,7 +348,7 @@ def parameters_global_to_local(parameters, iicorr, varin=varin, if pn in varin: # edit that parameter fit_parm[kk] = parameters[np.where( - np.array(varin) == pn)[0]] + np.array(varin) == pn)[0]][0] return fit_parm def parameters_local_to_global(parameters, iicorr, fit_parm, diff --git a/pycorrfit/gui/frontend.py b/pycorrfit/gui/frontend.py index 3934424e..64771f90 100644 --- a/pycorrfit/gui/frontend.py +++ b/pycorrfit/gui/frontend.py @@ -3,7 +3,7 @@ The frontend displays the GUI (Graphic User Interface). All necessary functions and modules are called from here. """ -from distutils.version import LooseVersion # For version checking +from looseversion import LooseVersion # For version checking import os import pathlib import platform diff --git a/pycorrfit/gui/main.py b/pycorrfit/gui/main.py index 99e37fb1..3694389b 100755 --- a/pycorrfit/gui/main.py +++ b/pycorrfit/gui/main.py @@ -1,5 +1,5 @@ """Main execution script""" -from distutils.version import LooseVersion +from looseversion import LooseVersion import sys import warnings diff --git a/pycorrfit/gui/page.py b/pycorrfit/gui/page.py index f40f302f..9509a8dd 100644 --- a/pycorrfit/gui/page.py +++ b/pycorrfit/gui/page.py @@ -273,7 +273,7 @@ def apply_parameters_reverse(self, event=None): # Write parameters to the form on the Page for i in np.arange(len(self.active_parms[1])): self.spincontrol[i].SetValue(parameters[i]) - self.checkboxes[i].SetValue(parameters_variable[i]) + self.checkboxes[i].SetValue(bool(parameters_variable[i])) # Fitting parameters self.Fitbox[5].SetValue(self.weighted_nuvar) idf = self.weighted_fittype_id @@ -735,7 +735,7 @@ def settings(self): parameterstofit = self.active_parms[2] # Set initial values given by user/programmer for Diffusion Model for i in np.arange(len(labels)): - self.checkboxes[i].SetValue(parameterstofit[i]) + self.checkboxes[i].SetValue(bool(parameterstofit[i])) self.spincontrol[i].SetValue(parameters[i]) # Put everything together self.panelsettings.sizer = wx.BoxSizer(wx.VERTICAL) diff --git a/pycorrfit/gui/tools/statistics.py b/pycorrfit/gui/tools/statistics.py index fdfcd3a0..19f4493c 100644 --- a/pycorrfit/gui/tools/statistics.py +++ b/pycorrfit/gui/tools/statistics.py @@ -192,7 +192,7 @@ def GetListOfAllParameters(self, e=None, return_std_checked=False, Info = Stat.SortParameters(parms) # List of default checked parameters: - checked = np.zeros(len(Info), dtype=np.bool) + checked = np.zeros(len(Info), dtype=bool) # Fit parameters pbool = page.corr.fit_parameters_variable model = mdls.modeldict[page.corr.fit_model.id] diff --git a/pycorrfit/gui/update.py b/pycorrfit/gui/update.py index 4b521e48..63ead502 100644 --- a/pycorrfit/gui/update.py +++ b/pycorrfit/gui/update.py @@ -1,5 +1,5 @@ """PyCorrFit - update checking""" -from distutils.version import LooseVersion # For version checking +from looseversion import LooseVersion # For version checking import os import tempfile import traceback diff --git a/pycorrfit/openfile.py b/pycorrfit/openfile.py index 6471d24e..25adbc48 100644 --- a/pycorrfit/openfile.py +++ b/pycorrfit/openfile.py @@ -5,6 +5,7 @@ """ import codecs import csv +from importlib.metadata import version, PackageNotFoundError import io import os import shutil @@ -18,7 +19,11 @@ # These imports are required for loading data from .trace import Trace -from ._version import version as __version__ +try: + __version__ = version("pycorrfit") +except PackageNotFoundError: + # package is not installed + __version__ = "unknown" def LoadSessionData(sessionfile, parameters_only=False): diff --git a/pycorrfit/readfiles/__init__.py b/pycorrfit/readfiles/__init__.py index fd7eb7c2..a2e67848 100644 --- a/pycorrfit/readfiles/__init__.py +++ b/pycorrfit/readfiles/__init__.py @@ -20,6 +20,7 @@ from .read_FCS_Confocor3 import openFCS from .read_mat_ries import openMAT from .read_pt3_PicoQuant import openPT3 +from .read_ptu_PicoQuant import openPTU def add_all_supported_filetype_entry(adict): @@ -247,6 +248,7 @@ def openZIP(path, filename=None): "PyCorrFit (*.csv)|*.csv": openCSV, "Matlab 'Ries (*.mat)|*.mat": openMAT, "PicoQuant (*.pt3)|*.pt3": openPT3, + "PicoQuant PTU T2/T3 (*.ptu)|*.ptu": openPTU, "Zeiss ConfoCor3 (*.fcs)|*.fcs": openFCS, "Zip file (*.zip)|*.zip": openZIP, "PyCorrFit session (*.pcfs)|*.pcfs": openZIP diff --git a/pycorrfit/readfiles/read_ASC_ALV.py b/pycorrfit/readfiles/read_ASC_ALV.py index 03f09226..0434f8c3 100644 --- a/pycorrfit/readfiles/read_ASC_ALV.py +++ b/pycorrfit/readfiles/read_ASC_ALV.py @@ -416,7 +416,7 @@ def openASC_ALV_7004(path): lent = alltrac.shape[0] # Traces - trace1 = np.zeros((lent, 2), dtype=np.float_) + trace1 = np.zeros((lent, 2), dtype=np.float64) trace1[:, 0] = time trace1[:, 1] = alltrac[:, 1] trace2 = trace1.copy() @@ -427,7 +427,7 @@ def openASC_ALV_7004(path): trace4[:, 1] = alltrac[:, 4] # Correlations - corr1 = np.zeros((lenc, 2), dtype=np.float_) + corr1 = np.zeros((lenc, 2), dtype=np.float64) corr1[:, 0] = tau corr1[:, 1] = allcorr[:, 1] corr2 = corr1.copy() diff --git a/pycorrfit/readfiles/read_pt3_scripts/README b/pycorrfit/readfiles/read_pt3_scripts/README index ca941324..a2aa7a3b 100644 --- a/pycorrfit/readfiles/read_pt3_scripts/README +++ b/pycorrfit/readfiles/read_pt3_scripts/README @@ -8,11 +8,11 @@ https://github.com/dwaithe/FCS_point_correlator The following changes were performed: - `fib4.pyx` - A doc string was inserted. + Removed and replaced with fib4.py - `correlation_objects.py` Line 7 was commented out: #from lmfit import minimize, Parameters,report_fit,report_errors, fit_report - `correlation_methods.py` Support for NumPy 0.13 was added: - https://github.com/FCS-analysis/PyCorrFit/commit/132991c7a2950c0c380c6df6edf433f61911a6db - - https://github.com/dwaithe/FCS_point_correlator/issues/2 \ No newline at end of file + - https://github.com/dwaithe/FCS_point_correlator/issues/2 diff --git a/pycorrfit/readfiles/read_pt3_scripts/correlation_methods.py b/pycorrfit/readfiles/read_pt3_scripts/correlation_methods.py index 77ba5ad6..841f4771 100644 --- a/pycorrfit/readfiles/read_pt3_scripts/correlation_methods.py +++ b/pycorrfit/readfiles/read_pt3_scripts/correlation_methods.py @@ -80,11 +80,11 @@ def tttr2xfcs(y, num, NcascStart, NcascEnd, Nsub): # i2= np.in1d(y+lag,y,assume_unique=True) # New method, cython - i1, i2 = dividAndConquer(y, y+lag, y.shape[0]) + i1, i2 = dividAndConquer(y, y+lag) # If the weights (num) are one as in the first Ncasc round, then the correlation is equal to np.sum(i1) - i1 = np.where(i1.astype(np.bool))[0] - i2 = np.where(i2.astype(np.bool))[0] + i1 = np.where(i1.astype(bool))[0] + i2 = np.where(i2.astype(bool))[0] # Now we want to weight each photon corectly. # Faster dot product method, faster than converting to matrix. diff --git a/pycorrfit/readfiles/read_pt3_scripts/correlation_objects.py b/pycorrfit/readfiles/read_pt3_scripts/correlation_objects.py index 3de789d2..4612252b 100644 --- a/pycorrfit/readfiles/read_pt3_scripts/correlation_objects.py +++ b/pycorrfit/readfiles/read_pt3_scripts/correlation_objects.py @@ -73,6 +73,9 @@ def processData(self): if self.ext == 'pt3': self.subChanArr, self.trueTimeArr, self.dTimeArr, self.resolution = pt3import( self.filepath) + if self.ext == 'ptu': + self.subChanArr, self.trueTimeArr, self.dTimeArr, self.resolution = ptuimport( + self.filepath) if self.ext == 'csv': self.subChanArr, self.trueTimeArr, self.dTimeArr, self.resolution = csvimport( self.filepath) @@ -256,13 +259,13 @@ def crossAndAuto(self, trueTimeArr, subChanArr): (auto[:, 0, 1]*maxY)/(self.count0*self.count1))-1 # Normalisaation of the decay functions. - self.photonDecayCh1Min = self.photonDecayCh1 - \ - np.min(self.photonDecayCh1) + self.photonDecayCh1Min = np.array(self.photonDecayCh1) - \ + (np.min(self.photonDecayCh1) if len(self.photonDecayCh1) > 0 else 0) self.photonDecayCh1Norm = self.photonDecayCh1Min / \ - np.max(self.photonDecayCh1Min) + (np.max(self.photonDecayCh1Min) if len(self.photonDecayCh1) > 0 else 1) if self.numOfCH == 2: - self.photonDecayCh2Min = self.photonDecayCh2 - \ + self.photonDecayCh2Min = np.array(self.photonDecayCh2) - \ np.min(self.photonDecayCh2) self.photonDecayCh2Norm = self.photonDecayCh2Min / \ np.max(self.photonDecayCh2Min) @@ -421,9 +424,9 @@ def subArrayGeneration(self, xmin, xmax): # self.subChanArr = np.array(self.chanArr) # Finds those photons which arrive above certain time or below certain time. photonInd = np.logical_and( - self.dTimeArr >= xmin, self.dTimeArr <= xmax).astype(np.bool) + self.dTimeArr >= xmin, self.dTimeArr <= xmax).astype(bool) - self.subChanArr[np.invert(photonInd).astype(np.bool)] = 16 + self.subChanArr[np.invert(photonInd).astype(bool)] = 16 self.crossAndAuto() diff --git a/pycorrfit/readfiles/read_pt3_scripts/fib4.py b/pycorrfit/readfiles/read_pt3_scripts/fib4.py new file mode 100644 index 00000000..af1cb4a7 --- /dev/null +++ b/pycorrfit/readfiles/read_pt3_scripts/fib4.py @@ -0,0 +1,89 @@ +import numpy as np +import numpy.typing as npt + + +def dividAndConquer( + arr1: npt.NDArray[np.integer | np.floating], + arr2: npt.NDArray[np.integer | np.floating], +) -> tuple[npt.NDArray[np.floating], npt.NDArray[np.floating]]: + """ + Pure numpy implementation of the original + 'divide and conquer fast intersection algorithm. Waithe D 2014' + + # Note + 2026-01-16 Thomas Bischof + + Replaces the following block from fib4.pyx. I tested this against both + random data and real data, and obtained identical results. + -------------------------------------------- + PicoQuant functionalities from FCS_viewer + + This file contains a fast implementation of an algorithm that is + very important (yes I have no clue about the structure of pt3 files) + for importing *.pt3 files: `dividAndConquer`. + + The code was written by + Dr. Dominic Waithe + Wolfson Imaging Centre. + Weatherall Institute of Molecular Medicine. + University of Oxford + + https://github.com/dwaithe/FCS_viewer + + See Also: + The wrapper: `read_pt3_PicoQuant.py` + The wrapped file: `read_pt3_PicoQuant_original_FCSViewer.py`. + + import cython + cimport cython + + import numpy as np + cimport numpy as np + + DTYPE = np.float64 + ctypedef np.float64_t DTYPE_t + + @cython.boundscheck(False) + @cython.wraparound(False) + @cython.nonecheck(False) + def dividAndConquer(arr1b,arr2b,arrLength): + \"\"\"divide and conquer fast intersection algorithm. Waithe D 2014\"\"\" + + cdef np.ndarray[DTYPE_t, ndim=1] arr1bool = np.zeros((arrLength-1)) + cdef np.ndarray[DTYPE_t, ndim=1] arr2bool = np.zeros((arrLength-1)) + cdef np.ndarray[DTYPE_t, ndim=1] arr1 = arr1b + cdef np.ndarray[DTYPE_t, ndim=1] arr2 = arr2b + + cdef int arrLen + arrLen = arrLength; + cdef int i + i = 0; + cdef int j + j = 0; + + while(i = 0] # remove all markers + + # either convert our t3 data to t2 or load the t2 data directly + # time is in units of ns + if ptu.is_t3: + channel = records["channel"] + dtime = records["dtime"] + sync_time = (1e9 / ptu.syncrate) + resolution = ptu.tcspc_resolution + time = (records["time"] * sync_time) + (dtime * resolution) + else: + channel = records["channel"] + dtime = np.zeros_like(channel, dtype=np.uint32) + resolution = ptu.global_resolution + time = records["time"] * 1e9 * resolution + + return channel, time, dtime, resolution + diff --git a/pycorrfit/readfiles/read_ptu_PicoQuant.py b/pycorrfit/readfiles/read_ptu_PicoQuant.py new file mode 100644 index 00000000..36b81840 --- /dev/null +++ b/pycorrfit/readfiles/read_ptu_PicoQuant.py @@ -0,0 +1,8 @@ +"""Read PicoQuant PTU files (t2 and t3 mode)""" + + +from .read_pt3_PicoQuant import openPT3 + + +def openPTU(path, filename=None): + return openPT3(path, filename) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..8cecfa22 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,68 @@ +[project] +authors = [{ name = "Paul Müller", email = "dev@craban.de" }] +description = "Scientific tool for fitting correlation curves on a logarithmic plot." +name = "pycorrfit" +readme = "README.rst" +license = "GPL-2.0-only" +requires-python = ">=3.11, <4" +dynamic = ["version"] + +dependencies = [ + "lmfit >= 0.9.2", + "numpy >= 1.14.2", + "pyyaml >= 3.12", + "scipy >= 1.0.1", + "ptufile >= 2025.12.12", + "looseversion >= 1.3.0", +] + +classifiers = [ + "Operating System :: OS Independent", + "Programming Language :: Python :: 3", + "Topic :: Scientific/Engineering :: Visualization", + "Intended Audience :: Science/Research", +] +keywords = ["fluorescence correlation spectroscopy (FCS)"] + +[project.urls] +repository = "https://github.com/FCS-analysis/PyCorrFit" + +[project.optional-dependencies] +test = ["pytest", "urllib3", "ruff", "ty", "codespell"] +# Graphical User Interface (pip install pycorrfit[GUI]) +GUI = [ + "matplotlib >= 2.2.2", + "sympy >= 1.1.1", + "simplejson", # for updates + "wxPython >= 4.0.1", +] + +[project.gui-scripts] +pycorrfit = "pycorrfit.gui.main:Main" + +[build-system] +requires = ["setuptools>=80", "setuptools-scm>=9"] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] +root = "." +version_file = "pycorrfit/_version.py" + +[tool.setuptools_scm.scm.git] +# ensures that we find the x.y.z (no v prefix) tags +describe_command = "git describe --dirty --tags --long --match '[0-9]*'" + +[tool.setuptools.package-data] +pycorrfit_doc = ["CHANGELOG", "doc/PyCorrFit_doc.pdf"] + +[tool.ruff] +exclude = ["docs", "pycorrfit", "tests"] + +[tool.ruff.lint] +select = ["E", "F", "W", "I", "B"] + +[tool.ty.src] +exclude = ["docs", "pycorrfit", "tests", "setup.py"] + +[tool.codespell] +skip = "./CHANGELOG,./build*,./*egg-info,./doc*,./examples,./pycorrfit,./setup.py,./tests" diff --git a/setup.py b/setup.py index 5c609ce2..3432ac50 100644 --- a/setup.py +++ b/setup.py @@ -1,100 +1,16 @@ -from os.path import join, dirname, realpath, exists -from setuptools import setup, Extension, find_packages -import sys - -# The next three lines are necessary for setup.py install to include -# ChangeLog and Documentation of PyCorrFit -from distutils.command.install import INSTALL_SCHEMES -for scheme in INSTALL_SCHEMES.values(): - scheme['data'] = scheme['purelib'] - - -# We don't need to cythonize if a .whl package is available. -try: - import numpy as np -except ImportError: - print("NumPy not available. Building extensions "+ - "with this setup script will not work:", sys.exc_info()) - extensions = [] -else: - extensions = [Extension("pycorrfit.readfiles.read_pt3_scripts.fib4", - sources=["pycorrfit/readfiles/read_pt3_scripts/fib4.pyx"], - include_dirs=[np.get_include()] - )] - - -try: - import urllib.request -except ImportError: - pass -else: - # Download documentation if it was not compiled with latex - pdfdoc = join(dirname(realpath(__file__)), "doc/PyCorrFit_doc.pdf") - webdoc = "https://github.com/FCS-analysis/PyCorrFit/wiki/PyCorrFit_doc.pdf" - if not exists(pdfdoc): - print("Downloading {} from {}".format(pdfdoc, webdoc)) - try: - urllib.request.urlretrieve(webdoc, pdfdoc) - except: - print("Failed to download documentation.") - -# Parameters -author = u"Paul Müller" -authors = [author] -description = 'Scientific tool for fitting correlation curves on a logarithmic plot.' -name = 'pycorrfit' -year = "2014" - -sys.path.insert(0, realpath(dirname(__file__))+"/"+name) -try: - from _version import version -except: - version = "unknown" - -setup( - author=author, - author_email='dev@craban.de', - data_files=[('pycorrfit_doc', ['CHANGELOG', 'doc/PyCorrFit_doc.pdf'])], - description=description, - long_description=open('README.rst').read() if exists('README.rst') else '', - include_package_data=True, - license="GPL v2", - name=name, - platforms=['ALL'], - url='https://github.com/FCS-analysis/PyCorrFit', - version=version, - # data files - packages=find_packages(include=(name+"*",)), - package_dir={name: name}, - # cython - ext_modules = extensions, - # requirements - install_requires=[ - "lmfit >= 0.9.2", - "numpy >= 1.14.2", - "pyyaml >= 3.12", - "scipy >= 1.0.1", - ], - extras_require = { - # Graphical User Interface (pip install pycorrfit[GUI]) - 'GUI': ["matplotlib >= 2.2.2", - "sympy >= 1.1.1", - "simplejson", # for updates - "wxPython >= 4.0.1", - ], - }, - setup_requires=["cython", 'numpy'], - python_requires='>=3.10, <4', - # scripts - entry_points={ - "gui_scripts": ["pycorrfit=pycorrfit.gui.main:Main"] - }, - keywords=["fluorescence correlation spectroscopy", - ], - classifiers= [ - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3', - 'Topic :: Scientific/Engineering :: Visualization', - 'Intended Audience :: Science/Research' - ], - ) +from os.path import dirname, exists, join, realpath +from urllib.request import urlretrieve + +from setuptools import setup + +# Download documentation if it was not compiled with latex +pdfdoc = join(dirname(realpath(__file__)), "doc/PyCorrFit_doc.pdf") +webdoc = "https://github.com/FCS-analysis/PyCorrFit/wiki/PyCorrFit_doc.pdf" +if not exists(pdfdoc): + print("Downloading {} from {}".format(pdfdoc, webdoc)) + try: + urlretrieve(webdoc, pdfdoc) + except Exception as _e: + print("Failed to download documentation.") + +setup() diff --git a/tests/data_file_dl.py b/tests/data_file_dl.py index ab15caf6..0b2d6c93 100644 --- a/tests/data_file_dl.py +++ b/tests/data_file_dl.py @@ -6,6 +6,7 @@ import json import os from os.path import abspath, dirname, join, exists +import pathlib import urllib3 import warnings @@ -112,8 +113,15 @@ def get_data_files_ext(extension, dldir=dldir, pool_manager=pool_manager, # Get file list and download files = get_data_tree_remote( pool_manager=pool_manager, api_origin=api_origin) + # also grab the local files that are in development + files += list(map( + lambda p: str(p.relative_to(pathlib.Path(dldir))), + filter( + lambda p: p.is_file(), + pathlib.Path(dldir).rglob(f"*{ext}")))) extfiles = [f for f in files if f.lower().startswith( ext[1:]+"/") and f.lower().endswith(ext)] + extfiles = list(set(extfiles)) extfiles.sort() dl_files = [] diff --git a/tests/test_file_formats.py b/tests/test_file_formats.py index d21297b3..8a42355c 100644 --- a/tests/test_file_formats.py +++ b/tests/test_file_formats.py @@ -1,4 +1,5 @@ """Test support for FCS file formats""" + import os from os.path import split import warnings @@ -18,8 +19,9 @@ @pytest.mark.xfail(NOAPITOKEN, reason="Restrictions to GitHub API") def test_asc_all_open(): # get list of supported file extensions - ext = "asc" + ext = "ASC" files = data_file_dl.get_data_files_ext(ext) + assert files for f in files: if [ex for ex in exclude if f.endswith(ex)]: continue @@ -34,14 +36,10 @@ def test_asc_alv7004usb(): f1 = data_file_dl.get_data_file("ALV-7004USB_ac01_cc01_10.ASC") data = pycorrfit.readfiles.open_any(f1) assert data["Type"] == ["AC1", "AC2", "CC12", "CC21"] - assert np.allclose(data["Correlation"][0][10], - np.array([0.000275, 0.11208])) - assert np.allclose(data["Correlation"][1][12], - np.array([0.000325, 0.0900233])) - assert np.allclose(data["Correlation"][2][18], - np.array([0.00055, 0.0582773])) - assert np.allclose(data["Correlation"][3][120], - np.array([3.6864, 0.0224212])) + assert np.allclose(data["Correlation"][0][10], np.array([0.000275, 0.11208])) + assert np.allclose(data["Correlation"][1][12], np.array([0.000325, 0.0900233])) + assert np.allclose(data["Correlation"][2][18], np.array([0.00055, 0.0582773])) + assert np.allclose(data["Correlation"][3][120], np.array([3.6864, 0.0224212])) assert len(data["Trace"][0]) == 253 assert len(data["Trace"][1]) == 253 assert len(data["Trace"][2]) == 2 @@ -58,8 +56,7 @@ def test_asc_alv7004usb(): # There are empty AC2 and CC12/CC21 curves in this file that should # be removed by pycorrfit. assert data2["Type"] == ["AC1"] - assert np.allclose(data2["Correlation"][0][56], - np.array([0.0144, 0.0513857])) + assert np.allclose(data2["Correlation"][0][56], np.array([0.0144, 0.0513857])) assert len(data2["Trace"][0]) == 254 assert np.allclose(data2["Trace"][0][210], np.array([49453.13, 165.41434])) @@ -68,10 +65,8 @@ def test_asc_alv7004usb(): assert len(data3["Type"]) == 1 assert len(data3["Trace"][0]) == 66 assert data3["Type"][0] == "AC" - assert np.allclose(data3["Correlation"][0][56], - np.array([0.0144, 0.38757])) - assert np.allclose(data3["Trace"][0][60], - np.array([1.21523440e5, 5.11968700e1])) + assert np.allclose(data3["Correlation"][0][56], np.array([0.0144, 0.38757])) + assert np.allclose(data3["Trace"][0][60], np.array([1.21523440e5, 5.11968700e1])) f4 = data_file_dl.get_data_file("ALV-7004USB_ac3.ASC") data4 = pycorrfit.readfiles.open_any(f4) @@ -79,24 +74,26 @@ def test_asc_alv7004usb(): assert data4["Type"][0] == "AC" assert len(data4["Trace"][0]) == 254 + @pytest.mark.xfail(NOAPITOKEN, reason="Restrictions to GitHub API") def test_cor_all_open(): - f1 = data_file_dl.get_data_file( - "1_nM_Great_Correlation_Curve_121622.cor") + f1 = data_file_dl.get_data_file("1_nM_Great_Correlation_Curve_121622.cor") data = pycorrfit.readfiles.open_any(f1) - assert(len(data['Filename']) == 3) - assert(data['Filename'][0] == '1_nM_Great_Correlation_Curve_121622.cor') - assert(len(data['Trace']) == 3) - assert(data['Trace'][0] == []) - assert(data['Type'] == ['AC', 'CC', 'AC']) - assert(len(data['Correlation']) == 3) - assert(all(map(lambda x: x.shape == (145, 2), data['Correlation']))) - + assert len(data["Filename"]) == 3 + assert data["Filename"][0] == "1_nM_Great_Correlation_Curve_121622.cor" + assert len(data["Trace"]) == 3 + assert data["Trace"][0] == [] + assert data["Type"] == ["AC", "CC", "AC"] + assert len(data["Correlation"]) == 3 + assert all(map(lambda x: x.shape == (145, 2), data["Correlation"])) + + @pytest.mark.xfail(NOAPITOKEN, reason="Restrictions to GitHub API") def test_csv_all_open(): # get list of supported file extensions ext = "csv" files = data_file_dl.get_data_files_ext(ext) + assert files for f in files: if [ex for ex in exclude if f.endswith(ex)]: continue @@ -110,6 +107,7 @@ def test_fcs_all_open(): # get list of supported file extensions ext = "fcs" files = data_file_dl.get_data_files_ext(ext) + assert files for f in files: if [ex for ex in exclude if f.endswith(ex)]: continue @@ -148,6 +146,7 @@ def test_pt3_all_open(): # get list of supported file extensions ext = "pt3" files = data_file_dl.get_data_files_ext(ext) + assert files for f in files: if [ex for ex in exclude if f.endswith(ex)]: continue @@ -158,8 +157,7 @@ def test_pt3_all_open(): @pytest.mark.xfail(NOAPITOKEN, reason="Restrictions to GitHub API") def test_pt3_basic(): - f1 = data_file_dl.get_data_file( - "PicoQuant_SymphoTime32_A42F-4jul2014/Point_1.pt3") + f1 = data_file_dl.get_data_file("PicoQuant_SymphoTime32_A42F-4jul2014/Point_1.pt3") data = pycorrfit.readfiles.open_any(f1) trace = data["Trace"][0][0] @@ -175,6 +173,54 @@ def test_pt3_basic(): assert np.allclose(corr[100], np.array([0.72089, 0.019201608388821567])) +@pytest.mark.xfail(NOAPITOKEN, reason="Restrictions to GitHub API") +def test_ptu_all_open(): + ext = "ptu" + files = data_file_dl.get_data_files_ext(ext) + assert files + for f in files: + if [ex for ex in exclude if f.endswith(ex)]: + continue + data = pycorrfit.readfiles.open_any(f) + assert data + + +@pytest.mark.xfail(NOAPITOKEN, reason="Restrictions to GitHub API") +def test_ptu_hydraharp_t2_basic(): + f1 = data_file_dl.get_data_file("hydraharp/v20_t2.ptu") + data = pycorrfit.readfiles.open_any(f1) + + trace = data["Trace"][0][0] + assert trace.shape == (199, 2) + + corr = data["Correlation"][0] + assert corr.shape == (150, 2) + + +@pytest.mark.xfail(NOAPITOKEN, reason="Restrictions to GitHub API") +def test_ptu_hydraharp_t3_basic(): + f1 = data_file_dl.get_data_file("hydraharp/v20_t3.ptu") + data = pycorrfit.readfiles.open_any(f1) + + trace = data["Trace"][0][0] + assert trace.shape == (399, 2) + + corr = data["Correlation"][0] + assert corr.shape == (150, 2) + + +@pytest.mark.xfail(NOAPITOKEN, reason="Restrictions to GitHub API") +def test_ptu_picoharp_v30_t2_basic(): + f1 = data_file_dl.get_data_file("picoharp/v30_t2.ptu") + data = pycorrfit.readfiles.open_any(f1) + + trace = data["Trace"][0][0] + assert trace.shape == (306, 2) + + corr = data["Correlation"][0] + assert corr.shape == (150, 2) + + @pytest.mark.xfail(NOAPITOKEN, reason="Restrictions to GitHub API") def test_sin_all_open(): # get list of supported file extensions diff --git a/tox.ini b/tox.ini new file mode 100644 index 00000000..0707ceee --- /dev/null +++ b/tox.ini @@ -0,0 +1,34 @@ +[tox] +envlist = + py311 + py312 + py313 + py314 + ruff + ty + codespell +isolated_build = True + +[testenv] +extras = test +commands = + pytest {posargs:tests} + +[testenv:ruff] +description = "ruff lint and format check" +extras = test +commands = + ruff check + ruff format --check + +[testenv:ty] +description = "ty type check" +extras = test +commands = + ty check + +[testenv:codespell] +description = "codespell spell check" +extras = test +commands = + codespell --toml pyproject.toml