diff --git a/cli/tests/test_populate.py b/cli/tests/test_populate.py index 86b48e36..20a5e4d9 100644 --- a/cli/tests/test_populate.py +++ b/cli/tests/test_populate.py @@ -13,8 +13,6 @@ # limitations under the License. """Patient queue tests""" -# ruff: noqa: SLF001 allow accessing of private members for mocking - from __future__ import annotations from typing import TYPE_CHECKING diff --git a/pixl_core/src/core/exports.py b/pixl_core/src/core/exports.py index c884b6d9..9f73442b 100644 --- a/pixl_core/src/core/exports.py +++ b/pixl_core/src/core/exports.py @@ -134,7 +134,14 @@ def upload(self) -> None: else: uploader = get_uploader(self.project_slug) - - msg = f"Uploading parquet files for project {self.project_slug} via '{destination}'" - logger.info(msg) + logger.info( + "Starting upload of parquet files for project {} via '{}'", + self.project_slug, + destination, + ) uploader.upload_parquet_files(self) + logger.success( + "Finished uploading parquet files for project {} via '{}'", + self.project_slug, + destination, + ) diff --git a/pixl_core/src/core/project_config/pixl_config_model.py b/pixl_core/src/core/project_config/pixl_config_model.py index 7a0d8b63..62aa60ed 100644 --- a/pixl_core/src/core/project_config/pixl_config_model.py +++ b/pixl_core/src/core/project_config/pixl_config_model.py @@ -125,6 +125,7 @@ class _DestinationEnum(str, Enum): ftps = "ftps" dicomweb = "dicomweb" xnat = "xnat" + tre = "tre" class _Destination(BaseModel): diff --git a/pixl_core/src/core/uploader/__init__.py b/pixl_core/src/core/uploader/__init__.py index b87bc9d0..b244ebdc 100644 --- a/pixl_core/src/core/uploader/__init__.py +++ b/pixl_core/src/core/uploader/__init__.py @@ -29,19 +29,21 @@ from ._dicomweb import DicomWebUploader from ._ftps import FTPSUploader +from ._treapi import TreApiUploader from ._xnat import XNATUploader if TYPE_CHECKING: from core.uploader.base import Uploader -# Intenitonally defined in __init__.py to avoid circular imports +# Intentionally defined in __init__.py to avoid circular imports def get_uploader(project_slug: str) -> Uploader: """Uploader Factory, returns uploader instance based on destination.""" choices: dict[str, type[Uploader]] = { "ftps": FTPSUploader, "dicomweb": DicomWebUploader, "xnat": XNATUploader, + "tre": TreApiUploader, } project_config = load_project_config(project_slug) destination = project_config.destination.dicom diff --git a/pixl_core/src/core/uploader/_treapi.py b/pixl_core/src/core/uploader/_treapi.py new file mode 100644 index 00000000..317b9be5 --- /dev/null +++ b/pixl_core/src/core/uploader/_treapi.py @@ -0,0 +1,226 @@ +# Copyright (c) University College London Hospitals NHS Foundation Trust +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Uploader subclass for the ARC TRE API.""" + +from __future__ import annotations + +import zipfile +from io import BytesIO +from pathlib import Path +from typing import TYPE_CHECKING + +import requests +from decouple import config + +from core.uploader._orthanc import StudyTags, get_study_zip_archive +from core.uploader.base import Uploader + +if TYPE_CHECKING: + from core.exports import ParquetExport + +# API Configuration +TRE_API_URL = "https://api.tre.arc.ucl.ac.uk/v0" +REQUEST_TIMEOUT = 10 + +# HTTP Status Codes +HTTP_OK = 200 + + +class TreApiUploader(Uploader): + """ + Uploader for the ARC TRE API. + + This uploader handles uploading DICOM images and parquet files to the ARC TRE + via their REST API. Files are uploaded to an airlock and then flushed to the + main project storage. + """ + + def __init__(self, project_slug: str, keyvault_alias: str | None = None) -> None: + """ + Initialize the TRE API uploader. + + Args: + project_slug: The project identifier + keyvault_alias: Optional Azure Key Vault alias for authentication + + """ + super().__init__(project_slug, keyvault_alias) + + def _set_config(self) -> None: + """Set up authentication configuration from Azure Key Vault.""" + # Use the Azure KV alias as prefix if it exists, otherwise use the project name + prefix = self.keyvault_alias or self.project_slug + self.token = self.keyvault.fetch_secret(f"{prefix}--api--token") + self.headers = {"Authorization": f"Bearer {self.token}"} + self.host = TRE_API_URL + self.upload_timeout = int(config("HTTP_TIMEOUT", default=30)) + + def _upload_dicom_image(self, study_id: str, study_tags: StudyTags) -> None: + """ + Upload a DICOM image to the TRE API. + + Args: + study_id: The study identifier + study_tags: Study metadata containing the pseudo anonymized ID + + """ + zip_content = get_study_zip_archive(study_id) + self.send_via_api(zip_content, f"{study_tags.pseudo_anon_image_id}.zip") + + def upload_parquet_files(self, parquet_export: ParquetExport) -> None: + """ + Upload parquet files as a zip archive to the TRE API. + + Args: + parquet_export: The parquet export containing files to upload + + Raises: + FileNotFoundError: If no parquet files are found in the export + + """ + source_root_dir = parquet_export.current_extract_base + source_files = list(source_root_dir.rglob("*.parquet")) + + if not source_files: + msg = f"No parquet files found in {source_root_dir}" + raise FileNotFoundError(msg) + + # Create zip file + zip_filename = f"{source_root_dir.name}.zip" + zip_file = _create_zip_archive(source_files, source_root_dir, zip_filename) + + # Upload the zip file + self.send_via_api(BytesIO(zip_file.read_bytes()), zip_file.name) + self.flush() # Not ideal, as this may cause multiple flushes in short period + + def send_via_api(self, data: BytesIO, filename: str) -> None: + """ + Upload data to the TRE API. + + Args: + data: The data to upload as a BytesIO stream + filename: The filename for the uploaded data + + Raises: + RuntimeError: If the token is invalid or upload fails + + """ + if not self._is_token_valid(): + msg = f"Token invalid: {self.token}" + raise RuntimeError(msg) + + self._upload_file(data, filename) + + def _is_token_valid(self) -> bool: + """ + Check if the current token is valid. + + Returns: + True if the token is valid, False otherwise + + """ + try: + response = requests.get( + url=f"{self.host}/tokens/info", + headers=self.headers, + timeout=REQUEST_TIMEOUT, + ) + response.raise_for_status() + except requests.RequestException: + return False + else: + return response.status_code == HTTP_OK + + def _upload_file(self, content: BytesIO, filename: str) -> None: + """ + Upload a file to the TRE airlock. + + Args: + content: The file content as a BytesIO stream + filename: The filename for the uploaded file + + Raises: + RuntimeError: If the upload fails + + """ + try: + response = requests.post( + url=f"{self.host}/airlock/upload/{filename}", + headers=self.headers, + data=content, + timeout=self.upload_timeout, + ) + response.raise_for_status() + + except requests.RequestException as e: + msg = f"Failed to upload file {filename}: {e}" + raise RuntimeError(msg) from e + + def flush(self) -> None: + """ + Flush the TRE airlock to move files to main project storage. + + This operation scans and moves files from quarantine storage to the + associated project storage. The operation is asynchronous and expensive, + so it should be called sparingly (e.g., once per day). + + Note: Files are automatically deleted if not moved within 7 days. + + Raises: + RuntimeError: If the flush operation fails + + """ + try: + response = requests.put( + url=f"{self.host}/airlock/flush", + headers=self.headers, + timeout=REQUEST_TIMEOUT, + ) + response.raise_for_status() + + except requests.RequestException as e: + msg = f"Failed to flush airlock: {e}" + raise RuntimeError(msg) from e + + +def _create_zip_archive(files: list[Path], root_dir: Path, zip_filename: str) -> Path: + """ + Create a zip archive from a list of files. + + Args: + files: List of file paths to include in the archive + root_dir: Root directory for relative paths, used to preserve the + directory structure of the input files + zip_filename: Filename for the output zip file + + Returns: + Path to the created zip file + + Raises: + OSError: If zip file creation fails + + """ + zip_path = Path(zip_filename) + + try: + with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf: + for file_path in files: + source_rel_path = file_path.relative_to(root_dir) + zipf.write(file_path, arcname=source_rel_path) + except OSError as e: + msg = f"Failed to create zip file {zip_filename}: {e}" + raise OSError(msg) from e + + return zip_path diff --git a/pixl_core/tests/patient_queue/test_subscriber.py b/pixl_core/tests/patient_queue/test_subscriber.py index f4872b46..f0b8f0e5 100644 --- a/pixl_core/tests/patient_queue/test_subscriber.py +++ b/pixl_core/tests/patient_queue/test_subscriber.py @@ -53,8 +53,8 @@ async def test_create(mock_message) -> None: # Cancel before assertion so the task doesn't hang task.cancel() # need to close the connection and channel - await consumer._channel.close() # noqa: SLF001 - await consumer._connection.close() # noqa: SLF001 + await consumer._channel.close() + await consumer._connection.close() consume.assert_called_once() # Fail on purpose to check async test awaited raise ExpectedTestError diff --git a/pixl_core/tests/uploader/test_dicomweb.py b/pixl_core/tests/uploader/test_dicomweb.py index 751eb4a8..4f9ce996 100644 --- a/pixl_core/tests/uploader/test_dicomweb.py +++ b/pixl_core/tests/uploader/test_dicomweb.py @@ -60,7 +60,7 @@ def dicomweb_uploader() -> MockDicomWebUploader: def test_dicomweb_server_config(run_containers, dicomweb_uploader) -> None: """Tests that the DICOMWeb server is configured correctly in Orthanc""" - dicomweb_uploader._setup_dicomweb_credentials() # noqa: SLF001, private method + dicomweb_uploader._setup_dicomweb_credentials() servers_response = requests.get( ORTHANC_ANON_URL + "/dicom-web/servers", auth=(ORTHANC_USERNAME, ORTHANC_PASSWORD), @@ -99,7 +99,7 @@ def test_upload_dicom_image( pseudo_anon_image_id=not_yet_exported_dicom_image.pseudo_study_uid, patient_id="patient", ) - dicomweb_uploader._upload_dicom_image( # noqa: SLF001 + dicomweb_uploader._upload_dicom_image( study_id, study_tags, ) @@ -119,7 +119,7 @@ def test_dicomweb_upload_fails_with_wrong_credentials( dicomweb_uploader.endpoint_password = "wrong" with pytest.raises(requests.exceptions.ConnectionError): - dicomweb_uploader._setup_dicomweb_credentials() # noqa: SLF001, private method + dicomweb_uploader._setup_dicomweb_credentials() def test_dicomweb_upload_fails_with_wrong_url(study_id, run_containers, dicomweb_uploader) -> None: @@ -127,4 +127,4 @@ def test_dicomweb_upload_fails_with_wrong_url(study_id, run_containers, dicomweb dicomweb_uploader.endpoint_url = "http://wrong" with pytest.raises(requests.exceptions.ConnectionError): - dicomweb_uploader._setup_dicomweb_credentials() # noqa: SLF001, private method + dicomweb_uploader._setup_dicomweb_credentials() diff --git a/pixl_core/tests/uploader/test_ftps.py b/pixl_core/tests/uploader/test_ftps.py index e6c37b83..0a932d21 100644 --- a/pixl_core/tests/uploader/test_ftps.py +++ b/pixl_core/tests/uploader/test_ftps.py @@ -94,15 +94,14 @@ def test_update_exported_and_save(rows_in_session) -> None: """Tests that the exported_at field is updated when a file is uploaded""" # ARRANGE expected_export_time = datetime.now(tz=UTC) + uid = generate_uid(entropy_srcs=["not_yet_exported"]) - # ACT - update_exported_at(generate_uid(entropy_srcs=["not_yet_exported"]), expected_export_time) - new_row = ( - rows_in_session.query(Image) - .filter(Image.pseudo_study_uid == generate_uid(entropy_srcs=["not_yet_exported"])) - .one() - ) - actual_export_time = new_row.exported_at.replace(tzinfo=UTC) + # Act + update_exported_at(uid, expected_export_time) + + # Retrieve updated record + updated_record = rows_in_session.query(Image).filter(Image.pseudo_study_uid == uid).one() + actual_export_time = updated_record.exported_at.replace(tzinfo=UTC) # ASSERT assert actual_export_time == expected_export_time @@ -129,7 +128,6 @@ def parquet_export(export_dir) -> ParquetExport: def test_upload_parquet(parquet_export, ftps_home_dir, ftps_uploader) -> None: """Tests that parquet files are uploaded to the correct location (but ignore their contents)""" # ARRANGE - parquet_export.copy_to_exports(Path(__file__).parents[3] / "test" / "resources" / "omop") parquet_export.export_radiology_linker(pd.DataFrame(list("dummy"), columns=["D"])) diff --git a/pixl_core/tests/uploader/test_treapi.py b/pixl_core/tests/uploader/test_treapi.py new file mode 100644 index 00000000..c892a837 --- /dev/null +++ b/pixl_core/tests/uploader/test_treapi.py @@ -0,0 +1,329 @@ +# Copyright (c) University College London Hospitals NHS Foundation Trust +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for the TRE API uploader module.""" + +from __future__ import annotations + +import filecmp +import zipfile +from datetime import UTC, datetime +from io import BytesIO +from pathlib import Path +from typing import TYPE_CHECKING +from unittest.mock import Mock + +import pandas as pd +import pytest +import requests +from pydicom.uid import generate_uid + +from core.db.models import Image +from core.db.queries import update_exported_at +from core.uploader._treapi import TreApiUploader, _create_zip_archive + +if TYPE_CHECKING: + from collections.abc import Generator + +# Constants +TEST_DIR = Path(__file__).parents[1] +MOCK_API_TOKEN = "test_token_123" +MOCK_HOST = "https://api.example.com" + +# HTTP Status Codes +HTTP_OK = 200 +HTTP_CREATED = 201 +HTTP_UNAUTHORIZED = 401 +HTTP_BAD_REQUEST = 400 + + +class MockTreApiUploader(TreApiUploader): + """Mock TRE API uploader for testing without real API calls.""" + + def __init__(self) -> None: + """Initialize mock uploader with test configuration.""" + self.host = MOCK_HOST + self.token = MOCK_API_TOKEN + self.headers = {"Authorization": f"Bearer {self.token}"} + + +@pytest.fixture +def mock_uploader(mocker) -> MockTreApiUploader: + """Create a mock TRE API uploader with HTTP requests mocked.""" + # Mock all HTTP requests + mock_get = mocker.patch("core.uploader._treapi.requests.get") + mock_post = mocker.patch("core.uploader._treapi.requests.post") + mock_put = mocker.patch("core.uploader._treapi.requests.put") + + # Configure default successful responses + _configure_success_responses(mock_get, mock_post, mock_put, mocker) + + return MockTreApiUploader() + + +def _configure_success_responses(mock_get, mock_post, mock_put, mocker) -> None: + """Configure mock responses for successful API calls.""" + # Token validation response + token_response = mocker.Mock() + token_response.raise_for_status.return_value = None + token_response.status_code = HTTP_OK + mock_get.return_value = token_response + + # Upload response + upload_response = mocker.Mock() + upload_response.raise_for_status.return_value = None + upload_response.status_code = HTTP_CREATED + mock_post.return_value = upload_response + + +@pytest.fixture +def test_zip_content() -> Generator[BytesIO, None, None]: + """Provide test zip file content.""" + test_zip_path = TEST_DIR / "data" / "public.zip" + with test_zip_path.open("rb") as file_content: + yield BytesIO(file_content.read()) + + +@pytest.fixture +def parquet_export(export_dir): + """ + Create a ParquetExport instance for testing. + + Note: Import is done locally to avoid circular dependencies. + """ + from core.exports import ParquetExport # noqa: PLC0415 + + return ParquetExport( + project_name_raw="test-project", + extract_datetime=datetime.now(tz=UTC), + export_dir=export_dir, + ) + + +class TestTreApiUploader: + """Test suite for TRE API uploader functionality.""" + + def test_send_via_api_success( + self, test_zip_content, not_yet_exported_dicom_image, mock_uploader, mocker + ) -> None: + """Test successful data upload via API.""" + # Arrange + pseudo_id = not_yet_exported_dicom_image.pseudo_study_uid + filename = f"{pseudo_id}.zip" + + mock_is_token_valid = mocker.patch.object( + mock_uploader, "_is_token_valid", return_value=True + ) + mock_upload_file = mocker.patch.object(mock_uploader, "_upload_file") + + # Act + mock_uploader.send_via_api(test_zip_content, filename) + + # Assert + mock_is_token_valid.assert_called_once() + mock_upload_file.assert_called_once_with(test_zip_content, filename) + + def test_send_via_api_invalid_token(self, test_zip_content, mock_uploader, mocker) -> None: + """Test API upload with invalid token raises error.""" + # Arrange + mock_is_token_valid = mocker.patch.object( + mock_uploader, "_is_token_valid", return_value=False + ) + + # Act & Assert + with pytest.raises(RuntimeError, match="Token invalid"): + mock_uploader.send_via_api(test_zip_content, "test.zip") + + mock_is_token_valid.assert_called_once() + + def test_upload_dicom_image(self, not_yet_exported_dicom_image, mock_uploader, mocker) -> None: + """Test DICOM image upload workflow.""" + # Arrange + study_id = "test_study_123" + study_tags = Mock() + study_tags.pseudo_anon_image_id = not_yet_exported_dicom_image.pseudo_study_uid + + mock_zip_content = BytesIO(b"mock_zip_data") + mock_get_study_zip = mocker.patch( + "core.uploader._treapi.get_study_zip_archive", return_value=mock_zip_content + ) + mock_send_via_api = mocker.patch.object(mock_uploader, "send_via_api") + + # Act + mock_uploader._upload_dicom_image(study_id, study_tags) + + # Assert + mock_get_study_zip.assert_called_once_with(study_id) + mock_send_via_api.assert_called_once_with( + mock_zip_content, f"{study_tags.pseudo_anon_image_id}.zip" + ) + + def test_token_validation_success(self, mock_uploader, mocker) -> None: + """Test successful token validation.""" + # Arrange + mock_response = Mock() + mock_response.status_code = HTTP_OK + mock_response.raise_for_status.return_value = None + mock_get = mocker.patch("core.uploader._treapi.requests.get", return_value=mock_response) + + # Act + result = mock_uploader._is_token_valid() + + # Assert + assert result is True + mock_get.assert_called_once_with( + url=f"{mock_uploader.host}/tokens/info", + headers=mock_uploader.headers, + timeout=10, + ) + + def test_token_validation_failure(self, mock_uploader, mocker) -> None: + """Test token validation failure.""" + # Arrange + mock_response = Mock() + mock_response.status_code = HTTP_UNAUTHORIZED + mock_response.raise_for_status.return_value = None + mocker.patch("core.uploader._treapi.requests.get", return_value=mock_response) + + # Act + result = mock_uploader._is_token_valid() + + # Assert + assert result is False + + def test_token_validation_request_exception(self, mock_uploader, mocker) -> None: + """Test token validation with request exception.""" + # Arrange + mocker.patch( + "core.uploader._treapi.requests.get", + side_effect=requests.RequestException("Network error"), + ) + + # Act + result = mock_uploader._is_token_valid() + + # Assert + assert result is False + + def test_upload_parquet_files(self, parquet_export, mock_uploader, mocker) -> None: + """Test parquet files upload as zip archive.""" + # Arrange + parquet_export.copy_to_exports(Path(__file__).parents[3] / "test" / "resources" / "omop") + parquet_export.export_radiology_linker(pd.DataFrame(["test_data"], columns=["data"])) + + mock_send_via_api = mocker.patch.object(mock_uploader, "send_via_api") + mock_flush = mocker.patch.object(mock_uploader, "flush") + + # Act + mock_uploader.upload_parquet_files(parquet_export) + + # Assert + mock_send_via_api.assert_called_once() + mock_flush.assert_called_once() + + # Verify call arguments + call_args = mock_send_via_api.call_args + assert isinstance(call_args[0][0], BytesIO) + assert call_args[0][1] == f"{parquet_export.current_extract_base.name}.zip" + + def test_upload_parquet_files_no_files(self, parquet_export, mock_uploader) -> None: + """Test error when no parquet files are found.""" + # Arrange + parquet_export.public_output.mkdir(parents=True, exist_ok=True) + + # Act & Assert + with pytest.raises(FileNotFoundError, match="No parquet files found"): + mock_uploader.upload_parquet_files(parquet_export) + + +class TestDatabaseOperations: + """Test suite for database-related operations.""" + + def test_update_exported_timestamp(self, rows_in_session) -> None: + """Test updating the exported timestamp in the database.""" + # Arrange + expected_export_time = datetime.now(tz=UTC) + uid = generate_uid(entropy_srcs=["not_yet_exported"]) + + # Act + update_exported_at(uid, expected_export_time) + + # Retrieve updated record + updated_record = rows_in_session.query(Image).filter(Image.pseudo_study_uid == uid).one() + actual_export_time = updated_record.exported_at.replace(tzinfo=UTC) + + # Assert + assert actual_export_time == expected_export_time + + +class TestUtilityFunctions: + """Test suite for utility functions.""" + + def test_create_zip_archive_preserves_dir_structure(self, tmp_path) -> None: + """Test zip archive creation with directory structure.""" + # Arrange + test_files = self._create_test_files(tmp_path) + zip_filename = str(tmp_path / "dir_structure.zip") + + # Act + zip_path = _create_zip_archive(test_files, self.root_dir, zip_filename) + + # Assert + assert zip_path.exists() + assert zipfile.is_zipfile(str(zip_path)) + + # Extract archive and verify contents + extract_path = tmp_path / "extracted" + with zipfile.ZipFile(str(zip_path), "r") as zipf: + zipf.extractall(extract_path) + + # Print difference report to aid debugging (it doesn't actually assert anything) + dc = filecmp.dircmp(tmp_path / self.root_dir, extract_path) + dc.report_full_closure() + + assert extract_path.exists() + assert (extract_path / "dir1" / "file1.txt").exists() + assert (extract_path / "dir2" / "subdir" / "file2.txt").exists() + + def test_create_zip_archive_empty_files(self, tmp_path) -> None: + """Test zip archive creation with empty file list.""" + # Arrange + zip_filename = str(tmp_path / "empty.zip") + + # Act + zip_path = _create_zip_archive([], "root", zip_filename) + + # Assert + assert zip_path.exists() + assert zipfile.is_zipfile(str(zip_path)) + + # Verify empty zip + with zipfile.ZipFile(str(zip_path), "r") as zipf: + assert len(zipf.namelist()) == 0 + + def _create_test_files(self, tmp_path: Path) -> list[Path]: + """Create test files with directory structure for zip archive testing.""" + self.root_dir = tmp_path / "root" + dir1 = self.root_dir / "dir1" + dir2 = self.root_dir / "dir2" / "subdir" + file1 = dir1 / "file1.txt" + file2 = dir2 / "file2.txt" + + self.root_dir.mkdir(parents=True) + dir1.mkdir(parents=True) + dir2.mkdir(parents=True) + file1.write_text("Content of file 1") + file2.write_text("Content of file 2") + + return [file1, file2] diff --git a/pixl_dcmd/pyproject.toml b/pixl_dcmd/pyproject.toml index b798e667..fa075044 100644 --- a/pixl_dcmd/pyproject.toml +++ b/pixl_dcmd/pyproject.toml @@ -10,7 +10,7 @@ dependencies = [ "core==0.2.0rc0", "arrow==1.3.0", "dicom-anonymizer==1.0.13.post1", - "dicom-validator==0.6.3", + "dicom-validator==0.7.3", "logger==1.4", "pydicom==2.4.4", "pydicom-data", diff --git a/projects/configs/uclh-nasogastric-tube-project-for-education.yaml b/projects/configs/uclh-nasogastric-tube-project-for-education.yaml new file mode 100644 index 00000000..1900ee43 --- /dev/null +++ b/projects/configs/uclh-nasogastric-tube-project-for-education.yaml @@ -0,0 +1,34 @@ +# Copyright (c) 2025 University College London Hospitals NHS Foundation Trust +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +project: + name: "uclh-nasogastric-tube-project-for-education" + azure_kv_alias: null + modalities: ["DX", "CR"] + +tag_operation_files: + base: + - "base.yaml" #Expected base config file for any project + - "xray.yaml" + manufacturer_overrides: null + +allowed_manufacturers: + - regex: ".*" + exclude_series_numbers: [] + +min_instances_per_series: 1 + +destination: + dicom: "tre" + parquet: "tre" diff --git a/ruff.toml b/ruff.toml index e74254c6..075b982d 100644 --- a/ruff.toml +++ b/ruff.toml @@ -39,6 +39,7 @@ lint.per-file-ignores = { "*test*" = [ "INP001", "PT028", # Test function parameter has default argument "S101", + "SLF001", # private member accessed ], "hooks*" = [ "INP001", "T201", diff --git a/uv.lock b/uv.lock index 1eeff09b..7e5a04c4 100644 --- a/uv.lock +++ b/uv.lock @@ -602,15 +602,16 @@ wheels = [ [[package]] name = "dicom-validator" -version = "0.6.3" +version = "0.7.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "lxml" }, { name = "pydicom" }, + { name = "pyparsing" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/16/40/64f155550f4c1dcf4efb840d24c253a27d69ec345d2e4ac45a8825c1e022/dicom_validator-0.6.3.tar.gz", hash = "sha256:9253daca2b028fa154116d8cd993f105682b59fdb57f7516391b6a03fada8642", size = 57175, upload-time = "2024-10-24T16:49:53.942Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a8/db/922c5d3c662aff47e1ab1a64fe9203b5ea813ea493b7b216c03b9bfb4676/dicom_validator-0.7.3.tar.gz", hash = "sha256:4ee8376688fb94ca33c48af34bc5a93dd3e3743abefdc1d16b70eba41c4aa542", size = 63526, upload-time = "2025-10-13T17:32:49.564Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/17/4d/7c925d169f1ea27c13a07ec54e8f5442e81f990a579ec34d197319ae0450/dicom_validator-0.6.3-py3-none-any.whl", hash = "sha256:4676b5c00b6181d4c44cda4cedfbeb9a44e0d82d6a35409c8d2de3c594e9f573", size = 65215, upload-time = "2024-10-24T16:49:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/6e/8d/b3401e93c7301e96315d4d816f5643815f5e6797a8c511e56e91e237b546/dicom_validator-0.7.3-py3-none-any.whl", hash = "sha256:2bffcdf2ae774ef18c521774561987098bfe8c157fdd1fa5bf04f552a195499f", size = 72200, upload-time = "2025-10-13T17:32:48.204Z" }, ] [[package]] @@ -1510,7 +1511,7 @@ requires-dist = [ { name = "core", extras = ["dev"], marker = "extra == 'dev'", editable = "pixl_core" }, { name = "core", extras = ["test"], marker = "extra == 'test'", editable = "pixl_core" }, { name = "dicom-anonymizer", specifier = "==1.0.13.post1" }, - { name = "dicom-validator", specifier = "==0.6.3" }, + { name = "dicom-validator", specifier = "==0.7.3" }, { name = "logger", specifier = "==1.4" }, { name = "pydicom", specifier = "==2.4.4" }, { name = "pydicom-data" }, @@ -1934,6 +1935,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/42/22/40f9162e943f86f0fc927ebc648078be87def360d9d8db346619fb97df2b/pyOpenSSL-24.3.0-py3-none-any.whl", hash = "sha256:e474f5a473cd7f92221cc04976e48f4d11502804657a08a989fb3be5514c904a", size = 56111, upload-time = "2024-11-27T20:43:21.112Z" }, ] +[[package]] +name = "pyparsing" +version = "3.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/33/c1/1d9de9aeaa1b89b0186e5fe23294ff6517fce1bc69149185577cd31016b2/pyparsing-3.3.1.tar.gz", hash = "sha256:47fad0f17ac1e2cad3de3b458570fbc9b03560aa029ed5e16ee5554da9a2251c", size = 1550512, upload-time = "2025-12-23T03:14:04.391Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/40/2614036cdd416452f5bf98ec037f38a1afb17f327cb8e6b652d4729e0af8/pyparsing-3.3.1-py3-none-any.whl", hash = "sha256:023b5e7e5520ad96642e2c6db4cb683d3970bd640cdf7115049a6e9c3682df82", size = 121793, upload-time = "2025-12-23T03:14:02.103Z" }, +] + [[package]] name = "pytest" version = "8.3.4"