From 64ef2de1e0468c412de4ca9966a0788bf7ef1c34 Mon Sep 17 00:00:00 2001 From: Dagonite Date: Fri, 30 Jan 2026 09:06:05 +0000 Subject: [PATCH 1/4] Add caching for job list and count endpoints --- fia_api/routers/jobs.py | 109 ++++++++++++++++++++++++++++++++++------ 1 file changed, 93 insertions(+), 16 deletions(-) diff --git a/fia_api/routers/jobs.py b/fia_api/routers/jobs.py index b58ffdf9..be6a24bb 100644 --- a/fia_api/routers/jobs.py +++ b/fia_api/routers/jobs.py @@ -1,11 +1,11 @@ -"""Jobs API Router""" +"""Jobs API Router.""" import io import json import os import zipfile from http import HTTPStatus -from typing import Annotated, Literal +from typing import Annotated, Any, Literal from fastapi import APIRouter, Depends, Query, Response from fastapi.responses import FileResponse, StreamingResponse @@ -13,6 +13,7 @@ from sqlalchemy.orm import Session from fia_api.core.auth.tokens import JWTAPIBearer, get_user_from_token +from fia_api.core.cache import cache_get_json, cache_set_json, hash_key from fia_api.core.exceptions import ( AuthError, NoFilesAddedError, @@ -34,6 +35,8 @@ JobsRouter = APIRouter(tags=["jobs"]) jwt_api_security = JWTAPIBearer() +JOB_LIST_CACHE_TTL_SECONDS = int(os.environ.get("JOB_LIST_CACHE_TTL_SECONDS", "15")) +JOB_COUNT_CACHE_TTL_SECONDS = int(os.environ.get("JOB_COUNT_CACHE_TTL_SECONDS", "15")) OrderField = Literal[ "start", @@ -49,6 +52,11 @@ ] +def _jobs_cache_key(scope: str, payload: dict[str, Any]) -> str: + digest = hash_key(json.dumps(payload, sort_keys=True, separators=(",", ":"))) + return f"fia_api:jobs:{scope}:{digest}" + + @JobsRouter.get("/jobs", tags=["jobs"]) async def get_jobs( credentials: Annotated[HTTPAuthorizationCredentials, Depends(jwt_api_security)], @@ -87,6 +95,24 @@ async def get_jobs( else: user_number = user.user_number + cache_key = None + if JOB_LIST_CACHE_TTL_SECONDS > 0: + cache_key = _jobs_cache_key( + "list:all", + { + "user_number": user_number, + "include_run": include_run, + "limit": limit, + "offset": offset, + "order_by": order_by, + "order_direction": order_direction, + "filters": filters, + }, + ) + cached = cache_get_json(cache_key) + if isinstance(cached, list): + return cached + jobs = get_all_jobs( session, limit=limit, @@ -98,8 +124,14 @@ async def get_jobs( ) if include_run: - return [JobWithRunResponse.from_job(j) for j in jobs] - return [JobResponse.from_job(j) for j in jobs] + payload = [JobWithRunResponse.from_job(j).model_dump(mode="json") for j in jobs] + else: + payload = [JobResponse.from_job(j).model_dump(mode="json") for j in jobs] + + if cache_key: + cache_set_json(cache_key, payload, JOB_LIST_CACHE_TTL_SECONDS) + + return payload @JobsRouter.get("/instrument/{instrument}/jobs", tags=["jobs"]) @@ -144,6 +176,25 @@ async def get_jobs_by_instrument( else: user_number = user.user_number + cache_key = None + if JOB_LIST_CACHE_TTL_SECONDS > 0: + cache_key = _jobs_cache_key( + "list:instrument", + { + "instrument": instrument, + "user_number": user_number, + "include_run": include_run, + "limit": limit, + "offset": offset, + "order_by": order_by, + "order_direction": order_direction, + "filters": filters, + }, + ) + cached = cache_get_json(cache_key) + if isinstance(cached, list): + return cached + jobs = get_job_by_instrument( instrument, session, @@ -156,8 +207,14 @@ async def get_jobs_by_instrument( ) if include_run: - return [JobWithRunResponse.from_job(j) for j in jobs] - return [JobResponse.from_job(j) for j in jobs] + payload = [JobWithRunResponse.from_job(j).model_dump(mode="json") for j in jobs] + else: + payload = [JobResponse.from_job(j).model_dump(mode="json") for j in jobs] + + if cache_key: + cache_set_json(cache_key, payload, JOB_LIST_CACHE_TTL_SECONDS) + + return payload @JobsRouter.get("/instrument/{instrument}/jobs/count", tags=["jobs"]) @@ -174,9 +231,20 @@ async def count_jobs_for_instrument( :return: CountResponse containing the count """ instrument = instrument.upper() - return CountResponse( - count=count_jobs_by_instrument(instrument, session, filters=json.loads(filters) if filters else None) - ) + parsed_filters = json.loads(filters) if filters else None + + cache_key = None + if JOB_COUNT_CACHE_TTL_SECONDS > 0: + cache_key = _jobs_cache_key("count:instrument", {"instrument": instrument, "filters": parsed_filters}) + cached = cache_get_json(cache_key) + if isinstance(cached, dict) and "count" in cached: + return CountResponse.model_validate(cached) + + count = count_jobs_by_instrument(instrument, session, filters=parsed_filters) + payload = {"count": count} + if cache_key: + cache_set_json(cache_key, payload, JOB_COUNT_CACHE_TTL_SECONDS) + return CountResponse.model_validate(payload) @JobsRouter.get("/job/{job_id}", tags=["jobs"]) @@ -229,13 +297,22 @@ async def count_all_jobs( session: Annotated[Session, Depends(get_db_session)], filters: Annotated[str | None, Query(description="json string of filters")] = None, ) -> CountResponse: - """ - Count all jobs - \f - :param filters: json string of filters - :return: CountResponse containing the count - """ - return CountResponse(count=count_jobs(session, filters=json.loads(filters) if filters else None)) + """Count all jobs \f :param filters: json string of filters :return: + CountResponse containing the count.""" + parsed_filters = json.loads(filters) if filters else None + + cache_key = None + if JOB_COUNT_CACHE_TTL_SECONDS > 0: + cache_key = _jobs_cache_key("count:all", {"filters": parsed_filters}) + cached = cache_get_json(cache_key) + if isinstance(cached, dict) and "count" in cached: + return CountResponse.model_validate(cached) + + count = count_jobs(session, filters=parsed_filters) + payload = {"count": count} + if cache_key: + cache_set_json(cache_key, payload, JOB_COUNT_CACHE_TTL_SECONDS) + return CountResponse.model_validate(payload) @JobsRouter.get("/job/{job_id}/filename/{filename}", tags=["jobs"]) From b4c1992578875c8bd7b19084209806f1f2c2ea28 Mon Sep 17 00:00:00 2001 From: Dagonite Date: Mon, 2 Feb 2026 15:34:36 +0000 Subject: [PATCH 2/4] Create test_jobs_cache.py --- test/e2e/test_jobs_cache.py | 77 +++++++++++++++++++++++++++++++++++++ 1 file changed, 77 insertions(+) create mode 100644 test/e2e/test_jobs_cache.py diff --git a/test/e2e/test_jobs_cache.py b/test/e2e/test_jobs_cache.py new file mode 100644 index 00000000..3d937791 --- /dev/null +++ b/test/e2e/test_jobs_cache.py @@ -0,0 +1,77 @@ +"""Cache behavior tests for jobs endpoints.""" + +from http import HTTPStatus +from unittest.mock import Mock, patch + +from starlette.testclient import TestClient + +from fia_api.fia_api import app + +from .constants import STAFF_HEADER + +client = TestClient(app) + + +@patch("fia_api.routers.jobs.JOB_LIST_CACHE_TTL_SECONDS", 15) +@patch("fia_api.core.auth.tokens.requests.post") +@patch("fia_api.routers.jobs.cache_set_json") +@patch("fia_api.routers.jobs.get_all_jobs") +@patch("fia_api.routers.jobs.cache_get_json") +def test_jobs_list_cache_hit_returns_cached_payload( + mock_cache_get, + mock_get_all_jobs, + mock_cache_set, + mock_post, +): + cached_payload = [ + { + "id": 1, + "start": None, + "end": None, + "state": "NOT_STARTED", + "status_message": None, + "inputs": {}, + "outputs": None, + "stacktrace": None, + "script": None, + "runner_image": None, + "type": "JobType.AUTOREDUCTION", + } + ] + mock_cache_get.return_value = cached_payload + mock_post.return_value.status_code = HTTPStatus.OK + + response = client.get("/jobs?limit=1", headers=STAFF_HEADER) + + assert response.status_code == HTTPStatus.OK + assert response.json() == cached_payload + mock_get_all_jobs.assert_not_called() + mock_cache_set.assert_not_called() + + +@patch("fia_api.routers.jobs.JOB_COUNT_CACHE_TTL_SECONDS", 15) +@patch("fia_api.routers.jobs.count_jobs") +@patch("fia_api.routers.jobs.cache_get_json") +def test_jobs_count_cache_hit_returns_cached_payload(mock_cache_get, mock_count_jobs): + cached_payload = {"count": 42} + mock_cache_get.return_value = cached_payload + + response = client.get("/jobs/count") + + assert response.status_code == HTTPStatus.OK + assert response.json() == cached_payload + mock_count_jobs.assert_not_called() + + +@patch("fia_api.routers.jobs.JOB_COUNT_CACHE_TTL_SECONDS", 15) +@patch("fia_api.routers.jobs.count_jobs_by_instrument") +@patch("fia_api.routers.jobs.cache_get_json") +def test_jobs_count_by_instrument_cache_hit_returns_cached_payload(mock_cache_get, mock_count_jobs): + cached_payload = {"count": 7} + mock_cache_get.return_value = cached_payload + + response = client.get("/instrument/TEST/jobs/count") + + assert response.status_code == HTTPStatus.OK + assert response.json() == cached_payload + mock_count_jobs.assert_not_called() From eb41604250310223f31edb6d0a91939623965cd0 Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 2 Feb 2026 15:35:15 +0000 Subject: [PATCH 3/4] Formatting and linting commit --- test/e2e/test_jobs_cache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/e2e/test_jobs_cache.py b/test/e2e/test_jobs_cache.py index 3d937791..49187f92 100644 --- a/test/e2e/test_jobs_cache.py +++ b/test/e2e/test_jobs_cache.py @@ -1,7 +1,7 @@ """Cache behavior tests for jobs endpoints.""" from http import HTTPStatus -from unittest.mock import Mock, patch +from unittest.mock import patch from starlette.testclient import TestClient From f2ad68263f9fdaf643791ce519f27589033c6083 Mon Sep 17 00:00:00 2001 From: Dagonite Date: Tue, 3 Feb 2026 12:52:00 +0000 Subject: [PATCH 4/4] Quell strict mypy warning --- fia_api/routers/jobs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/fia_api/routers/jobs.py b/fia_api/routers/jobs.py index be6a24bb..c8dfff0a 100644 --- a/fia_api/routers/jobs.py +++ b/fia_api/routers/jobs.py @@ -131,7 +131,7 @@ async def get_jobs( if cache_key: cache_set_json(cache_key, payload, JOB_LIST_CACHE_TTL_SECONDS) - return payload + return payload # type: ignore[return-value] @JobsRouter.get("/instrument/{instrument}/jobs", tags=["jobs"]) @@ -214,7 +214,7 @@ async def get_jobs_by_instrument( if cache_key: cache_set_json(cache_key, payload, JOB_LIST_CACHE_TTL_SECONDS) - return payload + return payload # type: ignore[return-value] @JobsRouter.get("/instrument/{instrument}/jobs/count", tags=["jobs"])