Skip to content

Commit 6c54159

Browse files
committed
feat: Improve database concurrency and error handling
1 parent ea18025 commit 6c54159

3 files changed

Lines changed: 67 additions & 5 deletions

File tree

backend/app/models/feed.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,9 @@ class Feed(SQLModel, table=True):
4242
)
4343

4444
users: list["User"] = Relationship( # type: ignore # noqa: F821
45-
back_populates="feeds", link_model=UserFeedLink
45+
back_populates="feeds",
46+
link_model=UserFeedLink,
47+
sa_relationship_kwargs={"overlaps": "feed,feed_links,user"},
4648
)
4749
articles: list["Article"] = Relationship(back_populates="feed") # type: ignore # noqa: F821
4850
feed_links: list["UserFeedLink"] = Relationship( # type: ignore # noqa: F821

backend/app/models/user.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,9 @@ class User(SQLModel, table=True):
2222
back_populates="users", link_model=UserArticleLink
2323
)
2424
feeds: list["Feed"] = Relationship( # type: ignore # noqa: F821
25-
back_populates="users", link_model=UserFeedLink
25+
back_populates="users",
26+
link_model=UserFeedLink,
27+
sa_relationship_kwargs={"overlaps": "feed,feed_links,user"},
2628
)
2729
feed_links: list["UserFeedLink"] = Relationship( # type: ignore # noqa: F821
2830
back_populates="user", sa_relationship_kwargs={"overlaps": "feeds,users"}

backend/app/tasks.py

Lines changed: 61 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,13 @@
11
import asyncio
22
import os
33
import json
4+
import time
5+
from functools import wraps
6+
from typing import Callable, TypeVar
47
from sqlmodel import create_engine, Session, select, update, delete, text
5-
from sqlalchemy import func
6-
from sqlalchemy.engine import Connection
8+
from sqlalchemy import func, event
9+
from sqlalchemy.engine import Connection, Engine
10+
from sqlite3 import OperationalError as SQLiteOperationalError
711
from datetime import datetime, timezone, timedelta
812
from loguru import logger
913
from redis import Redis # type: ignore
@@ -25,9 +29,60 @@
2529
ENGINE = create_engine(
2630
os.getenv("DATABASE_URL", "sqlite:///data/db.sqlite"),
2731
echo=bool(os.getenv("DEBUG", False)),
28-
connect_args={"check_same_thread": False},
32+
connect_args={"check_same_thread": False, "timeout": 30},
2933
)
3034

35+
36+
@event.listens_for(Engine, "connect")
37+
def set_sqlite_pragma(dbapi_connection, connection_record): # type: ignore[no-untyped-def]
38+
"""Configure SQLite for better concurrency."""
39+
cursor = dbapi_connection.cursor()
40+
cursor.execute("PRAGMA journal_mode=WAL")
41+
cursor.execute("PRAGMA busy_timeout=30000")
42+
cursor.close()
43+
44+
45+
T = TypeVar("T")
46+
47+
48+
def with_db_retry(
49+
max_retries: int = 3,
50+
base_delay: float = 0.1,
51+
max_delay: float = 2.0,
52+
) -> Callable[[Callable[..., T]], Callable[..., T]]:
53+
"""Decorator to retry database operations on lock errors with exponential backoff."""
54+
55+
def decorator(func: Callable[..., T]) -> Callable[..., T]:
56+
@wraps(func)
57+
def wrapper(*args, **kwargs) -> T: # type: ignore[no-untyped-def]
58+
last_exception = None
59+
for attempt in range(max_retries):
60+
try:
61+
return func(*args, **kwargs)
62+
except Exception as e:
63+
# Check if it's a database locked error
64+
if "database is locked" in str(e) or isinstance(
65+
e.__cause__, SQLiteOperationalError
66+
):
67+
last_exception = e
68+
delay = min(base_delay * (2**attempt), max_delay)
69+
logger.warning(
70+
f"Database locked on attempt {attempt + 1}/{max_retries} "
71+
f"for {func.__name__}, retrying in {delay:.2f}s"
72+
)
73+
time.sleep(delay)
74+
else:
75+
raise
76+
logger.error(
77+
f"Database still locked after {max_retries} retries for {func.__name__}"
78+
)
79+
raise last_exception # type: ignore[misc]
80+
81+
return wrapper
82+
83+
return decorator
84+
85+
3186
redis_conn = Redis.from_url(os.getenv("REDIS_URL", "redis://localhost:6379"))
3287
low_queue = Queue("low", connection=redis_conn, default_timeout=60)
3388
medium_queue = Queue("medium", connection=redis_conn, default_timeout=60)
@@ -53,6 +108,7 @@ def compute_article_embedding(article_id: int) -> None:
53108
logger.error(f"Error computing embedding for article {article_id}: {e}")
54109

55110

111+
@with_db_retry(max_retries=3, base_delay=0.1, max_delay=1.0)
56112
def recompute_user_clusters(user_id: str) -> None:
57113
with Session(ENGINE) as session:
58114
user = session.get(User, user_id)
@@ -286,6 +342,7 @@ def run_full_maintenance() -> dict:
286342
BATCH_SIZE = int(os.getenv("FEED_FETCH_BATCH_SIZE", "10"))
287343

288344

345+
@with_db_retry(max_retries=3, base_delay=0.2, max_delay=2.0)
289346
def fetch_feed_batch(feed_ids: list[int]) -> None:
290347
async def fetch_single_feed(feed: Feed) -> Feed | None:
291348
try:
@@ -382,6 +439,7 @@ def fetch_all_feeds() -> None:
382439
)
383440

384441

442+
@with_db_retry(max_retries=5, base_delay=0.1, max_delay=2.0)
385443
def log_user_action(user_id: str, article_id: int, link_url: str) -> None:
386444
with Session(ENGINE) as session:
387445
user = session.exec(select(User).where(User.id == user_id)).first()

0 commit comments

Comments
 (0)