Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion backend/contributions/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@
import os
import uuid

from tally.middleware.logging_utils import get_app_logger

logger = get_app_logger('contributions')


class Category(BaseModel):
"""
Expand Down Expand Up @@ -224,7 +228,7 @@ def validate_multiplier_at_creation(sender, instance, **kwargs):
float(instance.multiplier_at_creation)
except (decimal.InvalidOperation, TypeError, ValueError):
# If conversion fails, reset the multiplier to 1.0
print(f"WARNING: Fixing corrupted multiplier_at_creation value for contribution {instance.id}")
logger.warning("Fixing corrupted multiplier_at_creation value")
instance.multiplier_at_creation = 1.0
instance.frozen_global_points = instance.points

Expand Down
10 changes: 1 addition & 9 deletions backend/contributions/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,16 +117,8 @@ def top_contributors(self, request, pk=None):
contribution_count=Count('id')
).order_by('-total_points')[:10]

# Get user objects with select_related for efficiency
user_ids = [c['user'] for c in top_contributors]
users = {
user.id: user
for user in ContributionType.objects.get(pk=pk).contributions.filter(
user_id__in=user_ids
).select_related('user', 'user__validator', 'user__builder').values_list('user', flat=True).distinct()
}

# Fetch users directly with optimization
user_ids = [c['user'] for c in top_contributors]
from users.models import User
users_dict = {
user.id: user
Expand Down
10 changes: 6 additions & 4 deletions backend/deploy-apprunner-dev.sh
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,8 @@ if aws apprunner describe-service --service-arn arn:aws:apprunner:$REGION:$ACCOU
"GITHUB_ENCRYPTION_KEY": "$SSM_PREFIX/$SSM_ENV/github_encryption_key",
"GITHUB_REPO_TO_STAR": "$SSM_PREFIX/$SSM_ENV/github_repo_to_star",
"RECAPTCHA_PUBLIC_KEY": "$SSM_PREFIX/$SSM_ENV/recaptcha_public_key",
"RECAPTCHA_PRIVATE_KEY": "$SSM_PREFIX/$SSM_ENV/recaptcha_private_key"
"RECAPTCHA_PRIVATE_KEY": "$SSM_PREFIX/$SSM_ENV/recaptcha_private_key",
"CRON_SYNC_TOKEN": "$SSM_PREFIX/$SSM_ENV/cron_sync_token"
},
"StartCommand": "./startup.sh gunicorn --bind 0.0.0.0:8000 --timeout 180 --workers 2 tally.wsgi:application"
},
Expand All @@ -98,7 +99,7 @@ if aws apprunner describe-service --service-arn arn:aws:apprunner:$REGION:$ACCOU
}
}
EOF

aws apprunner update-service \
--region $REGION \
--service-arn arn:aws:apprunner:$REGION:$ACCOUNT_ID:service/$SERVICE_NAME \
Expand Down Expand Up @@ -227,7 +228,8 @@ EOF
"GITHUB_ENCRYPTION_KEY": "$SSM_PREFIX/$SSM_ENV/github_encryption_key",
"GITHUB_REPO_TO_STAR": "$SSM_PREFIX/$SSM_ENV/github_repo_to_star",
"RECAPTCHA_PUBLIC_KEY": "$SSM_PREFIX/$SSM_ENV/recaptcha_public_key",
"RECAPTCHA_PRIVATE_KEY": "$SSM_PREFIX/$SSM_ENV/recaptcha_private_key"
"RECAPTCHA_PRIVATE_KEY": "$SSM_PREFIX/$SSM_ENV/recaptcha_private_key",
"CRON_SYNC_TOKEN": "$SSM_PREFIX/$SSM_ENV/cron_sync_token"
},
"StartCommand": "./startup.sh gunicorn --bind 0.0.0.0:8000 --timeout 180 --workers 2 tally.wsgi:application"
},
Expand All @@ -253,7 +255,7 @@ EOF
}
}
EOF

aws apprunner create-service \
--region $REGION \
--cli-input-json file://apprunner-create-config.json
Expand Down
17 changes: 7 additions & 10 deletions backend/ethereum_auth/authentication.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,35 +3,32 @@
from rest_framework import authentication, exceptions
from rest_framework.authentication import SessionAuthentication

from tally.middleware.logging_utils import get_app_logger

User = get_user_model()
logger = get_app_logger('auth')


class EthereumAuthentication(authentication.BaseAuthentication):
"""
Authentication class for Ethereum wallet addresses.
Uses session authentication for the actual request validation.
"""

def authenticate(self, request):
# Check if the session has an authenticated ethereum address
ethereum_address = request.session.get('ethereum_address')
authenticated = request.session.get('authenticated', False)

# Debug logging
print(f"EthereumAuthentication - Session ID: {request.session.session_key}")
print(f"EthereumAuthentication - Ethereum Address: {ethereum_address}")
print(f"EthereumAuthentication - Authenticated: {authenticated}")


if not ethereum_address or not authenticated:
return None

try:
# Get user with the authenticated ethereum address
user = User.objects.get(address__iexact=ethereum_address)
print(f"EthereumAuthentication - User found: {user.email}")
return (user, None)
except User.DoesNotExist:
print(f"EthereumAuthentication - User not found for address: {ethereum_address}")
logger.debug("User not found for authenticated session")
return None


Expand Down
23 changes: 8 additions & 15 deletions backend/ethereum_auth/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,10 @@

from .models import Nonce
from .authentication import CsrfExemptSessionAuthentication
from tally.middleware.logging_utils import get_app_logger

User = get_user_model()
logger = get_app_logger('auth')


def generate_nonce(length=32):
Expand Down Expand Up @@ -137,10 +139,10 @@ def login(request):
if referrer != user:
user.referred_by = referrer
user.save(update_fields=['referred_by'])
print(f"New user {ethereum_address} referred by {referrer.address}")
logger.debug("New user referred successfully")
except User.DoesNotExist:
# Invalid referral code, but don't fail the login
print(f"Invalid referral code provided during login: {referral_code}")
logger.warning("Invalid referral code provided during login")

# Refresh user data from database to get referral_code from signal
user.refresh_from_db()
Expand All @@ -149,12 +151,9 @@ def login(request):
request.session['ethereum_address'] = ethereum_address
request.session['authenticated'] = True
request.session.save() # Explicitly save the session

# Debug logging
print(f"Login - Session ID: {request.session.session_key}")
print(f"Login - Setting ethereum_address: {ethereum_address}")
print(f"Login - Session data: {dict(request.session)}")


logger.debug("Login successful, session created")

# Return the authenticated user with referral data
return Response({
'authenticated': True,
Expand Down Expand Up @@ -186,13 +185,7 @@ def verify_auth(request):
"""
ethereum_address = request.session.get('ethereum_address')
authenticated = request.session.get('authenticated', False)

# Debug logging
print(f"Verify - Session ID: {request.session.session_key}")
print(f"Verify - Ethereum Address: {ethereum_address}")
print(f"Verify - Authenticated: {authenticated}")
print(f"Verify - Session data: {dict(request.session)}")


if authenticated and ethereum_address:
try:
user = User.objects.get(address__iexact=ethereum_address)
Expand Down
12 changes: 7 additions & 5 deletions backend/leaderboard/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@
from django.db.models import Sum
from utils.models import BaseModel
from contributions.models import ContributionType, Contribution, Category
from tally.middleware.logging_utils import get_app_logger

logger = get_app_logger('leaderboard')


# Helper functions for leaderboard configuration
Expand Down Expand Up @@ -370,8 +373,8 @@ def log_multiplier_creation(sender, instance, created, **kwargs):
When a new multiplier is created, log it for debugging purposes.
"""
if created:
print(f"New global multiplier: {instance.contribution_type.name} - "
f"{instance.multiplier_value}x valid from {instance.valid_from.strftime('%Y-%m-%d %H:%M')}")
logger.debug(f"New global multiplier: {instance.contribution_type.name} - "
f"{instance.multiplier_value}x")


@receiver(post_save, sender=Contribution)
Expand All @@ -383,9 +386,8 @@ def update_leaderboard_on_contribution(sender, instance, created, **kwargs):
# Only update if points have changed or it's a new contribution
if created or kwargs.get('update_fields') is None or 'points' in kwargs.get('update_fields', []):
# Log the contribution's point calculation
contribution_date_str = instance.contribution_date.strftime('%Y-%m-%d %H:%M') if instance.contribution_date else "N/A"
print(f"Contribution saved: {instance.points} points × {instance.multiplier_at_creation} = "
f"{instance.frozen_global_points} global points (contribution date: {contribution_date_str})")
logger.debug(f"Contribution saved: {instance.points} points × {instance.multiplier_at_creation} = "
f"{instance.frozen_global_points} global points")

# Update the user's leaderboard entries
update_user_leaderboard_entries(instance.user)
Expand Down
5 changes: 5 additions & 0 deletions backend/tally/middleware/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
"""Tally middleware package for logging."""
from .api_logging import APILoggingMiddleware
from .db_logging import DBLoggingMiddleware

__all__ = ['APILoggingMiddleware', 'DBLoggingMiddleware']
96 changes: 96 additions & 0 deletions backend/tally/middleware/api_logging.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
"""
API Layer Logging Middleware.

Logs HTTP requests/responses for the [API] layer (Frontend <-> Backend).
"""
import time
from django.conf import settings

from .logging_utils import (
get_api_logger,
generate_correlation_id,
set_correlation_id,
clear_correlation_id,
format_bytes,
)


logger = get_api_logger()


class APILoggingMiddleware:
"""
Middleware to log HTTP requests and responses.

DEBUG=true: Logs all requests with method, path, status, duration, size
DEBUG=false: Logs only errors (4xx, 5xx)
"""

# Paths to skip logging
SKIP_PATHS = (
'/static/',
'/media/',
'/health/',
'/favicon.ico',
'/__debug__/',
)

def __init__(self, get_response):
self.get_response = get_response

def __call__(self, request):
# Skip logging for certain paths
if any(request.path.startswith(path) for path in self.SKIP_PATHS):
return self.get_response(request)

# Generate and set correlation ID
correlation_id = generate_correlation_id()
set_correlation_id(correlation_id)

# Start timing
start_time = time.time()

# Process request
response = self.get_response(request)

# Calculate duration
duration_ms = (time.time() - start_time) * 1000

# Get response size
response_size = len(response.content) if hasattr(response, 'content') else 0

# Get DB stats if available (set by DBLoggingMiddleware)
db_query_count = getattr(request, '_db_query_count', 0)
db_time_ms = getattr(request, '_db_time_ms', 0)
logic_time_ms = duration_ms - db_time_ms

# Build log message with timing breakdown
if db_query_count > 0:
timing_info = f"{duration_ms:.0f}ms (db: {db_time_ms:.0f}ms/{db_query_count}q, logic: {logic_time_ms:.0f}ms)"
else:
timing_info = f"{duration_ms:.0f}ms"

log_message = (
f"{request.method} {request.path} "
f"{response.status_code} "
f"{timing_info} "
f"{format_bytes(response_size)}"
)

# Log based on status code and DEBUG setting
is_error = response.status_code >= 400

if is_error:
# Always log errors
if response.status_code >= 500:
logger.error(log_message)
else:
logger.warning(log_message)
elif settings.DEBUG:
# In debug mode, log all requests
logger.info(log_message)

# Clear correlation ID
clear_correlation_id()

return response
53 changes: 53 additions & 0 deletions backend/tally/middleware/db_logging.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
"""
DB Layer Logging Middleware.

Logs database query statistics for the [DB] layer (Backend <-> Database).
"""
from django.conf import settings
from django.db import connection, reset_queries

from .logging_utils import get_db_logger


logger = get_db_logger()


class DBLoggingMiddleware:
"""
Middleware to log database query statistics per request.

DEBUG=true: Logs query count and total time for each request
DEBUG=false: Only logs database errors

Note: Query counting only works when DEBUG=True (Django limitation).
"""

def __init__(self, get_response):
self.get_response = get_response

def __call__(self, request):
# Only do detailed logging in DEBUG mode (Django requirement)
if not settings.DEBUG:
return self.get_response(request)

# Reset queries at start of request
reset_queries()

# Process request
response = self.get_response(request)

# Get query statistics
queries = connection.queries
query_count = len(queries)

# Calculate total DB time (even if 0 queries)
total_time_ms = sum(
float(q.get('time', 0)) * 1000
for q in queries
)

# Store stats on request for API middleware to use
request._db_query_count = query_count
request._db_time_ms = total_time_ms

return response
Loading