Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 21 additions & 8 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,12 +1,6 @@
/Data/Arrest_Data_from_2020_to_Present.csv
/Data/processed_arrest_data.csv

/app/server/server_data.db
/app/server/server_data.db-journal
/app/server/server_data.db-lock
/app/server/server_data.db-trace
/app/server/server_data.db-wal

/app/client/__pycache__/*
/app/client/client_gui.pyc
/app/client/client_gui.pyo
Expand All @@ -21,10 +15,29 @@
/app/server/server_gui.pyw
/app/server/server_gui.pyz


app/shared/__pycache__/protocol.cpython-312.pyc
app/shared/__pycache__/constants.cpython-312.pyc
app/shared/__pycache__/protocol.cpython-310.pyc
app/shared/__pycache__/constants.cpython-310.pyc
app/server/server_data.db-shm

# PostgreSQL and Docker
postgres_data/

# Migration logs
migration_*.log

# Environment files
.env
.env.local

# IDE files
.vscode/
.idea/
*.swp
*.swo

# OS files
.DS_Store
Thumbs.db

.cursor/generalrule.mdc
62 changes: 57 additions & 5 deletions app/client/client_gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
QDateEdit, QDoubleSpinBox, QStackedWidget, QListWidget, QListWidgetItem
)
from PySide6.QtGui import QPixmap, QFont, QIcon, QPalette, QColor
from PySide6.QtCore import Qt, QTimer, Signal, Slot, QObject, QSettings, QDate
from PySide6.QtCore import Qt, QTimer, Signal, Slot, QObject, QSettings, QDate, QThread, QThreadPool, QRunnable

# Import necessary for rendering Matplotlib figure in Qt
from matplotlib.backends.backend_qtagg import FigureCanvasQTAgg
Expand All @@ -51,6 +51,29 @@
logger = logging.getLogger('client_gui')


class QueryResultProcessor(QRunnable):
"""Worker thread for processing large query results"""

def __init__(self, result_data, callback):
super().__init__()
self.result_data = result_data
self.callback = callback
self.setAutoDelete(True)

def run(self):
"""Process the query result in background thread"""
try:
processed_data = self.process_result(self.result_data)
self.callback(processed_data) # Call as function, not .emit
except Exception as e:
import traceback
self.callback({'error': f'{e}\n{traceback.format_exc()}'})

def process_result(self, result_data):
"""Process the query result data"""
# Add any heavy processing here if needed
return result_data


# Bridge class to convert client callbacks to Qt signals
class ClientCallbacksBridge(QObject):
Expand Down Expand Up @@ -182,6 +205,10 @@ def __init__(self):
self.callbacks_bridge = ClientCallbacksBridge()
self.plot_dialog = None # To store reference to plot dialog
self.tab_reset_handlers = {} # <--- ADD THIS LINE

# Thread pool for async processing
self.thread_pool = QThreadPool()
self.thread_pool.setMaxThreadCount(4) # Limit concurrent threads

# Load or default theme
self.current_theme = self.settings.value("theme", "dark") # Default to dark
Expand Down Expand Up @@ -478,7 +505,7 @@ def on_login_status_change(self, logged_in):
self.update_query_params() # Fetch metadata AFTER successful login

def on_query_result(self, result):
"""Handle results received from the server"""
"""Handle results received from the server with async processing for large results"""
logger.info(f"Received query result: {type(result)}")

# Check if the result is for metadata
Expand Down Expand Up @@ -528,9 +555,19 @@ def on_query_result(self, result):

# Check if the result contains data for the table (and no plot was handled)
if 'data' in result:
# This block is now only reached if fig_plot was None/False
self.query_tab.display_results(result)
logger.info("Query data result received and sent to display.")
# Check if this is a large dataset that needs async processing
data_size = len(result.get('data', []))
if data_size > 1000: # Process large datasets asynchronously
logger.info(f"Large dataset detected ({data_size} rows), processing asynchronously")
self.status_bar.showMessage("Processing large dataset...", 3000)

# Create worker for async processing
worker = QueryResultProcessor(result, self.query_result_processed)
self.thread_pool.start(worker)
else:
# Small dataset, process immediately
self.query_tab.display_results(result)
logger.info("Query data result received and sent to display.")
else:
# Handle case where neither plot nor data is present
logger.warning("Query result received with OK status but no data or plot.")
Expand Down Expand Up @@ -1123,6 +1160,21 @@ def on_plot_label_clicked(self, label_clicked):
# def on_plot_clicked(self, fig):
# ...

@Slot(object)
def query_result_processed(self, processed_result):
"""Called when async query result processing is complete"""
if 'error' in processed_result:
self.on_error(f"Error processing query result: {processed_result['error']}")
return

try:
self.query_tab.display_results(processed_result)
self.status_bar.showMessage("Query results displayed", 3000)
logger.info("Large query result processed and displayed successfully")
except Exception as e:
logger.error(f"Error displaying processed query result: {e}", exc_info=True)
self.on_error(f"Error displaying query results: {e}")


if __name__ == "__main__":
# Create application
Expand Down
30 changes: 13 additions & 17 deletions app/server/ClientHandler.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import socket
import queue
import time
import sqlite3
import psycopg2
import re
import logging
import os
Expand Down Expand Up @@ -237,9 +237,9 @@ def handle_login(self, data):
})

self.server.log_activity(f"Client logged in: {client_info['nickname']} ({client_info['email']})")
except sqlite3.OperationalError as sqlerr:
except psycopg2.OperationalError as sqlerr:
error_msg = str(sqlerr)
logger.error(f"SQLite error during login: {error_msg}")
logger.error(f"PostgreSQL error during login: {error_msg}")

if "no column named address" in error_msg:
self.send_error("Login failed: Database schema needs to be updated. Please restart the server.")
Expand Down Expand Up @@ -278,7 +278,7 @@ def handle_logout(self):
self.send_error("Logout failed: not logged in")

def handle_query(self, data):
"""Handle a query request"""
"""Handle a query request with async processing"""
if not self.client_info or not self.session_id:
self.send_error("Query failed: not logged in")
return
Expand All @@ -304,21 +304,17 @@ def handle_query(self, data):

logger.info(f"Processing query {query_type_id} from {self.client_info['nickname']} with params: {parameters}")

# --- Process the query based on query_type_id ---
# --- Process the query asynchronously based on query_type_id ---
result = {}
if query_type_id == 'query1':
result = self.data_processor.process_query1(parameters)
elif query_type_id == 'query2':
result = self.data_processor.process_query2(parameters)
elif query_type_id == 'query3':
result = self.data_processor.process_query3(parameters) # This one needs plot generation
elif query_type_id == 'query4':
result = self.data_processor.process_query4(parameters)

# Use async processing for heavy queries
if query_type_id == 'query4':
# Query 4 (map generation) is the most resource-intensive
logger.info(f"Starting async processing for {query_type_id}")
result = self.data_processor.process_query_async(query_type_id, parameters)
else:
# Handle unknown queryX type
logger.error(f"Unknown query type identifier received: {query_type_id}")
self.send_error(f"Query failed: Unknown query type identifier: {query_type_id}")
return
# For other queries, use regular processing but with timeout
result = self.data_processor.process_query_async(query_type_id, parameters)

# --- Log the raw result from processor ---
logger.info(f"HANDLE_QUERY: Result received from processor ({query_type_id}): {result.get('status')}, map_path_present={result.get('map_filepath') is not None}")
Expand Down
Loading