diff --git a/backend/ai_service.py b/backend/ai_service.py index 2e4d9d9..bab9423 100644 --- a/backend/ai_service.py +++ b/backend/ai_service.py @@ -26,7 +26,12 @@ if os.environ.get("ENVIRONMENT") == "production": logger.warning("GEMINI_API_KEY not set in production environment!") -genai.configure(api_key=api_key) +try: + genai.configure(api_key=api_key) + _GEMINI_CONFIGURED = True +except Exception as e: + logger.error(f"Failed to configure Gemini AI: {e}") + _GEMINI_CONFIGURED = False RESPONSIBILITY_MAP_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), "data", "responsibility_map.json") @@ -125,6 +130,10 @@ async def generate_action_plan(issue_description: str, category: str, language: async def _generate_with_gemini() -> dict: """Inner function to generate action plan with Gemini""" + if not _GEMINI_CONFIGURED: + logger.warning("Gemini AI not configured, returning fallback action plan") + return fallback_response + model = genai.GenerativeModel('gemini-1.5-flash') prompt = f""" @@ -184,6 +193,9 @@ async def chat_with_civic_assistant(query: str) -> str: """ async def _chat_with_gemini() -> str: """Inner function to chat with Gemini""" + if not _GEMINI_CONFIGURED: + return "I am currently running in offline mode and cannot process complex queries. Please check back later." + model = genai.GenerativeModel('gemini-1.5-flash') prompt = f""" diff --git a/backend/gemini_summary.py b/backend/gemini_summary.py index 64e665a..7bd0cf5 100644 --- a/backend/gemini_summary.py +++ b/backend/gemini_summary.py @@ -19,9 +19,14 @@ # Configure Gemini (mandatory environment variable) api_key = os.environ.get("GEMINI_API_KEY") +_GEMINI_CONFIGURED = False if api_key: - genai.configure(api_key=api_key) + try: + genai.configure(api_key=api_key) + _GEMINI_CONFIGURED = True + except Exception as e: + logger.error(f"Failed to configure Gemini AI: {e}") else: # Gemini disabled (mock/local mode) genai = None @@ -66,6 +71,9 @@ async def generate_mla_summary( """ async def _generate_mla_summary_with_gemini() -> str: """Inner function to generate MLA summary with Gemini""" + if not _GEMINI_CONFIGURED or not genai: + return _get_fallback_summary(mla_name, assembly_constituency, district) + model = genai.GenerativeModel('gemini-1.5-flash') issue_context = f" particularly regarding {issue_category} issues" if issue_category else ""