diff --git a/backend/app/agents/devrel/github/services/github_mcp_server.py b/backend/app/agents/devrel/github/services/github_mcp_server.py index 73f1c992..34cbc9d7 100644 --- a/backend/app/agents/devrel/github/services/github_mcp_server.py +++ b/backend/app/agents/devrel/github/services/github_mcp_server.py @@ -60,8 +60,8 @@ async def list_org_repos(request: OrgInfoRequest): return {"status": "success", "data": result} except Exception as e: - logger.exception("Error listing org repos") - raise HTTPException(status_code=500, detail=str(e)) + logger.error(f"Error type: {type(e).__name__}") + raise HTTPException(status_code=500, detail="Failed to list organization repositories") @app.post("/github_support") async def get_github_supp(request: RepoInfoRequest): @@ -78,8 +78,8 @@ async def get_github_supp(request: RepoInfoRequest): return RepoInfoResponse(status="error", data={}, error=result["error"]) return RepoInfoResponse(status="success", data=result) except Exception as e: - logger.exception("Error getting repo info") - raise HTTPException(status_code=500, detail=str(e)) + logger.error(f"Error type: {type(e).__name__}") + raise HTTPException(status_code=500, detail="Failed to get repository information") if __name__ == "__main__": import uvicorn diff --git a/backend/app/agents/devrel/github/services/github_mcp_service.py b/backend/app/agents/devrel/github/services/github_mcp_service.py index 4060f0af..3e324a87 100644 --- a/backend/app/agents/devrel/github/services/github_mcp_service.py +++ b/backend/app/agents/devrel/github/services/github_mcp_service.py @@ -1,9 +1,12 @@ import os import requests import asyncio +import logging from typing import Optional import config +logger = logging.getLogger(__name__) + class GitHubMCPService: def __init__(self, token: str = None): self.token = token or config.GITHUB_TOKEN @@ -18,7 +21,8 @@ def repo_query(self, owner: str, repo: str) -> dict: resp = requests.get(url, headers=headers, timeout=15) resp.raise_for_status() except requests.exceptions.RequestException as e: - return {"error": "Request failed", "message": str(e)} + logger.error(f"Error type: GitHubRequestError owner={owner} repo={repo}") + return {"error": "Request failed", "message": "GitHub API request failed"} data = resp.json() license_info = data.get("license") @@ -51,7 +55,8 @@ def list_repo_issues(self, owner: str, repo: str, state: str = "open") -> list: resp = requests.get(url, headers=headers, timeout=15) resp.raise_for_status() except requests.exceptions.RequestException as e: - return {"error": "Request failed", "message": str(e)} + logger.error(f"Error type: GitHubIssuesRequestError owner={owner} repo={repo}") + return {"error": "Request failed", "message": "Failed to fetch issues"} issues = resp.json() return [ @@ -76,7 +81,8 @@ def list_org_repos(self, org: str) -> list: resp = requests.get(url, headers=headers, timeout=15) resp.raise_for_status() except requests.exceptions.RequestException as e: - return {"error": "Request failed", "message": str(e)} + logger.error(f"Error type: GitHubOrgReposRequestError org={org}") + return {"error": "Request failed", "message": "Failed to fetch repositories"} repos = resp.json() return [ diff --git a/backend/app/agents/devrel/github/tools/github_support.py b/backend/app/agents/devrel/github/tools/github_support.py index ee3d9ff9..b89d50a2 100644 --- a/backend/app/agents/devrel/github/tools/github_support.py +++ b/backend/app/agents/devrel/github/tools/github_support.py @@ -108,5 +108,5 @@ async def handle_github_supp(query: str, org: Optional[str] = None): } except Exception as e: - logger.exception("GitHub support error: %s", e) - return {"status": "error", "message": str(e)} + logger.error(f"Error type: {type(e).__name__}") + return {"status": "error", "message": "An error occurred while fetching GitHub data"} diff --git a/backend/app/agents/devrel/github/tools/repo_support.py b/backend/app/agents/devrel/github/tools/repo_support.py index 56ce8c3c..a9ab71a5 100644 --- a/backend/app/agents/devrel/github/tools/repo_support.py +++ b/backend/app/agents/devrel/github/tools/repo_support.py @@ -29,7 +29,7 @@ async def handle_repo_support(query: str) -> Dict[str, Any]: } except Exception as e: - logger.exception("Repository support error") + logger.error(f"Error type: {type(e).__name__}") return { "status": "error", "sub_function": "repo_support", diff --git a/backend/app/core/cache.py b/backend/app/core/cache.py new file mode 100644 index 00000000..9a00fb31 --- /dev/null +++ b/backend/app/core/cache.py @@ -0,0 +1,162 @@ +""" +In-memory cache utility with TTL support for GitHub API responses. +Production-ready caching layer with thread safety. +""" +import hashlib +import json +import logging +import time +from typing import Any, Optional, Callable +from threading import Lock + +logger = logging.getLogger(__name__) + + +class CacheEntry: + """Represents a single cache entry with TTL.""" + + def __init__(self, value: Any, ttl_seconds: int): + self.value = value + self.created_at = time.time() + self.ttl_seconds = ttl_seconds + + def is_expired(self) -> bool: + """Check if the cache entry has expired.""" + return (time.time() - self.created_at) > self.ttl_seconds + + def __repr__(self): + remaining = self.ttl_seconds - (time.time() - self.created_at) + return f"CacheEntry(TTL: {remaining:.1f}s remaining)" + + +class SimpleCache: + """ + Thread-safe in-memory cache for API responses. + Automatically invalidates expired entries. + """ + + def __init__(self, max_size: int = 1000): + self._cache: dict[str, CacheEntry] = {} + self._lock = Lock() + self.max_size = max_size + self.hits = 0 + self.misses = 0 + + def _generate_key(self, prefix: str, **kwargs) -> str: + """Generate a cache key from prefix and kwargs.""" + key_str = f"{prefix}:{json.dumps(kwargs, sort_keys=True)}" + return hashlib.md5(key_str.encode()).hexdigest() + + def get(self, key: str) -> Optional[Any]: + """Get value from cache if it exists and hasn't expired.""" + with self._lock: + if key in self._cache: + entry = self._cache[key] + if not entry.is_expired(): + self.hits += 1 + logger.debug(f"Cache hit: {key} ({entry})") + return entry.value + else: + # Remove expired entry + del self._cache[key] + self.misses += 1 + logger.debug(f"Cache miss (expired): {key}") + return None + self.misses += 1 + logger.debug(f"Cache miss: {key}") + return None + + def set(self, key: str, value: Any, ttl_seconds: int = 300) -> None: + """Set value in cache with TTL.""" + with self._lock: + # Ensure at least 1 entry is removed when cache exceeds max_size + if len(self._cache) >= self.max_size: + evict_count = max(1, int(self.max_size * 0.1)) + old_keys = sorted( + self._cache.keys(), + key=lambda k: self._cache[k].created_at + )[:evict_count] + for old_key in old_keys: + del self._cache[old_key] + logger.debug(f"Cache evicted {len(old_keys)} old entries") + + self._cache[key] = CacheEntry(value, ttl_seconds) + logger.debug(f"Cache set: {key} (TTL: {ttl_seconds}s)") + + def clear(self) -> None: + """Clear all cache entries.""" + with self._lock: + size = len(self._cache) + self._cache.clear() + logger.info(f"Cache cleared ({size} entries removed)") + + def stats(self) -> dict[str, Any]: + """Get cache statistics.""" + with self._lock: + total_requests = self.hits + self.misses + hit_rate = (self.hits / total_requests * 100) if total_requests > 0 else 0 + return { + "size": len(self._cache), + "hits": self.hits, + "misses": self.misses, + "hit_rate": f"{hit_rate:.1f}%", + "total_requests": total_requests + } + + def cleanup_expired(self) -> int: + """Remove all expired entries. Returns count of removed entries.""" + with self._lock: + expired_keys = [ + key for key, entry in self._cache.items() + if entry.is_expired() + ] + for key in expired_keys: + del self._cache[key] + if expired_keys: + logger.debug(f"Cache cleanup removed {len(expired_keys)} expired entries") + return len(expired_keys) + + +# Global cache instance +_cache = SimpleCache(max_size=1000) + + +def get_cache() -> SimpleCache: + """Get the global cache instance.""" + return _cache + + +def cache_result(prefix: str, ttl_seconds: int = 300): + """ + Decorator to cache async function results. + + Args: + prefix: Cache key prefix + ttl_seconds: Time to live for cached result (default: 5 minutes) + + Example: + @cache_result("repo_stats", ttl_seconds=600) + async def get_repo_stats(owner: str, repo: str): + ... + """ + def decorator(func: Callable) -> Callable: + async def wrapper(*args, **kwargs): + cache = get_cache() + cache_key = cache._generate_key(prefix, args=args, kwargs=kwargs) + + # Try to get from cache + cached_value = cache.get(cache_key) + if cached_value is not None: + return cached_value + + # Execute function + result = await func(*args, **kwargs) + + # Cache the result + if result is not None: + cache.set(cache_key, result, ttl_seconds) + + return result + + return wrapper + return decorator diff --git a/backend/app/core/dependencies.py b/backend/app/core/dependencies.py index a06208dd..59f168a9 100644 --- a/backend/app/core/dependencies.py +++ b/backend/app/core/dependencies.py @@ -47,7 +47,7 @@ async def get_current_user(authorization: str = Header(None)) -> UUID: try: supabase = get_supabase_client() # Verify the token and get user - user_response = supabase.auth.get_user(token) + user_response = await supabase.auth.get_user(token) if not user_response or not user_response.user: raise HTTPException( diff --git a/backend/app/core/rate_limiter.py b/backend/app/core/rate_limiter.py new file mode 100644 index 00000000..cab7c40e --- /dev/null +++ b/backend/app/core/rate_limiter.py @@ -0,0 +1,21 @@ +""" +Rate limiting configuration using slowapi. +Prevents API quota exhaustion and protects against abuse. +""" +from slowapi import Limiter +from slowapi.util import get_remote_address +import logging + +logger = logging.getLogger(__name__) + +# Initialize the global rate limiter +limiter = Limiter( + key_func=get_remote_address, + default_limits=["100 per minute"], # Default fallback limit + storage_uri="memory://", # Use in-memory storage (can upgrade to Redis) +) + + +def get_limiter() -> Limiter: + """Get the global rate limiter instance.""" + return limiter diff --git a/backend/app/database/falkor/code-graph-backend/api/index.py b/backend/app/database/falkor/code-graph-backend/api/index.py index 0a87dd5e..83784bd1 100644 --- a/backend/app/database/falkor/code-graph-backend/api/index.py +++ b/backend/app/database/falkor/code-graph-backend/api/index.py @@ -70,10 +70,15 @@ def graph_entities(): if not repo: logging.error("Missing 'repo' parameter in request.") return jsonify({"status": "Missing 'repo' parameter"}), 400 + + # Validate repo format - prevent path traversal + import re + if not re.match(r'^[a-zA-Z0-9_\-\.]+$', str(repo)): + return jsonify({'status': 'Invalid repository name format'}), 400 if not graph_exists(repo): logging.error("Missing project %s", repo) - return jsonify({"status": f"Missing project {repo}"}), 400 + return jsonify({"status": "Project not found"}), 400 try: # Initialize the graph with the provided repo and credentials @@ -161,6 +166,11 @@ def auto_complete(): repo = data.get('repo') if repo is None: return jsonify({'status': 'Missing mandatory parameter "repo"'}), 400 + + # Validate repo format - prevent path traversal + import re + if not re.match(r'^[a-zA-Z0-9_\-\.]+$', str(repo)): + return jsonify({'status': 'Invalid repository name format'}), 400 # Validate that 'prefix' is provided prefix = data.get('prefix') @@ -169,7 +179,7 @@ def auto_complete(): # Validate repo exists if not graph_exists(repo): - return jsonify({'status': f'Missing project {repo}'}), 400 + return jsonify({'status': 'Project not found'}), 400 # Fetch auto-completion results completions = prefix_search(repo, prefix) @@ -273,6 +283,11 @@ def find_paths(): repo = data.get('repo') if repo is None: return jsonify({'status': 'Missing mandatory parameter "repo"'}), 400 + + # Validate repo format - prevent path traversal + import re + if not re.match(r'^[a-zA-Z0-9_\-\.]+$', str(repo)): + return jsonify({'status': 'Invalid repository name format'}), 400 # Validate 'src' parameter src = data.get('src') diff --git a/backend/app/services/codegraph/repo_service.py b/backend/app/services/codegraph/repo_service.py index eba4fca8..f3f1f9b8 100644 --- a/backend/app/services/codegraph/repo_service.py +++ b/backend/app/services/codegraph/repo_service.py @@ -15,7 +15,12 @@ class RepoService: def __init__(self): self.supabase = get_supabase_client() self.backend_url = os.getenv("CODEGRAPH_BACKEND_URL", "http://localhost:5000") - self.secret_token = os.getenv("SECRET_TOKEN", "DevRAI_CodeGraph_Secret") + self.secret_token = os.getenv("SECRET_TOKEN") + if not self.secret_token: + raise ValueError( + "SECRET_TOKEN environment variable must be set for CodeGraph backend authentication. " + "Please configure this in your .env file." + ) self.indexing_timeout = aiohttp.ClientTimeout(total=3600, connect=60) self.query_timeout = aiohttp.ClientTimeout(total=300, connect=30) logger.info(f"RepoService initialized with backend: {self.backend_url}") diff --git a/backend/integrations/discord/cogs.py b/backend/integrations/discord/cogs.py index 64fd0b7f..18b20354 100644 --- a/backend/integrations/discord/cogs.py +++ b/backend/integrations/discord/cogs.py @@ -29,10 +29,15 @@ def __init__(self, bot: DiscordBot, queue_manager: AsyncQueueManager): def cog_load(self): """Called when the cog is loaded""" + # Start the cleanup task - it will wait for the bot to be ready via before_loop self.cleanup_expired_tokens.start() def cog_unload(self): - self.cleanup_expired_tokens.cancel() + try: + if self.cleanup_expired_tokens.is_running(): + self.cleanup_expired_tokens.cancel() + except Exception as e: + logger.warning(f"Error cancelling cleanup task: {type(e).__name__}") @tasks.loop(minutes=5) async def cleanup_expired_tokens(self): @@ -47,7 +52,9 @@ async def cleanup_expired_tokens(self): @cleanup_expired_tokens.before_loop async def before_cleanup(self): """Wait until the bot is ready before starting cleanup""" + print("--> Waiting for bot to be ready before starting cleanup task...") await self.bot.wait_until_ready() + print("--> Bot is ready, starting cleanup task...") @app_commands.command(name="reset", description="Reset your DevRel thread and memory.") async def reset_thread(self, interaction: discord.Interaction): diff --git a/backend/main.py b/backend/main.py index b7ad80a6..39c9e956 100644 --- a/backend/main.py +++ b/backend/main.py @@ -6,14 +6,19 @@ import uvicorn from fastapi import FastAPI, Response from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import JSONResponse +from slowapi.errors import RateLimitExceeded +from slowapi.exceptions import RateLimitExceeded as RateLimitException from app.api.router import api_router from app.core.config import settings from app.core.orchestration.agent_coordinator import AgentCoordinator from app.core.orchestration.queue_manager import AsyncQueueManager +from app.core.rate_limiter import get_limiter from app.database.weaviate.client import get_weaviate_client from integrations.discord.bot import DiscordBot from discord.ext import commands +from routes import router as legacy_router # DevRel commands are now loaded dynamically (commented out below) # from integrations.discord.cogs import DevRelCommands @@ -104,6 +109,24 @@ async def lifespan(app: FastAPI): api = FastAPI(title="Devr.AI API", version="1.0", lifespan=lifespan) +# Setup rate limiting with proper error handler +limiter = get_limiter() +api.state.limiter = limiter + + +def rate_limit_exception_handler(request, exc): + """Handle rate limit exceptions with proper HTTP 429 response.""" + return JSONResponse( + status_code=429, + content={ + "detail": "Rate limit exceeded. Maximum 10 requests per minute per IP.", + "retry_after": 60 + } + ) + + +api.add_exception_handler(RateLimitExceeded, rate_limit_exception_handler) + # Configure CORS api.add_middleware( CORSMiddleware, @@ -114,8 +137,8 @@ async def lifespan(app: FastAPI): "http://127.0.0.1:3000", ], allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], + allow_methods=["GET", "POST", "PUT", "DELETE", "OPTIONS"], + allow_headers=["Content-Type", "Authorization"], ) @api.get("/favicon.ico") @@ -124,6 +147,7 @@ async def favicon(): return Response(status_code=204) api.include_router(api_router) +api.include_router(legacy_router, prefix="/api", tags=["Legacy"]) if __name__ == "__main__": diff --git a/backend/requirements.txt b/backend/requirements.txt index 59827539..665d030f 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -191,6 +191,7 @@ sentence-transformers==3.4.1 shellingham==1.5.4 six==1.17.0 slack_sdk==3.35.0 +slowapi==0.1.9 sniffio==1.3.1 SQLAlchemy==2.0.41 stack-data==0.6.3 diff --git a/backend/routes.py b/backend/routes.py index 7dbd6463..a3e41228 100644 --- a/backend/routes.py +++ b/backend/routes.py @@ -1,15 +1,30 @@ import asyncio import uuid import logging -from fastapi import APIRouter, Request, HTTPException +import re +from urllib.parse import urlparse +from fastapi import APIRouter, Request, HTTPException, Depends +from uuid import UUID from app.core.events.event_bus import EventBus from app.core.events.enums import EventType, PlatformType from app.core.events.base import BaseEvent from app.core.handler.handler_registry import HandlerRegistry from pydantic import BaseModel +from app.core.config import settings +from app.core.dependencies import get_current_user +from app.core.rate_limiter import get_limiter +from app.core.cache import cache_result +import httpx router = APIRouter() +# GitHub identifier validation regexes (GitHub allows dots in repo names) +GITHUB_OWNER = re.compile(r'^[a-zA-Z0-9_-]+$') +GITHUB_REPO = re.compile(r'^[a-zA-Z0-9._-]+$') + +# Initialize rate limiter +limiter = get_limiter() + class RepoRequest(BaseModel): repo_url: str @@ -35,6 +50,198 @@ def register_event_handlers(): event_bus.register_handler(EventType.PR_COMMENTED, sample_handler, PlatformType.GITHUB) event_bus.register_handler(EventType.PR_MERGED, sample_handler, PlatformType.GITHUB) +@router.post("/repo-stats") +@limiter.limit("10/minute") +async def get_repo_stats( + request: Request, + repo_request: RepoRequest, + current_user: UUID = Depends(get_current_user) +): + """ + Get repository statistics from GitHub with rate limiting, caching, and auth. + + Rate limit: 10 requests per minute per IP address + Cache: 5 minutes per repository + Authentication: Required (Bearer token) + """ + try: + # Parse repo URL to extract owner and repo name + repo_url = repo_request.repo_url.strip() + + # Handle SSH format (git@github.com:owner/repo.git) + if repo_url.startswith("git@"): + # Split on ':' to get the path part + if ":" in repo_url: + path = repo_url.split(":", 1)[1] + else: + raise HTTPException( + status_code=400, + detail="Invalid GitHub repository URL format" + ) + else: + # Ensure scheme is present + if not repo_url.startswith(("http://", "https://")): + repo_url = "https://" + repo_url + + # Parse URL to extract path (excludes query strings and fragments) + try: + parsed_url = urlparse(repo_url) + + # Verify it's a GitHub URL + if "github.com" not in parsed_url.netloc: + raise HTTPException( + status_code=400, + detail="Invalid GitHub repository URL: must be from github.com" + ) + + # Get path from parsed URL (excludes query string and fragment) + path = parsed_url.path + + except (ValueError, AttributeError): + raise HTTPException( + status_code=400, + detail="Invalid GitHub repository URL format" + ) + + # Strip leading/trailing slashes and .git suffix + path = path.strip("/").removesuffix(".git") + + # Split into segments and validate + segments = path.split("/") + if len(segments) < 2: + raise HTTPException( + status_code=400, + detail="Invalid GitHub repository URL: expected owner/repo format" + ) + + owner, repo = segments[0], segments[1] + + # Validate GitHub identifier format (owner and repo) + if not GITHUB_OWNER.match(owner): + raise HTTPException( + status_code=400, + detail=f"Invalid GitHub owner format: '{owner}'. Must contain only alphanumeric characters, hyphens, and underscores." + ) + + if not GITHUB_REPO.match(repo): + raise HTTPException( + status_code=400, + detail=f"Invalid GitHub repository name format: '{repo}'. Must contain only alphanumeric characters, hyphens, dots, and underscores." + ) + + # Validate GitHub token is configured + if not settings.github_token: + logger = logging.getLogger(__name__) + logger.error("Error type: MissingConfigurationError") + raise HTTPException( + status_code=500, + detail="GitHub API is not configured. Contact administrator." + ) + + # Generate cache key + cache_key = f"repo_stats:{owner.lower()}_{repo.lower()}" + + # Try to get from cache first + from app.core.cache import get_cache + cache = get_cache() + cached_data = cache.get(cache_key) + if cached_data: + logging.getLogger(__name__).info( + f"Serving cached repo stats for {owner}/{repo} (user: {current_user})" + ) + return cached_data + + # Fetch repository stats from GitHub API + headers = { + "Authorization": f"token {settings.github_token}", + "Accept": "application/vnd.github.v3+json" + } + + async with httpx.AsyncClient() as client: + try: + response = await client.get( + f"https://api.github.com/repos/{owner}/{repo}", + headers=headers, + timeout=10.0 + ) + + if response.status_code == 404: + raise HTTPException( + status_code=404, + detail=f"Repository not found: {owner}/{repo}" + ) + + if response.status_code == 403: + logger = logging.getLogger(__name__) + logger.error("Error type: GitHubRateLimitError") + raise HTTPException( + status_code=503, + detail="GitHub API rate limit exceeded. Please try again later." + ) + + if response.status_code != 200: + logger = logging.getLogger(__name__) + logger.error(f"Error type: GitHubAPIError status={response.status_code}") + raise HTTPException( + status_code=502, + detail="GitHub API error. Please try again later." + ) + + repo_data = response.json() + + result = { + "name": repo_data.get("name"), + "full_name": repo_data.get("full_name"), + "description": repo_data.get("description"), + "stars": repo_data.get("stargazers_count", 0), + "forks": repo_data.get("forks_count", 0), + "open_issues": repo_data.get("open_issues_count", 0), + "language": repo_data.get("language"), + "created_at": repo_data.get("created_at"), + "updated_at": repo_data.get("updated_at"), + "url": repo_data.get("html_url"), + "owner": { + "login": repo_data.get("owner", {}).get("login"), + "avatar_url": repo_data.get("owner", {}).get("avatar_url"), + } + } + + # Cache the result for 5 minutes + cache.set(cache_key, result, ttl_seconds=300) + + logger = logging.getLogger(__name__) + logger.info( + f"Fetched repo stats for {owner}/{repo} from GitHub API (user: {current_user})" + ) + + return result + + except httpx.TimeoutException: + logger = logging.getLogger(__name__) + logger.error("Error type: GitHubAPITimeoutError") + raise HTTPException( + status_code=504, + detail="GitHub API request timeout. Please try again." + ) + except httpx.RequestError as e: + logger = logging.getLogger(__name__) + logger.error(f"Error type: GitHubNetworkError") + raise HTTPException( + status_code=502, + detail="Failed to connect to GitHub API. Please try again." + ) + + except HTTPException: + raise + except Exception as e: + logger = logging.getLogger(__name__) + logger.error(f"Error type: {type(e).__name__}") + raise HTTPException( + status_code=500, + detail="An unexpected error occurred. Please try again later." + ) + + @router.post("/github/webhook") async def github_webhook(request: Request): payload = await request.json() diff --git a/frontend/package-lock.json b/frontend/package-lock.json index f2cf10ed..eab162e5 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -2171,9 +2171,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001731", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001731.tgz", - "integrity": "sha512-lDdp2/wrOmTRWuoB5DpfNkC0rJDU8DqRa6nYL6HK6sytw70QMopt/NIc/9SM7ylItlBWfACXk0tEn37UWM/+mg==", + "version": "1.0.30001764", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001764.tgz", + "integrity": "sha512-9JGuzl2M+vPL+pz70gtMF9sHdMFbY9FJaQBi186cHKH3pSzDvzoUJUPV6fqiKIMyXbud9ZLg4F3Yza1vJ1+93g==", "dev": true, "funding": [ { diff --git a/poetry.lock b/poetry.lock index 7d46dd61..8d13997f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -236,6 +236,66 @@ docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphi tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] +[[package]] +name = "audioop-lts" +version = "0.2.2" +description = "LTS Port of Python audioop" +optional = false +python-versions = ">=3.13" +groups = ["main"] +markers = "python_version == \"3.13\"" +files = [ + {file = "audioop_lts-0.2.2-cp313-abi3-macosx_10_13_universal2.whl", hash = "sha256:fd3d4602dc64914d462924a08c1a9816435a2155d74f325853c1f1ac3b2d9800"}, + {file = "audioop_lts-0.2.2-cp313-abi3-macosx_10_13_x86_64.whl", hash = "sha256:550c114a8df0aafe9a05442a1162dfc8fec37e9af1d625ae6060fed6e756f303"}, + {file = "audioop_lts-0.2.2-cp313-abi3-macosx_11_0_arm64.whl", hash = "sha256:9a13dc409f2564de15dd68be65b462ba0dde01b19663720c68c1140c782d1d75"}, + {file = "audioop_lts-0.2.2-cp313-abi3-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:51c916108c56aa6e426ce611946f901badac950ee2ddaf302b7ed35d9958970d"}, + {file = "audioop_lts-0.2.2-cp313-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:47eba38322370347b1c47024defbd36374a211e8dd5b0dcbce7b34fdb6f8847b"}, + {file = "audioop_lts-0.2.2-cp313-abi3-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba7c3a7e5f23e215cb271516197030c32aef2e754252c4c70a50aaff7031a2c8"}, + {file = "audioop_lts-0.2.2-cp313-abi3-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:def246fe9e180626731b26e89816e79aae2276f825420a07b4a647abaa84becc"}, + {file = "audioop_lts-0.2.2-cp313-abi3-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e160bf9df356d841bb6c180eeeea1834085464626dc1b68fa4e1d59070affdc3"}, + {file = "audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:4b4cd51a57b698b2d06cb9993b7ac8dfe89a3b2878e96bc7948e9f19ff51dba6"}, + {file = "audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_ppc64le.whl", hash = "sha256:4a53aa7c16a60a6857e6b0b165261436396ef7293f8b5c9c828a3a203147ed4a"}, + {file = "audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_riscv64.whl", hash = "sha256:3fc38008969796f0f689f1453722a0f463da1b8a6fbee11987830bfbb664f623"}, + {file = "audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_s390x.whl", hash = "sha256:15ab25dd3e620790f40e9ead897f91e79c0d3ce65fe193c8ed6c26cffdd24be7"}, + {file = "audioop_lts-0.2.2-cp313-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:03f061a1915538fd96272bac9551841859dbb2e3bf73ebe4a23ef043766f5449"}, + {file = "audioop_lts-0.2.2-cp313-abi3-win32.whl", hash = "sha256:3bcddaaf6cc5935a300a8387c99f7a7fbbe212a11568ec6cf6e4bc458c048636"}, + {file = "audioop_lts-0.2.2-cp313-abi3-win_amd64.whl", hash = "sha256:a2c2a947fae7d1062ef08c4e369e0ba2086049a5e598fda41122535557012e9e"}, + {file = "audioop_lts-0.2.2-cp313-abi3-win_arm64.whl", hash = "sha256:5f93a5db13927a37d2d09637ccca4b2b6b48c19cd9eda7b17a2e9f77edee6a6f"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:73f80bf4cd5d2ca7814da30a120de1f9408ee0619cc75da87d0641273d202a09"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:106753a83a25ee4d6f473f2be6b0966fc1c9af7e0017192f5531a3e7463dce58"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fbdd522624141e40948ab3e8cdae6e04c748d78710e9f0f8d4dae2750831de19"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:143fad0311e8209ece30a8dbddab3b65ab419cbe8c0dde6e8828da25999be911"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dfbbc74ec68a0fd08cfec1f4b5e8cca3d3cd7de5501b01c4b5d209995033cde9"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cfcac6aa6f42397471e4943e0feb2244549db5c5d01efcd02725b96af417f3fe"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:752d76472d9804ac60f0078c79cdae8b956f293177acd2316cd1e15149aee132"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:83c381767e2cc10e93e40281a04852facc4cd9334550e0f392f72d1c0a9c5753"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c0022283e9556e0f3643b7c3c03f05063ca72b3063291834cca43234f20c60bb"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:a2d4f1513d63c795e82948e1305f31a6d530626e5f9f2605408b300ae6095093"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:c9c8e68d8b4a56fda8c025e538e639f8c5953f5073886b596c93ec9b620055e7"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:96f19de485a2925314f5020e85911fb447ff5fbef56e8c7c6927851b95533a1c"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e541c3ef484852ef36545f66209444c48b28661e864ccadb29daddb6a4b8e5f5"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-win32.whl", hash = "sha256:d5e73fa573e273e4f2e5ff96f9043858a5e9311e94ffefd88a3186a910c70917"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9191d68659eda01e448188f60364c7763a7ca6653ed3f87ebb165822153a8547"}, + {file = "audioop_lts-0.2.2-cp313-cp313t-win_arm64.whl", hash = "sha256:c174e322bb5783c099aaf87faeb240c8d210686b04bd61dfd05a8e5a83d88969"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:f9ee9b52f5f857fbaf9d605a360884f034c92c1c23021fb90b2e39b8e64bede6"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:49ee1a41738a23e98d98b937a0638357a2477bc99e61b0f768a8f654f45d9b7a"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5b00be98ccd0fc123dcfad31d50030d25fcf31488cde9e61692029cd7394733b"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a6d2e0f9f7a69403e388894d4ca5ada5c47230716a03f2847cfc7bd1ecb589d6"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f9b0b8a03ef474f56d1a842af1a2e01398b8f7654009823c6d9e0ecff4d5cfbf"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2b267b70747d82125f1a021506565bdc5609a2b24bcb4773c16d79d2bb260bbd"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0337d658f9b81f4cd0fdb1f47635070cc084871a3d4646d9de74fdf4e7c3d24a"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:167d3b62586faef8b6b2275c3218796b12621a60e43f7e9d5845d627b9c9b80e"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0d9385e96f9f6da847f4d571ce3cb15b5091140edf3db97276872647ce37efd7"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:48159d96962674eccdca9a3df280e864e8ac75e40a577cc97c5c42667ffabfc5"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:8fefe5868cd082db1186f2837d64cfbfa78b548ea0d0543e9b28935ccce81ce9"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:58cf54380c3884fb49fdd37dfb7a772632b6701d28edd3e2904743c5e1773602"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:088327f00488cdeed296edd9215ca159f3a5a5034741465789cad403fcf4bec0"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-win32.whl", hash = "sha256:068aa17a38b4e0e7de771c62c60bbca2455924b67a8814f3b0dee92b5820c0b3"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-win_amd64.whl", hash = "sha256:a5bf613e96f49712073de86f20dbdd4014ca18efd4d34ed18c75bd808337851b"}, + {file = "audioop_lts-0.2.2-cp314-cp314t-win_arm64.whl", hash = "sha256:b492c3b040153e68b9fdaff5913305aaaba5bb433d8a7f73d5cf6a64ed3cc1dd"}, + {file = "audioop_lts-0.2.2.tar.gz", hash = "sha256:64d0c62d88e67b98a1a5e71987b7aa7b5bcffc7dcee65b635823dbdd0a8dbbd0"}, +] + [[package]] name = "authlib" version = "1.6.2" @@ -748,24 +808,26 @@ packaging = "*" [[package]] name = "discord-py" -version = "2.4.0" +version = "2.6.4" description = "A Python wrapper for the Discord API" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "discord.py-2.4.0-py3-none-any.whl", hash = "sha256:b8af6711c70f7e62160bfbecb55be699b5cb69d007426759ab8ab06b1bd77d1d"}, - {file = "discord_py-2.4.0.tar.gz", hash = "sha256:d07cb2a223a185873a1d0ee78b9faa9597e45b3f6186df21a95cec1e9bcdc9a5"}, + {file = "discord_py-2.6.4-py3-none-any.whl", hash = "sha256:2783b7fb7f8affa26847bfc025144652c294e8fe6e0f8877c67ed895749eb227"}, + {file = "discord_py-2.6.4.tar.gz", hash = "sha256:44384920bae9b7a073df64ae9b14c8cf85f9274b5ad5d1d07bd5a67539de2da9"}, ] [package.dependencies] aiohttp = ">=3.7.4,<4" +audioop-lts = {version = "*", markers = "python_version >= \"3.13\""} [package.extras] -docs = ["sphinx (==4.4.0)", "sphinx-inline-tabs (==2023.4.21)", "sphinxcontrib-applehelp (==1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (==2.0.1)", "sphinxcontrib-jsmath (==1.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)", "sphinxcontrib-trio (==1.1.2)", "sphinxcontrib-websupport (==1.2.4)", "typing-extensions (>=4.3,<5)"] -speed = ["Brotli", "aiodns (>=1.1)", "cchardet (==2.1.7) ; python_version < \"3.10\"", "orjson (>=3.5.4)"] +dev = ["ruff (==0.12)", "typing_extensions (>=4.3,<5)"] +docs = ["imghdr-lts (==1.0.0) ; python_version >= \"3.13\"", "sphinx (==4.4.0)", "sphinx-inline-tabs (==2023.4.21)", "sphinxcontrib-applehelp (==1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (==2.0.1)", "sphinxcontrib-jsmath (==1.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)", "sphinxcontrib-websupport (==1.2.4)", "sphinxcontrib_trio (==1.1.2)", "typing-extensions (>=4.3,<5)"] +speed = ["Brotli", "aiodns (>=1.1) ; sys_platform != \"win32\"", "cchardet (==2.1.7) ; python_version < \"3.10\"", "orjson (>=3.5.4)", "zstandard (>=0.23.0) ; python_version <= \"3.13\""] test = ["coverage[toml]", "pytest", "pytest-asyncio", "pytest-cov", "pytest-mock", "typing-extensions (>=4.3,<5)", "tzdata ; sys_platform == \"win32\""] -voice = ["PyNaCl (>=1.3.0,<1.6)"] +voice = ["PyNaCl (>=1.5.0,<1.6)"] [[package]] name = "distro" @@ -1396,6 +1458,8 @@ files = [ {file = "greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d"}, {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5"}, {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f"}, + {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f47617f698838ba98f4ff4189aef02e7343952df3a615f847bb575c3feb177a7"}, + {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af41be48a4f60429d5cad9d22175217805098a9ef7c40bfef44f7669fb9d74d8"}, {file = "greenlet-3.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c"}, {file = "greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2"}, {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246"}, @@ -1405,6 +1469,8 @@ files = [ {file = "greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8"}, {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52"}, {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa"}, + {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c"}, + {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5"}, {file = "greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9"}, {file = "greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd"}, {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb"}, @@ -1414,6 +1480,8 @@ files = [ {file = "greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0"}, {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0"}, {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f"}, + {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0"}, + {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d"}, {file = "greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02"}, {file = "greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31"}, {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945"}, @@ -1423,6 +1491,8 @@ files = [ {file = "greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671"}, {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b"}, {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae"}, + {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b"}, + {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929"}, {file = "greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b"}, {file = "greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0"}, {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f"}, @@ -1430,6 +1500,8 @@ files = [ {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1"}, {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735"}, {file = "greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337"}, + {file = "greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269"}, + {file = "greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681"}, {file = "greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01"}, {file = "greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c"}, {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d"}, @@ -1439,6 +1511,8 @@ files = [ {file = "greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df"}, {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594"}, {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98"}, + {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:28a3c6b7cd72a96f61b0e4b2a36f681025b60ae4779cc73c1535eb5f29560b10"}, + {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:52206cd642670b0b320a1fd1cbfd95bca0e043179c1d8a045f2c6109dfe973be"}, {file = "greenlet-3.2.4-cp39-cp39-win32.whl", hash = "sha256:65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b"}, {file = "greenlet-3.2.4-cp39-cp39-win_amd64.whl", hash = "sha256:d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb"}, {file = "greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d"}, @@ -6527,4 +6601,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.14" -content-hash = "d1f57ec66dbe816e607f52b1adcde2a92d44e901ec1c70b6faa80b2d1ca39282" +content-hash = "f24bcdb4ab7ef008b3879459da06c43c4164952cc806a3cd814035432db2de64" diff --git a/pyproject.toml b/pyproject.toml index 3d225571..3dcdf3cc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,7 +26,7 @@ dependencies = [ "uvicorn (>=0.38.0,<0.39.0)", "ddgs (>=9.0.2,<10.0.0)", "fastmcp>=2.11.3,<3.0.0", - "discord-py (>=2.4.0,<2.5.0)", + "discord-py (>=2.6.4,<3.0.0)", "graphrag-sdk (>=0.8.1,<0.9.0)", "tree-sitter (>=0.25.2,<0.26.0)", "validators (>=0.35.0,<0.36.0)",