From 4279410cca66bf8e528235c40e829a9304976083 Mon Sep 17 00:00:00 2001 From: samonaisi Date: Fri, 7 Mar 2025 11:46:22 +0100 Subject: [PATCH 01/57] =?UTF-8?q?=E2=9C=A8=20Use=20`electro`with=20websock?= =?UTF-8?q?ets=20(#5)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * remove discord tweeks * fix typing * add vscode settings * run with docker * actually return message * use test_flow instead of main.py file * create websocket interface * run make style * handle multiple users * separate api and websocket apps --- .gitignore | 1 + .vscode/settings.json | 4 + Dockerfile | 35 +++++ README.md | 11 +- electro/app.py | 32 +++-- electro/flow_connector.py | 3 + electro/flow_manager.py | 10 +- electro/flow_step.py | 15 +-- electro/interfaces.py | 50 +++++++ electro/toolkit/discord_tweeks.py | 140 -------------------- electro/toolkit/memory_storage.py | 2 +- electro/toolkit/redis_storage.py | 2 +- examples/locales/en/LC_MESSAGES/messages.po | 10 +- examples/locales/fr/LC_MESSAGES/messages.po | 16 +++ examples/test_flow.py | 103 +------------- 15 files changed, 166 insertions(+), 268 deletions(-) create mode 100644 .vscode/settings.json create mode 100644 Dockerfile create mode 100644 electro/interfaces.py delete mode 100644 electro/toolkit/discord_tweeks.py create mode 100644 examples/locales/fr/LC_MESSAGES/messages.po diff --git a/.gitignore b/.gitignore index f362e70..673fa51 100644 --- a/.gitignore +++ b/.gitignore @@ -18,6 +18,7 @@ Icon .Trashes .VolumeIcon.icns .com.apple.timemachine.donotpresent +.bash_history # Directories potentially created on remote AFP share .AppleDB diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..cfab949 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,4 @@ +{ + "python.analysis.extraPaths": ["./electro"], + "python.defaultInterpreterPath": ".venv/bin/python", +} \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..817b4ff --- /dev/null +++ b/Dockerfile @@ -0,0 +1,35 @@ +FROM python:3.12.6-slim AS builder + +ARG EXPORT_FLAG="--with dev" + +RUN pip install --upgrade pip poetry poetry-plugin-export + +COPY pyproject.toml poetry.lock ./ + +RUN poetry export -f requirements.txt $EXPORT_FLAG --without-hashes --output /tmp/requirements.txt + + +FROM python:3.12.6-slim + +WORKDIR /app + +RUN groupadd -g 10000 app && \ + useradd -g app -d /app -u 10000 app && \ + chown app:app /app && \ + apt-get update && \ + apt upgrade -y && \ + apt-get install nano && \ + apt-get install -y git && \ + pip install --upgrade pip + +COPY --from=builder /tmp/requirements.txt . + +RUN pip install -r requirements.txt +RUN pip install watchdog + +COPY . . + +USER app +ENV PYTHONPATH="/app" + +CMD ["watchmedo", "auto-restart", "--pattern=*.py", "--recursive", "--", "python", "examples/test_flow.py"] \ No newline at end of file diff --git a/README.md b/README.md index 978b17d..64505ce 100644 --- a/README.md +++ b/README.md @@ -18,12 +18,17 @@ A framework for building bots, made for humans. ```shell cp .env.example .env # vi .env + +4. Extract and compile translations: + ```shell + make upload-locales + make update-locales ``` -4. Run the `TestFlow`: +5. Run the `TestFlow`: ```shell poetry run python ./test_flow.py ``` -5. Check the API server @ http://localhost:8000/docs. -6. Use one of the clients to connect the platforms: [Discord](https://github.com/CyberCRI/ikigai-discord-client). \ No newline at end of file +6. Check the API server @ http://localhost:8000/docs. +7. Use one of the clients to connect the platforms: [Discord](https://github.com/CyberCRI/ikigai-discord-client). \ No newline at end of file diff --git a/electro/app.py b/electro/app.py index 13d736d..78a2379 100644 --- a/electro/app.py +++ b/electro/app.py @@ -1,10 +1,12 @@ """The API server that works as an endpoint for all the Electro Interfaces.""" -from fastapi import FastAPI +from fastapi import FastAPI, WebSocket, WebSocketDisconnect +from fastapi.websockets import WebSocketState from tortoise.contrib.fastapi import register_tortoise from . import types_ as types from .flow_manager import global_flow_manager +from .interfaces import APIInterface, WebSocketInterface from .toolkit.tortoise_orm import get_tortoise_config app = FastAPI( @@ -15,18 +17,32 @@ # redoc_url=None, ) +api_app = app +websocket_app = app -@app.post("/message") -async def process_message(message: types.Message) -> list[types.MessageToSend] | None: + +@api_app.post("/message") +async def process_message(message: types.Message) -> types.MessageToSend | None: """Process the message.""" + manager = APIInterface() + return await global_flow_manager.on_message(message, manager) + - return await global_flow_manager.on_message(message) +@websocket_app.websocket("/websocket/client/{client_name}/user/{user_id}") +async def websocket_endpoint(websocket: WebSocket, client_name: str, user_id: str): + manager = WebSocketInterface() + await manager.connect(websocket) + try: + while websocket.application_state == WebSocketState.CONNECTED: + data = await websocket.receive_json() + data = types.Message.model_validate(data) + await global_flow_manager.on_message(data, manager) + except WebSocketDisconnect: + await manager.disconnect() # region Register Tortoise -register_tortoise( - app, - config=get_tortoise_config(), -) +register_tortoise(api_app, config=get_tortoise_config()) +register_tortoise(websocket_app, config=get_tortoise_config()) # endregion diff --git a/electro/flow_connector.py b/electro/flow_connector.py index d3cc53b..3df8133 100644 --- a/electro/flow_connector.py +++ b/electro/flow_connector.py @@ -10,6 +10,7 @@ from discord.ext import commands from ._common import ContextInstanceMixin +from .interfaces import Interface from .models import Interaction, Message from .storage import ChannelData, UserData from .types_ import Channel, User @@ -61,3 +62,5 @@ class FlowConnector(ContextInstanceMixin): substitutions: dict[str, str] | None = None extra_data: dict[str, Any] | None = None + + interface: Interface | None = None diff --git a/electro/flow_manager.py b/electro/flow_manager.py index de662e2..1cd4992 100644 --- a/electro/flow_manager.py +++ b/electro/flow_manager.py @@ -14,6 +14,7 @@ from .flow_connector import FlowConnectorEvents # from decorators import fail_safely +from .interfaces import Interface from .models import Channel, Interaction, Message, User, UserStateChanged from .scopes import FlowScopes from .settings import settings @@ -273,6 +274,7 @@ async def _finish_flow(self, flow_connector: FlowConnector): # Run the callbacks for callback in self._on_finish_callbacks: await callback(flow_connector) + return await flow_connector.interface.stop_process(reason="end_of_flow") async def _create_user_and_channel( self, user: types.User | None = None, channel: types.Channel | types.DMChannel | None = None @@ -337,9 +339,8 @@ async def _dispatch(self, flow_connector: FlowConnector) -> list[types.MessageTo for flow in self.flows: # Check all the triggers if await flow.check_triggers(flow_connector, scope=scope): - return await flow.run(flow_connector) - # break - + await flow.run(flow_connector) + break else: # Check if it's not something that shouldn't be handled by the flows if ( @@ -419,7 +420,7 @@ async def dispatch(self, flow_connector: FlowConnector) -> list[types.MessageToS async with self: return await self._dispatch(flow_connector) - async def on_message(self, message: types.Message) -> list[Message] | None: + async def on_message(self, message: types.Message, interface: Interface) -> list[Message] | None: """Handle the messages sent by the users.""" # Save the message to the database @@ -451,6 +452,7 @@ async def on_message(self, message: types.Message) -> list[Message] | None: user_data=user_data, channel_state=channel_state, channel_data=channel_data, + interface=interface, ) return await self.dispatch(flow_connector) diff --git a/electro/flow_step.py b/electro/flow_step.py index 7bc5e02..e091edd 100644 --- a/electro/flow_step.py +++ b/electro/flow_step.py @@ -250,12 +250,11 @@ async def send_message( ) # view_to_sent = await view.get_or_create_for_connector(connector, from_step_run=True) if view else None - - return MessageToSend( - content=message, - channel=channel_to_send_to, - # files=files or None, - # view=view_to_sent, + await connector.interface.send_json( + { + "message": message, + "to": channel_to_send_to.id, + } ) # TODO: [2024-07-19 by Mykola] Use the decorators @@ -264,7 +263,7 @@ async def run( self, connector: FlowConnector, channel_to_send_to: Channel | BaseSubstitution | None = None, - ) -> list[MessageToSend] | None: + ) -> MessageToSend | None: """Run the `BaseFlowStep`.""" message: MessageToSend = await self.send_message( @@ -277,7 +276,7 @@ async def run( raise FlowStepDone() # TODO: [2025-03-03 by Mykola] Allow sending multiple messages - return [message] + return message async def respond(self, connector: FlowConnector) -> discord.Message: """Respond to the user.""" diff --git a/electro/interfaces.py b/electro/interfaces.py new file mode 100644 index 0000000..e6874ae --- /dev/null +++ b/electro/interfaces.py @@ -0,0 +1,50 @@ +from typing import Dict, Optional + +from fastapi import WebSocket + +from .toolkit.loguru_logging import logger + + +class Interface: + async def send_json(self, *args, **kwargs): + raise NotImplementedError + + async def stop_process(self, *args, **kwargs): + raise NotImplementedError + + +class WebSocketInterface(Interface): + """ + WebSocket Interface for the Electro framework. + + On the server side, the WebSocketInterface is used to send messages to the client, + If you want to send a message to the client in a Flow, you can use the `send_json` method. + """ + + def __init__(self): + self.interface: WebSocket | None = None + + async def connect(self, websocket: WebSocket): + await websocket.accept() + self.interface = websocket + + async def disconnect(self): + await self.interface.close() + self.interface = None + + async def send_json(self, data: Dict[str, str]): + await self.interface.send_json(data) + + async def stop_process(self, code: int = 1000, reason: Optional[str] = None): + await self.interface.close(code, reason) + + +class APIInterface(Interface): + def __init__(self): + self.messages = [] + + async def send_json(self, data: Dict[str, str]): + self.messages.append(data) + + async def stop_process(self, *args, **kwargs): + return self.messages diff --git a/electro/toolkit/discord_tweeks.py b/electro/toolkit/discord_tweeks.py deleted file mode 100644 index b4982ff..0000000 --- a/electro/toolkit/discord_tweeks.py +++ /dev/null @@ -1,140 +0,0 @@ -import asyncio -import re -import textwrap - -import discord - -from ..settings import settings -from .loguru_logging import logger - -__MESSAGE_SEPARATOR_ENABLED = False # a flag that indicates whether the message separator is enabled - - -# region Redefine `discord`'s `Messageable.sleep` method -async def _message_sleep(self: discord.abc.Messageable, message_text: str) -> None: - """Sleep for a certain amount of time, depending on the message length.""" - # Get sleep time - sleep_time = len(message_text) * settings.SLEEP_TIME_PER_CHARACTER if message_text else settings.DEFAULT_SLEEP_TIME - - # Sleep - await asyncio.sleep(sleep_time) - - -async def send( - self, - content=None, - *, - tts=None, - embed=None, - embeds=None, - file=None, - files=None, - stickers=None, - delete_after=None, - nonce=None, - allowed_mentions=None, - reference=None, - mention_author=None, - view=None, - suppress=None, - silent=None, -): - """Send a message to the channel.""" - total_sleep_time_at_the_end = 0 - - if content and isinstance(content, str): - message_parts = [ - wrapped_part - for part in content.split(settings.MESSAGE_BREAK) - for wrapped_part in textwrap.wrap( - part, settings.MESSAGE_MAX_LENGTH, replace_whitespace=False, break_long_words=False - ) - ] - - # Get from the last message part to the first, select all the parts that contain only sleep - # instructions, and increase the `total_sleep_time_at_the_end` variable - while message_parts and re.match(rf"^{settings.MESSAGE_SLEEP_INSTRUCTION_PATTERN}$", message_parts[-1].strip()): - total_sleep_time_at_the_end += float( - re.search(settings.MESSAGE_SLEEP_INSTRUCTION_PATTERN, message_parts[-1].strip()).group(1) - ) - message_parts.pop() - - while len(message_parts) > 1: - message_part = message_parts.pop(0).strip() - - # Sleep - await _message_sleep(self, message_part) - - # Check if the message contains a sleep instruction - if match := (re.search(settings.MESSAGE_SLEEP_INSTRUCTION_PATTERN, message_part)): - sleep_time = float(match.group(1)) - await asyncio.sleep(sleep_time) - - # Remove the sleep instruction from the message - message_part = re.sub(settings.MESSAGE_SLEEP_INSTRUCTION_PATTERN, "", message_part).strip() - - if not message_part: - continue - - # Send the message - try: - await discord.abc.Messageable.old_send( - self, - content=message_part, - ) - except discord.errors.HTTPException as e: - logger.error(f"Failed to send a message: {e}") - - content = message_parts[0].strip() - - # Check if the message contains a sleep instruction - if match := (re.search(settings.MESSAGE_SLEEP_INSTRUCTION_PATTERN, content)): - sleep_time = float(match.group(1)) - await asyncio.sleep(sleep_time) - - # Remove the sleep instruction from the message - content = re.sub(settings.MESSAGE_SLEEP_INSTRUCTION_PATTERN, "", content).strip() - - await _message_sleep(self, content) - - # Send the message - # noinspection PyArgumentList - sent_message = await discord.abc.Messageable.old_send( - self, - content=content, - tts=tts, - embed=embed, - embeds=embeds, - file=file, - files=files, - stickers=stickers, - delete_after=delete_after, - nonce=nonce, - allowed_mentions=allowed_mentions, - reference=reference, - mention_author=mention_author, - view=view, - suppress=suppress, - silent=silent, - ) - - # Sleep for the total sleep time at the end - await asyncio.sleep(total_sleep_time_at_the_end) - - return sent_message - - -def enable_message_separator(): - """Redefine `discord`'s `Messageable.sleep` method.""" - global __MESSAGE_SEPARATOR_ENABLED - - if __MESSAGE_SEPARATOR_ENABLED: - return - - discord.abc.Messageable.old_send = discord.abc.Messageable.send - discord.abc.Messageable.send = send - - __MESSAGE_SEPARATOR_ENABLED = True - - -# endregion diff --git a/electro/toolkit/memory_storage.py b/electro/toolkit/memory_storage.py index f949abc..4897cac 100644 --- a/electro/toolkit/memory_storage.py +++ b/electro/toolkit/memory_storage.py @@ -25,7 +25,7 @@ def check_address( *, chat: typing.Union[str, int, None] = None, user: typing.Union[str, int, None] = None, - ) -> (typing.Union[str, int], typing.Union[str, int]): + ) -> typing.Tuple[typing.Union[str, int]]: """ In all storage's methods chat or user is always required. If one of them is not provided, you have to set missing value based on the provided one. diff --git a/electro/toolkit/redis_storage.py b/electro/toolkit/redis_storage.py index 7047838..8e5a40f 100644 --- a/electro/toolkit/redis_storage.py +++ b/electro/toolkit/redis_storage.py @@ -63,7 +63,7 @@ def check_address( *, chat: typing.Union[str, int, None] = None, user: typing.Union[str, int, None] = None, - ) -> (typing.Union[str, int], typing.Union[str, int]): + ) -> typing.Tuple[typing.Union[str, int]]: """ In all storage's methods chat or user is always required. If one of them is not provided, you have to set missing value based on the provided one. diff --git a/examples/locales/en/LC_MESSAGES/messages.po b/examples/locales/en/LC_MESSAGES/messages.po index 74e33f9..5777374 100644 --- a/examples/locales/en/LC_MESSAGES/messages.po +++ b/examples/locales/en/LC_MESSAGES/messages.po @@ -7,11 +7,11 @@ msgstr "" "Project-Id-Version: electro\n" "Language: en\n" -#: test_flow.py:27 +#: test_flow.py:21 msgid "test_flow_message" -msgstr "Testing this `electro` Flow. Everything works!" +msgstr "Testing this `electro` Flow step 1. Everything works!" -#: test_flow.py:121 -msgid "message_edit_warning" -msgstr "" +#: test_flow.py:22 +msgid "test_flow_message_2" +msgstr "Testing this `electro` Flow step 2. Everything works!" diff --git a/examples/locales/fr/LC_MESSAGES/messages.po b/examples/locales/fr/LC_MESSAGES/messages.po new file mode 100644 index 0000000..59ead81 --- /dev/null +++ b/examples/locales/fr/LC_MESSAGES/messages.po @@ -0,0 +1,16 @@ +msgid "" +msgstr "" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"X-Generator: POEditor.com\n" +"Project-Id-Version: electro\n" +"Language: fr\n" + +#: test_flow.py:21 +msgid "test_flow_message" +msgstr "Test du Flow `electro` étape 1. Tout fonctionne!" + +#: test_flow.py:22 +msgid "test_flow_message_2" +msgstr "Test du Flow `electro` étape 2. Tout fonctionne!" diff --git a/examples/test_flow.py b/examples/test_flow.py index ec45434..f9b1a89 100644 --- a/examples/test_flow.py +++ b/examples/test_flow.py @@ -2,12 +2,10 @@ import uvicorn -from electro.app import app +from electro.app import websocket_app from electro import Flow, MessageFlowStep from electro.extra.i18n_gettext import templated_gettext as _ -from electro.toolkit.discord_tweeks import enable_message_separator - from electro.triggers import CommandTrigger from electro.flow_manager import global_flow_manager @@ -17,106 +15,15 @@ class TestFlow(Flow): """Test Flow.""" _triggers = [ - CommandTrigger("test_flow"), + CommandTrigger("test"), ] - send_test_message = MessageFlowStep( - _("test_flow_message"), - ) + send_test_message_1 = MessageFlowStep(_("test_flow_message")) + send_test_message_2 = MessageFlowStep(_("test_flow_message_2")) global_flow_manager.add_flow(TestFlow()) -# region Setting up the bot -# TODO: [2024-07-20 by Mykola] Move to a separate file -enable_message_separator() -# -# -# @bot.event -# async def on_connect(): -# """Start the services when the bot is ready.""" -# logger.info(f"Logged in as {bot.user.name} ({bot.user.id})") -# -# logger.info(f"Starting the Tortoise ORM...") -# await init_orm() -# -# # Save the bot to the database -# if not (bot_user := await User.get_or_none(id=bot.user.id)): -# bot_user = await User.create( -# id=bot.user.id, -# username=bot.user.name, -# discriminator=bot.user.discriminator, -# is_bot=True, -# ) -# -# logger.info(f"Saved the bot to the database: {bot_user=}") -# else: -# logger.debug(f"The bot is already in the database: {bot_user=}") -# -# -# @bot.listen("on_member_join") -# async def on_member_join(member: discord.Member): -# await flow_manager.on_member_join(member) -# -# -# @bot.listen("on_member_update") -# async def on_member_update(before: discord.Member, after: discord.Member): -# await flow_manager.on_member_update(before, after) -# -# -# @bot.listen("on_interaction") -# async def on_interaction(interaction: discord.Interaction): -# await flow_manager.on_interaction(interaction) -# -# -# @bot.event -# async def on_message(message: discord.Message): -# """Handle messages.""" -# return await flow_manager.on_message(message) -# -# -# # On bot joining a Guild (server), add that Guild to the database -# @bot.event -# async def on_guild_available(guild: discord.Guild): -# """Handle the bot joining a Guild (server).""" -# # Save the Guild to the database -# guild_, is_created = await Guild.get_or_create( -# id=guild.id, -# defaults=dict( -# name=guild.name, -# icon=guild.icon.url if guild.icon else None, -# banner=guild.banner.url if guild.banner else None, -# description=guild.description, -# preferred_locale=guild.preferred_locale, -# afk_channel_id=guild.afk_channel.id if guild.afk_channel else None, -# afk_timeout=guild.afk_timeout, -# owner_id=guild.owner.id, -# ), -# ) -# -# if is_created: -# logger.info(f"Created a new Guild: {guild_=}") -# else: -# logger.info(f"Found an existing Guild: {guild_=}") -# -# -# # Display a warning message on message edit -# @bot.event -# async def on_message_edit(before: discord.Message, after: discord.Message): -# """Handle message edits.""" -# if before.author == bot.user: -# return -# -# if before.content != after.content: -# await after.channel.send( -# _("message_edit_warning").safe_substitute( -# user_mention=before.author.mention, -# ), -# delete_after=5, -# ) - if __name__ == "__main__": - uvicorn.run(app=app, loop="asyncio") - -# endregion + uvicorn.run(app=websocket_app, loop="asyncio", port=8000, host="0.0.0.0") From aab490b34dbbfb005335e15bc78125d26534ae6d Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Fri, 7 Mar 2025 11:56:29 +0100 Subject: [PATCH 02/57] :sparkles: Update electro-migrations repository --- .gitmodules | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitmodules b/.gitmodules index 2b18627..8dd9521 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,3 @@ [submodule "migrations/electro"] path = migrations/electro - url = https://github.com/mykolasolodukha/electro-migrations.git + url = https://github.com/CyberCRI/electro-migrations.git From 4c42463c04e9dd09e58b74d6eadb5f28c0be724e Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Fri, 7 Mar 2025 12:22:00 +0100 Subject: [PATCH 03/57] use ssh url for gitmodule --- .gitmodules | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitmodules b/.gitmodules index 8dd9521..fcb23b5 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,3 @@ [submodule "migrations/electro"] path = migrations/electro - url = https://github.com/CyberCRI/electro-migrations.git + url = git@github.com:CyberCRI/electro-migrations.git From 9c9ddb50eb09341df54ee6264eabb3a55e563912 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Fri, 7 Mar 2025 12:25:15 +0100 Subject: [PATCH 04/57] Removed submodule --- .gitmodules | 3 --- migrations/electro | 1 - 2 files changed, 4 deletions(-) delete mode 160000 migrations/electro diff --git a/.gitmodules b/.gitmodules index fcb23b5..e69de29 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +0,0 @@ -[submodule "migrations/electro"] - path = migrations/electro - url = git@github.com:CyberCRI/electro-migrations.git diff --git a/migrations/electro b/migrations/electro deleted file mode 160000 index d3025c7..0000000 --- a/migrations/electro +++ /dev/null @@ -1 +0,0 @@ -Subproject commit d3025c7eb118bb215b252939294ba95faa955d8b From 10fda47437b05b7dbfcc0e7c7e24d01a9415f247 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Fri, 7 Mar 2025 12:26:16 +0100 Subject: [PATCH 05/57] recreate submodule --- .gitmodules | 3 +++ migrations/electro | 1 + 2 files changed, 4 insertions(+) create mode 160000 migrations/electro diff --git a/.gitmodules b/.gitmodules index e69de29..fcb23b5 100644 --- a/.gitmodules +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "migrations/electro"] + path = migrations/electro + url = git@github.com:CyberCRI/electro-migrations.git diff --git a/migrations/electro b/migrations/electro new file mode 160000 index 0000000..9751d9b --- /dev/null +++ b/migrations/electro @@ -0,0 +1 @@ +Subproject commit 9751d9ba89e47ae59a58dc03bdb9e6dfb91fc7d6 From 37dac4fab95799a007664647a23ce0ddbc02731c Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Fri, 7 Mar 2025 16:56:23 +0100 Subject: [PATCH 06/57] add create_dm method for user --- electro/models.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/electro/models.py b/electro/models.py index f516e0e..e609055 100644 --- a/electro/models.py +++ b/electro/models.py @@ -58,6 +58,13 @@ def __str__(self) -> str: """Return the string representation of the model.""" return f"{self.username}#{self.discriminator}" + async def create_dm(self) -> Channel: + """ + Create a DM channel with the user. + """ + channel, _ = await Channel.get_or_create(id=self.id, defaults={"type": 1}) + return channel + class File(BaseModel): """The model for the file.""" From b564b510acb01c82cf78dd6df724f151947e8244 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Fri, 7 Mar 2025 17:24:19 +0100 Subject: [PATCH 07/57] add create_dm method to user type --- electro/types_.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/electro/types_.py b/electro/types_.py index 54265ae..d3da05f 100644 --- a/electro/types_.py +++ b/electro/types_.py @@ -27,6 +27,18 @@ class User(ElectroBaseModel): discriminator: str | None avatar: dict | None + async def create_dm(self) -> "Channel": + """ + Return a DM channel with the user. + """ + return Channel( + id=self.id, + name=self.username, + type=ChannelType.private, + guild=None, + used_for="DM" + ) + class Guild(ElectroBaseModel): """The model for Guild.""" From 06582f6f6de3829d34d7eca0d83d80a9ee8be24e Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Wed, 12 Mar 2025 12:55:26 +0100 Subject: [PATCH 08/57] better with one app --- electro/app.py | 10 +++------- electro/types_.py | 8 +------- examples/test_flow.py | 4 ++-- 3 files changed, 6 insertions(+), 16 deletions(-) diff --git a/electro/app.py b/electro/app.py index 78a2379..4388434 100644 --- a/electro/app.py +++ b/electro/app.py @@ -17,18 +17,15 @@ # redoc_url=None, ) -api_app = app -websocket_app = app - -@api_app.post("/message") +@app.post("/message") async def process_message(message: types.Message) -> types.MessageToSend | None: """Process the message.""" manager = APIInterface() return await global_flow_manager.on_message(message, manager) -@websocket_app.websocket("/websocket/client/{client_name}/user/{user_id}") +@app.websocket("/websocket/client/{client_name}/user/{user_id}") async def websocket_endpoint(websocket: WebSocket, client_name: str, user_id: str): manager = WebSocketInterface() await manager.connect(websocket) @@ -42,7 +39,6 @@ async def websocket_endpoint(websocket: WebSocket, client_name: str, user_id: st # region Register Tortoise -register_tortoise(api_app, config=get_tortoise_config()) -register_tortoise(websocket_app, config=get_tortoise_config()) +register_tortoise(app, config=get_tortoise_config()) # endregion diff --git a/electro/types_.py b/electro/types_.py index d3da05f..91a5e0d 100644 --- a/electro/types_.py +++ b/electro/types_.py @@ -31,13 +31,7 @@ async def create_dm(self) -> "Channel": """ Return a DM channel with the user. """ - return Channel( - id=self.id, - name=self.username, - type=ChannelType.private, - guild=None, - used_for="DM" - ) + return Channel(id=self.id, name=self.username, type=ChannelType.private, guild=None, used_for="DM") class Guild(ElectroBaseModel): diff --git a/examples/test_flow.py b/examples/test_flow.py index f9b1a89..e83b76c 100644 --- a/examples/test_flow.py +++ b/examples/test_flow.py @@ -2,7 +2,7 @@ import uvicorn -from electro.app import websocket_app +from electro.app import app from electro import Flow, MessageFlowStep from electro.extra.i18n_gettext import templated_gettext as _ @@ -26,4 +26,4 @@ class TestFlow(Flow): if __name__ == "__main__": - uvicorn.run(app=websocket_app, loop="asyncio", port=8000, host="0.0.0.0") + uvicorn.run(app=app, loop="asyncio", port=8000, host="0.0.0.0") From 05c6d1236731836211cac7e6c3fd92c971f1a6d5 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Wed, 12 Mar 2025 13:14:20 +0100 Subject: [PATCH 09/57] cleanup --- electro/app.py | 12 ++++++------ electro/flow_connector.py | 4 ++-- electro/flow_manager.py | 4 ++-- electro/interfaces.py | 12 +++++++++--- 4 files changed, 19 insertions(+), 13 deletions(-) diff --git a/electro/app.py b/electro/app.py index 4388434..51dd1d5 100644 --- a/electro/app.py +++ b/electro/app.py @@ -21,21 +21,21 @@ @app.post("/message") async def process_message(message: types.Message) -> types.MessageToSend | None: """Process the message.""" - manager = APIInterface() - return await global_flow_manager.on_message(message, manager) + interface = APIInterface() + return await global_flow_manager.on_message(message, interface) @app.websocket("/websocket/client/{client_name}/user/{user_id}") async def websocket_endpoint(websocket: WebSocket, client_name: str, user_id: str): - manager = WebSocketInterface() - await manager.connect(websocket) + interface = WebSocketInterface() + await interface.connect(websocket) try: while websocket.application_state == WebSocketState.CONNECTED: data = await websocket.receive_json() data = types.Message.model_validate(data) - await global_flow_manager.on_message(data, manager) + await global_flow_manager.on_message(data, interface) except WebSocketDisconnect: - await manager.disconnect() + await interface.disconnect() # region Register Tortoise diff --git a/electro/flow_connector.py b/electro/flow_connector.py index 3df8133..6f0d345 100644 --- a/electro/flow_connector.py +++ b/electro/flow_connector.py @@ -10,7 +10,7 @@ from discord.ext import commands from ._common import ContextInstanceMixin -from .interfaces import Interface +from .interfaces import BaseInterface from .models import Interaction, Message from .storage import ChannelData, UserData from .types_ import Channel, User @@ -63,4 +63,4 @@ class FlowConnector(ContextInstanceMixin): extra_data: dict[str, Any] | None = None - interface: Interface | None = None + interface: BaseInterface | None = None diff --git a/electro/flow_manager.py b/electro/flow_manager.py index 1cd4992..d7dbfd0 100644 --- a/electro/flow_manager.py +++ b/electro/flow_manager.py @@ -14,7 +14,7 @@ from .flow_connector import FlowConnectorEvents # from decorators import fail_safely -from .interfaces import Interface +from .interfaces import BaseInterface from .models import Channel, Interaction, Message, User, UserStateChanged from .scopes import FlowScopes from .settings import settings @@ -420,7 +420,7 @@ async def dispatch(self, flow_connector: FlowConnector) -> list[types.MessageToS async with self: return await self._dispatch(flow_connector) - async def on_message(self, message: types.Message, interface: Interface) -> list[Message] | None: + async def on_message(self, message: types.Message, interface: BaseInterface) -> list[Message] | None: """Handle the messages sent by the users.""" # Save the message to the database diff --git a/electro/interfaces.py b/electro/interfaces.py index e6874ae..edbca6f 100644 --- a/electro/interfaces.py +++ b/electro/interfaces.py @@ -1,3 +1,4 @@ +from abc import ABC, abstractmethod from typing import Dict, Optional from fastapi import WebSocket @@ -5,15 +6,20 @@ from .toolkit.loguru_logging import logger -class Interface: +class BaseInterface(ABC): + """ + Interface class for the Electro framework.""" + + @abstractmethod async def send_json(self, *args, **kwargs): raise NotImplementedError + @abstractmethod async def stop_process(self, *args, **kwargs): raise NotImplementedError -class WebSocketInterface(Interface): +class WebSocketInterface(BaseInterface): """ WebSocket Interface for the Electro framework. @@ -39,7 +45,7 @@ async def stop_process(self, code: int = 1000, reason: Optional[str] = None): await self.interface.close(code, reason) -class APIInterface(Interface): +class APIInterface(BaseInterface): def __init__(self): self.messages = [] From 9bdd6465730cd149e0f863fecbf8a6209895139b Mon Sep 17 00:00:00 2001 From: Mykola Solodukha Date: Mon, 24 Mar 2025 11:51:34 +0200 Subject: [PATCH 10/57] =?UTF-8?q?=E2=9A=97=EF=B8=8F=20Use=20`contextvars`?= =?UTF-8?q?=20to=20store=20sent=20messages=20in=20`APIInterface`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- electro/interfaces.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/electro/interfaces.py b/electro/interfaces.py index edbca6f..e9ab3e1 100644 --- a/electro/interfaces.py +++ b/electro/interfaces.py @@ -1,3 +1,4 @@ +import contextvars from abc import ABC, abstractmethod from typing import Dict, Optional @@ -47,10 +48,12 @@ async def stop_process(self, code: int = 1000, reason: Optional[str] = None): class APIInterface(BaseInterface): def __init__(self): - self.messages = [] + self.messages = contextvars.ContextVar("messages") + + self.messages.set([]) async def send_json(self, data: Dict[str, str]): - self.messages.append(data) + self.messages.get().append(data) async def stop_process(self, *args, **kwargs): - return self.messages + return self.messages.get() From 48baaf9928cfaea6e550f4a8031709ed1e78c96f Mon Sep 17 00:00:00 2001 From: samonaisi Date: Tue, 8 Apr 2025 18:12:41 +0200 Subject: [PATCH 11/57] =?UTF-8?q?=E2=9C=A8=20Remove=20discord=20dependenci?= =?UTF-8?q?es=20(#9)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * remove bot * update buttons * buttons and views * remove views and unused files * rename interaction objects * confirm button * more stuff * change button model * updated models and buttons * per user websocket connection * update interface * resolve imports * openai_client, decorators and more actions * fix typing error * add get_image_url to universal image storage * fix db fetching objects in manager * handle static gifs and images * cleaner handling of gifs * send images one at a time and handle captions * universal image handler * rename and refactor some responses * do not send images as bytes * Add DataButton * remove call to bot * ignore some linter warnings --- .gitignore | 1 + .gitmodules | 3 - .pylintrc | 13 + README.md | 2 +- docs/source/electro.toolkit.rst | 8 - electro/app.py | 32 +- electro/bot.py | 35 - electro/contrib/assistants_storage_buckets.py | 19 - electro/contrib/buttons.py | 99 ++- electro/contrib/steps.py | 159 ---- electro/contrib/steps.py.upcoming | 712 ------------------ electro/contrib/storage_buckets.py | 16 + electro/contrib/storages.py | 15 - electro/contrib/views.py | 540 ------------- electro/enums.py | 30 +- electro/exceptions.py | 4 +- electro/extra/__init__.py | 1 - electro/extra/decorators.py.upcoming | 143 ---- electro/extra/httpx_client.py | 16 - electro/flow.py | 19 +- electro/flow_connector.py | 32 +- electro/flow_manager.py | 464 +++++------- electro/flow_step.py | 252 +++++-- electro/interfaces.py | 216 +++++- electro/models.py | 182 ++--- electro/schemas.py | 38 + electro/settings.py | 79 +- electro/storage.py | 74 -- electro/substitutions.py | 15 +- electro/toolkit/buttons.py | 22 - electro/toolkit/decorators.py | 35 + .../storage_services/_base_storage_service.py | 10 + .../azure_blob_storage_service.py | 29 +- .../storage_services/s3_service.py | 16 + .../images_storage/universal_image_storage.py | 4 + electro/toolkit/memory_storage.py | 169 ----- electro/toolkit/openai_client.py | 5 - electro/triggers.py | 4 +- electro/types_.py | 74 -- {electro/extra => examples}/i18n_gettext.py | 4 +- examples/test_flow.py | 7 +- migrations/electro | 1 - poetry.lock | 152 +++- pyproject.toml | 1 - 44 files changed, 1118 insertions(+), 2634 deletions(-) delete mode 100644 .gitmodules create mode 100644 .pylintrc delete mode 100644 electro/bot.py delete mode 100644 electro/contrib/assistants_storage_buckets.py delete mode 100644 electro/contrib/steps.py delete mode 100644 electro/contrib/steps.py.upcoming delete mode 100644 electro/contrib/storages.py delete mode 100644 electro/contrib/views.py delete mode 100644 electro/extra/__init__.py delete mode 100644 electro/extra/decorators.py.upcoming delete mode 100644 electro/extra/httpx_client.py create mode 100644 electro/schemas.py delete mode 100644 electro/toolkit/buttons.py create mode 100644 electro/toolkit/decorators.py delete mode 100644 electro/toolkit/memory_storage.py delete mode 100644 electro/toolkit/openai_client.py delete mode 100644 electro/types_.py rename {electro/extra => examples}/i18n_gettext.py (68%) delete mode 160000 migrations/electro diff --git a/.gitignore b/.gitignore index 673fa51..668b7fa 100644 --- a/.gitignore +++ b/.gitignore @@ -151,6 +151,7 @@ celerybeat.pid # Environments examples/.env +.env .venv env/ venv/ diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index fcb23b5..0000000 --- a/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "migrations/electro"] - path = migrations/electro - url = git@github.com:CyberCRI/electro-migrations.git diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 0000000..6214b28 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,13 @@ +[MAIN] + +max-line-length=120 + +disable= + E0401, # Import error: Ignored because imports actually work + R0801, # Duplicate code + R0903, # Too few public methods + C0114, # Missing module docstring + C0302, # Too many lines in module + W0511, # TODO + R0913, # Too many arguments + R0917, # Too many positional arguments \ No newline at end of file diff --git a/README.md b/README.md index 64505ce..3997ba5 100644 --- a/README.md +++ b/README.md @@ -31,4 +31,4 @@ A framework for building bots, made for humans. ``` 6. Check the API server @ http://localhost:8000/docs. -7. Use one of the clients to connect the platforms: [Discord](https://github.com/CyberCRI/ikigai-discord-client). \ No newline at end of file +7. Use one of the clients to connect the platforms: [Whatsapp](https://github.com/CyberCRI/ikigai-whatsapp-bot). \ No newline at end of file diff --git a/docs/source/electro.toolkit.rst b/docs/source/electro.toolkit.rst index 19f5aad..b7e0636 100644 --- a/docs/source/electro.toolkit.rst +++ b/docs/source/electro.toolkit.rst @@ -20,14 +20,6 @@ electro.toolkit.buttons module :show-inheritance: :undoc-members: -electro.toolkit.discord\_tweeks module --------------------------------------- - -.. automodule:: electro.toolkit.discord_tweeks - :members: - :show-inheritance: - :undoc-members: - electro.toolkit.loguru\_logging module -------------------------------------- diff --git a/electro/app.py b/electro/app.py index 51dd1d5..c891abd 100644 --- a/electro/app.py +++ b/electro/app.py @@ -4,9 +4,11 @@ from fastapi.websockets import WebSocketState from tortoise.contrib.fastapi import register_tortoise -from . import types_ as types +from .enums import SupportedPlatforms +from .flow_connector import FlowConnectorEvents from .flow_manager import global_flow_manager from .interfaces import APIInterface, WebSocketInterface +from .schemas import ButtonClick, ReceivedMessage from .toolkit.tortoise_orm import get_tortoise_config app = FastAPI( @@ -18,22 +20,36 @@ ) -@app.post("/message") -async def process_message(message: types.Message) -> types.MessageToSend | None: +@app.post("/message/platform/{platform}/") +async def process_message(message: ReceivedMessage, platform: str): """Process the message.""" + if platform not in SupportedPlatforms: + raise ValueError(f"Platform {platform} is not supported.") interface = APIInterface() - return await global_flow_manager.on_message(message, interface) + return await global_flow_manager.on_message(platform, message, interface) -@app.websocket("/websocket/client/{client_name}/user/{user_id}") -async def websocket_endpoint(websocket: WebSocket, client_name: str, user_id: str): +@app.websocket("/websocket/platform/{platform}/user/{user_id}") +async def websocket_endpoint(websocket: WebSocket, platform: str, user_id: str): + if platform not in SupportedPlatforms: + raise ValueError(f"Platform {platform} is not supported.") interface = WebSocketInterface() await interface.connect(websocket) try: while websocket.application_state == WebSocketState.CONNECTED: data = await websocket.receive_json() - data = types.Message.model_validate(data) - await global_flow_manager.on_message(data, interface) + action = data.get("action") + content = data.get("content") + if action == FlowConnectorEvents.MESSAGE: + content = ReceivedMessage.model_validate(content) + await global_flow_manager.on_message(platform, content, interface) + if action == FlowConnectorEvents.BUTTON_CLICK: + content = ButtonClick.model_validate(content) + await global_flow_manager.on_button_click(platform, content, interface) + if action == FlowConnectorEvents.MEMBER_JOIN: + pass + if action == FlowConnectorEvents.MEMBER_UPDATE: + pass except WebSocketDisconnect: await interface.disconnect() diff --git a/electro/bot.py b/electro/bot.py deleted file mode 100644 index 58cfd84..0000000 --- a/electro/bot.py +++ /dev/null @@ -1,35 +0,0 @@ -"""We define the `bot` instance here.""" - -import discord -from discord.ext import commands - -# from .__version__ import __version__ -from .settings import settings - -# Use this hacks to prevent the commands from being added multiple times -# __VERSION_COMMAND_ENABLED = False -__PING_COMMAND_ENABLED = False - -intents = discord.Intents.default() -# noinspection PyDunderSlots, PyUnresolvedReferences -intents.members = True -# noinspection PyDunderSlots, PyUnresolvedReferences -intents.message_content = True -bot = commands.Bot(command_prefix=settings.BOT_COMMAND_PREFIX, intents=intents) - -# Enable the `!version` and `!ping` commands on the lower level than the `electro` Framework does -# if not __VERSION_COMMAND_ENABLED: -# @bot.command(name="version") -# async def get_version(ctx): -# await ctx.send(f"Version: {__version__}") -# -# -# __VERSION_COMMAND_ENABLED = True - -if not __PING_COMMAND_ENABLED: - - @bot.command(name="ping") - async def ping(ctx): - await ctx.send("Pong!") - - __PING_COMMAND_ENABLED = True diff --git a/electro/contrib/assistants_storage_buckets.py b/electro/contrib/assistants_storage_buckets.py deleted file mode 100644 index c30944d..0000000 --- a/electro/contrib/assistants_storage_buckets.py +++ /dev/null @@ -1,19 +0,0 @@ -"""The storage buckets for the `GPTAssistantStep`s.""" - -from abc import ABC - -from .storage_buckets import BasePostgresStorageBucket, BaseStorageBucket, StorageBucketElement - - -class BaseAssistantsStorageBucket(BaseStorageBucket, ABC): - """Base storage bucket for the `GPTAssistantStep`s.""" - - __abstract = True - - thread_id: StorageBucketElement[str] - - -class BasePostgresAssistantsStorageBucket(BasePostgresStorageBucket, BaseAssistantsStorageBucket): - """Base storage bucket for the `GPTAssistantStep`s.""" - - __abstract = True diff --git a/electro/contrib/buttons.py b/electro/contrib/buttons.py index 0e95248..d3a1db0 100644 --- a/electro/contrib/buttons.py +++ b/electro/contrib/buttons.py @@ -1,23 +1,76 @@ """The buttons that can be used in the `electro` Framework.""" import typing +import uuid +from abc import ABC +from enum import Enum -import discord.ui - -from ..flow import Flow from ..flow_connector import FlowConnector -from ..flow_step import BaseFlowStep -from ..toolkit.buttons import FrameworkButtonStyle +from ..flow_step import BaseFlowStep, FlowStepDone + +CALLBACK_TYPE = typing.Callable[[FlowConnector], typing.Awaitable[None]] | BaseFlowStep | None + + +class ButtonStyle(Enum): + """A class to store the button styles.""" + + primary = 1 + secondary = 2 + success = 3 + danger = 4 + + blurple = 1 + grey = 2 + gray = 2 + green = 3 + red = 4 + url = 5 + + def __int__(self): + return self.value -CALLBACK_TYPE = typing.Callable[[FlowConnector], typing.Awaitable[None]] | BaseFlowStep +class BaseButton(ABC): + """The base class for buttons.""" -class ActionButton(discord.ui.Button): + def __init__( + self, + label: str | None = None, + style: ButtonStyle = ButtonStyle.primary, + disabled: bool = False, + remove_after_click: bool = False, + ): + if label and len(str(label)) > 80: + raise ValueError("label must be 80 characters or fewer") + + self.style = style + self.label = label + self.custom_id = str(uuid.uuid4()) + self.disabled = disabled + self.remove_after_click = remove_after_click + + +class DataButton(BaseButton): + """A button that can store data.""" + + def __init__( + self, + label: str | None = None, + style: ButtonStyle = ButtonStyle.primary, + disabled: bool = False, + remove_after_click: bool = False, + **kwargs, + ): + super().__init__(label, style, disabled, remove_after_click) + self.extra_data = kwargs + + +class ActionButton(BaseButton): """A button that performs an action when clicked.""" action_callback: CALLBACK_TYPE - def __init__(self, label: str, action_callback: CALLBACK_TYPE, *args, **kwargs): + def __init__(self, label: str, action_callback: CALLBACK_TYPE = None, *args, **kwargs): """Initialize the `ActionButton`.""" super().__init__(label=label, *args, **kwargs) @@ -31,10 +84,11 @@ def __init__(self, label: str, action_callback: CALLBACK_TYPE, *args, **kwargs): async def trigger_action(self, flow_connector: FlowConnector): """Trigger the `ActionButton`.""" - if isinstance(self.action_callback, BaseFlowStep): - await self.action_callback.run(flow_connector) - else: - await self.action_callback(flow_connector) + if self.action_callback: + if isinstance(self.action_callback, BaseFlowStep): + await self.action_callback.run(flow_connector) + else: + await self.action_callback(flow_connector) class GoToFlowButton(ActionButton): @@ -50,7 +104,7 @@ def __init__(self, label: str, flow_name: str, *args, **kwargs): async def trigger_action(self, flow_connector: FlowConnector): """Trigger the `GoToFlowButton`.""" - flow: Flow | None = flow_connector.flow_manager.get_flow(self.flow_name) + flow = flow_connector.flow_manager.get_flow(self.flow_name) if not flow: raise ValueError(f"Flow with the name '{self.flow_name}' does not exist.") @@ -59,10 +113,17 @@ async def trigger_action(self, flow_connector: FlowConnector): return await flow.run(flow_connector) -class DataButton(discord.ui.Button): - def __init__(self, label: str, style: FrameworkButtonStyle, custom_id: str = None, **kwargs): - super().__init__(label=label, style=style, custom_id=custom_id) - self.kwargs = kwargs +class ConfirmButton(ActionButton): + def __init__( + self, + label: str | None = None, + style: ButtonStyle = ButtonStyle.primary, + disabled: bool = False, + remove_after_click: bool = True, + ): + super().__init__( + label=label, style=style, action_callback=None, disabled=disabled, remove_after_click=remove_after_click + ) - async def callback(self, interaction: discord.Interaction): - interaction.data = {**interaction.data, **self.kwargs} + async def trigger_action(self, flow_connector: FlowConnector): + raise FlowStepDone diff --git a/electro/contrib/steps.py b/electro/contrib/steps.py deleted file mode 100644 index 896ba62..0000000 --- a/electro/contrib/steps.py +++ /dev/null @@ -1,159 +0,0 @@ -"""The [extra/contrib] steps that can be used in the flow. Specific to the project.""" - -import typing -from dataclasses import dataclass -from io import BytesIO -from typing import Type - -import discord - -from .. import FlowConnector -from ..contrib.storage_buckets import BaseStorageBucketElement -from ..flow_step import MessageFlowStep -from ..models import BaseModel, File -from ..settings import settings -from ..toolkit.images_storage.universal_image_storage import universal_image_storage -from ..toolkit.loguru_logging import logger -from ..toolkit.templated_i18n import TemplatedString -from .storages import ModelsStorageElement -from .views import ChooseOneOptionView - - -class ChooseOneModelView(ChooseOneOptionView): - """Choose one of the models.""" - - def __init__( - self, - model_to_choose_from: Type[BaseModel], - options: list[str | discord.ui.Button] | typing.Callable[[], typing.Awaitable[list[str]]] = None, - answers_storage: BaseStorageBucketElement | None = None, - **kwargs, - ): - """Initialize the view.""" - if not options: - options = [] - - super().__init__(options, answers_storage, **kwargs) - - self.model_to_choose_from: Type[BaseModel] = model_to_choose_from - - async def _get_instances_pks(self) -> list[str]: - instances: list[BaseModel] = await self.model_to_choose_from.filter(is_active=True, is_deleted=False).all() - - return [str(instance.pk) for instance in instances] - - async def get_static_buttons(self, flow_connector: FlowConnector) -> list[str]: - return await self._get_instances_pks() + await super().get_static_buttons(flow_connector) - - async def get_or_create_for_connector( - self, - flow_connector: FlowConnector, - dynamic_buttons: list[str | discord.Button] | None = None, - force_init: bool = False, - force_get: bool = False, - from_step_run: bool = False, - ) -> typing.Self: - """Get or create the view for the connector.""" - # TODO: [2024-09-11 by Mykola] Make it so that all dynamic views are re-created on each step run - if from_step_run: - force_init = True - - return await super().get_or_create_for_connector( - flow_connector, dynamic_buttons, force_init, force_get, from_step_run - ) - - async def _set_user_answer(self, user_answer: typing.Any): - """Set the user answer.""" - instance: BaseModel = await self.model_to_choose_from.get_or_none(pk=user_answer) - - return await super()._set_user_answer(instance) - - -@dataclass -class ChooseOneFromModelsStep(MessageFlowStep): - """Choose one of the models.""" - - model_to_choose_from: Type[BaseModel] = None - - storage_to_save_model_to: ModelsStorageElement = None - - def __post_init__(self): - if self.model_to_choose_from is None: - raise ValueError("`model_to_choose_from` is required!") - if self.storage_to_save_model_to is None: - raise ValueError("`storage_to_save_model_to` is required!") - - self.view = ChooseOneModelView( - model_to_choose_from=self.model_to_choose_from, - answers_storage=self.storage_to_save_model_to, - ) - - -@dataclass -class AcceptFileStep(MessageFlowStep): - """Accept a file from the user.""" - - storage_to_save_file_url_to: BaseStorageBucketElement | None = None - storage_to_save_file_object_id_to: BaseStorageBucketElement | None = None - - storage_to_save_saved_file_id_to: BaseStorageBucketElement | None = None - - file_is_required_message: TemplatedString | str = "You need to upload a file." - file_saved_confirmation_message: TemplatedString | str | None = None - - allow_skip: bool = False - - def __post_init__(self): - if self.storage_to_save_file_url_to is None: - raise ValueError("`storage_to_save_file_url_to` is required!") - - async def process_response(self, connector: FlowConnector): - """Process the response.""" - if not connector.message.attachments: - if self.allow_skip: - return await super().process_response(connector) - - return await self.send_message(connector, self.file_is_required_message) - - # Get the first attachment - attachment = connector.message.attachments[0] - - # Save the file URL - if self.storage_to_save_file_url_to: - await self.storage_to_save_file_url_to.set_data(attachment.url) - logger.info(f"Saved the file URL: {attachment.url=}") - - # Save the File - if self.storage_to_save_file_object_id_to or self.storage_to_save_saved_file_id_to: - file_io = BytesIO(await attachment.read()) - file_object_key = await universal_image_storage.upload_image(file_io) - - if self.storage_to_save_file_object_id_to: - # Save the file object key - await self.storage_to_save_file_object_id_to.set_data(file_object_key) - - logger.info(f"Saved the file object key: {file_object_key=}") - - if self.storage_to_save_saved_file_id_to: - # Create the `File` object - try: - file = await File.create( - added_by_user_id=connector.user.id, - storage_service=settings.STORAGE_SERVICE_ID, - storage_file_object_key=file_object_key, - file_name=attachment.filename, - discord_attachment_id=attachment.id, - discord_cdn_url=attachment.url, - ) - - except Exception as exception: - logger.error(f"Failed to save the file: {exception}") - return await self.send_message(connector, "Failed to save the file.") - - # Save the file ID - await self.storage_to_save_saved_file_id_to.set_data(file.pk) - - if self.file_saved_confirmation_message: - await self.send_message(connector, self.file_saved_confirmation_message) - - return await super().process_response(connector) diff --git a/electro/contrib/steps.py.upcoming b/electro/contrib/steps.py.upcoming deleted file mode 100644 index c708163..0000000 --- a/electro/contrib/steps.py.upcoming +++ /dev/null @@ -1,712 +0,0 @@ -"""The [extra/contrib] steps that can be used in the flow. Specific to the project.""" - -import asyncio -import io -import json -import random -import textwrap -import typing -from abc import ABC -from dataclasses import dataclass -from pathlib import Path -from tempfile import NamedTemporaryFile -from typing import Annotated - -import discord -from httpx import ReadTimeout -from openai.types import ImagesResponse -from openai.types.beta.threads.run_submit_tool_outputs_params import ToolOutput - -from decorators import with_constant_typing -from framework.contrib.assistants_storage_buckets import BaseAssistantsStorageBucket -from framework.contrib.storage_buckets import BasePostgresStorageBucket, BaseStorageBucket, StorageBucketElement -from framework.contrib.views import BaseView, ChooseOneOptionView, StorageMixin, ViewStepFinished -from framework.flow_connector import FlowConnector, FlowConnectorEvents -from framework.flow_step import BaseFlowStep, FlowStepDone, MessageFlowStep, MessageFormatterMixin -from framework.substitutions import BaseSubstitution -from httpx_client import httpx_client -from .settings import settings -from utils.buttons import FrameworkButtonStyle -from utils.images import get_combined_image -from utils.loguru_logging import logger -from utils.openai_client import async_openai_client -from utils.templated_i18n import TemplatedString - - -class BaseImagesStepStorageBucket(BaseStorageBucket, ABC): - """The base class for the storage bucket for the images step.""" - - __abstract = True - - buttons_sent_to_images: StorageBucketElement[dict[str, str]] - images_sent_in_this_step: StorageBucketElement[list[str]] - image_chosen: StorageBucketElement[str] - - load_more_button_custom_id: StorageBucketElement[str] - - -class BasePostgresImagesStepStorageBucket(BasePostgresStorageBucket, BaseImagesStepStorageBucket): - """The base class for the storage bucket for the images step.""" - - __abstract = True - - -class LoadMore(Exception): - """The exception that is raised when the user wants to load more images.""" - - pass - - -class ImagesButtonsView(ChooseOneOptionView): - """The view that allows the user to choose an image.""" - - load_more_button_label: str - - def __init__(self, load_more_button_label: str, **kwargs): - """Initialize the view.""" - super().__init__(options=[], **kwargs) - - self.load_more_button_label = load_more_button_label - - @property - def load_more_button(self): - """Get the load more button.""" - # TODO: [29.09.2023 by Mykola] Make this more secure: what if there are two buttons with - # the "load more" label? - for child in self.children: - if isinstance(child, discord.ui.Button) and child.label == self.load_more_button_label: - return child - - raise ValueError("The load more button is not found.") - - async def get_or_create_for_connector( - self, - flow_connector: FlowConnector, - dynamic_buttons: list[str | discord.Button] | None = None, - force_init: bool = False, - force_get: bool = False, - from_step_run: bool = False, - ) -> typing.Self: - if not dynamic_buttons and not force_get: - raise ValueError("The dynamic buttons are empty.") - - return await super().get_or_create_for_connector( - flow_connector, dynamic_buttons=dynamic_buttons, force_init=force_init - ) - - async def get_static_buttons(self, flow_connector: FlowConnector) -> list[str]: - """Get the buttons for the view.""" - return [*self.options, self.load_more_button_label] - - def remove_load_more_button(self): - """Remove the load more button.""" - self.remove_item(self.load_more_button) - - -class ImageChooseStep(BaseFlowStep, MessageFormatterMixin, StorageMixin): - """The step that allows the user to choose an image.""" - - choose_image_message: TemplatedString | None = None - - images: list[Path] - n_images_per_page: int = 5 - max_images_to_show: int | None = None - - buttons_style: FrameworkButtonStyle = FrameworkButtonStyle.primary - - _images_view: ImagesButtonsView - - image_chosen_message: TemplatedString | None = None - - images_chosen_storage: StorageBucketElement | None = None - - _images_step_storage: typing.Type[BaseImagesStepStorageBucket] | None = None - - _testing: bool = False - - # TODO: [29.09.2023 by Mykola] Make it state-less - _sent_messages_with_views_for_user: dict[int, list[discord.Message]] - - def __init__( - self, - choose_image_message: TemplatedString | None, - images: list[Path], - images_per_page: int = 5, - max_images_to_show: int | None = None, - buttons_style: FrameworkButtonStyle = FrameworkButtonStyle.primary, - substitutions: dict[str, str] | None = None, - load_more_images_button: TemplatedString | None = None, - image_chosen_message: TemplatedString | None = None, - images_step_storage: typing.Type[BaseImagesStepStorageBucket] | None = None, - _testing: bool = False, - ): - """Initialize the step.""" - self.choose_image_message = choose_image_message or TemplatedString("Choose an image:") - - if not images: - raise ValueError(f"The images list is empty in {self.__class__.__name__}.") - - self.images = images - self.max_images_to_show = max_images_to_show - self.buttons_style = buttons_style or self.buttons_style - - if len(images) < images_per_page: - raise ValueError("The number of images is less than the number of images per page.") - self.n_images_per_page = images_per_page - - self._images_view = ImagesButtonsView(load_more_images_button or TemplatedString("Load more images")) - - self.image_chosen_message = image_chosen_message or TemplatedString("You have chosen the image: {image_name}") - - if not images_step_storage: - # TODO: [29.09.2023 by Mykola] Make sure we have some other storage to store at least - # the `custom_id` of the `load_more` button. Otherwise, we won't be able to process - raise ValueError("The images step storage is empty. Unable to continue.") - - self._images_step_storage = images_step_storage - self._testing = _testing - - self._sent_messages_with_views_for_user = {} # re-set for each instance - - @staticmethod - async def _send_images( - connector: FlowConnector, - choose_image_message: str, - all_images_options: list[Path], - n_images_per_page: int, - images_view: ImagesButtonsView, - images_step_storage: typing.Type[BaseImagesStepStorageBucket], - max_images_to_show: int | None = None, - buttons_style: FrameworkButtonStyle = FrameworkButtonStyle.primary, - ): - """ - Send the images to the user. - - The idea behind making this method static is that this way we can reduce the mental load - when working with the method: we don't need to think about where the objects are coming - from, we just pass the parameters and get the result. - """ - # Get the already selected images at this step - images_already_sent: list[str] = await images_step_storage.images_sent_in_this_step.get_data(default=[]) - - # Get the images that are available to send - available_images: list[Path] = [image for image in all_images_options if image.name not in images_already_sent] - - images_on_page: list[Path] = random.sample(available_images, n_images_per_page) - - combined_image: NamedTemporaryFile = get_combined_image(images_on_page) - - # From here on, we work with images' names rather than images themselves as `Path` objects - images_names: list[str] = [image.name for image in images_on_page] - - # Save that we've sent these particular images to the user - await images_step_storage.images_sent_in_this_step.set_data([*images_already_sent, *images_names]) - - images_buttons: list[discord.Button] = images_view.strings_to_buttons( - [str(i) for i, _ in enumerate(images_names, start=1)], buttons_style=buttons_style - ) - - # Get the buttons sent related to the images - buttons_sent_to_images: dict[str, str] = await images_step_storage.buttons_sent_to_images.get_data(default={}) - - # Save/update the buttons sent related to the images - await images_step_storage.buttons_sent_to_images.set_data( - buttons_sent_to_images - | { - # TODO: [07.09.2023 by Mykola] Is `zip` here really safe? - button.custom_id: image_name - for button, image_name in zip(images_buttons, images_names) - } - ) - - view_to_send = await images_view.get_or_create_for_connector( - connector, dynamic_buttons=images_buttons, force_init=True - ) - - if max_images_to_show and ( - len(await images_step_storage.images_sent_in_this_step.get_data(default=[])) > max_images_to_show - ): - # Remove the load more button from the newly generated view - view_to_send.remove_load_more_button() - - else: - # Save the id of the Load More button - await images_step_storage.load_more_button_custom_id.set_data(view_to_send.load_more_button.custom_id) - - return await connector.channel.send( - choose_image_message, - file=discord.File(combined_image.file), - view=view_to_send, - ) - - async def send_images(self, connector: FlowConnector, *, with_message: bool = True): - """Send the images to the user.""" - choose_image_message: str | None = ( - await self._get_formatted_message(self.choose_image_message, connector) if with_message else None - ) - - images_message_with_view = await self._send_images( - connector, - choose_image_message, - self.images, - self.n_images_per_page, - self._images_view, - self._images_step_storage, - max_images_to_show=self.max_images_to_show, - buttons_style=self.buttons_style, - ) - - self._sent_messages_with_views_for_user.setdefault(connector.user.id, []).append(images_message_with_view) - - return images_message_with_view - - @with_constant_typing() - async def run(self, connector: FlowConnector): - """Initialize a View and send it to the user.""" - # Empty the storage - await self._images_step_storage.empty() - - return await self.send_images(connector, with_message=True) - - def _check_connector_can_be_processed(self, connector: FlowConnector): - if not connector.event == FlowConnectorEvents.BUTTON_CLICK: - logger.warning( - f"View {self.__class__.__name__} received an event that is not BUTTON_CLICK: " f"{connector.event}" - ) - - if connector.interaction is None: - raise FlowStepDone("The interaction is None.") - - async def set_chosen_image(self, connector: FlowConnector, image_name: str, message_to_edit: discord.Message): - """ - Remove all the views for selecting the images, remove the combined image and replace it - with the chosen image. - """ - - # Get all the messages sent for this connector - try: - messages_with_views_for_user: list[discord.Message] = self._sent_messages_with_views_for_user[ - connector.user.id - ] - except KeyError: - raise ValueError("There are no messages with views for this user.") - - # Remove all the views for selecting the images - for message_with_view in messages_with_views_for_user: - if message_with_view.id == message_to_edit.id: - continue - else: - await message_with_view.delete() - - # Delete all the messages with views for this user - del self._sent_messages_with_views_for_user[connector.user.id] - - # Remove the combined image and replace it with the chosen image - return await message_to_edit.edit( - content=await self._get_formatted_message(self.image_chosen_message, connector, image_name=image_name), - file=discord.File([image for image in self.images if image.name == image_name][0]), - view=None, - attachments=[], - ) - - async def process_response(self, connector: FlowConnector): - """Process the response.""" - # Check that the connector can be processed - self._check_connector_can_be_processed(connector) - - if connector.interaction: - try: - if self._images_step_storage: - # Check if the "load_more" button has been clicked - if connector.interaction.data["custom_id"] == ( - await self._images_step_storage.load_more_button_custom_id.get_data() - ): - # Remove the button from the view - # TODO: [29.09.2023 by Mykola] Maybe abstract this away? - view_for_connector = await self._images_view.get_or_create_for_connector( - connector, force_get=True - ) - view_for_connector.remove_load_more_button() - await connector.interaction.response.edit_message(view=view_for_connector) - - raise LoadMore() - - # Get the buttons sent related to the images - buttons_sent_to_images: dict[str, str] = ( - await self._images_step_storage.buttons_sent_to_images.get_data(default={}) - ) - - # Get the image name that the user has chosen - image_name: str = buttons_sent_to_images[connector.interaction.data["custom_id"]] - - # Save that this image has been chosen - await self._images_step_storage.image_chosen.set_data(image_name) - - # TODO: [07.09.2023 by Mykola] Save that this image has been chosen - # (so we don't get it on the next step. how?). - - await self.set_chosen_image(connector, image_name, connector.interaction.message) - - raise ViewStepFinished() - - # Should never be reached - return await self._images_view.process_interaction(connector) - except LoadMore: - return await self.send_images(connector, with_message=False) - except ViewStepFinished as exception: - raise FlowStepDone() from exception - - -class RankingOptionsView(ChooseOneOptionView): - """The view that allows the user to rank the options.""" - - def __init__( - self, - options: list[str] | typing.Callable[[], typing.Awaitable[list[str]]], - answers_storage: StorageBucketElement | None = None, - reverse_answers: bool = False, - **kwargs, - ): - """Initialize the view.""" - - # Reverse the answers, so that first answer (click) will be the last one in - # `self.answers_storage` - self.reverse_answers = reverse_answers - - super().__init__(options, answers_storage, **kwargs) - - async def _on_view_created(self, flow_connector: FlowConnector, view: BaseView): - """When the view is created, set the user answer to an empty list.""" - await self._set_user_answer([]) - - return await super()._on_view_created(flow_connector, view) - - async def process_button_click(self, button: discord.ui.Button, flow_connector: FlowConnector): - """Process the button click.""" - - # Save the answer - user_answers: list[str] | None = await self._get_user_answer() or [] - user_answers.append(button.label) - await self._set_user_answer(user_answers) - - # Remove the button - self.remove_item(button) - - # If there are no more buttons, finish the step - if not self.children: - if self.reverse_answers: - # Reverse the answers - await self._set_user_answer(user_answers[::-1]) - - # Remove the view - await flow_connector.interaction.response.edit_message(view=None) - - # Finish the step - raise ViewStepFinished() - - # Update the message - await flow_connector.interaction.response.edit_message(view=self) - - -@dataclass -class TimerStep(BaseFlowStep): - """The step that just counts down the timer and then moves to the next step.""" - - minutes: int | None = None - seconds: int | None = None - - non_blocking = True - - async def run(self, connector: FlowConnector): - """Run the step: count down the timer.""" - sleeping_time: int = (self.minutes or 0) * 60 + (self.seconds or 0) - - if settings.DEBUG: - # Vastly reduce the sleeping time in DEBUG mode - sleeping_time: int = int(sleeping_time / 60 * 2) - - logger.info(f"{self.__class__}: Sleeping for {sleeping_time} seconds...") - await asyncio.sleep(sleeping_time) - - if self.non_blocking: - raise FlowStepDone() - - async def process_response(self, connector: FlowConnector): - """Process the response. Should never be reached since the step is non-blocking.""" - raise FlowStepDone() - - -@dataclass -class GPTAssistantStep(BaseFlowStep, MessageFormatterMixin): - """The step that runs OpenAI Assistant.""" - - assistant_id: str - - start_message: TemplatedString | None = None - start_prompt: TemplatedString | None = None - - assistant_step_storage: typing.Type[BaseAssistantsStorageBucket] | None = None - - functions: dict[str, typing.Callable[[FlowConnector, ...], typing.Awaitable[None]]] | None = None - auto_add_continue_function: bool = True - any_function_triggers_flow_step_done: bool = False - - default_action_output_instead_of_none: str | None = "Executed successfully." - - _max_message_length: int = settings.MESSAGE_MAX_LENGTH - - @staticmethod - async def _continue(_connector: FlowConnector, *_, **__): - """Continue the flow.""" - if any((_, __)): - logger.warning(f"Got unexpected args: {_=}, {__=}") - - raise FlowStepDone() - - def __post_init__(self): - """Post-initialize the step.""" - if not self.start_message and not self.start_prompt: - raise ValueError("Either `start_message` or `start_prompt` should be set.") - - if self.auto_add_continue_function: - self.functions = self.functions or {} - self.functions["continue"] = self._continue - self.functions["exit_from_the_assistant"] = self._continue - - def _split_message(self, message: str) -> list[str]: - """Split the message into chunks.""" - return textwrap.wrap(message, width=self._max_message_length, replace_whitespace=False, break_long_words=False) - - async def get_assistant_response( - self, prompt: str, connector: FlowConnector - ) -> tuple[Annotated[str, "response_message_text"], Annotated[bool, "raise_flow_step_done"]]: - """Get the assistant response.""" - logger.info(f"Getting the Assistant response for {prompt=}, {connector.user.id=}") - - assistant = await async_openai_client.beta.assistants.retrieve(self.assistant_id) - - message_text: str = prompt - - # Get the Thread ID from the storage - thread_id: str | None = await self.assistant_step_storage.thread_id.get_data() - - if not thread_id: - # Create a Thread - thread = await async_openai_client.beta.threads.create() - - # Save the Thread ID to the Storage - await self.assistant_step_storage.thread_id.set_data(thread.id) - - else: - thread = await async_openai_client.beta.threads.retrieve(thread_id) - - thread_message = await async_openai_client.beta.threads.messages.create( - thread_id=thread.id, role="user", content=message_text - ) - logger.info(f"Created a message in Assistants API: {thread_message=}, {thread_id=}, {connector.user.id=}") - - run = await async_openai_client.beta.threads.runs.create( - thread_id=thread.id, - assistant_id=assistant.id, - ) - logger.info(f"Run the thread with: {thread_message=}, {thread_id=}, {connector.user.id=}. Got {run=}") - - raise_flow_step_done = False - while True: - for retry_n in range(1, 11): - logger.info(f"Trying to retrieve a run. Try #{retry_n}: {run.id=}") - try: - run = await async_openai_client.beta.threads.runs.retrieve( - thread_id=thread.id, run_id=run.id, timeout=10 - ) - logger.info(f"Retrieved a run: {run=} for {thread_id=}") - break - except ReadTimeout as exception: - logger.exception( - f"Hit a ReadTimeout while retrieving a run: {thread_id=}, {run.id=}", exc_info=exception - ) - - logger.info("Sleeping for 1 second") - await asyncio.sleep(1) - - if run.status == "completed": - break - - elif run.status == "requires_action": - # TODO: [01.02.2024 by Mykola] Move this somewhere up the execution tree - if not self.functions: - raise ValueError(f"The functions are not set in {self.__class__.__name__}") - - tool_outputs: list[ToolOutput] = [] - # Get the action to perform - for tool_call in run.required_action.submit_tool_outputs.tool_calls: - # Get the function to execute - function_to_execute = self.functions.get(tool_call.function.name) - - if not function_to_execute: - raise ValueError(f"The function to execute is not found: {tool_call.function.name}") - - try: - # Execute the function - action_output = await function_to_execute(connector, **json.loads(tool_call.function.arguments)) - except FlowStepDone: - # If the function raised `FlowStepDone`, then we should finish the step - raise_flow_step_done = True - - action_output = None - - if self.any_function_triggers_flow_step_done: - raise_flow_step_done = True - - # Add the output to the tool outputs - tool_outputs.append( - ToolOutput( - tool_call_id=tool_call.id, - output=action_output or self.default_action_output_instead_of_none, - ) - ) - - # Submit tool outputs - await async_openai_client.beta.threads.runs.submit_tool_outputs( - thread_id=thread.id, - run_id=run.id, - tool_outputs=tool_outputs, - ) - - elif run.status == "in_progress": - logger.debug(f"Run {run.id} is in progress. Sleeping for 1 second...") - await asyncio.sleep(1) - - elif run.status == "queued": - logger.debug(f"Run {run.id} is queued. Sleeping for 1 second...") - await asyncio.sleep(1) - - else: - raise ValueError(f"The Run status is unexpected: {run.status}") - - thread_messages = await async_openai_client.beta.threads.messages.list(thread_id=thread.id) - response_message_text: str = thread_messages.data[0].content[0].text.value - - return response_message_text, raise_flow_step_done - - @with_constant_typing() - async def run(self, connector: FlowConnector): - if not self.assistant_id: - logger.error(f"The Assistant ID is not set in {self.__class__.__name__}") - raise FlowStepDone() - - await self.assistant_step_storage.empty() - - if self.start_prompt: - formatted_prompt = await self._get_formatted_message(self.start_prompt, connector) - - if not formatted_prompt: - raise ValueError("The formatted prompt is empty.") - - try: - response_message_text, raise_flow_step_done = await self.get_assistant_response( - prompt=formatted_prompt, connector=connector - ) - except Exception as exception: - logger.exception( - f"Got an exception while processing the response: {formatted_prompt=}", exc_info=exception - ) - - await self.assistant_step_storage.empty() - - raise FlowStepDone() - - sent_message: discord.Message = [ - await connector.message.channel.send(content=content) - for content in self._split_message(response_message_text) - ][0] - - if raise_flow_step_done: - raise FlowStepDone() - - return sent_message - - return [ - await connector.channel.send(message_part) - for message_part in self._split_message(await self._get_formatted_message(self.start_message, connector)) - ][0] - - @with_constant_typing() - async def process_response(self, connector: FlowConnector): - """Process the response from the User. Usually it's the first prompt to the Assistant.""" - try: - response_message_text, raise_flow_step_done = await self.get_assistant_response( - prompt=connector.message.content, connector=connector - ) - except Exception as exception: - logger.exception( - f"Got an exception while processing the response: {connector.message.content=}", exc_info=exception - ) - - await self.assistant_step_storage.empty() - - raise FlowStepDone() - - sent_message: discord.Message = await connector.message.channel.send(content=response_message_text) - - if raise_flow_step_done: - raise FlowStepDone() - - return sent_message - - -@dataclass -class DalleImageStep(MessageFlowStep): - """The step that runs OpenAI DALL-E to generate an image.""" - - prompt: str | TemplatedString | None = None - - def __post_init__(self): - """Post-initialize the step.""" - if not self.prompt: - raise ValueError(f"{self.__class__.__name__}: The `prompt` is not set.") - - @staticmethod - async def _generate_image(prompt: str, model: str = settings.OPENAI_DALLE_MODEL) -> io.BytesIO: - """Generate an image from the prompt.""" - logger.info(f"Generating an image from the prompt: {prompt=}") - images_response: ImagesResponse = await async_openai_client.images.generate( - model=model, - prompt=prompt, - n=1, # Generate only one image. # TODO: [28.02.2024 by Mykola] Make it configurable? - ) - logger.debug(f"Got the images response: {images_response=} for {prompt=} and {model=}") - - image_url = images_response.data[0].url - logger.debug(f"Got the image URL: {image_url=}") - - image_data = await httpx_client.get(image_url) - logger.debug(f"Got the image data: {image_data=} for {image_url=}. Content length: {len(image_data.content)}") - - logger.info(f"Generated an image from the prompt: {prompt=}") - return io.BytesIO(image_data.content) - - async def _get_files_to_send(self, connector: FlowConnector) -> list[discord.File]: - """Get the files to send. Append the generated image to the list of files.""" - generated_image = await self._generate_image(await self._get_formatted_message(self.prompt, connector)) - - return [discord.File(generated_image, filename="dalle_image.jpg")] + await super()._get_files_to_send(connector) - - -@dataclass -class UpdateStorageStep(BaseFlowStep): - """The step that updates the storage.""" - - storage_bucket_element: StorageBucketElement - - value: typing.Any - - async def run(self, connector: FlowConnector): - """Run the step.""" - await self.storage_bucket_element.set_data(self.value) - - raise FlowStepDone() - - async def process_response(self, connector: FlowConnector): - """Process the response.""" - raise FlowStepDone() diff --git a/electro/contrib/storage_buckets.py b/electro/contrib/storage_buckets.py index 3bd4b17..27f053e 100644 --- a/electro/contrib/storage_buckets.py +++ b/electro/contrib/storage_buckets.py @@ -455,6 +455,8 @@ def __new__(mcs, name, bases, namespace, **kwargs): # Set the storage elements from annotations for attr_name, attr_type in (merged_bases_annotations | cls.__annotations__).items(): + resolved_annotations = typing.get_type_hints(cls) + attr_type = resolved_annotations.get(attr_name, attr_type) if (not attr_name.startswith("_")) and issubclass(get_origin(attr_type), BaseStorageBucketElement): element_class: Type[PostgresStorageBucketElement] = ( PostgresStorageBucketElement @@ -540,4 +542,18 @@ async def empty(cls): # } +class BaseAssistantsStorageBucket(BaseStorageBucket, ABC): + """Base storage bucket for the `GPTAssistantStep`s.""" + + __abstract = True + + thread_id: StorageBucketElement[str] + + +class BasePostgresAssistantsStorageBucket(BasePostgresStorageBucket, BaseAssistantsStorageBucket): + """Base storage bucket for the `GPTAssistantStep`s.""" + + __abstract = True + + # endregion diff --git a/electro/contrib/storages.py b/electro/contrib/storages.py deleted file mode 100644 index e61a042..0000000 --- a/electro/contrib/storages.py +++ /dev/null @@ -1,15 +0,0 @@ -import typing - -from ..models import BaseModel -from .storage_buckets import PostgresStorageBucketElement - -VALUE = typing.TypeVar("VALUE", bound=BaseModel) - - -class ModelsStorageElement(PostgresStorageBucketElement[VALUE]): - """Storage element for the models.""" - - async def get_data(self, default: VALUE | None = None) -> VALUE | None: - queryset = await super().get_data(default) - - return await queryset diff --git a/electro/contrib/views.py b/electro/contrib/views.py deleted file mode 100644 index 687f245..0000000 --- a/electro/contrib/views.py +++ /dev/null @@ -1,540 +0,0 @@ -"""The module with extra Views that can be used with the Framework.""" - -from __future__ import annotations - -import inspect -import typing -from abc import ABC, abstractmethod -from copy import copy - -import discord - -# noinspection PyProtectedMember -from discord.ui.view import _ViewWeights -from tenacity import retry, stop_after_attempt, wait_fixed - -from ..substitutions import BaseSubstitution -from ..toolkit.buttons import create_button, FrameworkButtonStyle -from ..toolkit.loguru_logging import logger - -if typing.TYPE_CHECKING: - from .buttons import ActionButton - -from .storage_buckets import StorageBucketElement - -if typing.TYPE_CHECKING: - from ..flow_connector import FlowConnector - - -LAST_ROW_INDEX = 4 - - -class ViewStepFinished(Exception): - """The exception that is raised when the `View` is finished.""" - - pass - - -class BaseView(discord.ui.View, ABC): - """The base view for all the views in the framework.""" - - # NB: The use of `emoji_buttons` is discouraged unless the button's label is a one-time text - # (not being saved in the storage) - emoji_buttons: bool = False - buttons_style: FrameworkButtonStyle = FrameworkButtonStyle.primary - - _parent_view: BaseView | None = None - _custom_id_to_button: dict[str, discord.ui.Button] = {} - - _user_connectors_to_views: dict[int, BaseView] - - _trim_button_labels_at: int = 80 - - def __init__( - self, - emoji_buttons: bool | None = None, - buttons_style: FrameworkButtonStyle = FrameworkButtonStyle.primary, - force_init_on_step_run: bool = False, - clear_storage_on_step_run: bool = False, - timeout: int | None = None, - ): - """Initialize the view.""" - self.emoji_buttons = emoji_buttons if emoji_buttons is not None else self.emoji_buttons - self.buttons_style = buttons_style or self.buttons_style - - self.force_init_on_step_run = force_init_on_step_run - self.clear_storage_on_step_run = clear_storage_on_step_run - - self._user_connectors_to_views = {} - - super().__init__(timeout=timeout) - - async def _on_view_created(self, flow_connector: FlowConnector, view: BaseView): - """The method that is called when the view is created.""" - pass - - async def get_or_create_for_connector( - self, - flow_connector: FlowConnector, - dynamic_buttons: list[str | discord.Button] | None = None, - force_init: bool = False, - force_get: bool = False, - from_step_run: bool = False, - ) -> typing.Self: - """Get the view specifically for this `FlowConnector`.""" - if from_step_run: - force_init = force_init or self.force_init_on_step_run - - if self.clear_storage_on_step_run and from_step_run: - if isinstance(self, StorageMixin): - await self.clear_storage() - - if force_get and force_init: - raise ValueError("Cannot force both get and init.") - - if not force_init and (view := self._user_connectors_to_views.get(flow_connector.user.id)): - return view - - if force_get: - raise ValueError("Cannot force get if the view is not initialized.") - - view = copy(self) - # Since we're _copying_ the view, the `.children` attribute would be copied as well, as a - # link to the original view's children. We need to set it to an empty list to avoid - # modifying the original view's children - view.children = [] - # Same with `self.__weights = _ViewWeights(self.children)` - view._View__weights = _ViewWeights(view.children) - - static_buttons: list[discord.ui.Button | str] = [ - ( - button_or_string[: self._trim_button_labels_at] - if self._trim_button_labels_at and isinstance(button_or_string, str) - else button_or_string - ) - for button_or_string in await self.get_static_buttons(flow_connector) - ] - - # Add the new buttons to the view - view._add_buttons(*dynamic_buttons or [], *static_buttons) - - # Add the buttons to the `_custom_id_to_button` dict - # This is used so that we can get the button by its custom id when we process the - # interaction - # TODO: [07.09.2023 by Mykola] Find a more sustainable way to do this - for item in view.children: - if isinstance(item, discord.ui.Button): - self._custom_id_to_button[item.custom_id] = item - - view._parent_view = self - - # Save the view for the user - self._user_connectors_to_views[flow_connector.user.id] = view - - await self._on_view_created(flow_connector, view) - - return view - - # return self - - @staticmethod - def strings_to_buttons( - strings: list[str], buttons_style: FrameworkButtonStyle = FrameworkButtonStyle.primary - ) -> list[discord.ui.Button]: - """Convert the strings to buttons.""" - return [create_button(string, style=buttons_style or FrameworkButtonStyle.primary) for string in strings] - - # TODO: [07.09.2023 by Mykola] Make it not `async` - @abstractmethod - async def get_static_buttons(self, flow_connector: FlowConnector) -> list[discord.ui.Button | str]: - """Get the static buttons for the view.""" - raise NotImplementedError - - def _add_buttons(self, *buttons: discord.ui.Button | str): - """Add buttons to the view.""" - for button in buttons: - if isinstance(button, str): - button = create_button(button, style=self.buttons_style or FrameworkButtonStyle.primary) - - self.add_item(button) - - def _remove_button(self, button: discord.ui.Button): - """Remove the button from the view.""" - self.children.remove(button) - - def _get_button_by_custom_id(self, custom_id: str) -> discord.ui.Button | None: - """Get the button by its custom id.""" - logger.debug("Getting the button by its custom id from the view.") - for item in self.children: - if isinstance(item, discord.ui.Button) and item.custom_id == custom_id: - return item - else: - logger.debug(f"Cannot find the button with custom id {custom_id} in {self.children=}") - - logger.debug(f"Trying to get the button from the parent view.") - return (self._parent_view or self)._custom_id_to_button.get(custom_id) - - @staticmethod - @retry(stop=stop_after_attempt(3), wait=wait_fixed(3), reraise=True) - async def _defer(interaction: discord.Interaction): - try: - await interaction.response.defer() - except discord.InteractionResponded: - pass - except discord.NotFound: - logger.warning(f"Interaction {interaction.id} was not found. Cannot defer the response.") - - @retry( - stop=stop_after_attempt(3), - wait=wait_fixed(3), - reraise=False, - retry_error_callback=lambda _: None, - ) - async def _update_view(self, interaction: discord.Interaction): - """Update the view.""" - try: - await interaction.response.edit_message(view=self) - except discord.NotFound as exception: - logger.warning(f"Interaction {interaction.id} was not found. Cannot update the view.") - raise exception - - @staticmethod - @retry(stop=stop_after_attempt(3), wait=wait_fixed(3), reraise=False) - async def _remove_view(interaction: discord.Interaction): - try: - await interaction.response.edit_message(view=None) - except discord.NotFound as exception: - logger.warning(f"Interaction {interaction.id} was not found. Cannot remove the view.") - raise exception - - @abstractmethod - async def process_button_click(self, button: discord.ui.Button, flow_connector: FlowConnector): - """Process the button click.""" - raise NotImplementedError - - async def process_interaction(self, flow_connector: FlowConnector): - """Process the interaction. Might be overridden in subclasses.""" - if not (button_custom_id := flow_connector.interaction.data.get("custom_id")): - logger.error(f"Cannot find the custom id in {flow_connector.interaction.data=}") - return - - if not (button := self._get_button_by_custom_id(button_custom_id)): - logger.error(f"Cannot find the button with custom id {button_custom_id} in {self.children=}") - return - - return await self.process_button_click(button, flow_connector) - - -class ConfirmButtonView(BaseView): - """The view that has only one confirm button.""" - - confirm_button_label: str - - _confirm_button: discord.ui.Button | None = None - - def __init__(self, confirm_button_label: str, **kwargs): - """Initialize the view.""" - super().__init__(**kwargs) - - self.confirm_button_label = confirm_button_label - - @property - def confirm_button(self) -> discord.ui.Button: - if not self._confirm_button: - self._confirm_button = create_button( - self.confirm_button_label, style=self.buttons_style or FrameworkButtonStyle.secondary - ) - - return self._confirm_button - - @confirm_button.setter - def confirm_button(self, value: discord.ui.Button): - self._confirm_button = value - - async def get_static_buttons(self, flow_connector: FlowConnector) -> list[discord.ui.Button | str]: - return [self.confirm_button] - - async def on_submit(self, flow_connector: FlowConnector): - """The method that is called when the confirm button is clicked.""" - await self._remove_view(flow_connector.interaction) - - raise ViewStepFinished() - - async def process_button_click(self, button: discord.ui.Button, flow_connector: FlowConnector): - """Process the button click.""" - return await self.on_submit(flow_connector) - - -class StorageMixin(ABC): - answers_storage: StorageBucketElement | None = None - - async def _get_user_answer(self) -> typing.Any: - """Get the user answer.""" - if self.answers_storage: - async with self.answers_storage as answers_storage: - return answers_storage.get() - - async def _set_user_answer(self, user_answer: typing.Any): - """Set the user answer.""" - if self.answers_storage: - async with self.answers_storage as answers_storage: - answers_storage.set(user_answer) - - async def clear_storage(self) -> None: - """Clear the storage.""" - if self.answers_storage: - await self.answers_storage.delete_data() - - -class ChooseOneOptionView(BaseView, StorageMixin): - """The view that allows the user to choose only one option.""" - - options: list[str] | typing.Callable[[], typing.Awaitable[list[str]]] - - def __init__( - self, - options: list[str | discord.ui.Button] | typing.Callable[[], typing.Awaitable[list[str]]], - answers_storage: StorageBucketElement | None = None, - **kwargs, - ): - """Initialize the view.""" - super().__init__(**kwargs) - - self.options = options - - # TODO: [07.09.2023 by Mykola] Use the unique `._step_name` for the storage key - # self.answers_storage = answers_storage or StorageBucketElement( - # f"{self.__class__.__name__}::answers" - # ) - self.answers_storage = answers_storage - - async def get_static_buttons(self, flow_connector: FlowConnector) -> list[str]: - """Get the buttons for the view.""" - return self.options if isinstance(self.options, list) else await self.options() - - async def process_button_click(self, button: discord.ui.Button, flow_connector: FlowConnector): - """Process the button click.""" - await self._remove_view(flow_connector.interaction) - - await self._set_user_answer(button.label) - - raise ViewStepFinished() - - -class MultipleAnswersView(ConfirmButtonView, StorageMixin): - """The view that allows the user to choose multiple answers.""" - - answers: ( - list[str] | BaseSubstitution | typing.Awaitable[list[str]] | typing.Callable[[], typing.Awaitable[list[str]]] - ) - n_answers_to_select: int | None - min_answers_allowed: int | None - - confirm_button_label: str | None = None - - _confirm_button: discord.ui.Button | None = None - - def __init__( - self, - answers: ( - list[str] - | BaseSubstitution - | typing.Awaitable[list[str]] - | typing.Callable[[], typing.Awaitable[list[str]]] - ), - confirm_button_label: str, - n_answers_to_select: int | None = None, - min_answers_allowed: int | None = None, - answers_storage: StorageBucketElement | None = None, - answers_selection_style: FrameworkButtonStyle = FrameworkButtonStyle.success, - confirm_button_style: FrameworkButtonStyle = FrameworkButtonStyle.primary, - **kwargs, - ): - """Initialize the view.""" - super().__init__(confirm_button_label, **kwargs) - - self.answers = answers - self.n_answers_to_select: int | None = n_answers_to_select - self.min_answers_allowed: int | None = min_answers_allowed or n_answers_to_select - - self.answers_storage = answers_storage - - self.answers_selection_style = answers_selection_style - self.confirm_button_style = confirm_button_style - - async def get_static_buttons(self, flow_connector: FlowConnector) -> list[discord.ui.Button | str]: - """Get the buttons for the answers, and the confirm button.""" - confirm_button = copy(self.confirm_button) - - # Change the style of the confirm button - confirm_button.style = self.confirm_button_style - - # Disable the confirm button by default, only if the `n_answers_to_select` or the `min_answers_allowed` is set - confirm_button.disabled = bool(self.n_answers_to_select) or bool(self.min_answers_allowed) - - return [ - *( - self.answers - if isinstance(self.answers, list) - else ( - ( - await self.answers.resolve(flow_connector) - if isinstance(self.answers, BaseSubstitution) - else (await self.answers if inspect.isawaitable(self.answers) else await self.answers()) - ) - ) - ), - confirm_button, - ] - - def _update_user_answers(self, user_answers: list[str], button: discord.ui.Button) -> None: - """Update the user answers.""" - if button.label in user_answers: - user_answers.remove(button.label) - button.style = self.buttons_style - else: - user_answers.append(button.label) - button.style = self.answers_selection_style - - def _disable_unselected_buttons(self, user_answers: list[str]) -> None: - """Disable the unselected buttons.""" - for item in self.children: - # Skip the confirm button - # TODO: [18.09.2023 by Mykola] This doesn't work here since all the buttons _must_ be - # copied for all the individual views - # if item == self._confirm_button: - # continue - if isinstance(item, discord.ui.Button) and item.label not in user_answers: - item.disabled = True - - def _enable_all_buttons(self) -> None: - """Enable all the buttons.""" - for item in self.children: - # Skip the confirm button - if item == self._confirm_button: - continue - if isinstance(item, discord.ui.Button): - item.disabled = False - - def _change_confirm_button_state(self, enabled: bool = True) -> None: - """Change the confirm button state.""" - for item in self.children: - if isinstance(item, discord.ui.Button) and item.label == self.confirm_button_label: - item.disabled = not enabled - - async def get_or_create_for_connector( - self, - flow_connector: FlowConnector, - dynamic_buttons: list[str | discord.Button] | None = None, - force_init: bool = False, - force_get: bool = False, - from_step_run: bool = False, - ) -> typing.Self: - if from_step_run: - # Clear the storage - await self.clear_storage() - - return await super().get_or_create_for_connector( - flow_connector, dynamic_buttons, force_init, force_get, from_step_run - ) - - async def process_button_click(self, button: discord.ui.Button, flow_connector: FlowConnector): - """Save the answer and check whether the `n_answers_to_select` is reached.""" - - if button.label == self.confirm_button.label: - # Remove the view from the message - return await self.on_submit(flow_connector) - - # Save the answer - user_answers: list[str] | None = await self._get_user_answer() or [] - self._update_user_answers(user_answers, button) - await self._set_user_answer(user_answers) - - if self._confirm_button: - if self.n_answers_to_select: - if len(user_answers) >= self.n_answers_to_select: - self._disable_unselected_buttons(user_answers) - else: - self._enable_all_buttons() - - if self.min_answers_allowed: - if len(user_answers) >= self.min_answers_allowed: - self._change_confirm_button_state(enabled=True) - else: - self._change_confirm_button_state(enabled=False) - - return await self._update_view(flow_connector.interaction) - - else: - return await self._defer(flow_connector.interaction) - - -# region ActionButtonsView -DEFAULT_ACTION_BUTTONS_VIEW_CONFIRM_BUTTON_LABEL = "Confirm" - - -class ActionButtonsView(ConfirmButtonView): - """The view that has action buttons.""" - - action_buttons: list[ActionButton] - - one_time_view: bool = False - force_stay_on_step: bool = False - force_confirm_button: bool = False - - def __init__( - self, - action_buttons: list[ActionButton], - force_stay_on_step: bool = False, - force_confirm_button: bool = False, - **kwargs, - ): - """Initialize the view.""" - self.action_buttons = action_buttons - - self.one_time_view = kwargs.pop("one_time_view", False) - self.force_stay_on_step = force_stay_on_step - self.force_confirm_button = force_confirm_button - - super().__init__( - confirm_button_label=kwargs.pop("confirm_button_label", DEFAULT_ACTION_BUTTONS_VIEW_CONFIRM_BUTTON_LABEL), - **kwargs, - ) - - async def get_static_buttons(self, flow_connector: FlowConnector) -> list[discord.ui.Button]: - """Get the buttons for the view.""" - return self.action_buttons + ( - await super().get_static_buttons(flow_connector) - if not self.one_time_view or self.force_confirm_button - else [] - ) - - async def process_button_click(self, button: discord.ui.Button, flow_connector: FlowConnector): - """Process the button click.""" - from .buttons import ActionButton - - if button.label == self.confirm_button.label: - # Remove the view from the message - return await self.on_submit(flow_connector) - - raise_view_step_finished: bool = False - - try: - if self.one_time_view: - await self.on_submit(flow_connector) - else: - await self._defer(flow_connector.interaction) - except ViewStepFinished: - raise_view_step_finished = True - pass - - if not isinstance(button, ActionButton): - raise ValueError("The button must be an instance of `ActionButton`.") - - await button.trigger_action(flow_connector) - - if raise_view_step_finished and not self.force_stay_on_step: - raise ViewStepFinished() - - -# endregion diff --git a/electro/enums.py b/electro/enums.py index 13a5cad..7748066 100644 --- a/electro/enums.py +++ b/electro/enums.py @@ -7,26 +7,16 @@ class SupportedPlatforms(str, Enum): """The supported platforms for the project.""" DISCORD = "discord" - # WHATSAPP = "whatsapp" - # TELEGRAM = "telegram" - # SLACK = "slack" + WHATSAPP = "whatsapp" + CUSTOM = "custom" -class ChannelType(Enum): - """Channel type""" +class ResponseTypes(str, Enum): + """The actions that can be processed by the clients.""" - text = 0 - private = 1 - voice = 2 - group = 3 - category = 4 - news = 5 - news_thread = 10 - public_thread = 11 - private_thread = 12 - stage_voice = 13 - directory = 14 - forum = 15 - - def __str__(self): - return self.name + MESSAGE = "message" + IMAGE = "image" + ADD_ROLE = "add_role" + REMOVE_ROLE = "remove_role" + START_TYPING = "start_typing" + STOP_TYPING = "stop_typing" diff --git a/electro/exceptions.py b/electro/exceptions.py index 9d548c7..725bdca 100644 --- a/electro/exceptions.py +++ b/electro/exceptions.py @@ -4,4 +4,6 @@ class EventCannotBeProcessed(Exception): """The exception that is raised when the event cannot be processed.""" - pass + +class DisabledButtonClick(Exception): + """The exception that is raised when the button click is disabled.""" diff --git a/electro/extra/__init__.py b/electro/extra/__init__.py deleted file mode 100644 index 976e514..0000000 --- a/electro/extra/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Extra things that are not part of the standard `electro` Framework but useful, nevertheless.""" diff --git a/electro/extra/decorators.py.upcoming b/electro/extra/decorators.py.upcoming deleted file mode 100644 index a1bc2af..0000000 --- a/electro/extra/decorators.py.upcoming +++ /dev/null @@ -1,143 +0,0 @@ -"""Decorators used in the Application.""" - -from asyncio import Lock -from collections import defaultdict -from functools import wraps -from typing import Callable, Coroutine - -from bot import bot -from framework.flow_connector import FlowConnector, FlowConnectorEvents -from i18n_gettext import _ -from models import Channel -from settings import settings -from utils.loguru_logging import logger -from utils.templated_i18n import TemplatedString - - -def fail_safely(function: Callable[..., Coroutine]): - """Fail safely and send the message to the User about the issue""" - - @wraps(function) - async def wrapper(*args, **kwargs): - try: - return await function(*args, **kwargs) - except Exception as exception: - # Log the exception with the traceback - logger.exception(exception) - - # Check if any of the arguments is a FlowConnector - flow_connector: FlowConnector | None = next( - (arg for arg in args if isinstance(arg, FlowConnector)), - None, - ) - - substitute_dict = { - "function_name": function.__name__, - "exception": exception, - "exception_type": type(exception), - "exception_text": str(exception), - "user_id": flow_connector.user.id if flow_connector else "N/A", - "user_name": flow_connector.user.name if flow_connector else "N/A", - "user_discriminator": flow_connector.user.discriminator if flow_connector else "N/A", - } - - if flow_connector: - # Send a message to the User about the issue - if settings.DEBUG: - error_text = _("fail_safely.error_text.debug").safe_substitute(**substitute_dict) - else: - error_text = _("fail_safely.error_text").safe_substitute(**substitute_dict) - await flow_connector.message.reply(error_text) - else: - logger.error(f"FlowConnector is not set for the function: {function.__name__} in {args=}") - - if not (global_errors_channel_obj := await Channel.get_or_none(used_for="global_errors")): - logger.error("Global errors channel is not set.") - raise exception - - if settings.DEBUG: - await bot.get_channel(global_errors_channel_obj.id).send( - _("fail_safely.global_error_text.debug").safe_substitute(**substitute_dict) - ) - else: - await bot.get_channel(global_errors_channel_obj.id).send( - _("fail_safely.global_error_text").safe_substitute(**substitute_dict) - ) - - # TODO: [01.04.2024 by Mykola] Should we raise an exception, or just logging it is enough? - raise exception - - return wrapper - - -GLOBAL_USER_LOCKS: defaultdict[int, Lock] = defaultdict(Lock) - - -def forbid_concurrent_execution( - keep_extra_messages=False, - extra_messages_reply: TemplatedString | None = _("forbidden_concurrent_execution.extra_messages_reply"), -) -> Callable: - """Forbid concurrent execution of the function for the same User.""" - - def decorator(function: Callable[..., Coroutine]): - """The actual decorator.""" - - @wraps(function) - async def wrapper(*args, **kwargs): - # Get the `FlowConnector` from the arguments - flow_connector: FlowConnector | None = next( - (arg for arg in args if isinstance(arg, FlowConnector)), - None, - ) - - if not flow_connector: - logger.error(f"FlowConnector is not set for the function: {function.__name__} in {args=}") - - return await function(*args, **kwargs) - - # Get the User's ID - user_id = flow_connector.user.id - - # Get the User's lock - user_lock = GLOBAL_USER_LOCKS[user_id] - - # Check if the User's lock is already acquired - if user_lock.locked() and not keep_extra_messages: - # Send a message to the User saying that the function is already running - return await flow_connector.message.reply(extra_messages_reply) - - # With the lock acquired, execute the function - async with user_lock: - return await function(*args, **kwargs) - - return wrapper - - return decorator - - -def with_constant_typing( - run_only_on_events: list[FlowConnectorEvents] | None = None, -) -> Callable: - """Send a typing indicator while executing the function.""" - - def decorator(function: Callable[..., Coroutine]) -> Callable[..., Coroutine]: - """The actual decorator.""" - - @wraps(function) - async def wrapper(*args, **kwargs): - # Check if any of the arguments is a FlowConnector - flow_connector: FlowConnector | None = next( - (arg for arg in args if isinstance(arg, FlowConnector)), - None, - ) - - if flow_connector and (not run_only_on_events or flow_connector.event in run_only_on_events): - async with flow_connector.channel.typing(): - return await function(*args, **kwargs) - - # If the FlowConnector is not found, just execute the function - return await function(*args, **kwargs) - - return wrapper - - return decorator diff --git a/electro/extra/httpx_client.py b/electro/extra/httpx_client.py deleted file mode 100644 index 070f479..0000000 --- a/electro/extra/httpx_client.py +++ /dev/null @@ -1,16 +0,0 @@ -""" -The main (global) `httpx` client. - -Used to (apparently) be able to async/await multiple web requests at the same time. - -Whether this client preserves cookies or not remains a mystery to me. However, I'm not sure if it's relevant -to the bot's use case. - -If you're reading this, you might as well check the `httpx` documentation to find out and let me know. -""" - -import httpx - -from ..settings import settings - -httpx_client = httpx.AsyncClient(timeout=settings.HTTPX_CLIENT_DEFAULT_TIMEOUT) diff --git a/electro/flow.py b/electro/flow.py index 1b417e8..d747a50 100644 --- a/electro/flow.py +++ b/electro/flow.py @@ -7,15 +7,14 @@ from stringcase import snakecase -from . import types_ as types from .contrib.storage_buckets import BaseStorageBucket, BaseStorageBucketElement from .flow_connector import FlowConnector, FlowConnectorEvents from .flow_step import BaseFlowStep, FlowStepDone +from .models import BotMessage from .scopes import FlowScopes - -# from decorators import forbid_concurrent_execution, with_constant_typing from .settings import settings from .substitutions import BaseSubstitution +from .toolkit.decorators import with_constant_typing from .toolkit.loguru_logging import logger from .triggers import BaseFlowTrigger @@ -145,14 +144,12 @@ async def get_iterables(self, connector: FlowConnector) -> typing.List: @abstractmethod async def step( self, connector: FlowConnector, initial: bool = False, upper_level_state: str | None = None - ) -> list[types.MessageToSend] | None: + ) -> list[BotMessage] | None: """Process the response in the current step of the `Flow`.""" raise NotImplementedError @abstractmethod - async def run( - self, connector: FlowConnector, upper_level_state: str | None = None - ) -> list[types.MessageToSend] | None: + async def run(self, connector: FlowConnector, upper_level_state: str | None = None) -> list[BotMessage] | None: """Start the `Flow`.""" raise NotImplementedError @@ -219,9 +216,7 @@ async def _update_connector_pre_run(self, connector: FlowConnector, *_, **__kwar """Update the connector before running the `Flow`.""" return connector - async def run( - self, connector: FlowConnector, upper_level_state: str | None = None - ) -> list[types.MessageToSend] | None: + async def run(self, connector: FlowConnector, upper_level_state: str | None = None) -> list[BotMessage] | None: """Start the `Flow`.""" # Make sure there are steps in the `Flow` if not self._steps: @@ -242,10 +237,10 @@ async def run( # TODO: [2024-07-19 by Mykola] Use the decorators # @forbid_concurrent_execution() - # @with_constant_typing(run_only_on_events=[FlowConnectorEvents.MESSAGE]) + @with_constant_typing(run_only_on_events=[FlowConnectorEvents.MESSAGE]) async def step( self, connector: FlowConnector, initial: bool = False, upper_level_state: str | None = None - ) -> list[types.MessageToSend] | None: + ) -> list[BotMessage] | None: """ Process the response in the current step of the `Flow`. diff --git a/electro/flow_connector.py b/electro/flow_connector.py index 6f0d345..f5ee6e9 100644 --- a/electro/flow_connector.py +++ b/electro/flow_connector.py @@ -6,14 +6,10 @@ from enum import Enum from typing import Any, TYPE_CHECKING -import discord -from discord.ext import commands - from ._common import ContextInstanceMixin from .interfaces import BaseInterface -from .models import Interaction, Message +from .models import Button, Channel, Message, User from .storage import ChannelData, UserData -from .types_ import Channel, User if TYPE_CHECKING: from electro import FlowManager @@ -23,11 +19,8 @@ class FlowConnectorEvents(str, Enum): """The events that are used in the `FlowConnector`.""" MESSAGE = "message" - BUTTON_CLICK = "button_click" - MEMBER_JOIN = "member_join" - MEMBER_UPDATE = "member_update" @@ -35,32 +28,19 @@ class FlowConnectorEvents(str, Enum): class FlowConnector(ContextInstanceMixin): """The connector that is passed from one `Flow` to another.""" - # TODO: [05.09.2023 by Mykola] Forbid re-assigning the attributes of this class - flow_manager: FlowManager - - bot: commands.Bot - + interface: BaseInterface event: FlowConnectorEvents - user: User | None - channel: Channel | None - user_state: str | None user_data: UserData - channel_state: str | None channel_data: ChannelData - message: discord.Message | None = None - interaction: discord.Interaction | None = None + user: User + channel: Channel + message: Message | None = None + button: Button | None = None - message_obj: Message | None = None - interaction_obj: Interaction | None = None - - member: discord.Member | None = None substitutions: dict[str, str] | None = None - extra_data: dict[str, Any] | None = None - - interface: BaseInterface | None = None diff --git a/electro/flow_manager.py b/electro/flow_manager.py index d7dbfd0..4867f4f 100644 --- a/electro/flow_manager.py +++ b/electro/flow_manager.py @@ -5,17 +5,15 @@ import typing from collections import defaultdict -from . import types_ as types +from . import schemas from ._common import ContextInstanceMixin -from .bot import bot as global_bot -from .enums import ChannelType -from .exceptions import EventCannotBeProcessed +from .exceptions import DisabledButtonClick, EventCannotBeProcessed from .flow import Flow, FlowConnector, FlowFinished from .flow_connector import FlowConnectorEvents # from decorators import fail_safely from .interfaces import BaseInterface -from .models import Channel, Interaction, Message, User, UserStateChanged +from .models import Button, Channel, Guild, Message, PlatformId, User, UserStateChanged from .scopes import FlowScopes from .settings import settings from .storage import BaseFlowStorage, ChannelData, FlowMemoryStorage, UserData @@ -32,131 +30,90 @@ def __init__(self, flow_manager: FlowManager): # Set the current analytics manager self.set_current(self) - @staticmethod - async def save_user(user: types.User, guild: types.Guild | None = None) -> User: + @classmethod + async def get_or_create_guild(cls, platform: str, guild_data: schemas.Guild) -> Guild: + """Save the guild to the database.""" + platform_id, created = await PlatformId.get_or_create( + platform_id=guild_data.platform_id.id, platform=platform, type=PlatformId.PlatformIdTypes.GUILD + ) + if created: + guild = await Guild.create(name=guild_data.name) + platform_id.guild = guild + logger.info(f"Created the Guild record for {guild.id=}, {guild.name=}") + await platform_id.save() + return await platform_id.guild + + @classmethod + async def get_or_create_user(cls, platform: str, user_data: schemas.User) -> User: """Save the user to the database.""" - user, created = await User.get_or_create( - id=user.id, - defaults={ - "username": user.username, - "discriminator": user.discriminator, - "avatar": user.avatar.get("url") if user.avatar else None, - "guild_id": guild.id if guild else None, - }, + platform_id, created = await PlatformId.get_or_create( + platform_id=user_data.platform_id.id, platform=platform, type=PlatformId.PlatformIdTypes.USER ) - if created: - logger.info(f"Created the User record for {user.id=}, {user.username=}, and {user.discriminator}") - - return user - - @staticmethod - async def save_channel(channel: types.Channel) -> Channel: + user = await User.create(username=user_data.username) + platform_id.user = user + logger.info(f"Created the User record for {user.id=}, {user.username=}") + await platform_id.save() + if user_data.guild: + guild = await cls.get_or_create_guild(platform, user_data.guild) + user.guild = guild + await user.save() + return await platform_id.user + + @classmethod + async def get_or_create_channel(cls, platform: str, channel_data: schemas.Channel) -> Channel: """Save the channel to the database.""" - return await Channel.create( - id=channel.id, - name=getattr(channel, "name", None), - guild_id=channel.guild.id if getattr(channel, "guild", None) else None, - type=channel.type, + platform_id, created = await PlatformId.get_or_create( + platform_id=channel_data.platform_id.id, platform=platform, type=PlatformId.PlatformIdTypes.CHANNEL ) - - async def save_new_member(self, member: types.Member) -> User: - """Save the new member to the database.""" - # noinspection PyProtectedMember - user = member._user - user_obj = await self.save_user(user, member.guild) - - # TODO: [05.06.2024 by Mykola] Save the new member to the database - - return user_obj - - async def save_updated_member(self, before: types.Member, after: types.Member) -> User: - """Save the updated member to the database.""" - # noinspection PyProtectedMember - user = after._user - user_obj = await self.save_user(user, after.guild) - - # TODO: [05.06.2024 by Mykola] Save the updated member to the database - - return user_obj - - async def _get_user_obj(self, user: types.User, guild: types.Guild | None = None) -> User: - if not (user_obj := await User.get_or_none(id=user.id)): - logger.warning(f"User {user.id} not found in the database. Creating the user record.") - user_obj: User = await self.save_user(user, guild) - - return user_obj - - async def _get_channel_obj(self, channel: types.Channel) -> Channel: - if not (channel_obj := await Channel.get_or_none(id=channel.id)): - logger.warning(f"Channel {channel.id} not found in the database. Creating the channel record.") - channel_obj: Channel = await self.save_channel(channel) - - return channel_obj - - async def get_or_save_message(self, message: types.Message) -> Message: + if created: + channel_type = channel_data.type + if channel_type not in Channel.ChannelTypes: + raise ValueError(f"Invalid channel type: {channel_type}") + channel = await Channel.create(name=channel_data.name, type=channel_type) + platform_id.channel = channel + logger.info(f"Created the Channel record for {channel.id=}, {channel.name=}") + await platform_id.save() + if channel_data.guild: + guild = await cls.get_or_create_guild(platform, channel_data.guild) + channel.guild = guild + await channel.save() + return await platform_id.channel + + @classmethod + async def save_message(cls, platform: str, message_data: schemas.ReceivedMessage) -> Message: """Save the message to the database.""" - # Get the user and channel objects (make sure they exist in the database) - # TODO: [2024-12-23 by Mykola] Add the `.guild` to the `message` object - # user_obj = await self._get_user_obj(message.author, message.guild) - user_obj = await self._get_user_obj(message.author, None) - channel_obj = await self._get_channel_obj(message.channel) - - if message_obj := await Message.get_or_none(id=message.id): - return message_obj - + author = await cls.get_or_create_user(platform, message_data.author) + if message_data.channel: + channel = await cls.get_or_create_channel(platform, message_data.channel) + else: + channel = None return await Message.create( - id=message.id, - content=message.content, - author=user_obj, - channel=channel_obj, - created_at=message.created_at, - edited_at=message.edited_at, - # TODO: [2024-12-23 by Mykola] Expand the message model - # is_pinned=message.pinned, - # is_tts=message.tts, - # is_bot_message=message.author.bot, - is_command=message.content.startswith(settings.BOT_COMMAND_PREFIX), - ) - - async def save_interaction( - self, interaction: types.Interaction, return_message_obj=False - ) -> Interaction | tuple[Interaction, Message]: - """Save the interaction to the database.""" - # Get the user and channel objects (make sure they exist in the database) - user_obj = await self._get_user_obj(interaction.user, interaction.guild) - channel_obj = await self._get_channel_obj(interaction.channel) - - message_obj = await self.get_or_save_message(interaction.message) - - interaction_obj: Interaction = await Interaction.create( - id=interaction.id, - user=user_obj, - channel=channel_obj, - message=message_obj, - custom_id=interaction.data.get("custom_id"), + content=message_data.content, + author=author, + channel=channel, + is_command=message_data.content.startswith(settings.BOT_COMMAND_PREFIX), ) - if return_message_obj: - return interaction_obj, message_obj - - return interaction_obj - + @classmethod + async def save_button_click(cls, button_id: int) -> Button: + """Save the button to the database.""" + # Get the user and channel objects (make sure they exist in the database + button = await Button.get(id=button_id) + if button.clicked and button.remove_after_click: + raise DisabledButtonClick + button.clicked = True + await button.save() + return button + + @classmethod async def save_user_state_changed( - self, user: types.User, previous_state: str | None, new_state: str | None + cls, user: User, previous_state: str | None, new_state: str | None ) -> UserStateChanged | None: """Save the user state changed record to the database.""" if previous_state == new_state: - return - - # Get the user object (make sure it exists in the database) - user_obj = await self._get_user_obj(user) # TODO: [2024-10-16 by Mykola] Should be pass `guild` here? - - return await UserStateChanged.create( - user=user_obj, - previous_state=previous_state, - new_state=new_state, - ) + return None + return await UserStateChanged.create(user=user, previous_state=previous_state, new_state=new_state) class FlowManager(ContextInstanceMixin): @@ -167,12 +124,10 @@ class FlowManager(ContextInstanceMixin): def __init__( self, - bot: types.Bot, flows: typing.Optional[list[Flow]] = None, storage: typing.Optional[BaseFlowStorage] = None, on_finish_callbacks: typing.Optional[list[typing.Callable[[FlowConnector], typing.Awaitable[None]]]] = None, ): - self.bot = bot self.flows: list[Flow] = flows or [] self.storage = storage or FlowMemoryStorage() @@ -186,11 +141,11 @@ def __init__( self.set_current(self) # region User State and Data management - async def _get_user_state(self, user: types.User) -> str | None: + async def _get_user_state(self, user: User) -> str | None: """Get the state of the user.""" return await self.storage.get_user_state(user.id) - async def _set_user_state(self, user: types.User, state: str | None): + async def _set_user_state(self, user: User, state: str | None): """Set the state of the user.""" # Save the state to the database old_state = await self._get_user_state(user) @@ -198,49 +153,49 @@ async def _set_user_state(self, user: types.User, state: str | None): await self.analytics_manager.save_user_state_changed(user, old_state, state) await self.storage.set_user_state(user.id, state) - async def _delete_user_state(self, user: types.User): + async def _delete_user_state(self, user: User): """Delete the state of the user.""" old_state = await self._get_user_state(user) if old_state: await self.analytics_manager.save_user_state_changed(user, old_state, None) await self.storage.delete_user_state(user.id) - async def _get_user_data(self, user: types.User) -> UserData: + async def _get_user_data(self, user: User) -> UserData: """Get the data of the user.""" return await self.storage.get_user_data(user.id) - async def _set_user_data(self, user: types.User, data: UserData | dict[str, typing.Any] | None): + async def _set_user_data(self, user: User, data: UserData | dict[str, typing.Any] | None): """Set the data of the user.""" await self.storage.set_user_data(user.id, data) - async def _delete_user_data(self, user: types.User): + async def _delete_user_data(self, user: User): """Delete the data of the user.""" await self.storage.delete_user_data(user.id) # endregion # region Channel State and Data management - async def _get_channel_state(self, channel: types.Channel) -> str | None: + async def _get_channel_state(self, channel: Channel) -> str | None: """Get the state of the channel.""" return await self.storage.get_channel_state(channel.id) - async def _set_channel_state(self, channel: types.Channel, state: str | None): + async def _set_channel_state(self, channel: Channel, state: str | None): """Set the state of the channel.""" await self.storage.set_channel_state(channel.id, state) - async def _delete_channel_state(self, channel: types.Channel): + async def _delete_channel_state(self, channel: Channel): """Delete the state of the channel.""" await self.storage.delete_channel_state(channel.id) - async def _get_channel_data(self, channel: types.Channel) -> ChannelData: + async def _get_channel_data(self, channel: Channel) -> ChannelData: """Get the data of the channel.""" return await self.storage.get_channel_data(channel.id) - async def _set_channel_data(self, channel: types.Channel, data: ChannelData | dict[str, typing.Any] | None): + async def _set_channel_data(self, channel: Channel, data: ChannelData | dict[str, typing.Any] | None): """Set the data of the channel.""" await self.storage.set_channel_data(channel.id, data) - async def _delete_channel_data(self, channel: types.Channel): + async def _delete_channel_data(self, channel: Channel): """Delete the data of the channel.""" await self.storage.delete_channel_data(channel.id) @@ -274,65 +229,18 @@ async def _finish_flow(self, flow_connector: FlowConnector): # Run the callbacks for callback in self._on_finish_callbacks: await callback(flow_connector) - return await flow_connector.interface.stop_process(reason="end_of_flow") - - async def _create_user_and_channel( - self, user: types.User | None = None, channel: types.Channel | types.DMChannel | None = None - ): - """Create the `User` and `Channel` records if they don't exist.""" - logger.info(f"Creating the User and Channel records for {user=}, {channel=}") - - user_id = getattr(user, "id", None) if user else None - channel_id = getattr(channel, "id", None) if channel else None - - logger.debug(f"Creating the User and Channel records for {user_id=} and {channel_id=}") - - if user and not user_id: - logger.warning(f"Failed to get the user ID: {user=}, {channel=}, {channel_id=}") - - if channel and not channel_id: - logger.warning(f"Failed to get the channel ID: {channel=}, {user=}, {user_id=}") - - # Create the User record - if user_id and not await self._storage__user_model.get_or_none(id=user_id): - await self._storage__user_model.create( - id=user_id, - username=user.name, - discriminator=user.discriminator, - avatar=user.avatar.url if user.avatar else None, - ) - logger.info( - f"Created the User record for {user_id=}, " - f"{getattr(user, 'name')=}, and {getattr(user, 'display_name')=}" - ) - - # Create the Channel record - if channel_id and not await self._storage__channel_model.get_or_none(id=channel_id): - await self._storage__channel_model.create( - id=channel_id, - name=getattr(channel, "name", None), - guild_id=getattr(getattr(channel, "guild", None), "id", None), - type=channel.type, - ) - - logger.info( - f"Created the Channel record for {channel_id=}, " - f"{getattr(channel, 'name', None)=}, {getattr(channel, 'type')=}" - ) + return # TODO: [2024-07-19 by Mykola] Use the decorators # @fail_safely - async def _dispatch(self, flow_connector: FlowConnector) -> list[types.MessageToSend] | None: + async def _dispatch(self, flow_connector: FlowConnector): """Dispatch the flow.""" - # Create the User and Channel records if they don't exist - await self._create_user_and_channel(flow_connector.user, flow_connector.channel) - is_dm_channel = flow_connector.channel and flow_connector.channel.type == ChannelType.private - - if is_dm_channel: - scope = FlowScopes.USER - else: + # Create the User and Channel records if they don't exist + if flow_connector.channel and flow_connector.channel.type == Channel.ChannelTypes.CHANNEL: scope = FlowScopes.CHANNEL + else: + scope = FlowScopes.USER # TODO: [17.05.2024 by Mykola] Allow for `FlowScopes.GUILD` flows # Check whether this event has triggered any of the flows @@ -346,7 +254,7 @@ async def _dispatch(self, flow_connector: FlowConnector) -> list[types.MessageTo if ( flow_connector.event == FlowConnectorEvents.MESSAGE and flow_connector.message.content - and flow_connector.message.content.startswith(flow_connector.bot.command_prefix) + and flow_connector.message.content.startswith(settings.BOT_COMMAND_PREFIX) ): if scope == FlowScopes.USER: # Remove user's state, so that the user wouldn't resume any flow @@ -358,9 +266,7 @@ async def _dispatch(self, flow_connector: FlowConnector) -> list[types.MessageTo ) else: logger.warning( - "Out-of-scope `{scope}` command `{flow_connector.message.content}` is not handled by the flows", - scope=scope, - flow_connector=flow_connector, + f"Out-of-scope `{scope}` command `{flow_connector.message.content}` is not handled by the flows" ) raise EventCannotBeProcessed( f"Out-of-scope `{scope}` command `{flow_connector.message.content}` is not handled by the flows" @@ -381,7 +287,7 @@ async def _dispatch(self, flow_connector: FlowConnector) -> list[types.MessageTo flows_by_scope[flow._scope].append(flow) # If it's not a private channel, Channel-scoped flows get the priority - if flow_connector.channel.type != types.ChannelType.private and ( + if flow_connector.channel.type == Channel.ChannelTypes.CHANNEL and ( channel_scope_flows := flows_by_scope.get(FlowScopes.CHANNEL) ): flows_that_can_be_run = channel_scope_flows @@ -412,7 +318,7 @@ async def _dispatch(self, flow_connector: FlowConnector) -> list[types.MessageTo ) return # Do not raise an exception, as it's not an error - async def dispatch(self, flow_connector: FlowConnector) -> list[types.MessageToSend] | None: + async def dispatch(self, flow_connector: FlowConnector): """Dispatch the flow.""" # Set the current flow connector FlowConnector.set_current(flow_connector) @@ -420,34 +326,33 @@ async def dispatch(self, flow_connector: FlowConnector) -> list[types.MessageToS async with self: return await self._dispatch(flow_connector) - async def on_message(self, message: types.Message, interface: BaseInterface) -> list[Message] | None: + async def on_message(self, platform: str, message_data: schemas.ReceivedMessage, interface: BaseInterface): """Handle the messages sent by the users.""" # Save the message to the database - message_obj: Message = await self.analytics_manager.get_or_save_message(message) - - # Ignore the messages sent by the bots - if message.author.bot: - return None + message = await self.analytics_manager.save_message(platform, message_data) + user = await message.author + channel = await message.channel # Get the user state and data # TODO: [20.08.2023 by Mykola] Use context manager for this - logger.info(f"Getting the user state and data for {message.author.id}") - user_state = await self._get_user_state(message.author) - user_data = await self._get_user_data(message.author) + user_state = await self._get_user_state(user) + user_data = await self._get_user_data(user) # Get the channel state and data - channel_state = await self._get_channel_state(message.channel) - channel_data = await self._get_channel_data(message.channel) + if channel: + channel_state = await self._get_channel_state(message.channel) + channel_data = await self._get_channel_data(message.channel) + else: + channel_state = None + channel_data = ChannelData() flow_connector = FlowConnector( flow_manager=self, - bot=self.bot, event=FlowConnectorEvents.MESSAGE, - user=message.author, - channel=message.channel, + user=user, + channel=channel, message=message, - message_obj=message_obj, user_state=user_state, user_data=user_data, channel_state=channel_state, @@ -457,97 +362,94 @@ async def on_message(self, message: types.Message, interface: BaseInterface) -> return await self.dispatch(flow_connector) - async def on_interaction(self, interaction: types.Interaction): - """Handle the interactions sent by the users.""" - # Save the interaction to the database - interaction_obj, message_obj = await self.analytics_manager.save_interaction( - interaction, return_message_obj=True - ) + async def on_button_click(self, platform: str, button_data: schemas.ButtonClick, interface: BaseInterface): + """Handle the buttons clicked by the users.""" + # Save the button click to the database + user = await self.analytics_manager.get_or_create_user(platform, button_data.user) + channel = await self.analytics_manager.get_or_create_channel(platform, button_data.channel) + try: + button = await self.analytics_manager.save_button_click(button_data.id) + except DisabledButtonClick: + return await interface.send_message("button already clicked", user, channel) # Get the user state and data - logger.info(f"Getting the user state and data for {interaction.user.id}") - user_state = await self._get_user_state(interaction.user) - user_data = await self._get_user_data(interaction.user) + user_state = await self._get_user_state(user) + user_data = await self._get_user_data(user) # Get the channel state and data - channel_state = await self._get_channel_state(interaction.message.channel) - channel_data = await self._get_channel_data(interaction.message.channel) + channel_state = await self._get_channel_state(channel) + channel_data = await self._get_channel_data(channel) # noinspection PyTypeChecker flow_connector = FlowConnector( flow_manager=self, - bot=self.bot, event=FlowConnectorEvents.BUTTON_CLICK, - user=interaction.user, - channel=interaction.channel, + user=user, + channel=channel, + button=button, user_state=user_state, user_data=user_data, - message=interaction.message, - interaction=interaction, - message_obj=message_obj, - interaction_obj=interaction_obj, channel_state=channel_state, channel_data=channel_data, + interface=interface, ) return await self.dispatch(flow_connector) - async def on_member_join(self, member: types.Member): - """Handle the `member_join` event.""" - # Save the user to the database - await self.analytics_manager.save_new_member(member) - - # Get the user state and data - logger.info(f"Getting the user state and data for {member.id}") - # TODO: [22.08.2023 by Mykola] Use correct types here - user_state = await self._get_user_state(member) - user_data = await self._get_user_data(member) - - # noinspection PyProtectedMember - flow_connector = FlowConnector( - flow_manager=self, - bot=self.bot, - event=FlowConnectorEvents.MEMBER_JOIN, - user=member._user, - member=member, - # TODO: [28.08.2023 by Mykola] Use the correct channel here - channel=member.guild.system_channel, - message=None, - user_state=user_state, - user_data=user_data, - channel_state=None, - channel_data=ChannelData(), - ) - - return await self.dispatch(flow_connector) - - async def on_member_update(self, before: types.Member, after: types.Member): - """Handle the `member_update` event.""" - # Save the member update record to the database - await self.analytics_manager.save_updated_member(before, after) - - # Get the user state and data - logger.info(f"Getting the user state and data for {after.id}") - user_state = await self._get_user_state(after) - user_data = await self._get_user_data(after) - - # noinspection PyProtectedMember - flow_connector = FlowConnector( - flow_manager=self, - bot=self.bot, - event=FlowConnectorEvents.MEMBER_UPDATE, - user=after._user, - member=after, - channel=after.guild.system_channel, - message=None, - user_state=user_state, - user_data=user_data, - extra_data={"old_member": before}, - channel_state=None, - channel_data=ChannelData(), - ) - - return await self.dispatch(flow_connector) + # async def on_member_join(self, member: types.Member): + # """Handle the `member_join` event.""" + # # Save the user to the database + # await self.analytics_manager.save_new_member(member) + + # # Get the user state and data + # logger.info(f"Getting the user state and data for {member.id}") + # # TODO: [22.08.2023 by Mykola] Use correct types here + # user_state = await self._get_user_state(member) + # user_data = await self._get_user_data(member) + + # # noinspection PyProtectedMember + # flow_connector = FlowConnector( + # flow_manager=self, + # event=FlowConnectorEvents.MEMBER_JOIN, + # user=member._user, + # member=member, + # # TODO: [28.08.2023 by Mykola] Use the correct channel here + # channel=member.guild.system_channel, + # message=None, + # user_state=user_state, + # user_data=user_data, + # channel_state=None, + # channel_data=ChannelData(), + # ) + + # return await self.dispatch(flow_connector) + + # async def on_member_update(self, before: types.Member, after: types.Member): + # """Handle the `member_update` event.""" + # # Save the member update record to the database + # await self.analytics_manager.save_updated_member(before, after) + + # # Get the user state and data + # logger.info(f"Getting the user state and data for {after.id}") + # user_state = await self._get_user_state(after) + # user_data = await self._get_user_data(after) + + # # noinspection PyProtectedMember + # flow_connector = FlowConnector( + # flow_manager=self, + # event=FlowConnectorEvents.MEMBER_UPDATE, + # user=after._user, + # member=after, + # channel=after.guild.system_channel, + # message=None, + # user_state=user_state, + # user_data=user_data, + # extra_data={"old_member": before}, + # channel_state=None, + # channel_data=ChannelData(), + # ) + + # return await self.dispatch(flow_connector) # region Context Manager async def __aenter__(self): @@ -571,6 +473,4 @@ async def __aexit__(self, exc_type, exc_val, exc_tb): # endregion -global_flow_manager = FlowManager( - bot=global_bot, -) +global_flow_manager = FlowManager() diff --git a/electro/flow_step.py b/electro/flow_step.py index e091edd..01363a9 100644 --- a/electro/flow_step.py +++ b/electro/flow_step.py @@ -8,24 +8,24 @@ from abc import ABC, abstractmethod from dataclasses import dataclass from enum import Enum -from typing import Any +from io import BytesIO +from urllib.parse import urlparse -import discord -from openai import NOT_GIVEN +from openai import AsyncOpenAI, NOT_GIVEN from .contrib.storage_buckets import BaseStorageBucketElement, StorageBucketElement -from .contrib.views import BaseView, ViewStepFinished +from .enums import ResponseTypes from .flow_connector import FlowConnectorEvents - -# from decorators import with_constant_typing +from .models import Channel, File from .settings import settings from .substitutions import BaseSubstitution, GlobalAbstractChannel, resolve_channel +from .toolkit.decorators import with_constant_typing +from .toolkit.images_storage.universal_image_storage import universal_image_storage from .toolkit.loguru_logging import logger -from .toolkit.openai_client import async_openai_client from .toolkit.templated_i18n import TemplatedString -from .types_ import Channel, MessageToSend if typing.TYPE_CHECKING: + from .contrib.buttons import ActionButton from .flow import FlowConnector @@ -44,7 +44,7 @@ class BaseFlowStep(ABC): _testing: bool = False @abstractmethod - async def run(self, connector: FlowConnector) -> list[MessageToSend] | None: + async def run(self, connector: FlowConnector): """Run the `BaseFlowStep`. Called when the `BaseFlowStep` is started.""" raise NotImplementedError @@ -87,28 +87,36 @@ async def _get_formatted_message(self, message: TemplatedString, connector: Flow @dataclass(kw_only=True) class FilesMixin: - file: discord.File | typing.BinaryIO | pathlib.Path | BaseSubstitution | None = None - files: list[discord.File | typing.BinaryIO | pathlib.Path | BaseSubstitution] | None = None + files: list[File | typing.BinaryIO | BaseSubstitution] | None = None - async def _get_files_to_send(self, connector: FlowConnector) -> list[discord.File]: + async def _get_files_to_send(self, connector: FlowConnector) -> list[File]: """Get the files to send.""" - if self.file and self.files: - # TODO: [18.11.2023 by Mykola] Use `overload` to type-hint prohibit both `file` and `files` at the same time - raise ValueError("You can't specify both `file` and `files`.") - # Resolve the files if they are `BaseSubstitution`s - files: list[discord.File | typing.BinaryIO | pathlib.Path | None] = [ + return [ await file.resolve(connector) if file and isinstance(file, BaseSubstitution) else file - for file in (self.files or ([self.file] if self.file else [])) + for file in self.files or [] ] - # Convert the files to `discord.File`s if they are not - files = [file if isinstance(file, discord.File) else (discord.File(file) if file else None) for file in files] - # Remove the `None`s - files = [file for file in files if file] +class StorageMixin(ABC): + answers_storage: StorageBucketElement | None = None + + async def _get_user_answer(self) -> typing.Any: + """Get the user answer.""" + if self.answers_storage: + async with self.answers_storage as answers_storage: + return answers_storage.get() + + async def _set_user_answer(self, user_answer: typing.Any): + """Set the user answer.""" + if self.answers_storage: + async with self.answers_storage as answers_storage: + answers_storage.set(user_answer) - return files + async def clear_storage(self) -> None: + """Clear the storage.""" + if self.answers_storage: + await self.answers_storage.delete_data() class CallbackHandlerStep(BaseFlowStep): @@ -133,8 +141,7 @@ def __init__( self._step = None - # TODO: [2024-07-19 by Mykola] Use the decorators - # @with_constant_typing() + @with_constant_typing() async def run(self, connector: FlowConnector): try: result = await self.callback(connector) @@ -200,11 +207,11 @@ class MessageFlowStep(BaseFlowStep, FilesMixin, MessageFormatterMixin): message: TemplatedString | None = None response_message: TemplatedString | None = None - channel_to_send_to: discord.abc.Messageable | BaseSubstitution | GlobalAbstractChannel | None = None + channel_to_send_to: Channel | BaseSubstitution | GlobalAbstractChannel | None = None substitutions: dict[str, str] | None = None - view: BaseView | None = None + buttons: typing.List[ActionButton] | None = None validator: typing.Callable[[str], bool] | None = None validator_error_message: TemplatedString | None = None @@ -213,6 +220,7 @@ class MessageFlowStep(BaseFlowStep, FilesMixin, MessageFormatterMixin): save_response_to_storage: StorageBucketElement | None = None non_blocking: bool = False + delete_after: int | None = None _testing: bool = False @staticmethod @@ -222,10 +230,8 @@ async def _resolve_channel_to_send_to( ) -> Channel: if not channel_to_send_to: return connector.channel - if isinstance(channel_to_send_to, BaseSubstitution): return await channel_to_send_to.resolve(connector) - if isinstance(channel_to_send_to, GlobalAbstractChannel): return await resolve_channel(channel_to_send_to, connector.user) @@ -235,75 +241,64 @@ async def send_message( self, connector: FlowConnector, message: TemplatedString | str, - channel: discord.abc.Messageable | BaseSubstitution[discord.abc.Messageable] | None = None, - view: BaseView | None = None, - ) -> MessageToSend: + channel: Channel | BaseSubstitution[Channel] | None = None, + buttons: typing.Optional[typing.List[ActionButton]] = None, + ): """Send the message.""" message: str | None = ( await self._get_formatted_message(message, connector) if isinstance(message, TemplatedString) else message ) + channel_to_send_to = await self._resolve_channel_to_send_to(channel or self.channel_to_send_to, connector) + files = await self._get_files_to_send(connector) + for file in files: + await connector.interface.send_image(file, connector.user, channel_to_send_to) + await connector.interface.send_message(message, connector.user, channel_to_send_to, buttons) - # files = await self._get_files_to_send(connector) - - channel_to_send_to: Channel = await self._resolve_channel_to_send_to( - channel or self.channel_to_send_to, connector - ) - - # view_to_sent = await view.get_or_create_for_connector(connector, from_step_run=True) if view else None - await connector.interface.send_json( - { - "message": message, - "to": channel_to_send_to.id, - } - ) - - # TODO: [2024-07-19 by Mykola] Use the decorators - # @with_constant_typing() + @with_constant_typing() async def run( self, connector: FlowConnector, channel_to_send_to: Channel | BaseSubstitution | None = None, - ) -> MessageToSend | None: + ): """Run the `BaseFlowStep`.""" - - message: MessageToSend = await self.send_message( - connector, self.message, channel=channel_to_send_to or connector.channel, view=self.view + await self.send_message( + connector, + self.message, + buttons=self.buttons, + channel=channel_to_send_to or connector.channel, ) - if self.non_blocking: await self.respond(connector) - raise FlowStepDone() - # TODO: [2025-03-03 by Mykola] Allow sending multiple messages - return message - - async def respond(self, connector: FlowConnector) -> discord.Message: + async def respond(self, connector: FlowConnector): """Respond to the user.""" if self.response_message: return await self.send_message(connector, self.response_message, channel=connector.channel) async def process_response(self, connector: FlowConnector): """Process the response. If the `.response_message` is set, send it.""" - if self.view and connector.event == FlowConnectorEvents.BUTTON_CLICK and connector.interaction: - try: - view_for_connector = await self.view.get_or_create_for_connector(connector) - return await view_for_connector.process_interaction(connector) - except ViewStepFinished: - pass + if self.buttons and connector.event == FlowConnectorEvents.BUTTON_CLICK: + button = [b for b in self.buttons if b.custom_id == connector.button.custom_id] + if len(button) > 1: + logger.error( + f"Multiple buttons with the same custom id {connector.button.custom_id} in {self.buttons=}" + ) + return + if not button: + logger.error(f"Cannot find the button with custom id {connector.button.custom_id} in {self.buttons=}") + return + return await button[0].trigger_action(connector) # TODO: [23.11.2023 by Mykola] Use Whisper to transcribe the audio message into text - if self.validator: if not self.validator(connector.message.content): - error_message = ( + message = ( await self._get_formatted_message(self.validator_error_message, connector) if self.validator_error_message else "Invalid input." ) - - await connector.channel.send(error_message) - return + return await connector.interface.send_message(message, connector.user, connector.channel) if self.save_response_to_storage: await self.save_response_to_storage.set_data(connector.message.content) @@ -316,8 +311,8 @@ async def process_response(self, connector: FlowConnector): class DirectMessageFlowStep(MessageFlowStep): """The same as `MessageFlowStep`, but sends the message to the user's DMs.""" - async def run(self, connector: FlowConnector, channel_to_send_to: discord.abc.Messageable | None = None): - if not channel_to_send_to and not isinstance(connector.channel, discord.DMChannel): + async def run(self, connector: FlowConnector, channel_to_send_to: Channel | None = None): + if not channel_to_send_to: channel_to_send_to = GlobalAbstractChannel.DM_CHANNEL return await super().run(connector, channel_to_send_to=channel_to_send_to) @@ -327,6 +322,9 @@ async def run(self, connector: FlowConnector, channel_to_send_to: discord.abc.Me class SendImageFlowStep(MessageFlowStep): """The Step that sends an image.""" + file: File | pathlib.Path | BytesIO | str | None = None + caption: str | None = None + language: str | None = None force_blocking_step: bool = False @@ -338,18 +336,35 @@ def __post_init__(self): self.non_blocking = True # If the language is set, try to use the language-specific file - if self.language: + if self.language and isinstance(self.file, pathlib.Path): language = self.language.lower() - language_specific_file = self.file.with_stem(f"{self.file.stem}__{language}") - - if language_specific_file.exists(): - self.file = language_specific_file - else: + file, extention = str(self.file).rsplit(".", 1) + language_specific_file = f"{file}__{language}.{extention}" + try: + with open(language_specific_file, "rb"): + self.file = language_specific_file + except FileNotFoundError: logger.warning( f"In step {self.__class__.__name__}: " f"Language-specific file {language_specific_file} does not exist. Using the default." ) + async def send_message( + self, + connector: FlowConnector, + message: TemplatedString | str, + channel: Channel | BaseSubstitution[Channel] | None = None, + buttons: typing.Optional[typing.List[ActionButton]] = None, + ): + """Send the message.""" + message: str | None = ( + await self._get_formatted_message(message, connector) if isinstance(message, TemplatedString) else message + ) + channel_to_send_to = await self._resolve_channel_to_send_to(channel or self.channel_to_send_to, connector) + await connector.interface.send_image( + self.file, connector.user, channel_to_send_to, caption=self.caption, buttons=buttons + ) + # TODO: [26.09.2023 by Mykola] Move to a separate file class ChatGPTResponseFormat(str, Enum): @@ -366,6 +381,8 @@ class ChatGPTMixin: @staticmethod async def get_response_from_chat_gpt( prompt: str, + openai_client: AsyncOpenAI, + chat_completion_model: str, system_message: str | None = None, response_format: ChatGPTResponseFormat = ChatGPTResponseFormat.AUTO, ) -> str: @@ -401,8 +418,8 @@ async def get_response_from_chat_gpt( ) ) - completion = await async_openai_client.chat.completions.create( - model=settings.OPENAI_CHAT_COMPLETION_MODEL, messages=completion_messages, response_format=response_format + completion = await openai_client.chat.completions.create( + model=chat_completion_model, messages=completion_messages, response_format=response_format ) message = completion.choices[0].message @@ -415,6 +432,9 @@ async def get_response_from_chat_gpt( class ChatGPTRequestMessageFlowStep(MessageFlowStep, ChatGPTMixin): """The Step that gets the response from the ChatGPT API for sending a message.""" + openai_client: AsyncOpenAI | None = None + chat_completion_model: str | None = None + message_prompt: TemplatedString | None = None response_message_prompt: TemplatedString | None = None @@ -429,10 +449,12 @@ async def _get_formatted_message(self, message: TemplatedString, connector: Flow return await super()._get_formatted_message(message, connector, **kwargs) # Send the typing indicator - await connector.channel.trigger_typing() + # await connector.interface.set_typing(connector.user, connector.channel, action=ResponseTypes.START_TYPING) prompt_response = await self.get_response_from_chat_gpt( await super()._get_formatted_message(self.message_prompt, connector, **kwargs), + self.openai_client, + self.chat_completion_model, response_format=self.response_format, ) @@ -446,7 +468,7 @@ async def _get_formatted_message(self, message: TemplatedString, connector: Flow and self.response_format == ChatGPTResponseFormat.JSON_OBJECT ): try: - response_to_save: Any = json.loads(prompt_response) + response_to_save: typing.Any = json.loads(prompt_response) logger.debug(f"Parsed the `{self.__class__.__name__}` JSON response: {response_to_save=}") except json.JSONDecodeError: logger.exception( @@ -457,3 +479,71 @@ async def _get_formatted_message(self, message: TemplatedString, connector: Flow await self.save_prompt_response_to_storage.set_data(response_to_save) return await super()._get_formatted_message(message, connector, prompt_response=prompt_response, **kwargs) + + +@dataclass +class AcceptFileStep(MessageFlowStep): + """Accept a file from the user.""" + + storage_to_save_file_url_to: BaseStorageBucketElement | None = None + storage_to_save_file_object_id_to: BaseStorageBucketElement | None = None + + storage_to_save_saved_file_id_to: BaseStorageBucketElement | None = None + + file_is_required_message: TemplatedString | str = "You need to upload a file." + file_saved_confirmation_message: TemplatedString | str | None = None + + allow_skip: bool = False + + def __post_init__(self): + if self.storage_to_save_file_url_to is None: + raise ValueError("`storage_to_save_file_url_to` is required!") + + async def process_response(self, connector: FlowConnector): + """Process the response.""" + if not connector.message.attachments: + if self.allow_skip: + return await super().process_response(connector) + + return await self.send_message(connector, self.file_is_required_message) + + # Get the first attachment + attachment = connector.message.attachments[0] + + # Save the file URL + if self.storage_to_save_file_url_to: + await self.storage_to_save_file_url_to.set_data(attachment.url) + logger.info(f"Saved the file URL: {attachment.url=}") + + # Save the File + if self.storage_to_save_file_object_id_to or self.storage_to_save_saved_file_id_to: + file_io = BytesIO(await attachment.read()) + file_object_key = await universal_image_storage.upload_image(file_io) + + if self.storage_to_save_file_object_id_to: + # Save the file object key + await self.storage_to_save_file_object_id_to.set_data(file_object_key) + + logger.info(f"Saved the file object key: {file_object_key=}") + + if self.storage_to_save_saved_file_id_to: + # Create the `File` object + try: + file = await File.create( + added_by_user_id=connector.user.id, + storage_service=settings.STORAGE_SERVICE_ID, + storage_file_object_key=file_object_key, + file_name=attachment.filename, + ) + + except Exception as exception: + logger.error(f"Failed to save the file: {exception}") + return await self.send_message(connector, "Failed to save the file.") + + # Save the file ID + await self.storage_to_save_saved_file_id_to.set_data(file.pk) + + if self.file_saved_confirmation_message: + await self.send_message(connector, self.file_saved_confirmation_message) + + return await super().process_response(connector) diff --git a/electro/interfaces.py b/electro/interfaces.py index e9ab3e1..9147e71 100644 --- a/electro/interfaces.py +++ b/electro/interfaces.py @@ -1,18 +1,217 @@ import contextvars +import pathlib from abc import ABC, abstractmethod -from typing import Dict, Optional +from contextlib import asynccontextmanager +from io import BytesIO +from typing import Any, Dict, List, Optional, TYPE_CHECKING from fastapi import WebSocket -from .toolkit.loguru_logging import logger +from .enums import ResponseTypes +from .models import BotMessage, Button, Channel, File, Guild, Role, User +from .settings import settings +from .toolkit.images_storage.universal_image_storage import universal_image_storage + +if TYPE_CHECKING: + from .contrib.buttons import BaseButton class BaseInterface(ABC): """ - Interface class for the Electro framework.""" + Interface class for the Electro framework. + """ + + async def _create_and_format_buttons( + self, buttons: Optional[List["BaseButton"]] = None, bot_message: Optional[BotMessage] = None + ) -> List[Button]: + """ + Format the buttons to be sent to the client. + """ + response = [] + for button in buttons or []: + button_object = await Button.create( + bot_message=bot_message, + custom_id=button.custom_id, + style=button.style, + label=button.label, + remove_after_click=button.remove_after_click, + extra_data=getattr(button, "extra_data", {}), + ) + response.append( + { + "id": button_object.id, + "custom_id": button_object.custom_id, + "style": button_object.style, + "label": button_object.label, + "clicked": button_object.clicked, + "remove_after_click": button_object.remove_after_click, + } + ) + return response + + async def _format_user(self, user: Optional[User]) -> Dict[str, Any]: + """ + Format the user to be sent to the client. + """ + if not user: + return None + return { + "id": user.id, + "username": user.username, + "platform_ids": { + identifier.platform: identifier.platform_id for identifier in await user.platform_ids.all() + }, + } + + async def _format_channel(self, channel: Optional[Channel]) -> Dict[str, Any]: + """ + Format the channel to be sent to the client. + """ + if not channel: + return None + return { + "id": channel.id, + "name": channel.name, + "platform_ids": { + identifier.platform: identifier.platform_id for identifier in await channel.platform_ids.all() + }, + } + + async def _format_guild(self, guild: Optional[Guild]) -> Dict[str, Any]: + """ + Format the guild to be sent to the client. + """ + if not guild: + return None + return { + "id": guild.id, + "name": guild.name, + "platform_ids": { + identifier.platform: identifier.platform_id for identifier in await guild.platform_ids.all() + }, + } + + async def send_message( + self, + message: str, + user: Optional[User], + channel: Optional[Channel], + buttons: Optional[List["BaseButton"]] = None, + delete_after: Optional[int] = None, + ): + """ + Send a formatted message to the client by using `format_message`. + """ + bot_message = await BotMessage.create(receiver=user, channel=channel, content=message) + data = { + "user": await self._format_user(user), + "channel": await self._format_channel(channel), + "message": bot_message.content, + "buttons": await self._create_and_format_buttons(buttons, bot_message), + "delete_after": delete_after, + } + await self.send_json( + { + "action": ResponseTypes.MESSAGE, + "content": data, + } + ) + + async def send_image( + self, + image: File | BytesIO | str | pathlib.Path, + user: Optional[User], + channel: Optional[Channel], + caption: Optional[str] = None, + buttons: Optional[List["BaseButton"]] = None, + delete_after: Optional[int] = None, + ): + """ + Send images to the client. + """ + if buttons and not caption: + raise ValueError("A caption must be provided when sending an image with buttons.") + if isinstance(image, File): + image_url = await universal_image_storage.get_image_url(image.storage_file_object_key) + elif isinstance(image, BytesIO): + object_key = await universal_image_storage.upload_image(image) + await File.create( + owner=user, + storage_service=settings.STORAGE_SERVICE_ID, + storage_file_object_key=object_key, + ) + image_url = await universal_image_storage.get_image_url(object_key) + else: + image_url = str(image) + if image_url.startswith(settings.APP_ROOT): + image_url = settings.SERVER_URL + image_url[len(settings.APP_ROOT) :] + if str(image_url).endswith(".gif") and (buttons or caption): + raise ValueError("GIFs do not support buttons or captions.") + + data = { + "user": await self._format_user(user), + "channel": await self._format_channel(channel), + "image": image_url, + "caption": caption, + "buttons": await self._create_and_format_buttons(buttons), + "delete_after": delete_after, + } + await self.send_json( + { + "action": ResponseTypes.IMAGE, + "content": data, + } + ) + + async def add_role(self, user: User, role: Role): + await self.send_json( + { + "action": ResponseTypes.ADD_ROLE, + "content": { + "role": role.name, + "guild": await self._format_guild(role.guild), + "user": await self._format_user(user), + }, + } + ) + + async def remove_role(self, user: User, role: Role): + await self.send_json( + { + "action": ResponseTypes.REMOVE_ROLE, + "content": { + "role": role.name, + "guild": await self._format_guild(role.guild), + "user": await self._format_user(user), + }, + } + ) + + async def set_typing(self, user: User, channel: Channel, action: ResponseTypes): + if action not in [ResponseTypes.START_TYPING, ResponseTypes.STOP_TYPING]: + raise ValueError("Action must be either `START_TYPING` or `STOP_TYPING`.") + await self.send_json( + { + "action": action.value, + "content": { + "user": await self._format_user(user), + "channel": await self._format_channel(channel), + }, + } + ) + + @asynccontextmanager + async def with_constant_typing(self, user: User, channel: Channel): + """An asynchronous context manager for typing indicators or other tasks.""" + await self.set_typing(user, channel, ResponseTypes.START_TYPING) + yield + await self.set_typing(user, channel, ResponseTypes.STOP_TYPING) @abstractmethod - async def send_json(self, *args, **kwargs): + async def send_json(self, data: Dict[str, Any]): + """ + Send an action for the client. + """ raise NotImplementedError @abstractmethod @@ -25,7 +224,7 @@ class WebSocketInterface(BaseInterface): WebSocket Interface for the Electro framework. On the server side, the WebSocketInterface is used to send messages to the client, - If you want to send a message to the client in a Flow, you can use the `send_json` method. + If you want to send a message to the client in a Flow, you can use the `send_message` method. """ def __init__(self): @@ -39,17 +238,16 @@ async def disconnect(self): await self.interface.close() self.interface = None - async def send_json(self, data: Dict[str, str]): - await self.interface.send_json(data) - async def stop_process(self, code: int = 1000, reason: Optional[str] = None): await self.interface.close(code, reason) + async def send_json(self, data: Dict[str, Any]): + await self.interface.send_json(data) + class APIInterface(BaseInterface): def __init__(self): self.messages = contextvars.ContextVar("messages") - self.messages.set([]) async def send_json(self, data: Dict[str, str]): diff --git a/electro/models.py b/electro/models.py index e609055..bd5d740 100644 --- a/electro/models.py +++ b/electro/models.py @@ -2,8 +2,10 @@ from __future__ import annotations +from enum import Enum + from tortoise import fields -from tortoise.fields import ForeignKeyRelation +from tortoise.fields import ForeignKeyRelation, ManyToManyField from .toolkit.images_storage.storages_enums import StoragesIDs from .toolkit.tortoise_orm import Model @@ -19,7 +21,6 @@ class BaseModel(Model): is_active = fields.BooleanField(default=True) is_deleted = fields.BooleanField(default=False) - date_deleted = fields.DatetimeField(null=True) class Meta: # pylint: disable=too-few-public-methods @@ -28,154 +29,142 @@ class Meta: # pylint: disable=too-few-public-methods abstract = True -# region Discord Models -class User(BaseModel): - """The model for Discord User.""" +# region Core Models - id = fields.BigIntField(pk=True) - username = fields.CharField(max_length=255) - discriminator = fields.IntField() - avatar = fields.CharField(max_length=255, null=True) +class PlatformId(Model): + """ + This model is used to store the IDs of the users, channels, and guilds on different platforms. - locale = fields.CharField(max_length=255, null=True) + It is used to link the users, channels, and guilds on different platforms to the same user, channel, or guild in the database. + + Attributes: + id (int): The ID of the platform ID. + type (str): The type of the platform ID. Can be "user", "channel", or "guild". + platform_id (str): The ID of the user, channel, or guild on the platform. + platform (str): The name of the platform. + user (User): The user associated with the platform ID. + channel (Channel): The channel associated with the platform ID. + guild (Guild): The guild associated with the platform ID. + """ + + class PlatformIdTypes(str, Enum): + """The types of platform IDs.""" + + USER = "user" + CHANNEL = "channel" + GUILD = "guild" + + id = fields.IntField(pk=True) + type = fields.CharField(max_length=255) + platform_id = fields.CharField(max_length=255) + platform = fields.CharField(max_length=255) + user = fields.ForeignKeyField("electro.User", related_name="platform_ids", null=True) + channel = fields.ForeignKeyField("electro.Channel", related_name="platform_ids", null=True) + guild = fields.ForeignKeyField("electro.Guild", related_name="platform_ids", null=True) + + class Meta: + unique_together = (("type", "platform_id", "platform"),) - is_bot = fields.BooleanField(default=False) +class User(BaseModel): + """The model for User.""" + + id = fields.BigIntField(pk=True) + username = fields.CharField(max_length=255) + locale = fields.CharField(max_length=255, null=True) is_admin = fields.BooleanField(default=False) # guilds: fields.ManyToManyRelation["Guild"] # TODO: [2024-08-30 by Mykola] Allow multiple guilds for the user. guild: fields.ForeignKeyRelation[Guild] | Guild = fields.ForeignKeyField( "electro.Guild", related_name="users", null=True ) + roles: fields.ManyToManyRelation[Role] = fields.ManyToManyField("electro.Role", related_name="users") + platform_ids: fields.ReverseRelation[PlatformId] messages: fields.ReverseRelation[Message] state_changed: fields.ReverseRelation[UserStateChanged] - files: fields.ReverseRelation[File] def __str__(self) -> str: """Return the string representation of the model.""" - return f"{self.username}#{self.discriminator}" - - async def create_dm(self) -> Channel: - """ - Create a DM channel with the user. - """ - channel, _ = await Channel.get_or_create(id=self.id, defaults={"type": 1}) - return channel + return self.username class File(BaseModel): """The model for the file.""" - added_by_user: ForeignKeyRelation[User] = fields.ForeignKeyField("electro.User", related_name="files", null=True) - + owner: ForeignKeyRelation[User] = fields.ForeignKeyField("electro.User", null=True) storage_service: StoragesIDs = fields.CharEnumField(StoragesIDs, max_length=32) - storage_file_object_key = fields.TextField() - file_name = fields.TextField(null=True) - discord_attachment_id = fields.TextField(null=True) - discord_cdn_url = fields.TextField(null=True) - class Guild(BaseModel): - """The model for Discord Guild.""" + """The model for Guild.""" id = fields.BigIntField(pk=True) name = fields.CharField(max_length=255) - icon = fields.CharField(max_length=255, null=True) - banner = fields.CharField(max_length=255, null=True) - description = fields.TextField(null=True) - preferred_locale = fields.CharField(max_length=255, null=True) - afk_channel_id = fields.BigIntField(null=True) - afk_timeout = fields.IntField(null=True) - owner_id = fields.BigIntField(null=True) + platform_ids: fields.ReverseRelation[PlatformId] def __str__(self) -> str: """Return the string representation of the model.""" return self.name -class GuildMember(BaseModel): - """The model for Discord Guild Member.""" - - user = fields.ForeignKeyField("electro.User", related_name="guild_members") - guild = fields.ForeignKeyField("electro.Guild", related_name="guild_members") +class Channel(BaseModel): + """The model for Channel.""" - nickname = fields.CharField(max_length=255, null=True) - joined_at = fields.DatetimeField(null=True) - premium_since = fields.DatetimeField(null=True) - deaf = fields.BooleanField(default=False) - mute = fields.BooleanField(default=False) + class ChannelTypes(str, Enum): + """The types of channels.""" - def __str__(self) -> str: - """Return the string representation of the model.""" - return f"{self.user} in {self.guild}" + DM = "dm" + CHANNEL = "channel" - -class Channel(BaseModel): - """The model for Discord Channel.""" + class ChannelUsedFor(str, Enum): + GLOBAL_ERRORS = "global_errors" + MEANING_CARDS = "meaning_cards" + CAUSE_CARDS = "cause_cards" + IKIGAI_CARDS = "ikigai_cards" + PROFESSION_CARDS = "profession_cards" id = fields.BigIntField(pk=True) guild: Guild = fields.ForeignKeyField("electro.Guild", related_name="channels", null=True) - name = fields.CharField(max_length=255, null=True) type = fields.CharField(max_length=255) + used_for = fields.CharEnumField(ChannelUsedFor, max_length=255, null=True) - used_for: str = fields.CharField(max_length=255, null=True) - + platform_ids: fields.ReverseRelation[PlatformId] messages: fields.ReverseRelation[Message] + bot_messages: fields.ReverseRelation[BotMessage] def __str__(self) -> str: """Return the string representation of the model.""" - return f"{self.name} in {self.guild or 'DM'} (used for {self.used_for})" + return f"Channel `{self.name}` in {self.guild}" class Role(BaseModel): - """The model for Discord Role.""" + """The model for Role.""" id = fields.BigIntField(pk=True) - guild: Guild = fields.ForeignKeyField("electro.Guild", related_name="roles") - name = fields.CharField(max_length=255) - color = fields.IntField(null=True) - position = fields.IntField(null=True) - permissions = fields.IntField(null=True) - is_hoisted = fields.BooleanField(default=False) - is_mentionable = fields.BooleanField(default=False) + + users: fields.ManyToManyRelation[User] def __str__(self) -> str: """Return the string representation of the model.""" return f"{self.name} in {self.guild}" -# endregion Discord Models - - -# region Analytics models class Message(BaseModel): """The model for Message.""" id = fields.BigIntField(pk=True) - author: ForeignKeyRelation[User] = fields.ForeignKeyField("electro.User", related_name="messages") - channel: ForeignKeyRelation[Channel] = fields.ForeignKeyField("electro.Channel", related_name="messages") - + channel: ForeignKeyRelation[Channel] = fields.ForeignKeyField("electro.Channel", related_name="messages", null=True) content = fields.TextField() - - created_at = fields.DatetimeField() - edited_at = fields.DatetimeField(null=True) - - is_pinned = fields.BooleanField(null=True) - is_tts = fields.BooleanField(null=True) - - # Dynamically added fields - is_bot_message = fields.BooleanField(null=True) is_command = fields.BooleanField(null=True) def __str__(self) -> str: @@ -183,20 +172,41 @@ def __str__(self) -> str: return f"`{self.author}` Message: `{self.content}`." -class Interaction(BaseModel): - """The model for Interaction.""" +class BotMessage(BaseModel): + """The model for Bot Message.""" id = fields.BigIntField(pk=True) + receiver: ForeignKeyRelation[User] = fields.ForeignKeyField("electro.User", related_name="bot_messages", null=True) + channel: ForeignKeyRelation[Channel] = fields.ForeignKeyField( + "electro.Channel", related_name="bot_messages", null=True + ) + content = fields.TextField(null=True) + + files: fields.ManyToManyRelation[File] = ManyToManyField("electro.File", related_name="bot_messages") + buttons: fields.ReverseRelation[Button] + + def __str__(self) -> str: + """Return the string representation of the model.""" + return f"`{self.receiver}` Bot Message: `{self.content}`." - user: ForeignKeyRelation[User] = fields.ForeignKeyField("electro.User", related_name="interactions") - channel: ForeignKeyRelation[Channel] = fields.ForeignKeyField("electro.Channel", related_name="interactions") - message: fields.ForeignKeyRelation[Message] = fields.ForeignKeyField("electro.Message", related_name="interactions") +class Button(BaseModel): + """The model for Button.""" + + id = fields.BigIntField(pk=True) custom_id = fields.CharField(max_length=255) + style = fields.IntField() + label = fields.CharField(max_length=255) + clicked = fields.BooleanField(default=False) + remove_after_click = fields.BooleanField(default=False) + extra_data = fields.JSONField(null=True) + bot_message: ForeignKeyRelation[BotMessage] = fields.ForeignKeyField( + "electro.BotMessage", related_name="buttons", null=True + ) def __str__(self) -> str: """Return the string representation of the model.""" - return f"`{self.user}` Interaction `{self.custom_id}`." + return f"Button `{self.id}`." class UserStateChanged(BaseModel): @@ -212,7 +222,7 @@ def __str__(self) -> str: return f"`{self.user}` State Changed: `{self.previous_state}` -> `{self.new_state}`." -# endregion Analytics models +# endregion Core Models # region Base storage models diff --git a/electro/schemas.py b/electro/schemas.py new file mode 100644 index 0000000..d1fb1df --- /dev/null +++ b/electro/schemas.py @@ -0,0 +1,38 @@ +from pydantic import BaseModel + +from .enums import SupportedPlatforms + + +class PlatformId(BaseModel): + id: str + + +class Guild(BaseModel): + platform_id: PlatformId + name: str + + +class User(BaseModel): + platform_id: PlatformId + username: str + guild: Guild | None + + +class Channel(BaseModel): + platform_id: PlatformId + name: str + guild: Guild | None + type: str + + +class ReceivedMessage(BaseModel): + content: str + author: User + channel: Channel | None + + +class ButtonClick(BaseModel): + id: int + custom_id: str + user: User + channel: Channel | None diff --git a/electro/settings.py b/electro/settings.py index 39eac1e..74ef5fa 100644 --- a/electro/settings.py +++ b/electro/settings.py @@ -16,20 +16,44 @@ class Settings(BaseSettings): extra="ignore", ) + # General settings DEBUG: bool = False LOG_LEVEL: str = "INFO" - - LOCALES_PATH: str = "locales" # Relative to the current working directory - DEFAULT_LOCALE: str = "en" # Should mirror the `BOT_LANGUAGE` setting. User in the `make upload-locales` target - DO_USE_FILE_LOGS: bool = True DO_USE_COMMAND_ALIASES: bool = False + DEFAULT_SLEEP_TIME: int = 3 # seconds + SLEEP_TIME_PER_CHARACTER: float = 0.05 + HTTPX_CLIENT_DEFAULT_TIMEOUT: int = 60 + # Build urls for static files by removing root path and adding the server url + SERVER_URL: str = "http://localhost:8000" + APP_ROOT: str = "/app" # Bot settings + MESSAGE_BREAK: str = "--- message break ---" + MESSAGE_SLEEP_INSTRUCTION_PATTERN: str = r"--- sleep (\d+.?\d*) seconds ---" + MESSAGE_MAX_LENGTH: int = 1900 # 2000 - 100 (safe margin) BOT_COMMAND_PREFIX: str = "!" BOT_LANGUAGE: str = "en" # Should mirror the `DEFAULT_LOCALE` setting. User in the Python code + GO_BACK_COMMAND: str = "_go_back" + RELOAD_COMMAND: str = "_reload" + # Validate GO_BACK_COMMAND + if GO_BACK_COMMAND.startswith(BOT_COMMAND_PREFIX): + raise ValueError( + f"The GO_BACK_COMMAND ({GO_BACK_COMMAND}) " + f"should not start with the BOT_COMMAND_PREFIX ({BOT_COMMAND_PREFIX})" + ) + # Validate RELOAD_COMMAND + if RELOAD_COMMAND.startswith(BOT_COMMAND_PREFIX): + raise ValueError( + f"The RELOAD_COMMAND ({RELOAD_COMMAND}) " + f"should not start with the BOT_COMMAND_PREFIX ({BOT_COMMAND_PREFIX})" + ) - # Postgres database credentials + # Locales settings + LOCALES_PATH: str = "locales" # Relative to the current working directory + DEFAULT_LOCALE: str = "en" # Should mirror the `BOT_LANGUAGE` setting. User in the `make upload-locales` target + + # Database settings DATABASE_URL: PostgresDsn | None # if the `DATABASE_URL` is not set, then use the following credentials: POSTGRES_HOST: str | None = None @@ -37,71 +61,32 @@ class Settings(BaseSettings): POSTGRES_PASSWORD: str | None = None POSTGRES_PORT: int | None = 5432 POSTGRES_DB: str | None = None - ENABLE_DATABASE_SSL: bool = True - # Redis credentials + # Redis settings REDIS_URL: RedisDsn | None # if the `REDIS_URL` is not set, then use the following credentials: REDIS_HOST: str | None = None REDIS_PORT: int | None = 6379 REDIS_DB: int | None = 0 - # Images storage + # Storage settings STORAGE_SERVICE_ID: StoragesIDs = "S3" - # S3 storage # Allow this to be optional S3_ENDPOINT_URL: str | None = None S3_ACCESS_KEY_ID: str | None = None S3_SECRET_ACCESS_KEY: str | None = None S3_REGION_NAME: str | None = None - S3_IMAGES_BUCKET_NAME: str = "files" - # Azure Blob Storage # NB: It appears to be never used directly, rather the env vars are used by `DefaultAzureCredential` AZURE_CLIENT_ID: str | None = None AZURE_TENANT_ID: str | None = None AZURE_CLIENT_SECRET: str | None = None - AZURE_STORAGE_ACCOUNT_NAME: str | None = None - + AZURE_ACCOUNT_KEY: str | None = None AZURE_CONTAINER_NAME: str = "files" - HTTPX_CLIENT_DEFAULT_TIMEOUT: int = 60 - - # TODO: [06.03.2024 by Mykola] Do not let it be `None`. It's `None` only because we want to let `spinx` import it - # while building the documentation. - OPENAI_API_KEY: str | None = "sk_test_1234567890" - - OPENAI_CHAT_COMPLETION_MODEL: str = "gpt-4o" - OPENAI_DALLE_MODEL: str = "dall-e-3" - - DEFAULT_SLEEP_TIME: int = 3 # seconds - SLEEP_TIME_PER_CHARACTER: float = 0.05 - - MESSAGE_BREAK: str = "--- message break ---" - MESSAGE_SLEEP_INSTRUCTION_PATTERN: str = r"--- sleep (\d+.?\d*) seconds ---" - - MESSAGE_MAX_LENGTH: int = 1900 # 2000 - 100 (safe margin) - - GO_BACK_COMMAND: str = "_go_back" - RELOAD_COMMAND: str = "_reload" - - # Validate GO_BACK_COMMAND - if GO_BACK_COMMAND.startswith(BOT_COMMAND_PREFIX): - raise ValueError( - f"The GO_BACK_COMMAND ({GO_BACK_COMMAND}) " - f"should not start with the BOT_COMMAND_PREFIX ({BOT_COMMAND_PREFIX})" - ) - - # Validate RELOAD_COMMAND - if RELOAD_COMMAND.startswith(BOT_COMMAND_PREFIX): - raise ValueError( - f"The RELOAD_COMMAND ({RELOAD_COMMAND}) " - f"should not start with the BOT_COMMAND_PREFIX ({BOT_COMMAND_PREFIX})" - ) - settings = Settings() diff --git a/electro/storage.py b/electro/storage.py index ba0460a..7a88d46 100644 --- a/electro/storage.py +++ b/electro/storage.py @@ -12,8 +12,6 @@ import typing from abc import ABC, abstractmethod -from .toolkit import redis_storage - DEFAULT_FLOW_STORAGE_PREFIX = "flow::" DEFAULT_MISSING_ADDRESS_PART = "missing" @@ -182,75 +180,3 @@ async def clear(self): self._user_data.clear() self._channel_states.clear() self._channel_data.clear() - - -class FlowRedisStorage(BaseFlowStorage): - """The storage used for `Flow`. Stores data for all the users in Redis.""" - - _redis_storage: redis_storage.RedisStorage - _flow_storage_prefix: str - - _missing_address_part: str - - def __init__( - self, - storage: redis_storage.RedisStorage, - flow_storage_prefix: str = DEFAULT_FLOW_STORAGE_PREFIX, - missing_address_part: str = DEFAULT_MISSING_ADDRESS_PART, - ): - self._redis_storage = storage - - self._flow_storage_prefix = flow_storage_prefix - self._missing_address_part = missing_address_part - - async def get_user_state(self, user_id: int) -> str | None: - """Get the state for a user.""" - return await self._redis_storage.get_state(chat=self._missing_address_part, user=user_id) - - async def get_channel_state(self, channel_id: int) -> str | None: - """Get the state for a channel.""" - return await self._redis_storage.get_state(chat=channel_id, user=self._missing_address_part) - - async def set_user_state(self, user_id: int, state: str | None): - """Set the state for a user.""" - await self._redis_storage.set_state(chat=self._missing_address_part, user=user_id, state=state) - - async def set_channel_state(self, channel_id: int, state: str | None): - """Set the state for a channel.""" - await self._redis_storage.set_state(chat=channel_id, user=self._missing_address_part, state=state) - - async def delete_user_state(self, user_id: int): - """Delete the state for a user.""" - await self._redis_storage.set_state(chat=self._missing_address_part, user=user_id, state=None) - - async def delete_channel_state(self, channel_id: int): - """Delete the state for a channel.""" - await self._redis_storage.set_state(chat=channel_id, user=self._missing_address_part, state=None) - - async def get_user_data(self, user_id: int) -> UserData: - """Get the data for a user.""" - return UserData(**await self._redis_storage.get_data(chat=self._missing_address_part, user=user_id)) - - async def get_channel_data(self, channel_id: int) -> ChannelData: - """Get the data for a channel.""" - return ChannelData(**await self._redis_storage.get_data(chat=channel_id, user=self._missing_address_part)) - - async def set_user_data(self, user_id: int, data: UserData | dict[str, typing.Any] | None): - """Set the data for a user.""" - await self._redis_storage.set_data(chat=self._missing_address_part, user=user_id, data=dict(**data)) - - async def set_channel_data(self, channel_id: int, data: ChannelData | dict[str, typing.Any] | None): - """Set the data for a channel.""" - await self._redis_storage.set_data(chat=channel_id, user=self._missing_address_part, data=dict(**data)) - - async def delete_user_data(self, user_id: int): - """Delete the data for a user.""" - await self._redis_storage.set_data(chat=self._missing_address_part, user=user_id, data=None) - - async def delete_channel_data(self, channel_id: int): - """Delete the data for a channel.""" - await self._redis_storage.set_data(chat=channel_id, user=self._missing_address_part, data=None) - - async def clear(self): - """Clear the storage.""" - await self._redis_storage.reset_all() diff --git a/electro/substitutions.py b/electro/substitutions.py index 62fd956..46520a4 100644 --- a/electro/substitutions.py +++ b/electro/substitutions.py @@ -6,9 +6,8 @@ from abc import ABC, abstractmethod from enum import Enum -import discord - from .flow_connector import FlowConnector +from .models import Channel, User from .toolkit.redis_storage import RedisStorage VALUE = typing.TypeVar("VALUE") @@ -60,8 +59,8 @@ def __init__( self.is_chat_specific = is_chat_specific async def _resolve(self, connector: FlowConnector) -> str: - if not self.is_chat_specific and not isinstance(connector.channel, discord.DMChannel): - channel = await connector.bot.create_dm(connector.user) + if not self.is_chat_specific and not isinstance(connector.channel, Channel): + channel = None else: channel = connector.channel @@ -133,7 +132,7 @@ def __getattribute__(self, item) -> AttributeSubstitution: class UserSubstitutionObject(BaseFlowSubstitutionObject): - object: discord.User + object: User flow_connector_attribute = "user" @@ -148,12 +147,10 @@ class GlobalAbstractChannel(str, Enum): DM_CHANNEL = "dm_channel" -async def resolve_channel( - abstract_channel: GlobalAbstractChannel, user: discord.User -) -> discord.TextChannel | discord.DMChannel: +async def resolve_channel(abstract_channel: GlobalAbstractChannel, user: User) -> Channel: """Resolve the channel by the name.""" if abstract_channel == GlobalAbstractChannel.DM_CHANNEL: - return await user.create_dm() + return None raise ValueError(f"Unknown channel: {abstract_channel}") diff --git a/electro/toolkit/buttons.py b/electro/toolkit/buttons.py deleted file mode 100644 index 3ca6606..0000000 --- a/electro/toolkit/buttons.py +++ /dev/null @@ -1,22 +0,0 @@ -""" -This module contains the button styles and a function to create [Discord] buttons. - -The idea being, we abstract the button styles and the button creation so that in the future, when -we want to support multiple front-ends, we can do so without needing to change the public interface of the Framework. -""" - -import discord - - -class FrameworkButtonStyle(discord.ButtonStyle): - """A class to store the button styles.""" - - -def create_button( - label: str, *, style: FrameworkButtonStyle = FrameworkButtonStyle.primary, custom_id: str = None -) -> discord.ui.Button: - """Get a button with the given label and style.""" - # TODO: [04.03.2024 by Mykola] Make sure the `FrameworkButtonStyle` is compatible with `discord.ui.Button` - # Right now we hook it up directly, but since we want to add a new front-end in the future, the way we connect the - # both might change. - return discord.ui.Button(label=label, style=style, custom_id=custom_id) diff --git a/electro/toolkit/decorators.py b/electro/toolkit/decorators.py new file mode 100644 index 0000000..c59a3eb --- /dev/null +++ b/electro/toolkit/decorators.py @@ -0,0 +1,35 @@ +"""Decorators used in the Application.""" + +from functools import wraps +from typing import Callable, Coroutine + +from electro.flow_connector import FlowConnector, FlowConnectorEvents + + +def with_constant_typing( + run_only_on_events: list[FlowConnectorEvents] | None = None, +) -> Callable: + """Send a typing indicator while executing the function.""" + + def decorator(function: Callable[..., Coroutine]) -> Callable[..., Coroutine]: + """The actual decorator.""" + + @wraps(function) + async def wrapper(*args, **kwargs): + # Check if any of the arguments is a FlowConnector + flow_connector: FlowConnector | None = next( + (arg for arg in args if isinstance(arg, FlowConnector)), + None, + ) + + if flow_connector and (not run_only_on_events or flow_connector.event in run_only_on_events): + # Send a typing indicator while executing the function + async with flow_connector.interface.with_constant_typing(flow_connector.user, flow_connector.channel): + return await function(*args, **kwargs) + + # If the FlowConnector is not found, just execute the function + return await function(*args, **kwargs) + + return wrapper + + return decorator diff --git a/electro/toolkit/images_storage/storage_services/_base_storage_service.py b/electro/toolkit/images_storage/storage_services/_base_storage_service.py index 58e7c00..a64a675 100644 --- a/electro/toolkit/images_storage/storage_services/_base_storage_service.py +++ b/electro/toolkit/images_storage/storage_services/_base_storage_service.py @@ -26,3 +26,13 @@ async def download_image(self, object_key: str) -> BytesIO: """ raise NotImplementedError + + @abstractmethod + async def get_image_url(self, object_key: str) -> str: + """Returns the URL of the image. + + :param object_key: object key of the image + :return: URL of the image + + """ + raise NotImplementedError diff --git a/electro/toolkit/images_storage/storage_services/azure_blob_storage_service.py b/electro/toolkit/images_storage/storage_services/azure_blob_storage_service.py index 3bdde13..911544f 100644 --- a/electro/toolkit/images_storage/storage_services/azure_blob_storage_service.py +++ b/electro/toolkit/images_storage/storage_services/azure_blob_storage_service.py @@ -1,11 +1,13 @@ """Azure Blob Storage Service Module.""" +import datetime import os from io import BytesIO from azure.core.exceptions import ResourceNotFoundError from azure.identity.aio import DefaultAzureCredential -from azure.storage.blob.aio import BlobServiceClient +from azure.storage.blob import BlobSasPermissions, ContentSettings, generate_blob_sas +from azure.storage.blob.aio import BlobClient, BlobServiceClient from ....settings import settings from ...images_storage.storage_services._base_storage_service import BaseStorageService @@ -49,7 +51,9 @@ async def upload_image(self, image_io: BytesIO) -> str: await self._ensure_container_exists() container_client = client.get_container_client(self.container_name) blob_client = container_client.get_blob_client(blob_name) - await blob_client.upload_blob(image_io, blob_type="BlockBlob") + await blob_client.upload_blob( + image_io, blob_type="BlockBlob", content_settings=ContentSettings(content_type="image/png") + ) return blob_name async def download_image(self, object_key: str) -> BytesIO: @@ -62,3 +66,24 @@ async def download_image(self, object_key: str) -> BytesIO: except ResourceNotFoundError: raise FileNotFoundError(f"Image with key '{object_key}' not found in the Azure Blob Storage.") return BytesIO(await image_data.readall()) + + async def _create_image_access_token(self, blob_client: BlobClient, account_key: str) -> str: + start_time = datetime.datetime.now(datetime.timezone.utc) + expiry_time = start_time + datetime.timedelta(days=1) + return generate_blob_sas( + account_name=blob_client.account_name, + container_name=blob_client.container_name, + blob_name=blob_client.blob_name, + account_key=settings.AZURE_ACCOUNT_KEY, + permission=BlobSasPermissions(read=True), + expiry=expiry_time, + start=start_time, + ) + + async def get_image_url(self, object_key: str) -> str: + """Get the URL of an image in the Azure Blob Storage.""" + async with await self.blob_service_client as client: + container_client = client.get_container_client(self.container_name) + blob_client = container_client.get_blob_client(object_key) + token = await self._create_image_access_token(blob_client, settings.AZURE_CLIENT_SECRET) + return f"{blob_client.url}?{token}" diff --git a/electro/toolkit/images_storage/storage_services/s3_service.py b/electro/toolkit/images_storage/storage_services/s3_service.py index c0999a0..42bf96b 100644 --- a/electro/toolkit/images_storage/storage_services/s3_service.py +++ b/electro/toolkit/images_storage/storage_services/s3_service.py @@ -95,3 +95,19 @@ async def download_image(self, object_key: str) -> BytesIO: except Exception as e: logger.error(f"Failed to download image: {e}") raise + + async def get_image_url(self, object_key: str) -> str: + """Returns the URL of the image. + + :param object_key: object key of the image + :return: URL of the image + + """ + async with self.session.client("s3", endpoint_url=settings.S3_ENDPOINT_URL) as s3: + try: + url = f"{settings.S3_ENDPOINT_URL}/{self.bucket_name}/{object_key}" + logger.info(f"Image URL: {url}") + return url + except Exception as e: + logger.error(f"Failed to get image URL: {e}") + raise diff --git a/electro/toolkit/images_storage/universal_image_storage.py b/electro/toolkit/images_storage/universal_image_storage.py index 332084e..0dcb653 100644 --- a/electro/toolkit/images_storage/universal_image_storage.py +++ b/electro/toolkit/images_storage/universal_image_storage.py @@ -25,6 +25,10 @@ async def download_image(self, object_key: str) -> BytesIO: """Download an image from the storage service.""" return await self.storage_service.download_image(object_key) + async def get_image_url(self, object_key: str) -> str: + """Get the URL of the image from the storage service.""" + return await self.storage_service.get_image_url(object_key) + STORAGES_IDS_TO_SERVICES = { StoragesIDs.S3: S3Service, diff --git a/electro/toolkit/memory_storage.py b/electro/toolkit/memory_storage.py deleted file mode 100644 index 4897cac..0000000 --- a/electro/toolkit/memory_storage.py +++ /dev/null @@ -1,169 +0,0 @@ -import copy -import typing - - -class MemoryStorage: - """ - In-memory based states storage. - - This type of storage is not recommended for usage in bots, - because you will lose all states after restarting. - """ - - async def wait_closed(self): - pass - - async def close(self): - self.data.clear() - - def __init__(self): - self.data = {} - - @classmethod - def check_address( - cls, - *, - chat: typing.Union[str, int, None] = None, - user: typing.Union[str, int, None] = None, - ) -> typing.Tuple[typing.Union[str, int]]: - """ - In all storage's methods chat or user is always required. - If one of them is not provided, you have to set missing value based on the provided one. - - This method performs the check described above. - - :param chat: chat_id - :param user: user_id - :return: - """ - if chat is None and user is None: - raise ValueError("`user` or `chat` parameter is required but no one is provided!") - - if user is None: - user = chat - - elif chat is None: - chat = user - - return chat, user - - def resolve_address(self, chat, user): - chat_id, user_id = map(str, self.check_address(chat=chat, user=user)) - - if chat_id not in self.data: - self.data[chat_id] = {} - if user_id not in self.data[chat_id]: - self.data[chat_id][user_id] = {"state": None, "data": {}, "bucket": {}} - - return chat_id, user_id - - async def get_state( - self, - *, - chat: typing.Union[str, int, None] = None, - user: typing.Union[str, int, None] = None, - default: typing.Optional[str] = None, - ) -> typing.Optional[str]: - chat, user = self.resolve_address(chat=chat, user=user) - return self.data[chat][user].get("state", default) - - async def get_data( - self, - *, - chat: typing.Union[str, int, None] = None, - user: typing.Union[str, int, None] = None, - default: typing.Optional[str] = None, - ) -> typing.Dict: - chat, user = self.resolve_address(chat=chat, user=user) - return copy.deepcopy(self.data[chat][user]["data"]) - - async def update_data( - self, - *, - chat: typing.Union[str, int, None] = None, - user: typing.Union[str, int, None] = None, - data: typing.Dict = None, - **kwargs, - ): - if data is None: - data = {} - chat, user = self.resolve_address(chat=chat, user=user) - self.data[chat][user]["data"].update(data, **kwargs) - - async def set_state( - self, - *, - chat: typing.Union[str, int, None] = None, - user: typing.Union[str, int, None] = None, - state: typing.AnyStr = None, - ): - chat, user = self.resolve_address(chat=chat, user=user) - self.data[chat][user]["state"] = state - - async def set_data( - self, - *, - chat: typing.Union[str, int, None] = None, - user: typing.Union[str, int, None] = None, - data: typing.Dict = None, - ): - chat, user = self.resolve_address(chat=chat, user=user) - self.data[chat][user]["data"] = copy.deepcopy(data) - self._cleanup(chat, user) - - async def reset_state( - self, - *, - chat: typing.Union[str, int, None] = None, - user: typing.Union[str, int, None] = None, - with_data: typing.Optional[bool] = True, - ): - await self.set_state(chat=chat, user=user, state=None) - if with_data: - await self.set_data(chat=chat, user=user, data={}) - self._cleanup(chat, user) - - @staticmethod - def has_bucket(): - return True - - async def get_bucket( - self, - *, - chat: typing.Union[str, int, None] = None, - user: typing.Union[str, int, None] = None, - default: typing.Optional[dict] = None, - ) -> typing.Dict: - chat, user = self.resolve_address(chat=chat, user=user) - return copy.deepcopy(self.data[chat][user]["bucket"]) - - async def set_bucket( - self, - *, - chat: typing.Union[str, int, None] = None, - user: typing.Union[str, int, None] = None, - bucket: typing.Dict = None, - ): - chat, user = self.resolve_address(chat=chat, user=user) - self.data[chat][user]["bucket"] = copy.deepcopy(bucket) - self._cleanup(chat, user) - - async def update_bucket( - self, - *, - chat: typing.Union[str, int, None] = None, - user: typing.Union[str, int, None] = None, - bucket: typing.Dict = None, - **kwargs, - ): - if bucket is None: - bucket = {} - chat, user = self.resolve_address(chat=chat, user=user) - self.data[chat][user]["bucket"].update(bucket, **kwargs) - - def _cleanup(self, chat, user): - chat, user = self.resolve_address(chat=chat, user=user) - if self.data[chat][user] == {"state": None, "data": {}, "bucket": {}}: - del self.data[chat][user] - if not self.data[chat]: - del self.data[chat] diff --git a/electro/toolkit/openai_client.py b/electro/toolkit/openai_client.py deleted file mode 100644 index db5624b..0000000 --- a/electro/toolkit/openai_client.py +++ /dev/null @@ -1,5 +0,0 @@ -from openai import AsyncOpenAI - -from ..settings import settings - -async_openai_client = AsyncOpenAI(api_key=settings.OPENAI_API_KEY) diff --git a/electro/triggers.py b/electro/triggers.py index afcfcc4..06baf5f 100644 --- a/electro/triggers.py +++ b/electro/triggers.py @@ -46,12 +46,12 @@ class CommandTrigger(BaseFlowTrigger): async def _check(self, connector: FlowConnector, scope: FlowScopes | None = None) -> bool: """Check if the `Flow` can be run based on the command (and the scope, if provided).""" matches: list[str] = [ - f"{connector.bot.command_prefix}{self.command}", + f"{settings.BOT_COMMAND_PREFIX}{self.command}", ] if settings.DO_USE_COMMAND_ALIASES or settings.DEBUG: command_alias = "".join([part[0] for part in self.command.split("_") if part]) - matches.append(f"{connector.bot.command_prefix}{command_alias}") + matches.append(f"{settings.BOT_COMMAND_PREFIX}{command_alias}") if connector.message and connector.message.content in matches: return True diff --git a/electro/types_.py b/electro/types_.py deleted file mode 100644 index 91a5e0d..0000000 --- a/electro/types_.py +++ /dev/null @@ -1,74 +0,0 @@ -"""The types (objects) used in the `electro` framework. Used to un-couple `electro` from the Discord framework.""" - -from datetime import datetime - -from pydantic import BaseModel, ConfigDict - -from .enums import ChannelType, SupportedPlatforms - - -class ElectroBaseModel(BaseModel): - """The base model for all the models in the `electro` framework.""" - - model_config = ConfigDict(extra="allow") - - # TODO: [2024-12-16 by Mykola] Allow more platforms - from_platform: SupportedPlatforms = SupportedPlatforms.DISCORD - - -class User(ElectroBaseModel): - """The model for User.""" - - id: int - username: str - - bot: bool = False - - discriminator: str | None - avatar: dict | None - - async def create_dm(self) -> "Channel": - """ - Return a DM channel with the user. - """ - return Channel(id=self.id, name=self.username, type=ChannelType.private, guild=None, used_for="DM") - - -class Guild(ElectroBaseModel): - """The model for Guild.""" - - id: int - name: str - icon: str | None - - -class Channel(ElectroBaseModel): - """The model for Channel.""" - - id: int - name: str | None - type: ChannelType - - guild: Guild | None - - used_for: str | None - - -class Message(ElectroBaseModel): - """The model for Message.""" - - id: int - content: str - - author: User - channel: Channel - - created_at: datetime - edited_at: datetime | None - - -class MessageToSend(ElectroBaseModel): - """The model for Message to send.""" - - content: str - channel: Channel diff --git a/electro/extra/i18n_gettext.py b/examples/i18n_gettext.py similarity index 68% rename from electro/extra/i18n_gettext.py rename to examples/i18n_gettext.py index b7ff408..96a39da 100644 --- a/electro/extra/i18n_gettext.py +++ b/examples/i18n_gettext.py @@ -1,7 +1,7 @@ import gettext -from ..settings import settings -from ..toolkit.templated_i18n import make_templated_gettext +from electro.settings import settings +from electro.toolkit.templated_i18n import make_templated_gettext # Set up `gettext` en = gettext.translation("messages", localedir=settings.LOCALES_PATH, languages=[settings.BOT_LANGUAGE]) diff --git a/examples/test_flow.py b/examples/test_flow.py index e83b76c..51df29e 100644 --- a/examples/test_flow.py +++ b/examples/test_flow.py @@ -2,14 +2,13 @@ import uvicorn -from electro.app import app from electro import Flow, MessageFlowStep -from electro.extra.i18n_gettext import templated_gettext as _ - +from electro.app import app from electro.triggers import CommandTrigger - from electro.flow_manager import global_flow_manager +from .i18n_gettext import templated_gettext as _ + class TestFlow(Flow): """Test Flow.""" diff --git a/migrations/electro b/migrations/electro deleted file mode 160000 index 9751d9b..0000000 --- a/migrations/electro +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 9751d9ba89e47ae59a58dc03bdb9e6dfb91fc7d6 diff --git a/poetry.lock b/poetry.lock index 1097170..f6511f1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.0.0 and should not be changed by hand. [[package]] name = "aerich" @@ -6,6 +6,7 @@ version = "0.7.2" description = "A database migrations tool for Tortoise ORM." optional = false python-versions = ">=3.7,<4.0" +groups = ["main"] files = [ {file = "aerich-0.7.2-py3-none-any.whl", hash = "sha256:84c78c07d45436b89ca4db5411eca4e9292a591fb7d6fd4282fa4a7d0c6d2af1"}, {file = "aerich-0.7.2.tar.gz", hash = "sha256:31d67de7b96184636b89de99062e059e5e6204b6251d24c33eb21fc9cf982e09"}, @@ -28,6 +29,7 @@ version = "13.4.0" description = "Async boto3 wrapper" optional = false python-versions = "<4.0,>=3.8" +groups = ["main"] files = [ {file = "aioboto3-13.4.0-py3-none-any.whl", hash = "sha256:d78f3400ef3a01b4d5515108ef244941894a0bc39c4716321a00e15898d7e002"}, {file = "aioboto3-13.4.0.tar.gz", hash = "sha256:3105f9e5618c686c90050e60eb5ebf9e28f7f8c4e0fa162d4481aaa402008aab"}, @@ -47,6 +49,7 @@ version = "2.18.0" description = "Async client for aws services using botocore and aiohttp" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "aiobotocore-2.18.0-py3-none-any.whl", hash = "sha256:89634470946944baf0a72fe2939cdd5f98b61335d400ca55f3032aca92989ec1"}, {file = "aiobotocore-2.18.0.tar.gz", hash = "sha256:c54db752c5a742bf1a05c8359a93f508b4bf702b0e6be253a4c9ef1f9c9b6706"}, @@ -73,6 +76,7 @@ version = "24.1.0" description = "File support for asyncio." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, @@ -84,6 +88,7 @@ version = "2.4.6" description = "Happy Eyeballs for asyncio" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "aiohappyeyeballs-2.4.6-py3-none-any.whl", hash = "sha256:147ec992cf873d74f5062644332c539fcd42956dc69453fe5204195e560517e1"}, {file = "aiohappyeyeballs-2.4.6.tar.gz", hash = "sha256:9b05052f9042985d32ecbe4b59a77ae19c006a78f1344d7fdad69d28ded3d0b0"}, @@ -95,6 +100,7 @@ version = "3.11.13" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "aiohttp-3.11.13-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a4fe27dbbeec445e6e1291e61d61eb212ee9fed6e47998b27de71d70d3e8777d"}, {file = "aiohttp-3.11.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9e64ca2dbea28807f8484c13f684a2f761e69ba2640ec49dacd342763cc265ef"}, @@ -197,6 +203,7 @@ version = "0.12.0" description = "itertools and builtins for AsyncIO and mixed iterables" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "aioitertools-0.12.0-py3-none-any.whl", hash = "sha256:fc1f5fac3d737354de8831cbba3eb04f79dd649d8f3afb4c5b114925e662a796"}, {file = "aioitertools-0.12.0.tar.gz", hash = "sha256:c2a9055b4fbb7705f561b9d86053e8af5d10cc845d22c32008c43490b2d8dd6b"}, @@ -212,6 +219,7 @@ version = "1.3.2" description = "aiosignal: a list of registered asynchronous callbacks" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, @@ -226,6 +234,7 @@ version = "0.20.0" description = "asyncio bridge to the standard sqlite3 module" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "aiosqlite-0.20.0-py3-none-any.whl", hash = "sha256:36a1deaca0cac40ebe32aac9977a6e2bbc7f5189f23f4a54d5908986729e5bd6"}, {file = "aiosqlite-0.20.0.tar.gz", hash = "sha256:6d35c8c256637f4672f843c31021464090805bf925385ac39473fb16eaaca3d7"}, @@ -244,6 +253,7 @@ version = "1.0.0" description = "A light, configurable Sphinx theme" optional = false python-versions = ">=3.10" +groups = ["dev"] files = [ {file = "alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b"}, {file = "alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e"}, @@ -255,6 +265,7 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -266,6 +277,7 @@ version = "4.8.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, @@ -287,6 +299,7 @@ version = "3.3.8" description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.9.0" +groups = ["dev"] files = [ {file = "astroid-3.3.8-py3-none-any.whl", hash = "sha256:187ccc0c248bfbba564826c26f070494f7bc964fd286b6d9fff4420e55de828c"}, {file = "astroid-3.3.8.tar.gz", hash = "sha256:a88c7994f914a4ea8572fac479459f4955eeccc877be3f2d959a33273b0cf40b"}, @@ -298,6 +311,7 @@ version = "0.30.0" description = "An asyncio PostgreSQL driver" optional = false python-versions = ">=3.8.0" +groups = ["main"] files = [ {file = "asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e"}, {file = "asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0"}, @@ -361,6 +375,7 @@ version = "25.1.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, @@ -380,6 +395,7 @@ version = "2.2.0" description = "Seamlessly integrate pydantic models in your Sphinx documentation." optional = false python-versions = "<4.0.0,>=3.8.1" +groups = ["dev"] files = [ {file = "autodoc_pydantic-2.2.0-py3-none-any.whl", hash = "sha256:8c6a36fbf6ed2700ea9c6d21ea76ad541b621fbdf16b5a80ee04673548af4d95"}, ] @@ -403,6 +419,7 @@ version = "1.32.0" description = "Microsoft Azure Core Library for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "azure_core-1.32.0-py3-none-any.whl", hash = "sha256:eac191a0efb23bfa83fddf321b27b122b4ec847befa3091fa736a5c32c50d7b4"}, {file = "azure_core-1.32.0.tar.gz", hash = "sha256:22b3c35d6b2dae14990f6c1be2912bf23ffe50b220e708a28ab1bb92b1c730e5"}, @@ -422,6 +439,7 @@ version = "1.20.0" description = "Microsoft Azure Identity Library for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "azure_identity-1.20.0-py3-none-any.whl", hash = "sha256:5f23fc4889a66330e840bd78830287e14f3761820fe3c5f77ac875edcb9ec998"}, {file = "azure_identity-1.20.0.tar.gz", hash = "sha256:40597210d56c83e15031b0fe2ea3b26420189e1e7f3e20bdbb292315da1ba014"}, @@ -440,6 +458,7 @@ version = "12.24.1" description = "Microsoft Azure Blob Storage Client Library for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "azure_storage_blob-12.24.1-py3-none-any.whl", hash = "sha256:77fb823fdbac7f3c11f7d86a5892e2f85e161e8440a7489babe2195bf248f09e"}, {file = "azure_storage_blob-12.24.1.tar.gz", hash = "sha256:052b2a1ea41725ba12e2f4f17be85a54df1129e13ea0321f5a2fcc851cbf47d4"}, @@ -460,6 +479,7 @@ version = "2.17.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, @@ -474,6 +494,7 @@ version = "24.10.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"}, {file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"}, @@ -518,6 +539,7 @@ version = "1.36.1" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "boto3-1.36.1-py3-none-any.whl", hash = "sha256:eb21380d73fec6645439c0d802210f72a0cdb3295b02953f246ff53f512faa8f"}, {file = "boto3-1.36.1.tar.gz", hash = "sha256:258ab77225a81d3cf3029c9afe9920cd9dec317689dfadec6f6f0a23130bb60a"}, @@ -537,6 +559,7 @@ version = "1.36.1" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "botocore-1.36.1-py3-none-any.whl", hash = "sha256:dec513b4eb8a847d79bbefdcdd07040ed9d44c20b0001136f0890a03d595705a"}, {file = "botocore-1.36.1.tar.gz", hash = "sha256:f789a6f272b5b3d8f8756495019785e33868e5e00dd9662a3ee7959ac939bb12"}, @@ -556,6 +579,7 @@ version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main", "dev"] files = [ {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, @@ -567,6 +591,8 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\"" files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -646,6 +672,7 @@ version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -747,6 +774,7 @@ version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, @@ -761,10 +789,12 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "sys_platform == \"win32\" or platform_system == \"Windows\"", dev = "platform_system == \"Windows\" or sys_platform == \"win32\""} [[package]] name = "cryptography" @@ -772,6 +802,7 @@ version = "44.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" +groups = ["main"] files = [ {file = "cryptography-44.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009"}, {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f"}, @@ -825,6 +856,7 @@ version = "0.9.0" description = "Dictdiffer is a library that helps you to diff and patch dictionaries." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "dictdiffer-0.9.0-py2.py3-none-any.whl", hash = "sha256:442bfc693cfcadaf46674575d2eba1c53b42f5e404218ca2c2ff549f2df56595"}, {file = "dictdiffer-0.9.0.tar.gz", hash = "sha256:17bacf5fbfe613ccf1b6d512bd766e6b21fb798822a133aa86098b8ac9997578"}, @@ -842,6 +874,7 @@ version = "0.3.9" description = "serialize all of Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, @@ -857,6 +890,7 @@ version = "1.9.0" description = "Distro - an OS platform information API" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, @@ -868,6 +902,7 @@ version = "0.1.4" description = "Use Redis URLs in your Django Application." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "dj-redis-url-0.1.4.tar.gz", hash = "sha256:eca9633d2c0a08c0d68ca50b631b25f1966842dd953f9a210bc659c06d75d6f2"}, ] @@ -881,6 +916,7 @@ version = "2.7.0" description = "DNS toolkit" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, @@ -901,6 +937,7 @@ version = "0.21.2" description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, @@ -912,6 +949,7 @@ version = "2.2.0" description = "A robust email address syntax and deliverability validation library." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, @@ -927,6 +965,7 @@ version = "0.115.8" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "fastapi-0.115.8-py3-none-any.whl", hash = "sha256:753a96dd7e036b34eeef8babdfcfe3f28ff79648f86551eb36bfc1b0bf4a8cbf"}, {file = "fastapi-0.115.8.tar.gz", hash = "sha256:0ce9111231720190473e222cdf0f07f7206ad7e53ea02beb1d2dc36e2f0741e9"}, @@ -953,6 +992,7 @@ version = "0.0.7" description = "Run and manage FastAPI apps from the command line with FastAPI CLI. 🚀" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "fastapi_cli-0.0.7-py3-none-any.whl", hash = "sha256:d549368ff584b2804336c61f192d86ddea080c11255f375959627911944804f4"}, {file = "fastapi_cli-0.0.7.tar.gz", hash = "sha256:02b3b65956f526412515907a0793c9094abd4bfb5457b389f645b0ea6ba3605e"}, @@ -972,6 +1012,7 @@ version = "1.5.0" description = "A list-like structure which implements collections.abc.MutableSequence" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"}, {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"}, @@ -1073,6 +1114,7 @@ version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -1084,6 +1126,7 @@ version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, @@ -1105,6 +1148,7 @@ version = "0.6.4" description = "A collection of framework independent HTTP protocol utils." optional = false python-versions = ">=3.8.0" +groups = ["main"] files = [ {file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}, {file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}, @@ -1160,6 +1204,7 @@ version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, @@ -1184,6 +1229,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main", "dev"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -1198,6 +1244,7 @@ version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["dev"] files = [ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, @@ -1209,6 +1256,7 @@ version = "2.1.0" description = "Simple module to parse ISO 8601 dates" optional = false python-versions = ">=3.7,<4.0" +groups = ["main"] files = [ {file = "iso8601-2.1.0-py3-none-any.whl", hash = "sha256:aac4145c4dcb66ad8b648a02830f5e2ff6c24af20f4f482689be402db2429242"}, {file = "iso8601-2.1.0.tar.gz", hash = "sha256:6b1d3829ee8921c4301998c909f7829fa9ed3cbdac0d3b16af2d743aed1ba8df"}, @@ -1220,6 +1268,7 @@ version = "0.7.2" description = "An ISO 8601 date/time/duration parser and formatter" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"}, {file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"}, @@ -1231,6 +1280,7 @@ version = "5.13.2" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" +groups = ["dev"] files = [ {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, @@ -1245,6 +1295,7 @@ version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, @@ -1262,6 +1313,7 @@ version = "0.8.2" description = "Fast iterable JSON parser." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "jiter-0.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ca8577f6a413abe29b079bc30f907894d7eb07a865c4df69475e868d73e71c7b"}, {file = "jiter-0.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b25bd626bde7fb51534190c7e3cb97cee89ee76b76d7585580e22f34f5e3f393"}, @@ -1347,6 +1399,7 @@ version = "1.0.1" description = "JSON Matching Expressions" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, @@ -1358,6 +1411,7 @@ version = "0.7.3" description = "Python logging made (stupidly) simple" optional = false python-versions = "<4.0,>=3.5" +groups = ["main"] files = [ {file = "loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c"}, {file = "loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6"}, @@ -1376,6 +1430,7 @@ version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, @@ -1400,6 +1455,7 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" +groups = ["main", "dev"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -1470,6 +1526,7 @@ version = "0.7.0" description = "McCabe checker, plugin for flake8" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, @@ -1481,6 +1538,7 @@ version = "0.1.2" description = "Markdown URL utilities" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -1492,6 +1550,7 @@ version = "1.31.1" description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "msal-1.31.1-py3-none-any.whl", hash = "sha256:29d9882de247e96db01386496d59f29035e5e841bcac892e6d7bf4390bf6bd17"}, {file = "msal-1.31.1.tar.gz", hash = "sha256:11b5e6a3f802ffd3a72107203e20c4eac6ef53401961b880af2835b723d80578"}, @@ -1511,6 +1570,7 @@ version = "1.2.0" description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "msal_extensions-1.2.0-py3-none-any.whl", hash = "sha256:cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d"}, {file = "msal_extensions-1.2.0.tar.gz", hash = "sha256:6f41b320bfd2933d631a215c91ca0dd3e67d84bd1a2f50ce917d5874ec646bef"}, @@ -1526,6 +1586,7 @@ version = "6.1.0" description = "multidict implementation" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, @@ -1627,6 +1688,7 @@ version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" +groups = ["dev"] files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, @@ -1638,6 +1700,7 @@ version = "1.64.0" description = "The official Python library for the openai API" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "openai-1.64.0-py3-none-any.whl", hash = "sha256:20f85cde9e95e9fbb416e3cb5a6d3119c0b28308afd6e3cc47bf100623dac623"}, {file = "openai-1.64.0.tar.gz", hash = "sha256:2861053538704d61340da56e2f176853d19f1dc5704bc306b7597155f850d57a"}, @@ -1663,6 +1726,7 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, @@ -1674,6 +1738,7 @@ version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -1685,6 +1750,7 @@ version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, @@ -1701,6 +1767,7 @@ version = "2.10.1" description = "Wraps the portalocker recipe for easy usage" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf"}, {file = "portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f"}, @@ -1720,6 +1787,7 @@ version = "0.3.0" description = "Accelerated property cache" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "propcache-0.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:efa44f64c37cc30c9f05932c740a8b40ce359f51882c70883cc95feac842da4d"}, {file = "propcache-0.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2383a17385d9800b6eb5855c2f05ee550f803878f344f58b6e194de08b96352c"}, @@ -1821,35 +1889,14 @@ files = [ {file = "propcache-0.3.0.tar.gz", hash = "sha256:a8fd93de4e1d278046345f49e2238cdb298589325849b2645d4a94c53faeffc5"}, ] -[[package]] -name = "py-cord" -version = "0.1.dev5905+gd8070a9" -description = "A Python wrapper for the Discord API" -optional = false -python-versions = ">=3.8" -files = [] -develop = false - -[package.dependencies] -aiohttp = ">=3.6.0,<4.0" - -[package.extras] -docs = ["furo @ git+https://github.com/pradyunsg/furo@193643f", "myst-parser (==1.0.0)", "sphinx (==5.3.0)", "sphinx-autodoc-typehints (==1.23.0)", "sphinx-copybutton (==0.5.2)", "sphinxcontrib-websupport (==1.2.4)", "sphinxcontrib_trio (==1.1.2)", "sphinxext-opengraph (==0.9.1)"] -speed = ["aiohttp[speedups]", "msgspec (>=0.18.6,<0.19.0)"] -voice = ["PyNaCl (>=1.3.0,<1.6)"] - -[package.source] -type = "git" -url = "https://github.com/mykolasolodukha/pycord.git" -reference = "d8070a90a8ec12dbc64b703513c14fa80834861d" -resolved_reference = "d8070a90a8ec12dbc64b703513c14fa80834861d" - [[package]] name = "pycparser" version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\"" files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, @@ -1861,6 +1908,7 @@ version = "2.10.6" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, @@ -1881,6 +1929,7 @@ version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, @@ -1993,6 +2042,7 @@ version = "2.8.1" description = "Settings management using Pydantic" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "pydantic_settings-2.8.1-py3-none-any.whl", hash = "sha256:81942d5ac3d905f7f3ee1a70df5dfb62d5569c12f51a5a647defc1c3d9ee2e9c"}, {file = "pydantic_settings-2.8.1.tar.gz", hash = "sha256:d5c663dfbe9db9d5e1c646b2e161da12f0d734d422ee56f567d0ea2cee4e8585"}, @@ -2013,6 +2063,7 @@ version = "6.3.0" description = "Python docstring style checker" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, @@ -2030,6 +2081,7 @@ version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, @@ -2044,6 +2096,7 @@ version = "2.10.1" description = "JSON Web Token implementation in Python" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, @@ -2064,6 +2117,7 @@ version = "3.3.4" description = "python code static checker" optional = false python-versions = ">=3.9.0" +groups = ["dev"] files = [ {file = "pylint-3.3.4-py3-none-any.whl", hash = "sha256:289e6a1eb27b453b08436478391a48cd53bb0efb824873f949e709350f3de018"}, {file = "pylint-3.3.4.tar.gz", hash = "sha256:74ae7a38b177e69a9b525d0794bd8183820bfa7eb68cc1bee6e8ed22a42be4ce"}, @@ -2088,6 +2142,7 @@ version = "0.2.2" description = "Forked from pypika and streamline just for tortoise-orm" optional = false python-versions = "<4.0,>=3.8" +groups = ["main"] files = [ {file = "pypika_tortoise-0.2.2-py3-none-any.whl", hash = "sha256:e93190aedd95acb08b69636bc2328cc053b2c9971307b6d44405bc6d9f9b71a5"}, {file = "pypika_tortoise-0.2.2.tar.gz", hash = "sha256:f0fbc9e0c3ddc33118a5be69907428863849df60788e125edef1f46a6261d63b"}, @@ -2099,6 +2154,7 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -2113,6 +2169,7 @@ version = "1.0.1" description = "Read key-value pairs from a .env file and set them as environment variables" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, @@ -2127,6 +2184,7 @@ version = "0.0.20" description = "A streaming multipart parser for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104"}, {file = "python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13"}, @@ -2138,6 +2196,7 @@ version = "2025.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, @@ -2149,6 +2208,8 @@ version = "308" description = "Python for Window Extensions" optional = false python-versions = "*" +groups = ["main"] +markers = "platform_system == \"Windows\"" files = [ {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, @@ -2176,6 +2237,7 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -2238,6 +2300,7 @@ version = "5.2.1" description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"}, {file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"}, @@ -2253,6 +2316,7 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -2274,6 +2338,7 @@ version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" +groups = ["main"] files = [ {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, @@ -2292,6 +2357,7 @@ version = "0.13.2" description = "Rich toolkit for building command-line applications" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "rich_toolkit-0.13.2-py3-none-any.whl", hash = "sha256:f3f6c583e5283298a2f7dbd3c65aca18b7f818ad96174113ab5bec0b0e35ed61"}, {file = "rich_toolkit-0.13.2.tar.gz", hash = "sha256:fea92557530de7c28f121cbed572ad93d9e0ddc60c3ca643f1b831f2f56b95d3"}, @@ -2308,6 +2374,7 @@ version = "3.1.0" description = "Manipulate well-formed Roman numerals" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "roman_numerals_py-3.1.0-py3-none-any.whl", hash = "sha256:9da2ad2fb670bcf24e81070ceb3be72f6c11c440d73bd579fbeca1e9f330954c"}, {file = "roman_numerals_py-3.1.0.tar.gz", hash = "sha256:be4bf804f083a4ce001b5eb7e3c0862479d10f94c936f6c4e5f250aa5ff5bd2d"}, @@ -2323,6 +2390,7 @@ version = "0.11.3" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "s3transfer-0.11.3-py3-none-any.whl", hash = "sha256:ca855bdeb885174b5ffa95b9913622459d4ad8e331fc98eb01e6d5eb6a30655d"}, {file = "s3transfer-0.11.3.tar.gz", hash = "sha256:edae4977e3a122445660c7c114bba949f9d191bae3b34a096f18a1c8c354527a"}, @@ -2340,6 +2408,7 @@ version = "1.5.4" description = "Tool to Detect Surrounding Shell" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, @@ -2351,6 +2420,7 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -2362,6 +2432,7 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -2373,6 +2444,7 @@ version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, @@ -2384,6 +2456,7 @@ version = "8.2.1" description = "Python documentation generator" optional = false python-versions = ">=3.11" +groups = ["dev"] files = [ {file = "sphinx-8.2.1-py3-none-any.whl", hash = "sha256:b5d2bb3cdf6207fcacde9f92085d2b97667b05b9c346eaec426ca4be8af505e9"}, {file = "sphinx-8.2.1.tar.gz", hash = "sha256:e4b932951b9c18b039f73b72e4e63afe967d90408700ec222b981ac24647c01e"}, @@ -2419,6 +2492,7 @@ version = "3.1.0" description = "Type hints (PEP 484) support for the Sphinx autodoc extension" optional = false python-versions = ">=3.11" +groups = ["dev"] files = [ {file = "sphinx_autodoc_typehints-3.1.0-py3-none-any.whl", hash = "sha256:67bdee7e27ba943976ce92ebc5647a976a7a08f9f689a826c54617b96a423913"}, {file = "sphinx_autodoc_typehints-3.1.0.tar.gz", hash = "sha256:a6b7b0b6df0a380783ce5b29150c2d30352746f027a3e294d37183995d3f23ed"}, @@ -2437,6 +2511,7 @@ version = "3.0.2" description = "Read the Docs theme for Sphinx" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "sphinx_rtd_theme-3.0.2-py2.py3-none-any.whl", hash = "sha256:422ccc750c3a3a311de4ae327e82affdaf59eb695ba4936538552f3b00f4ee13"}, {file = "sphinx_rtd_theme-3.0.2.tar.gz", hash = "sha256:b7457bc25dda723b20b086a670b9953c859eab60a2a03ee8eb2bb23e176e5f85"}, @@ -2456,6 +2531,7 @@ version = "2.0.0" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, @@ -2472,6 +2548,7 @@ version = "2.0.0" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, @@ -2488,6 +2565,7 @@ version = "2.1.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, @@ -2504,6 +2582,7 @@ version = "4.1" description = "Extension to include jQuery on newer Sphinx releases" optional = false python-versions = ">=2.7" +groups = ["dev"] files = [ {file = "sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a"}, {file = "sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae"}, @@ -2518,6 +2597,7 @@ version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" optional = false python-versions = ">=3.5" +groups = ["dev"] files = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, @@ -2532,6 +2612,7 @@ version = "2.0.0" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, @@ -2548,6 +2629,7 @@ version = "2.0.0" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, @@ -2564,6 +2646,7 @@ version = "0.45.3" description = "The little ASGI library that shines." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "starlette-0.45.3-py3-none-any.whl", hash = "sha256:dfb6d332576f136ec740296c7e8bb8c8a7125044e7c6da30744718880cdd059d"}, {file = "starlette-0.45.3.tar.gz", hash = "sha256:2cbcba2a75806f8a41c722141486f37c28e30a0921c5f6fe4346cb0dcee1302f"}, @@ -2581,6 +2664,7 @@ version = "1.2.0" description = "String case converter." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "stringcase-1.2.0.tar.gz", hash = "sha256:48a06980661908efe8d9d34eab2b6c13aefa2163b3ced26972902e3bdfd87008"}, ] @@ -2591,6 +2675,7 @@ version = "8.5.0" description = "Retry code until it succeeds" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687"}, {file = "tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78"}, @@ -2606,6 +2691,7 @@ version = "0.13.2" description = "Style preserving TOML library" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, @@ -2617,6 +2703,7 @@ version = "0.21.7" description = "Easy async ORM for python, built with relations in mind" optional = false python-versions = "<4.0,>=3.8" +groups = ["main"] files = [ {file = "tortoise_orm-0.21.7-py3-none-any.whl", hash = "sha256:2229925885461f424673223ea1875bd5e6961384c766833af55a1ea11a9b25eb"}, {file = "tortoise_orm-0.21.7.tar.gz", hash = "sha256:8a790a931828aa37ac364b344c561e603422aced2af5e403f6790575da0f19c5"}, @@ -2644,6 +2731,7 @@ version = "4.67.1" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, @@ -2665,6 +2753,7 @@ version = "0.15.1" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "typer-0.15.1-py3-none-any.whl", hash = "sha256:7994fb7b8155b64d3402518560648446072864beefd44aa2dc36972a5972e847"}, {file = "typer-0.15.1.tar.gz", hash = "sha256:a0588c0a7fa68a1978a069818657778f86abe6ff5ea6abf472f940a08bfe4f0a"}, @@ -2682,6 +2771,7 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -2693,6 +2783,7 @@ version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" +groups = ["main", "dev"] files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, @@ -2710,6 +2801,7 @@ version = "0.34.0" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4"}, {file = "uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9"}, @@ -2735,6 +2827,8 @@ version = "0.21.0" description = "Fast implementation of asyncio event loop on top of libuv" optional = false python-versions = ">=3.8.0" +groups = ["main"] +markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\"" files = [ {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, @@ -2786,6 +2880,7 @@ version = "1.0.4" description = "Simple, modern and high performance file watching and code reload in python." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "watchfiles-1.0.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ba5bb3073d9db37c64520681dd2650f8bd40902d991e7b4cfaeece3e32561d08"}, {file = "watchfiles-1.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f25d0ba0fe2b6d2c921cf587b2bf4c451860086534f40c384329fb96e2044d1"}, @@ -2869,6 +2964,7 @@ version = "15.0" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "websockets-15.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5e6ee18a53dd5743e6155b8ff7e8e477c25b29b440f87f65be8165275c87fef0"}, {file = "websockets-15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ee06405ea2e67366a661ed313e14cf2a86e84142a3462852eb96348f7219cee3"}, @@ -2947,6 +3043,8 @@ version = "1.2.0" description = "A small Python utility to set file creation time on Windows" optional = false python-versions = ">=3.5" +groups = ["main"] +markers = "sys_platform == \"win32\"" files = [ {file = "win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390"}, {file = "win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0"}, @@ -2961,6 +3059,7 @@ version = "1.17.2" description = "Module for decorators, wrappers and monkey patching." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, @@ -3049,6 +3148,7 @@ version = "1.18.3" description = "Yet another URL library" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, @@ -3140,6 +3240,6 @@ multidict = ">=4.0" propcache = ">=0.2.0" [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = "^3.12" -content-hash = "9bb19721a52e690cd0da8adb19981586e51d3a6277068e54acec7d10ba671994" +content-hash = "5541f0420d0450921e10fc038bddc2f7bb75bd8264406cc2a6ca5c0acd4af8bf" diff --git a/pyproject.toml b/pyproject.toml index 442a537..43a90ae 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,7 +20,6 @@ redis = "^5.0.7" tortoise-orm = {extras = ["asyncpg"], version = "^0.21.5"} aerich = "^0.7.2" tenacity = "^8.5.0" -py-cord = {git = "https://github.com/mykolasolodukha/pycord.git", rev = "d8070a90a8ec12dbc64b703513c14fa80834861d"} aioboto3 = "^13.1.1" azure-storage-blob = "^12.23.1" azure-identity = "^1.18.0" From d20785461b885ec1d89487366c4a90ad33494fd7 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Tue, 8 Apr 2025 19:05:37 +0200 Subject: [PATCH 12/57] add user dm_channel field --- electro/flow_manager.py | 16 +++++++++++++--- electro/interfaces.py | 1 + electro/models.py | 3 +++ 3 files changed, 17 insertions(+), 3 deletions(-) diff --git a/electro/flow_manager.py b/electro/flow_manager.py index 4867f4f..f6a5612 100644 --- a/electro/flow_manager.py +++ b/electro/flow_manager.py @@ -61,7 +61,9 @@ async def get_or_create_user(cls, platform: str, user_data: schemas.User) -> Use return await platform_id.user @classmethod - async def get_or_create_channel(cls, platform: str, channel_data: schemas.Channel) -> Channel: + async def get_or_create_channel( + cls, platform: str, channel_data: schemas.Channel, user: typing.Optional[User] = None + ) -> Channel: """Save the channel to the database.""" platform_id, created = await PlatformId.get_or_create( platform_id=channel_data.platform_id.id, platform=platform, type=PlatformId.PlatformIdTypes.CHANNEL @@ -78,6 +80,14 @@ async def get_or_create_channel(cls, platform: str, channel_data: schemas.Channe guild = await cls.get_or_create_guild(platform, channel_data.guild) channel.guild = guild await channel.save() + if user and channel.type == Channel.ChannelTypes.DM: + if not user.dm_channel: + user.dm_channel = channel + await user.save() + elif created: + platform_id.channel = user.dm_channel + await platform_id.save() + await channel.delete() return await platform_id.channel @classmethod @@ -85,7 +95,7 @@ async def save_message(cls, platform: str, message_data: schemas.ReceivedMessage """Save the message to the database.""" author = await cls.get_or_create_user(platform, message_data.author) if message_data.channel: - channel = await cls.get_or_create_channel(platform, message_data.channel) + channel = await cls.get_or_create_channel(platform, message_data.channel, author) else: channel = None return await Message.create( @@ -366,7 +376,7 @@ async def on_button_click(self, platform: str, button_data: schemas.ButtonClick, """Handle the buttons clicked by the users.""" # Save the button click to the database user = await self.analytics_manager.get_or_create_user(platform, button_data.user) - channel = await self.analytics_manager.get_or_create_channel(platform, button_data.channel) + channel = await self.analytics_manager.get_or_create_channel(platform, button_data.channel, user) try: button = await self.analytics_manager.save_button_click(button_data.id) except DisabledButtonClick: diff --git a/electro/interfaces.py b/electro/interfaces.py index 9147e71..2c7ea60 100644 --- a/electro/interfaces.py +++ b/electro/interfaces.py @@ -72,6 +72,7 @@ async def _format_channel(self, channel: Optional[Channel]) -> Dict[str, Any]: return { "id": channel.id, "name": channel.name, + "type": channel.type, "platform_ids": { identifier.platform: identifier.platform_id for identifier in await channel.platform_ids.all() }, diff --git a/electro/models.py b/electro/models.py index bd5d740..70e246d 100644 --- a/electro/models.py +++ b/electro/models.py @@ -76,6 +76,9 @@ class User(BaseModel): is_admin = fields.BooleanField(default=False) # guilds: fields.ManyToManyRelation["Guild"] # TODO: [2024-08-30 by Mykola] Allow multiple guilds for the user. + dm_channel: fields.ForeignKeyRelation[Channel] | Channel = fields.ForeignKeyField( + "electro.Channel", related_name="dm_users", null=True + ) guild: fields.ForeignKeyRelation[Guild] | Guild = fields.ForeignKeyField( "electro.Guild", related_name="users", null=True ) From 88b54ebbbe046e30e5f4964c4754196c882adf15 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Tue, 8 Apr 2025 19:14:42 +0200 Subject: [PATCH 13/57] fix dm_channel setup in manager --- electro/flow_manager.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/electro/flow_manager.py b/electro/flow_manager.py index f6a5612..0f53e79 100644 --- a/electro/flow_manager.py +++ b/electro/flow_manager.py @@ -77,18 +77,22 @@ async def get_or_create_channel( logger.info(f"Created the Channel record for {channel.id=}, {channel.name=}") await platform_id.save() if channel_data.guild: + logger.error(f"{channel_data=}, {channel_data.guild=}") guild = await cls.get_or_create_guild(platform, channel_data.guild) channel.guild = guild await channel.save() + channel = await platform_id.channel if user and channel.type == Channel.ChannelTypes.DM: if not user.dm_channel: user.dm_channel = channel await user.save() + return channel elif created: platform_id.channel = user.dm_channel await platform_id.save() await channel.delete() - return await platform_id.channel + return await platform_id.channel + return channel @classmethod async def save_message(cls, platform: str, message_data: schemas.ReceivedMessage) -> Message: From d779881c615fa3adf860d5583d1aa231d4cd97ab Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Wed, 9 Apr 2025 16:31:47 +0200 Subject: [PATCH 14/57] handle api connection --- electro/app.py | 25 +++++++------------------ electro/flow_connector.py | 3 ++- electro/flow_manager.py | 4 +++- electro/interfaces.py | 26 +++++++++++++++++++++++--- 4 files changed, 35 insertions(+), 23 deletions(-) diff --git a/electro/app.py b/electro/app.py index c891abd..65de93a 100644 --- a/electro/app.py +++ b/electro/app.py @@ -1,14 +1,13 @@ """The API server that works as an endpoint for all the Electro Interfaces.""" +from typing import Any, Dict + from fastapi import FastAPI, WebSocket, WebSocketDisconnect from fastapi.websockets import WebSocketState from tortoise.contrib.fastapi import register_tortoise from .enums import SupportedPlatforms -from .flow_connector import FlowConnectorEvents -from .flow_manager import global_flow_manager from .interfaces import APIInterface, WebSocketInterface -from .schemas import ButtonClick, ReceivedMessage from .toolkit.tortoise_orm import get_tortoise_config app = FastAPI( @@ -20,13 +19,14 @@ ) -@app.post("/message/platform/{platform}/") -async def process_message(message: ReceivedMessage, platform: str): +@app.post("/api/platform/{platform}") +async def process_message(platform: str, data: Dict[str, Any]): """Process the message.""" if platform not in SupportedPlatforms: raise ValueError(f"Platform {platform} is not supported.") interface = APIInterface() - return await global_flow_manager.on_message(platform, message, interface) + await interface.handle_incoming_action(platform, data) + return interface.messages.get() @app.websocket("/websocket/platform/{platform}/user/{user_id}") @@ -38,18 +38,7 @@ async def websocket_endpoint(websocket: WebSocket, platform: str, user_id: str): try: while websocket.application_state == WebSocketState.CONNECTED: data = await websocket.receive_json() - action = data.get("action") - content = data.get("content") - if action == FlowConnectorEvents.MESSAGE: - content = ReceivedMessage.model_validate(content) - await global_flow_manager.on_message(platform, content, interface) - if action == FlowConnectorEvents.BUTTON_CLICK: - content = ButtonClick.model_validate(content) - await global_flow_manager.on_button_click(platform, content, interface) - if action == FlowConnectorEvents.MEMBER_JOIN: - pass - if action == FlowConnectorEvents.MEMBER_UPDATE: - pass + await interface.handle_incoming_action(platform, data) except WebSocketDisconnect: await interface.disconnect() diff --git a/electro/flow_connector.py b/electro/flow_connector.py index f5ee6e9..7f0e423 100644 --- a/electro/flow_connector.py +++ b/electro/flow_connector.py @@ -7,13 +7,14 @@ from typing import Any, TYPE_CHECKING from ._common import ContextInstanceMixin -from .interfaces import BaseInterface from .models import Button, Channel, Message, User from .storage import ChannelData, UserData if TYPE_CHECKING: from electro import FlowManager + from .interfaces import BaseInterface + class FlowConnectorEvents(str, Enum): """The events that are used in the `FlowConnector`.""" diff --git a/electro/flow_manager.py b/electro/flow_manager.py index 0f53e79..7e3336d 100644 --- a/electro/flow_manager.py +++ b/electro/flow_manager.py @@ -12,7 +12,6 @@ from .flow_connector import FlowConnectorEvents # from decorators import fail_safely -from .interfaces import BaseInterface from .models import Button, Channel, Guild, Message, PlatformId, User, UserStateChanged from .scopes import FlowScopes from .settings import settings @@ -20,6 +19,9 @@ from .toolkit.loguru_logging import logger from .toolkit.tortoise_orm import Model +if typing.TYPE_CHECKING: + from .interfaces import BaseInterface + class AnalyticsManager(ContextInstanceMixin): """The object that manages the analytics.""" diff --git a/electro/interfaces.py b/electro/interfaces.py index 2c7ea60..e612035 100644 --- a/electro/interfaces.py +++ b/electro/interfaces.py @@ -3,12 +3,15 @@ from abc import ABC, abstractmethod from contextlib import asynccontextmanager from io import BytesIO -from typing import Any, Dict, List, Optional, TYPE_CHECKING +from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING, Union from fastapi import WebSocket from .enums import ResponseTypes +from .flow_connector import FlowConnectorEvents +from .flow_manager import global_flow_manager from .models import BotMessage, Button, Channel, File, Guild, Role, User +from .schemas import ButtonClick, ReceivedMessage from .settings import settings from .toolkit.images_storage.universal_image_storage import universal_image_storage @@ -98,7 +101,7 @@ async def send_message( user: Optional[User], channel: Optional[Channel], buttons: Optional[List["BaseButton"]] = None, - delete_after: Optional[int] = None, + delete_after: Optional[Union[int, str]] = None, ): """ Send a formatted message to the client by using `format_message`. @@ -208,6 +211,23 @@ async def with_constant_typing(self, user: User, channel: Channel): yield await self.set_typing(user, channel, ResponseTypes.STOP_TYPING) + async def handle_incoming_action(self, platform: str, data: Dict[str, Any]) -> Tuple[Dict[str, str], int]: + """ + Handle incoming actions from the client. + """ + action = data.get("action") + content = data.get("content") + if action == FlowConnectorEvents.MESSAGE: + content = ReceivedMessage.model_validate(content) + await global_flow_manager.on_message(platform, content, self) + if action == FlowConnectorEvents.BUTTON_CLICK: + content = ButtonClick.model_validate(content) + await global_flow_manager.on_button_click(platform, content, self) + if action == FlowConnectorEvents.MEMBER_JOIN: + pass + if action == FlowConnectorEvents.MEMBER_UPDATE: + pass + @abstractmethod async def send_json(self, data: Dict[str, Any]): """ @@ -255,4 +275,4 @@ async def send_json(self, data: Dict[str, str]): self.messages.get().append(data) async def stop_process(self, *args, **kwargs): - return self.messages.get() + pass From 7ce20031823abd035e4bec4150e9df75f13e8ed9 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Wed, 9 Apr 2025 17:38:26 +0200 Subject: [PATCH 15/57] handle api key auth --- electro/app.py | 15 +++++++++++++-- electro/settings.py | 2 ++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/electro/app.py b/electro/app.py index 65de93a..5a964e5 100644 --- a/electro/app.py +++ b/electro/app.py @@ -1,19 +1,30 @@ """The API server that works as an endpoint for all the Electro Interfaces.""" -from typing import Any, Dict +from typing import Any, Dict, Optional -from fastapi import FastAPI, WebSocket, WebSocketDisconnect +from fastapi import Depends, FastAPI, Header, HTTPException, WebSocket, WebSocketDisconnect from fastapi.websockets import WebSocketState from tortoise.contrib.fastapi import register_tortoise from .enums import SupportedPlatforms from .interfaces import APIInterface, WebSocketInterface +from .settings import settings from .toolkit.tortoise_orm import get_tortoise_config + +def validate_api_key(x_api_key: Optional[str] = Header(default=None)) -> None: + """Validate the API key provided in the request header.""" + if not x_api_key and settings.API_KEY: + raise HTTPException(status_code=401, detail="API Key is missing") + if settings.API_KEY and x_api_key != settings.API_KEY: + raise HTTPException(status_code=401, detail="Invalid API Key") + + app = FastAPI( title="Electro API", description="The API server that works as an endpoint for all the Electro Interfaces.", version="0.1.0", + dependencies=[Depends(validate_api_key)], # docs_url="/", # redoc_url=None, ) diff --git a/electro/settings.py b/electro/settings.py index 74ef5fa..88d71f1 100644 --- a/electro/settings.py +++ b/electro/settings.py @@ -24,6 +24,8 @@ class Settings(BaseSettings): DEFAULT_SLEEP_TIME: int = 3 # seconds SLEEP_TIME_PER_CHARACTER: float = 0.05 HTTPX_CLIENT_DEFAULT_TIMEOUT: int = 60 + API_KEY_HEADER_NAME: str = "x-api-key" + API_KEY: str = "server-token" # Build urls for static files by removing root path and adding the server url SERVER_URL: str = "http://localhost:8000" APP_ROOT: str = "/app" From 630b4f8005800faddd0812863749c4e911a7c3d2 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Thu, 10 Apr 2025 15:08:51 +0200 Subject: [PATCH 16/57] fix linter issues --- .pylintrc | 11 +- electro/app.py | 2 +- electro/contrib/buttons.py | 32 ++--- electro/contrib/storage_buckets.py | 28 ++-- electro/contrib/storage_substitutions.py | 6 +- electro/enums.py | 1 + electro/flow.py | 8 +- electro/flow_block.py | 4 - electro/flow_manager.py | 35 +++-- electro/flow_step.py | 18 +-- electro/interfaces.py | 122 +++++++++++++----- electro/models.py | 11 +- electro/schemas.py | 2 - electro/storage.py | 6 - electro/substitutions.py | 21 ++- .../azure_blob_storage_service.py | 8 +- .../storage_services/s3_service.py | 2 +- electro/toolkit/redis_storage.py | 21 ++- electro/toolkit/tortoise_orm.py | 2 - electro/triggers.py | 2 +- 20 files changed, 181 insertions(+), 161 deletions(-) diff --git a/.pylintrc b/.pylintrc index 6214b28..9527a0a 100644 --- a/.pylintrc +++ b/.pylintrc @@ -3,11 +3,14 @@ max-line-length=120 disable= + C0114, # Missing module docstring + C0115, # Missing class docstring + C0116, # Missing function or method docstring + C0302, # Too many lines in module E0401, # Import error: Ignored because imports actually work R0801, # Duplicate code + R0902, # Too many instance attributes R0903, # Too few public methods - C0114, # Missing module docstring - C0302, # Too many lines in module - W0511, # TODO R0913, # Too many arguments - R0917, # Too many positional arguments \ No newline at end of file + R0917, # Too many positional arguments + W0511, # TODO \ No newline at end of file diff --git a/electro/app.py b/electro/app.py index 5a964e5..34510bd 100644 --- a/electro/app.py +++ b/electro/app.py @@ -41,7 +41,7 @@ async def process_message(platform: str, data: Dict[str, Any]): @app.websocket("/websocket/platform/{platform}/user/{user_id}") -async def websocket_endpoint(websocket: WebSocket, platform: str, user_id: str): +async def websocket_endpoint(websocket: WebSocket, platform: str, user_id: str): # pylint: disable=W0613 if platform not in SupportedPlatforms: raise ValueError(f"Platform {platform} is not supported.") interface = WebSocketInterface() diff --git a/electro/contrib/buttons.py b/electro/contrib/buttons.py index d3a1db0..8cf4035 100644 --- a/electro/contrib/buttons.py +++ b/electro/contrib/buttons.py @@ -14,17 +14,17 @@ class ButtonStyle(Enum): """A class to store the button styles.""" - primary = 1 - secondary = 2 - success = 3 - danger = 4 - - blurple = 1 - grey = 2 - gray = 2 - green = 3 - red = 4 - url = 5 + PRIMARY = 1 + SECONDARY = 2 + SUCCESS = 3 + DANGER = 4 + + BLURPLE = 1 + GREY = 2 + GRAY = 2 + GREEN = 3 + RED = 4 + URL = 5 def __int__(self): return self.value @@ -36,7 +36,7 @@ class BaseButton(ABC): def __init__( self, label: str | None = None, - style: ButtonStyle = ButtonStyle.primary, + style: ButtonStyle = ButtonStyle.PRIMARY, disabled: bool = False, remove_after_click: bool = False, ): @@ -56,7 +56,7 @@ class DataButton(BaseButton): def __init__( self, label: str | None = None, - style: ButtonStyle = ButtonStyle.primary, + style: ButtonStyle = ButtonStyle.PRIMARY, disabled: bool = False, remove_after_click: bool = False, **kwargs, @@ -70,9 +70,9 @@ class ActionButton(BaseButton): action_callback: CALLBACK_TYPE - def __init__(self, label: str, action_callback: CALLBACK_TYPE = None, *args, **kwargs): + def __init__(self, label: str, action_callback: CALLBACK_TYPE = None, **kwargs): """Initialize the `ActionButton`.""" - super().__init__(label=label, *args, **kwargs) + super().__init__(label=label, **kwargs) if isinstance(action_callback, BaseFlowStep): if action_callback.non_blocking: @@ -117,7 +117,7 @@ class ConfirmButton(ActionButton): def __init__( self, label: str | None = None, - style: ButtonStyle = ButtonStyle.primary, + style: ButtonStyle = ButtonStyle.PRIMARY, disabled: bool = False, remove_after_click: bool = True, ): diff --git a/electro/contrib/storage_buckets.py b/electro/contrib/storage_buckets.py index 27f053e..3bd25e2 100644 --- a/electro/contrib/storage_buckets.py +++ b/electro/contrib/storage_buckets.py @@ -27,11 +27,10 @@ def __init__( self, data_factory: typing.Callable[[], typing.Awaitable[VALUE | None]], index: int | None = None, - *args, **kwargs, ): """Initialize the Storage Substitution.""" - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.data_factory = data_factory self.index = index @@ -104,14 +103,14 @@ class BaseStorageBucketElement(Generic[VALUE], ABC): """The class for storage elements.""" _type: type[VALUE] - _scope: FlowScopes + _storage_data: StorageData[VALUE] | None def __init__(self, *, _type: type[VALUE], _scope: FlowScopes = FlowScopes.USER, **__): """Initialize the storage element. Called by the metaclass.""" - self._type = _type self._scope = _scope + self._storage_data = None @staticmethod async def get_current_user_id() -> int: @@ -165,28 +164,25 @@ async def get_data(self, default: VALUE | None = None) -> VALUE | None: """Get the data for the storage element.""" if self._scope == FlowScopes.USER: return await self._get_user_data(default=default) - elif self._scope == FlowScopes.CHANNEL: + if self._scope == FlowScopes.CHANNEL: return await self._get_channel_data(default=default) - else: - raise NotImplementedError(f"Unknown scope: {self._scope}") + raise NotImplementedError(f"Unknown scope: {self._scope}") async def set_data(self, data: VALUE): """Set the data for the storage element.""" if self._scope == FlowScopes.USER: return await self._set_user_data(data) - elif self._scope == FlowScopes.CHANNEL: + if self._scope == FlowScopes.CHANNEL: return await self._set_channel_data(data) - else: - raise NotImplementedError(f"Unknown scope: {self._scope}") + raise NotImplementedError(f"Unknown scope: {self._scope}") async def delete_data(self): """Delete the data for the storage element.""" if self._scope == FlowScopes.USER: return await self._delete_user_data() - elif self._scope == FlowScopes.CHANNEL: + if self._scope == FlowScopes.CHANNEL: return await self._delete_channel_data() - else: - raise NotImplementedError(f"Unknown scope: {self._scope}") + raise NotImplementedError(f"Unknown scope: {self._scope}") async def __aenter__(self) -> VALUE: """Get the data for the storage element.""" @@ -484,7 +480,7 @@ def __new__(mcs, name, bases, namespace, **kwargs): class BasePostgresStorageBucket(BaseStorageBucket, metaclass=PostgresStorageBucketMeta): """The base class for Postgres storage buckets.""" - __abstract = True + __abstract = True # pylint: disable=W0238 _model: tortoise.Model @@ -545,7 +541,7 @@ async def empty(cls): class BaseAssistantsStorageBucket(BaseStorageBucket, ABC): """Base storage bucket for the `GPTAssistantStep`s.""" - __abstract = True + __abstract = True # pylint: disable=W0238 thread_id: StorageBucketElement[str] @@ -553,7 +549,7 @@ class BaseAssistantsStorageBucket(BaseStorageBucket, ABC): class BasePostgresAssistantsStorageBucket(BasePostgresStorageBucket, BaseAssistantsStorageBucket): """Base storage bucket for the `GPTAssistantStep`s.""" - __abstract = True + __abstract = True # pylint: disable=W0238 # endregion diff --git a/electro/contrib/storage_substitutions.py b/electro/contrib/storage_substitutions.py index 241f410..97f8e3e 100644 --- a/electro/contrib/storage_substitutions.py +++ b/electro/contrib/storage_substitutions.py @@ -18,7 +18,6 @@ def __init__( tortoise_model_field_name: str, filters: dict[str, Any] | None = None, ensure_list_result: bool = False, - *args, **kwargs, ): """Initialize the substitution.""" @@ -27,14 +26,13 @@ def __init__( self.filters = filters or {} self.ensure_list_result = ensure_list_result - super().__init__(callback=self.get_value_for_connector, *args, **kwargs) + super().__init__(callback=self.get_value_for_connector, **kwargs) @staticmethod async def resolve_filters(flow_connector: FlowConnector, filters: dict[str, Any]) -> dict[str, Any]: - # noinspection PyProtectedMember return { key: ( - await value.get_data(default=value._type()) + await value.get_data(default=value._type()) # pylint: disable=W0212 if isinstance(value, BaseStorageBucketElement) else await value.resolve(flow_connector) if isinstance(value, BaseSubstitution) else value ) diff --git a/electro/enums.py b/electro/enums.py index 7748066..09296eb 100644 --- a/electro/enums.py +++ b/electro/enums.py @@ -20,3 +20,4 @@ class ResponseTypes(str, Enum): REMOVE_ROLE = "remove_role" START_TYPING = "start_typing" STOP_TYPING = "stop_typing" + STOP_PROCESS = "stop_process" diff --git a/electro/flow.py b/electro/flow.py index d747a50..0f0a07a 100644 --- a/electro/flow.py +++ b/electro/flow.py @@ -24,8 +24,6 @@ class FlowFinished(Exception): """The exception that is raised when the `Flow` is finished.""" - pass - class FlowMeta(ABCMeta): def __new__(mcs: typing.Type[FlowMeta], name, bases, namespace, **kwargs): @@ -210,7 +208,7 @@ async def check(self, connector: FlowConnector, scope: FlowScopes | None = None) async def check_triggers(self, connector: FlowConnector, scope: FlowScopes | None = None) -> bool: """Check if the `Flow` can be triggered.""" - return any([await trigger.check(connector, scope=scope) for trigger in self._triggers]) + return any(await trigger.check(connector, scope=scope) for trigger in self._triggers) async def _update_connector_pre_run(self, connector: FlowConnector, *_, **__kwargs) -> FlowConnector | None: """Update the connector before running the `Flow`.""" @@ -334,7 +332,7 @@ async def step( ) next_step_name, next_step = list(self._steps.items())[next_step_index] logger.info(f"Next step name: {next_step_name} [{connector.user.id=}]") - except (IndexError, StopIteration): + except (IndexError, StopIteration) as e: if ( (iterables := await self.get_iterables(connector)) # and isinstance( @@ -348,7 +346,7 @@ async def step( next_step_name, next_step = list(self._steps.items())[next_step_index] logger.info(f"Next step name: {next_step_name} [{connector.user.id=}]") else: - raise FlowFinished() + raise FlowFinished() from e # Set the state for the user default_state_parts: list[str] = [self._state_prefix, iterator_index, next_step_name] diff --git a/electro/flow_block.py b/electro/flow_block.py index 66f141f..bbfc10f 100644 --- a/electro/flow_block.py +++ b/electro/flow_block.py @@ -10,8 +10,6 @@ class FlowBlockFinished(Exception): """The exception that is raised when the `FlowBlock` is finished.""" - pass - class FlowBlockMeta(ABCMeta): pass @@ -20,8 +18,6 @@ class FlowBlockMeta(ABCMeta): class BaseFlowBlock(ABC, metaclass=FlowBlockMeta): """The base class for `FlowBlock`.""" - pass - @dataclass class FlowBlock(BaseFlowBlock): diff --git a/electro/flow_manager.py b/electro/flow_manager.py index 7e3336d..c3c7f43 100644 --- a/electro/flow_manager.py +++ b/electro/flow_manager.py @@ -89,7 +89,7 @@ async def get_or_create_channel( user.dm_channel = channel await user.save() return channel - elif created: + if created: platform_id.channel = user.dm_channel await platform_id.save() await channel.delete() @@ -280,13 +280,13 @@ async def _dispatch(self, flow_connector: FlowConnector): f"The message is a command that is not handled by any of the flows: " f"{flow_connector.message.content}" ) - else: - logger.warning( - f"Out-of-scope `{scope}` command `{flow_connector.message.content}` is not handled by the flows" - ) - raise EventCannotBeProcessed( - f"Out-of-scope `{scope}` command `{flow_connector.message.content}` is not handled by the flows" - ) + + logger.warning( + f"Out-of-scope `{scope}` command `{flow_connector.message.content}` is not handled by the flows" + ) + raise EventCannotBeProcessed( + f"Out-of-scope `{scope}` command `{flow_connector.message.content}` is not handled by the flows" + ) # Get all the flows that can be run: # Check if the flow can be run (maybe the user is in the middle of the flow) @@ -300,7 +300,7 @@ async def _dispatch(self, flow_connector: FlowConnector): flows_by_scope = defaultdict(list) for flow in flows_that_can_be_run: # noinspection PyProtectedMember - flows_by_scope[flow._scope].append(flow) + flows_by_scope[flow._scope].append(flow) # pylint: disable=W0212 # If it's not a private channel, Channel-scoped flows get the priority if flow_connector.channel.type == Channel.ChannelTypes.CHANNEL and ( @@ -322,17 +322,16 @@ async def _dispatch(self, flow_connector: FlowConnector): if scope == FlowScopes.USER: if flow_connector.event == FlowConnectorEvents.MESSAGE: return await self._finish_flow(flow_connector) - logger.warning(f"Received an event that cannot be processed: {flow_connector.event}") raise EventCannotBeProcessed(f"Received an event that cannot be processed: {flow_connector.event}") - else: - logger.debug( - "Out-of-scope `{scope}` event cannot be processed: " - "`{flow_connector.event}` in `#{flow_connector.channel}`", - scope=scope, - flow_connector=flow_connector, - ) - return # Do not raise an exception, as it's not an error + + logger.debug( + "Out-of-scope `{scope}` event cannot be processed: " + "`{flow_connector.event}` in `#{flow_connector.channel}`", + scope=scope, + flow_connector=flow_connector, + ) + return # Do not raise an exception, as it's not an error async def dispatch(self, flow_connector: FlowConnector): """Dispatch the flow.""" diff --git a/electro/flow_step.py b/electro/flow_step.py index 01363a9..e117155 100644 --- a/electro/flow_step.py +++ b/electro/flow_step.py @@ -9,12 +9,10 @@ from dataclasses import dataclass from enum import Enum from io import BytesIO -from urllib.parse import urlparse from openai import AsyncOpenAI, NOT_GIVEN from .contrib.storage_buckets import BaseStorageBucketElement, StorageBucketElement -from .enums import ResponseTypes from .flow_connector import FlowConnectorEvents from .models import Channel, File from .settings import settings @@ -32,8 +30,6 @@ class FlowStepDone(Exception): """The exception that is raised when the `BaseFlowStep` is finished.""" - pass - class BaseFlowStep(ABC): """The base class for `BaseFlowStep`.""" @@ -103,19 +99,19 @@ class StorageMixin(ABC): async def _get_user_answer(self) -> typing.Any: """Get the user answer.""" - if self.answers_storage: + if self.answers_storage is not None: async with self.answers_storage as answers_storage: return answers_storage.get() async def _set_user_answer(self, user_answer: typing.Any): """Set the user answer.""" - if self.answers_storage: + if self.answers_storage is not None: async with self.answers_storage as answers_storage: answers_storage.set(user_answer) async def clear_storage(self) -> None: """Clear the storage.""" - if self.answers_storage: + if self.answers_storage is not None: await self.answers_storage.delete_data() @@ -153,13 +149,11 @@ async def run(self, connector: FlowConnector): # Run the `BaseFlowStep` await result.run(connector) - except Exception as e: + except Exception as e: # pylint: disable=W0718 if self.skip_on_failure: logger.exception(e) - raise FlowStepDone() from e - else: - raise e + raise e if self.non_blocking: raise FlowStepDone() @@ -536,7 +530,7 @@ async def process_response(self, connector: FlowConnector): file_name=attachment.filename, ) - except Exception as exception: + except Exception as exception: # pylint: disable=W0718 logger.error(f"Failed to save the file: {exception}") return await self.send_message(connector, "Failed to save the file.") diff --git a/electro/interfaces.py b/electro/interfaces.py index e612035..85cd6f8 100644 --- a/electro/interfaces.py +++ b/electro/interfaces.py @@ -7,7 +7,7 @@ from fastapi import WebSocket -from .enums import ResponseTypes +from .enums import ResponseTypes, SupportedPlatforms from .flow_connector import FlowConnectorEvents from .flow_manager import global_flow_manager from .models import BotMessage, Button, Channel, File, Guild, Role, User @@ -21,15 +21,18 @@ class BaseInterface(ABC): """ - Interface class for the Electro framework. + Interface class for the Electro framework. This class is used to receive messages and events from the client and + send back tasks to be executed. + + To use it, you need to inherit from this class and implement the `send_json` method. This method is called + whenever a task is sent to the client. You can also override the `handle_incoming_action` method to handle + more incoming actions from the client. """ async def _create_and_format_buttons( self, buttons: Optional[List["BaseButton"]] = None, bot_message: Optional[BotMessage] = None ) -> List[Button]: - """ - Format the buttons to be sent to the client. - """ + """Format the buttons to be sent to the client.""" response = [] for button in buttons or []: button_object = await Button.create( @@ -53,9 +56,7 @@ async def _create_and_format_buttons( return response async def _format_user(self, user: Optional[User]) -> Dict[str, Any]: - """ - Format the user to be sent to the client. - """ + """Format the user to be sent to the client.""" if not user: return None return { @@ -67,9 +68,7 @@ async def _format_user(self, user: Optional[User]) -> Dict[str, Any]: } async def _format_channel(self, channel: Optional[Channel]) -> Dict[str, Any]: - """ - Format the channel to be sent to the client. - """ + """Format the channel to be sent to the client.""" if not channel: return None return { @@ -82,9 +81,7 @@ async def _format_channel(self, channel: Optional[Channel]) -> Dict[str, Any]: } async def _format_guild(self, guild: Optional[Guild]) -> Dict[str, Any]: - """ - Format the guild to be sent to the client. - """ + """Format the guild to be sent to the client.""" if not guild: return None return { @@ -104,7 +101,17 @@ async def send_message( delete_after: Optional[Union[int, str]] = None, ): """ - Send a formatted message to the client by using `format_message`. + Send a formatted message to the client. + + Arguments: + message: The message to be sent. + user: The user who will receive the message. + channel: The channel the message is being sent to. + buttons: A list of buttons to be included with the message. + delete_after: The time in seconds after which the message should be deleted. + - if None, the message will not be deleted. + - if "next", the message will be deleted after the next message is sent. + - if an integer, the message will be deleted after that many seconds. """ bot_message = await BotMessage.create(receiver=user, channel=channel, content=message) data = { @@ -131,7 +138,24 @@ async def send_image( delete_after: Optional[int] = None, ): """ - Send images to the client. + Send images to the client as a link: + + If the image is a File, the link to the blob storage location will be sent. + If the image is a BytesIO object, it will be uploaded to blob storage and the link will be sent. + If the image is a string, it will be sent as is so make sure it is a valid URL. + If the image is a pathlib.Path object, it will be sent as a link to the static file endpoint. + + Arguments: + image: The image to be sent. + user: The user who will receive the image. + channel: The channel the image is being sent to. + caption: The caption to be included with the image. + buttons: A list of buttons to be included with the image. + delete_after: The time in seconds after which the image should be deleted. + - if None, the image will not be deleted. + - if "next", the image will be deleted after the next message is sent. + - if an integer, the image will be deleted after that many seconds. + """ if buttons and not caption: raise ValueError("A caption must be provided when sending an image with buttons.") @@ -168,6 +192,13 @@ async def send_image( ) async def add_role(self, user: User, role: Role): + """ + Assign a role to a user. + + Arguments: + user: The user to whom the role will be assigned. + role: The role to be assigned to the user. + """ await self.send_json( { "action": ResponseTypes.ADD_ROLE, @@ -180,6 +211,13 @@ async def add_role(self, user: User, role: Role): ) async def remove_role(self, user: User, role: Role): + """ + Remove a role from a user. + + Arguments: + user: The user from whom the role will be removed. + role: The role to be removed from the user. + """ await self.send_json( { "action": ResponseTypes.REMOVE_ROLE, @@ -192,6 +230,14 @@ async def remove_role(self, user: User, role: Role): ) async def set_typing(self, user: User, channel: Channel, action: ResponseTypes): + """ + Set the typing indicator for a user or a channel. + + Arguments: + user: The user for whom the typing indicator will be set. + channel: The channel in which the typing indicator will be set. + action: The action to be performed (either "start_typing" or "stop_typing"). + """ if action not in [ResponseTypes.START_TYPING, ResponseTypes.STOP_TYPING]: raise ValueError("Action must be either `START_TYPING` or `STOP_TYPING`.") await self.send_json( @@ -204,6 +250,19 @@ async def set_typing(self, user: User, channel: Channel, action: ResponseTypes): } ) + async def stop_process(self): + """ + Stop the process for the client. + + This is used to stop the process for the client and close the connection. + """ + await self.send_json( + { + "action": ResponseTypes.STOP_PROCESS, + "content": {}, + } + ) + @asynccontextmanager async def with_constant_typing(self, user: User, channel: Channel): """An asynchronous context manager for typing indicators or other tasks.""" @@ -211,9 +270,15 @@ async def with_constant_typing(self, user: User, channel: Channel): yield await self.set_typing(user, channel, ResponseTypes.STOP_TYPING) - async def handle_incoming_action(self, platform: str, data: Dict[str, Any]) -> Tuple[Dict[str, str], int]: + async def handle_incoming_action( + self, platform: SupportedPlatforms, data: Dict[str, Any] + ) -> Tuple[Dict[str, str], int]: """ - Handle incoming actions from the client. + Handle incoming actions from the client. The action data is validated and processed. + + Arguments: + platform: The platform from which the action was received (). + data: The data received from the client. """ action = data.get("action") content = data.get("content") @@ -230,23 +295,12 @@ async def handle_incoming_action(self, platform: str, data: Dict[str, Any]) -> T @abstractmethod async def send_json(self, data: Dict[str, Any]): - """ - Send an action for the client. - """ - raise NotImplementedError - - @abstractmethod - async def stop_process(self, *args, **kwargs): + """Send an task for the client to process.""" raise NotImplementedError class WebSocketInterface(BaseInterface): - """ - WebSocket Interface for the Electro framework. - - On the server side, the WebSocketInterface is used to send messages to the client, - If you want to send a message to the client in a Flow, you can use the `send_message` method. - """ + """WebSocket Interface for the Electro framework.""" def __init__(self): self.interface: WebSocket | None = None @@ -260,6 +314,7 @@ async def disconnect(self): self.interface = None async def stop_process(self, code: int = 1000, reason: Optional[str] = None): + await super().stop_process() await self.interface.close(code, reason) async def send_json(self, data: Dict[str, Any]): @@ -267,12 +322,11 @@ async def send_json(self, data: Dict[str, Any]): class APIInterface(BaseInterface): + """API Interface for the Electro framework.""" + def __init__(self): self.messages = contextvars.ContextVar("messages") self.messages.set([]) async def send_json(self, data: Dict[str, str]): self.messages.get().append(data) - - async def stop_process(self, *args, **kwargs): - pass diff --git a/electro/models.py b/electro/models.py index 70e246d..6791a6a 100644 --- a/electro/models.py +++ b/electro/models.py @@ -23,7 +23,7 @@ class BaseModel(Model): is_deleted = fields.BooleanField(default=False) date_deleted = fields.DatetimeField(null=True) - class Meta: # pylint: disable=too-few-public-methods + class Meta: """The metaclass for the base model.""" abstract = True @@ -36,7 +36,8 @@ class PlatformId(Model): """ This model is used to store the IDs of the users, channels, and guilds on different platforms. - It is used to link the users, channels, and guilds on different platforms to the same user, channel, or guild in the database. + It is used to link the users, channels, and guilds on different platforms to the same user, channel, + or guild in the database. Attributes: id (int): The ID of the platform ID. @@ -246,7 +247,7 @@ def __init_subclass__(cls, **kwargs): cls.storage_models.append(cls) - class Meta: # pylint: disable=too-few-public-methods + class Meta: """The metaclass for the model.""" abstract = True @@ -264,7 +265,7 @@ class BaseImagesStepStorageModel(BaseStorageModel): load_more_button_custom_id = fields.CharField(max_length=255, null=True) - class Meta: # pylint: disable=too-few-public-methods + class Meta: """The metaclass for the model.""" abstract = True @@ -275,7 +276,7 @@ class BaseAssistantsStorageModel(BaseStorageModel): thread_id = fields.CharField(max_length=255, null=True) - class Meta: # pylint: disable=too-few-public-methods + class Meta: """The metaclass for the model.""" abstract = True diff --git a/electro/schemas.py b/electro/schemas.py index d1fb1df..ff2b7fc 100644 --- a/electro/schemas.py +++ b/electro/schemas.py @@ -1,7 +1,5 @@ from pydantic import BaseModel -from .enums import SupportedPlatforms - class PlatformId(BaseModel): id: str diff --git a/electro/storage.py b/electro/storage.py index 7a88d46..4fa8721 100644 --- a/electro/storage.py +++ b/electro/storage.py @@ -19,20 +19,14 @@ class BaseData(dict): """The base class for the data.""" - pass - class UserData(BaseData): """The data for a user.""" - pass - class ChannelData(BaseData): """The data for a channel.""" - pass - class BaseFlowStorage(ABC): """The base class for the storage.""" diff --git a/electro/substitutions.py b/electro/substitutions.py index 46520a4..a63fad8 100644 --- a/electro/substitutions.py +++ b/electro/substitutions.py @@ -34,11 +34,9 @@ async def _resolve(self, connector: FlowConnector) -> VALUE: async def resolve(self, connector: FlowConnector) -> VALUE: """Resolve the value for the connector.""" value = await self._resolve(connector) or self.default_value - if self.formatter and value is not None: return self.formatter(value) - else: - return str(value) if self.ensure_str_result else value + return str(value) if self.ensure_str_result else value class ManualRedisStorageSubstitution(BaseSubstitution): @@ -50,9 +48,9 @@ class ManualRedisStorageSubstitution(BaseSubstitution): is_chat_specific: bool = False def __init__( - self, redis_storage: RedisStorage, redis_storage_key_name: str, is_chat_specific: bool = False, *args, **kwargs + self, redis_storage: RedisStorage, redis_storage_key_name: str, is_chat_specific: bool = False, **kwargs ): - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.redis_storage = redis_storage self.redis_storage_key_name = redis_storage_key_name @@ -75,17 +73,16 @@ async def _resolve(self, connector: FlowConnector) -> str: data: VALUE = redis_user_data.get(self.redis_storage_key_name, self.default_value) except (TypeError, IndexError) as exception: return str(f"{exception} in REDIS STORAGE SUBSTITUTION for key: {self.redis_storage_key_name}") - else: - return data + return data class AttributeSubstitution(BaseSubstitution): substitution_object: BaseFlowSubstitutionObject attribute: str | None = None - def __init__(self, substitution_object: BaseFlowSubstitutionObject, attribute: str | None = None, *args, **kwargs): + def __init__(self, substitution_object: BaseFlowSubstitutionObject, attribute: str | None = None, **kwargs): """The Substitution object that would be fetched from the attribute of the object.""" - super().__init__(*args, **kwargs) + super().__init__(**kwargs) self.substitution_object = substitution_object self.attribute = attribute @@ -99,8 +96,7 @@ async def _resolve(self, connector: FlowConnector) -> VALUE: if self.attribute: return getattr(real_object, self.attribute) - else: - return real_object + return real_object class CallbackSubstitution(BaseSubstitution[VALUE]): @@ -150,8 +146,7 @@ class GlobalAbstractChannel(str, Enum): async def resolve_channel(abstract_channel: GlobalAbstractChannel, user: User) -> Channel: """Resolve the channel by the name.""" if abstract_channel == GlobalAbstractChannel.DM_CHANNEL: - return None - + return await user.dm_channel raise ValueError(f"Unknown channel: {abstract_channel}") diff --git a/electro/toolkit/images_storage/storage_services/azure_blob_storage_service.py b/electro/toolkit/images_storage/storage_services/azure_blob_storage_service.py index 911544f..cc2e034 100644 --- a/electro/toolkit/images_storage/storage_services/azure_blob_storage_service.py +++ b/electro/toolkit/images_storage/storage_services/azure_blob_storage_service.py @@ -63,11 +63,11 @@ async def download_image(self, object_key: str) -> BytesIO: blob_client = container_client.get_blob_client(object_key) try: image_data = await blob_client.download_blob() - except ResourceNotFoundError: - raise FileNotFoundError(f"Image with key '{object_key}' not found in the Azure Blob Storage.") + except ResourceNotFoundError as e: + raise FileNotFoundError(f"Image with key '{object_key}' not found in the Azure Blob Storage.") from e return BytesIO(await image_data.readall()) - async def _create_image_access_token(self, blob_client: BlobClient, account_key: str) -> str: + async def _create_image_access_token(self, blob_client: BlobClient) -> str: start_time = datetime.datetime.now(datetime.timezone.utc) expiry_time = start_time + datetime.timedelta(days=1) return generate_blob_sas( @@ -85,5 +85,5 @@ async def get_image_url(self, object_key: str) -> str: async with await self.blob_service_client as client: container_client = client.get_container_client(self.container_name) blob_client = container_client.get_blob_client(object_key) - token = await self._create_image_access_token(blob_client, settings.AZURE_CLIENT_SECRET) + token = await self._create_image_access_token(blob_client) return f"{blob_client.url}?{token}" diff --git a/electro/toolkit/images_storage/storage_services/s3_service.py b/electro/toolkit/images_storage/storage_services/s3_service.py index 42bf96b..54e8d2a 100644 --- a/electro/toolkit/images_storage/storage_services/s3_service.py +++ b/electro/toolkit/images_storage/storage_services/s3_service.py @@ -103,7 +103,7 @@ async def get_image_url(self, object_key: str) -> str: :return: URL of the image """ - async with self.session.client("s3", endpoint_url=settings.S3_ENDPOINT_URL) as s3: + async with self.session.client("s3", endpoint_url=settings.S3_ENDPOINT_URL): try: url = f"{settings.S3_ENDPOINT_URL}/{self.bucket_name}/{object_key}" logger.info(f"Image URL: {url}") diff --git a/electro/toolkit/redis_storage.py b/electro/toolkit/redis_storage.py index 8e5a40f..f1aa4aa 100644 --- a/electro/toolkit/redis_storage.py +++ b/electro/toolkit/redis_storage.py @@ -13,10 +13,10 @@ from ..settings import settings -if not (redis_url := settings.REDIS_URL): - redis_url = f"redis://{settings.REDIS_HOST}:{settings.REDIS_PORT}/{settings.REDIS_DB}" +if not (REDIS_URL := settings.REDIS_URL): + REDIS_URL = f"redis://{settings.REDIS_HOST}:{settings.REDIS_PORT}/{settings.REDIS_DB}" -redis_config: dict = dj_redis_url.config(default=str(redis_url)) +redis_config: dict = dj_redis_url.config(default=str(REDIS_URL)) STATE_KEY = "state" STATE_DATA_KEY = "data" @@ -130,7 +130,7 @@ async def set_state( *, chat: typing.Union[str, int, None] = None, user: typing.Union[str, int, None] = None, - state: typing.Optional[typing.AnyStr] = None, + state: typing.Union[str, bytes, None] = None, ): """ Set new state for user in chat @@ -333,12 +333,8 @@ async def reset_bucket( @staticmethod def resolve_state(value): - if value is None: - return - - if isinstance(value, str): + if value is None or isinstance(value, str): return value - return str(value) @@ -387,7 +383,7 @@ def __init__( decode_responses=True, **kwargs, ) - + self._loop = loop self._prefix = (prefix,) self._state_ttl = state_ttl self._data_ttl = data_ttl @@ -432,7 +428,7 @@ async def set_state( *, chat: typing.Union[str, int, None] = None, user: typing.Union[str, int, None] = None, - state: typing.Optional[typing.AnyStr] = None, + state: typing.Union[str, bytes, None] = None, ): chat, user = self.check_address(chat=chat, user=user) key = self.generate_key(chat, user, STATE_KEY) @@ -544,10 +540,9 @@ async def get_states_list(self) -> typing.List[typing.Tuple[str, str]]: # According to the structure above, it's better to write this expression +# https://devcenter.heroku.com/articles/ah-redis-stackhero#how-to-avoid-error-connection-closed-by-server-with-redis-and-python redis_storage = RedisStorage( **parse_config(redis_config), - # Configs below are from here: - # https://devcenter.heroku.com/articles/ah-redis-stackhero#:~:text=The%20error%20%E2%80%9Credis.,and%20the%20connection%20closes%20automatically. health_check_interval=10, socket_connect_timeout=5, retry_on_timeout=True, diff --git a/electro/toolkit/tortoise_orm.py b/electro/toolkit/tortoise_orm.py index 031bef1..969743d 100644 --- a/electro/toolkit/tortoise_orm.py +++ b/electro/toolkit/tortoise_orm.py @@ -40,8 +40,6 @@ def __new__(mcs, name, bases, attrs): class Model(tortoise_Model, metaclass=ModelMeta): """The base `tortoise-orm` `Model`.""" - pass - def get_tortoise_config(): """Get the configuration for the `tortoise-orm`.""" diff --git a/electro/triggers.py b/electro/triggers.py index 06baf5f..5a0dd52 100644 --- a/electro/triggers.py +++ b/electro/triggers.py @@ -19,7 +19,7 @@ class BaseFlowTrigger(ABC): allowed_scopes: list[FlowScopes] = [FlowScopes.USER] - # noinspection PyUnusedLocal + # pylint: disable=W0613 async def check_scope(self, connector: FlowConnector, scope: FlowScopes | None = None) -> bool: """Check if the `Flow` can be run based on the scope.""" if scope and scope not in self.allowed_scopes: From 8cc8ca1cfcab3280811e32f3182792e61117dad0 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Thu, 10 Apr 2025 15:56:47 +0200 Subject: [PATCH 17/57] solve more linter issues --- Makefile | 1 - electro/flow.py | 1 + electro/flow_manager.py | 1 + poetry.lock | 20 +------------------- pyproject.toml | 4 ---- 5 files changed, 3 insertions(+), 24 deletions(-) diff --git a/Makefile b/Makefile index 128d7c3..168eeb0 100644 --- a/Makefile +++ b/Makefile @@ -15,7 +15,6 @@ style: poetry run black $(SOURCES_DIR) poetry run isort $(SOURCES_DIR) poetry run pylint $(SOURCES_DIR) - poetry run pydocstyle $(SOURCES_DIR) .PHONY: docs docs: diff --git a/electro/flow.py b/electro/flow.py index 0f0a07a..4af0c70 100644 --- a/electro/flow.py +++ b/electro/flow.py @@ -233,6 +233,7 @@ async def run(self, connector: FlowConnector, upper_level_state: str | None = No return await self.step(connector, initial=True, upper_level_state=upper_level_state) + # TODO: This is too complex and should be refactored. pylint: disable=R0912,R0914,R0915 # TODO: [2024-07-19 by Mykola] Use the decorators # @forbid_concurrent_execution() @with_constant_typing(run_only_on_events=[FlowConnectorEvents.MESSAGE]) diff --git a/electro/flow_manager.py b/electro/flow_manager.py index c3c7f43..b3f7439 100644 --- a/electro/flow_manager.py +++ b/electro/flow_manager.py @@ -247,6 +247,7 @@ async def _finish_flow(self, flow_connector: FlowConnector): await callback(flow_connector) return + # TODO: This is too complex and should be refactored. pylint: disable=R0912 # TODO: [2024-07-19 by Mykola] Use the decorators # @fail_safely async def _dispatch(self, flow_connector: FlowConnector): diff --git a/poetry.lock b/poetry.lock index f6511f1..91637f1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2057,24 +2057,6 @@ azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0 toml = ["tomli (>=2.0.1)"] yaml = ["pyyaml (>=6.0.1)"] -[[package]] -name = "pydocstyle" -version = "6.3.0" -description = "Python docstring style checker" -optional = false -python-versions = ">=3.6" -groups = ["dev"] -files = [ - {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, - {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, -] - -[package.dependencies] -snowballstemmer = ">=2.2.0" - -[package.extras] -toml = ["tomli (>=1.2.3)"] - [[package]] name = "pygments" version = "2.19.1" @@ -3242,4 +3224,4 @@ propcache = ">=0.2.0" [metadata] lock-version = "2.1" python-versions = "^3.12" -content-hash = "5541f0420d0450921e10fc038bddc2f7bb75bd8264406cc2a6ca5c0acd4af8bf" +content-hash = "ad97dcb629919c9eaa8d416320ff979abfb56334be409605a665c27177cf4fad" diff --git a/pyproject.toml b/pyproject.toml index 43a90ae..98afa07 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,6 @@ fastapi = {extras = ["standard"], version = "^0.115.5"} black = "^24.8.0" isort = "^5.13.2" pylint = "^3.2.6" -pydocstyle = {extras = ["toml"], version = "^6.3.0"} sphinx = "^8.2.0" sphinx-rtd-theme = "^3.0.2" autodoc-pydantic = "^2.2.0" @@ -58,9 +57,6 @@ disable = ["fixme"] # ignore TODOs and FIXMEs source-roots = ["electro"] # the source roots for pylint to check ignore-patterns = ["tests", "migrations"] # ignore the tests and migrations folders -[tool.pydocstyle] -match-dir = "(?!tests|migrations).*" # exclude the tests and migrations folders - [tool.aerich] tortoise_orm = "electro.toolkit.tortoise_orm.TORTOISE_ORM_CONFIG" From 3ebb00d6b35eb921e6cde2388c481c080be3e990 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Thu, 10 Apr 2025 17:07:00 +0200 Subject: [PATCH 18/57] fix error --- electro/flow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/electro/flow.py b/electro/flow.py index 4af0c70..2e79a11 100644 --- a/electro/flow.py +++ b/electro/flow.py @@ -208,7 +208,7 @@ async def check(self, connector: FlowConnector, scope: FlowScopes | None = None) async def check_triggers(self, connector: FlowConnector, scope: FlowScopes | None = None) -> bool: """Check if the `Flow` can be triggered.""" - return any(await trigger.check(connector, scope=scope) for trigger in self._triggers) + return any([await trigger.check(connector, scope=scope) for trigger in self._triggers]) # pylint: disable=R1729 async def _update_connector_pre_run(self, connector: FlowConnector, *_, **__kwargs) -> FlowConnector | None: """Update the connector before running the `Flow`.""" From 82ce4b6c54a7bcbae6b4030e88d5a01dddb8593c Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Fri, 11 Apr 2025 10:36:57 +0200 Subject: [PATCH 19/57] handle no database url --- electro/settings.py | 2 +- electro/toolkit/tortoise_orm.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/electro/settings.py b/electro/settings.py index 88d71f1..180032c 100644 --- a/electro/settings.py +++ b/electro/settings.py @@ -56,7 +56,7 @@ class Settings(BaseSettings): DEFAULT_LOCALE: str = "en" # Should mirror the `BOT_LANGUAGE` setting. User in the `make upload-locales` target # Database settings - DATABASE_URL: PostgresDsn | None + DATABASE_URL: PostgresDsn | None = None # if the `DATABASE_URL` is not set, then use the following credentials: POSTGRES_HOST: str | None = None POSTGRES_USER: str | None = None diff --git a/electro/toolkit/tortoise_orm.py b/electro/toolkit/tortoise_orm.py index 969743d..3d3730d 100644 --- a/electro/toolkit/tortoise_orm.py +++ b/electro/toolkit/tortoise_orm.py @@ -43,12 +43,12 @@ class Model(tortoise_Model, metaclass=ModelMeta): def get_tortoise_config(): """Get the configuration for the `tortoise-orm`.""" - if not (database_url := str(settings.DATABASE_URL)): + if not (database_url := settings.DATABASE_URL): database_url = ( f"postgres://{settings.POSTGRES_USER}:{settings.POSTGRES_PASSWORD}@" f"{settings.POSTGRES_HOST}:{settings.POSTGRES_PORT}/{settings.POSTGRES_DB}" ) - db = expand_db_url(database_url) + db = expand_db_url(str(database_url)) ctx = False if settings.ENABLE_DATABASE_SSL: ctx = ssl.create_default_context(cafile="") From d62f823e23da4d849560a3b4366d13a14ab43220 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Tue, 29 Apr 2025 18:07:28 +0200 Subject: [PATCH 20/57] feat: handle message breaks --- electro/interfaces.py | 29 +++++++++++++++++------------ 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/electro/interfaces.py b/electro/interfaces.py index 85cd6f8..cd324fc 100644 --- a/electro/interfaces.py +++ b/electro/interfaces.py @@ -114,19 +114,24 @@ async def send_message( - if an integer, the message will be deleted after that many seconds. """ bot_message = await BotMessage.create(receiver=user, channel=channel, content=message) - data = { - "user": await self._format_user(user), - "channel": await self._format_channel(channel), - "message": bot_message.content, - "buttons": await self._create_and_format_buttons(buttons, bot_message), - "delete_after": delete_after, - } - await self.send_json( - { - "action": ResponseTypes.MESSAGE, - "content": data, + message_chunks = message.split(settings.MESSAGE_BREAK) + user = await self._format_user(user) + channel = await self._format_channel(channel) + buttons = await self._create_and_format_buttons(buttons, bot_message) + for i, message_chunk in enumerate(message_chunks): + data = { + "user": user, + "channel": channel, + "message": message_chunk, + "buttons": buttons if i == len(message_chunks) - 1 else [], + "delete_after": delete_after, } - ) + await self.send_json( + { + "action": ResponseTypes.MESSAGE, + "content": data, + } + ) async def send_image( self, From 7c7cf3a6aa708f5e7a73442e25c13ae5d5b44130 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Thu, 22 May 2025 10:50:26 +0200 Subject: [PATCH 21/57] Temporary comment api key validation --- electro/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/electro/app.py b/electro/app.py index 34510bd..dfe774a 100644 --- a/electro/app.py +++ b/electro/app.py @@ -24,7 +24,7 @@ def validate_api_key(x_api_key: Optional[str] = Header(default=None)) -> None: title="Electro API", description="The API server that works as an endpoint for all the Electro Interfaces.", version="0.1.0", - dependencies=[Depends(validate_api_key)], + # dependencies=[Depends(validate_api_key)], # docs_url="/", # redoc_url=None, ) From d0aa1f155321b6b7d6199eadec62d023600839e0 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Thu, 22 May 2025 15:52:55 +0200 Subject: [PATCH 22/57] New message model --- electro/app.py | 55 ++++++++++++++++++++++++++++++++++++++++- electro/flow.py | 10 ++++---- electro/flow_manager.py | 7 +++--- electro/interfaces.py | 43 +++++++++++++++++++++++--------- electro/models.py | 37 +++++++++++---------------- electro/utils.py | 42 +++++++++++++++++++++++++++++++ 6 files changed, 152 insertions(+), 42 deletions(-) create mode 100644 electro/utils.py diff --git a/electro/app.py b/electro/app.py index dfe774a..7405780 100644 --- a/electro/app.py +++ b/electro/app.py @@ -2,14 +2,17 @@ from typing import Any, Dict, Optional -from fastapi import Depends, FastAPI, Header, HTTPException, WebSocket, WebSocketDisconnect +# from fastapi import Depends +from fastapi import FastAPI, Header, HTTPException, WebSocket, WebSocketDisconnect from fastapi.websockets import WebSocketState from tortoise.contrib.fastapi import register_tortoise from .enums import SupportedPlatforms from .interfaces import APIInterface, WebSocketInterface +from .models import Channel, Message, User from .settings import settings from .toolkit.tortoise_orm import get_tortoise_config +from .utils import format_historical_message def validate_api_key(x_api_key: Optional[str] = Header(default=None)) -> None: @@ -30,6 +33,56 @@ def validate_api_key(x_api_key: Optional[str] = Header(default=None)) -> None: ) +@app.get("/api/platform/{platform}/user/{user_id}/messages") +async def get_user_messages(user_id: str, limit: int = 20, offset: int = 0): + """ + Get the message history for a user. + + Arguments: + user: The user whose message history is to be retrieved. + limit: The maximum number of messages to retrieve. + offset: The number of messages to skip before retrieving the history. + """ + user = await User.get_or_none(id=user_id) + if not user: + raise HTTPException(status_code=404, detail="User not found") + messages = ( + await Message.filter( + user=user, + is_temporary=False, + ) + .order_by("-date_added") + .limit(limit) + .offset(offset) + ) + return [await format_historical_message(message) for message in messages] + + +@app.get("/api/channel/{channel_id}/messages") +async def get_channel_messages(channel_id: str, limit: int = 20, offset: int = 0): + """ + Get the message history for a channel. + + Arguments: + channel: The channel whose message history is to be retrieved. + limit: The maximum number of messages to retrieve. + offset: The number of messages to skip before retrieving the history. + """ + channel = await Channel.get_or_none(id=channel_id) + if not channel: + raise HTTPException(status_code=404, detail="Channel not found") + messages = ( + await Message.filter( + channel=channel, + is_temporary=False, + ) + .order_by("-date_added") + .limit(limit) + .offset(offset) + ) + return [await format_historical_message(message) for message in messages] + + @app.post("/api/platform/{platform}") async def process_message(platform: str, data: Dict[str, Any]): """Process the message.""" diff --git a/electro/flow.py b/electro/flow.py index 2e79a11..82d5479 100644 --- a/electro/flow.py +++ b/electro/flow.py @@ -10,7 +10,7 @@ from .contrib.storage_buckets import BaseStorageBucket, BaseStorageBucketElement from .flow_connector import FlowConnector, FlowConnectorEvents from .flow_step import BaseFlowStep, FlowStepDone -from .models import BotMessage +from .models import Message from .scopes import FlowScopes from .settings import settings from .substitutions import BaseSubstitution @@ -142,12 +142,12 @@ async def get_iterables(self, connector: FlowConnector) -> typing.List: @abstractmethod async def step( self, connector: FlowConnector, initial: bool = False, upper_level_state: str | None = None - ) -> list[BotMessage] | None: + ) -> list[Message] | None: """Process the response in the current step of the `Flow`.""" raise NotImplementedError @abstractmethod - async def run(self, connector: FlowConnector, upper_level_state: str | None = None) -> list[BotMessage] | None: + async def run(self, connector: FlowConnector, upper_level_state: str | None = None) -> list[Message] | None: """Start the `Flow`.""" raise NotImplementedError @@ -214,7 +214,7 @@ async def _update_connector_pre_run(self, connector: FlowConnector, *_, **__kwar """Update the connector before running the `Flow`.""" return connector - async def run(self, connector: FlowConnector, upper_level_state: str | None = None) -> list[BotMessage] | None: + async def run(self, connector: FlowConnector, upper_level_state: str | None = None) -> list[Message] | None: """Start the `Flow`.""" # Make sure there are steps in the `Flow` if not self._steps: @@ -239,7 +239,7 @@ async def run(self, connector: FlowConnector, upper_level_state: str | None = No @with_constant_typing(run_only_on_events=[FlowConnectorEvents.MESSAGE]) async def step( self, connector: FlowConnector, initial: bool = False, upper_level_state: str | None = None - ) -> list[BotMessage] | None: + ) -> list[Message] | None: """ Process the response in the current step of the `Flow`. diff --git a/electro/flow_manager.py b/electro/flow_manager.py index b3f7439..3e329f8 100644 --- a/electro/flow_manager.py +++ b/electro/flow_manager.py @@ -105,10 +105,11 @@ async def save_message(cls, platform: str, message_data: schemas.ReceivedMessage else: channel = None return await Message.create( + is_command=message_data.content.startswith(settings.BOT_COMMAND_PREFIX), + is_bot_message=False, + user=author, content=message_data.content, - author=author, channel=channel, - is_command=message_data.content.startswith(settings.BOT_COMMAND_PREFIX), ) @classmethod @@ -347,7 +348,7 @@ async def on_message(self, platform: str, message_data: schemas.ReceivedMessage, # Save the message to the database message = await self.analytics_manager.save_message(platform, message_data) - user = await message.author + user = await message.user channel = await message.channel # Get the user state and data diff --git a/electro/interfaces.py b/electro/interfaces.py index cd324fc..4e3b66e 100644 --- a/electro/interfaces.py +++ b/electro/interfaces.py @@ -10,7 +10,7 @@ from .enums import ResponseTypes, SupportedPlatforms from .flow_connector import FlowConnectorEvents from .flow_manager import global_flow_manager -from .models import BotMessage, Button, Channel, File, Guild, Role, User +from .models import Button, Channel, File, Guild, Message, Role, User from .schemas import ButtonClick, ReceivedMessage from .settings import settings from .toolkit.images_storage.universal_image_storage import universal_image_storage @@ -30,13 +30,13 @@ class BaseInterface(ABC): """ async def _create_and_format_buttons( - self, buttons: Optional[List["BaseButton"]] = None, bot_message: Optional[BotMessage] = None + self, buttons: Optional[List["BaseButton"]] = None, message: Optional[Message] = None ) -> List[Button]: """Format the buttons to be sent to the client.""" response = [] for button in buttons or []: button_object = await Button.create( - bot_message=bot_message, + message=message, custom_id=button.custom_id, style=button.style, label=button.label, @@ -113,17 +113,27 @@ async def send_message( - if "next", the message will be deleted after the next message is sent. - if an integer, the message will be deleted after that many seconds. """ - bot_message = await BotMessage.create(receiver=user, channel=channel, content=message) message_chunks = message.split(settings.MESSAGE_BREAK) - user = await self._format_user(user) - channel = await self._format_channel(channel) - buttons = await self._create_and_format_buttons(buttons, bot_message) + user_data = await self._format_user(user) + channel_data = await self._format_channel(channel) for i, message_chunk in enumerate(message_chunks): + message = await Message.create( + is_temporary=delete_after is not None, + is_bot_message=True, + type=Message.MessageTypes.TEXT, + user=user, + channel=channel, + content=message_chunk, + ) + if i == len(message_chunks) - 1: + buttons = await self._create_and_format_buttons(buttons, message) + else: + buttons = [] data = { - "user": user, - "channel": channel, + "user": user_data, + "channel": channel_data, "message": message_chunk, - "buttons": buttons if i == len(message_chunks) - 1 else [], + "buttons": buttons, "delete_after": delete_after, } await self.send_json( @@ -168,7 +178,7 @@ async def send_image( image_url = await universal_image_storage.get_image_url(image.storage_file_object_key) elif isinstance(image, BytesIO): object_key = await universal_image_storage.upload_image(image) - await File.create( + image = await File.create( owner=user, storage_service=settings.STORAGE_SERVICE_ID, storage_file_object_key=object_key, @@ -181,6 +191,17 @@ async def send_image( if str(image_url).endswith(".gif") and (buttons or caption): raise ValueError("GIFs do not support buttons or captions.") + message = await Message.create( + is_temporary=delete_after is not None, + is_bot_message=True, + type=Message.MessageTypes.IMAGE, + user=user, + channel=channel, + content=image_url, + caption=caption, + ) + if isinstance(image, File): + await message.files.add(image) data = { "user": await self._format_user(user), "channel": await self._format_channel(channel), diff --git a/electro/models.py b/electro/models.py index 6791a6a..1fb4c63 100644 --- a/electro/models.py +++ b/electro/models.py @@ -141,7 +141,6 @@ class ChannelUsedFor(str, Enum): platform_ids: fields.ReverseRelation[PlatformId] messages: fields.ReverseRelation[Message] - bot_messages: fields.ReverseRelation[BotMessage] def __str__(self) -> str: """Return the string representation of the model.""" @@ -165,33 +164,29 @@ def __str__(self) -> str: class Message(BaseModel): """The model for Message.""" - id = fields.BigIntField(pk=True) - author: ForeignKeyRelation[User] = fields.ForeignKeyField("electro.User", related_name="messages") - channel: ForeignKeyRelation[Channel] = fields.ForeignKeyField("electro.Channel", related_name="messages", null=True) - content = fields.TextField() - is_command = fields.BooleanField(null=True) + class MessageTypes(str, Enum): + """The types of messages.""" - def __str__(self) -> str: - """Return the string representation of the model.""" - return f"`{self.author}` Message: `{self.content}`." + TEXT = "text" + IMAGE = "image" + id = fields.BigIntField(pk=True) -class BotMessage(BaseModel): - """The model for Bot Message.""" + is_bot_message = fields.BooleanField(default=False) + is_command = fields.BooleanField(default=False) + is_temporary = fields.BooleanField(default=False) + type = fields.CharEnumField(MessageTypes, max_length=255, default=MessageTypes.TEXT) - id = fields.BigIntField(pk=True) - receiver: ForeignKeyRelation[User] = fields.ForeignKeyField("electro.User", related_name="bot_messages", null=True) - channel: ForeignKeyRelation[Channel] = fields.ForeignKeyField( - "electro.Channel", related_name="bot_messages", null=True - ) + user: ForeignKeyRelation[User] = fields.ForeignKeyField("electro.User", related_name="messages", null=True) + channel: ForeignKeyRelation[Channel] = fields.ForeignKeyField("electro.Channel", related_name="messages", null=True) content = fields.TextField(null=True) - - files: fields.ManyToManyRelation[File] = ManyToManyField("electro.File", related_name="bot_messages") + caption = fields.TextField(null=True) + files: fields.ManyToManyRelation[File] = ManyToManyField("electro.File", related_name="messages") buttons: fields.ReverseRelation[Button] def __str__(self) -> str: """Return the string representation of the model.""" - return f"`{self.receiver}` Bot Message: `{self.content}`." + return f"Message `{self.id}`." class Button(BaseModel): @@ -204,9 +199,7 @@ class Button(BaseModel): clicked = fields.BooleanField(default=False) remove_after_click = fields.BooleanField(default=False) extra_data = fields.JSONField(null=True) - bot_message: ForeignKeyRelation[BotMessage] = fields.ForeignKeyField( - "electro.BotMessage", related_name="buttons", null=True - ) + message: ForeignKeyRelation[Message] = fields.ForeignKeyField("electro.Message", related_name="buttons", null=True) def __str__(self) -> str: """Return the string representation of the model.""" diff --git a/electro/utils.py b/electro/utils.py new file mode 100644 index 0000000..23e62ff --- /dev/null +++ b/electro/utils.py @@ -0,0 +1,42 @@ +from typing import Any, Dict + +from .models import Message +from .toolkit.images_storage.universal_image_storage import universal_image_storage + + +async def format_historical_message(message: Message) -> Dict[str, Any]: + await message.fetch_related("buttons", "files") + buttons = [ + { + "id": button.id, + "custom_id": button.custom_id, + "style": button.style, + "label": button.label, + "clicked": button.clicked, + "remove_after_click": button.remove_after_click, + } + for button in message.buttons + ] + if message.type == Message.MessageTypes.IMAGE: + if len(message.files) > 0: + image = message.files[0] + image_url = await universal_image_storage.get_image_url(image.storage_file_object_key) + else: + image_url = message.content + return { + "id": message.id, + "type": message.type, + "is_bot_message": message.is_bot_message, + "image": image_url, + "caption": message.caption, + "buttons": buttons, + } + if message.type == Message.MessageTypes.TEXT: + return { + "id": message.id, + "type": message.type, + "is_bot_message": message.is_bot_message, + "message": message.content, + "buttons": buttons, + } + return {} From 8384225ed544a0f12c22fdef5af273a0f7a5c413 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Fri, 23 May 2025 17:50:55 +0200 Subject: [PATCH 23/57] add authentication --- electro/app.py | 165 ++++++++++++++++++++++---------------- electro/authentication.py | 49 +++++++++++ electro/enums.py | 1 + electro/flow_manager.py | 32 ++------ electro/interfaces.py | 36 ++++++--- electro/schemas.py | 8 -- electro/settings.py | 9 ++- 7 files changed, 185 insertions(+), 115 deletions(-) create mode 100644 electro/authentication.py diff --git a/electro/app.py b/electro/app.py index 7405780..a9185aa 100644 --- a/electro/app.py +++ b/electro/app.py @@ -2,109 +2,138 @@ from typing import Any, Dict, Optional -# from fastapi import Depends -from fastapi import FastAPI, Header, HTTPException, WebSocket, WebSocketDisconnect +from fastapi import Depends, FastAPI, HTTPException, WebSocket, WebSocketDisconnect from fastapi.websockets import WebSocketState from tortoise.contrib.fastapi import register_tortoise -from .enums import SupportedPlatforms +from .authentication import http_authenticate_user, ws_authenticate_user from .interfaces import APIInterface, WebSocketInterface -from .models import Channel, Message, User +from .models import Message, PlatformId, User from .settings import settings from .toolkit.tortoise_orm import get_tortoise_config from .utils import format_historical_message - -def validate_api_key(x_api_key: Optional[str] = Header(default=None)) -> None: - """Validate the API key provided in the request header.""" - if not x_api_key and settings.API_KEY: - raise HTTPException(status_code=401, detail="API Key is missing") - if settings.API_KEY and x_api_key != settings.API_KEY: - raise HTTPException(status_code=401, detail="Invalid API Key") - - app = FastAPI( title="Electro API", description="The API server that works as an endpoint for all the Electro Interfaces.", version="0.1.0", - # dependencies=[Depends(validate_api_key)], # docs_url="/", # redoc_url=None, ) -@app.get("/api/platform/{platform}/user/{user_id}/messages") -async def get_user_messages(user_id: str, limit: int = 20, offset: int = 0): +@app.get("/api/platform/{platform}/user/{user_platform_id}") +async def get_user(platform: str, user_id: str, request_user: Optional[User] = Depends(http_authenticate_user)): """ - Get the message history for a user. - - Arguments: - user: The user whose message history is to be retrieved. - limit: The maximum number of messages to retrieve. - offset: The number of messages to skip before retrieving the history. + Test the API endpoint. """ - user = await User.get_or_none(id=user_id) - if not user: - raise HTTPException(status_code=404, detail="User not found") - messages = ( - await Message.filter( - user=user, - is_temporary=False, - ) - .order_by("-date_added") - .limit(limit) - .offset(offset) + platform_id = await PlatformId.get_or_none( + platform_id=user_id, platform=platform, type=PlatformId.PlatformIdTypes.USER ) - return [await format_historical_message(message) for message in messages] + if not platform_id: + raise HTTPException(status_code=404, detail="User not found.") + user = await platform_id.user + if request_user == user or not settings.AUTHENTICATION_ENABLED: + # TODO: create a permission check to allow access to other users + user = await User.get_or_none(id=user_id) + return { + "id": user.id, + "username": user.username, + "platform_ids": [ + { + "platform": platform.platform, + "platform_id": platform.platform_id, + "type": platform.type, + } + for platform in await user.platform_ids.all() + ], + } + raise HTTPException(status_code=403, detail="You are not authorized to access this user's information.") -@app.get("/api/channel/{channel_id}/messages") -async def get_channel_messages(channel_id: str, limit: int = 20, offset: int = 0): +@app.get("/api/platform/{platform}/user/{user_id}/messages") +async def get_user_messages( + platform: str, + user_id: str, + request_user: Optional[User] = Depends(http_authenticate_user), + limit: int = 20, + offset: int = 0, +): """ - Get the message history for a channel. + Get the message history for a user. Arguments: - channel: The channel whose message history is to be retrieved. + user: The user whose message history is to be retrieved. limit: The maximum number of messages to retrieve. offset: The number of messages to skip before retrieving the history. """ - channel = await Channel.get_or_none(id=channel_id) - if not channel: - raise HTTPException(status_code=404, detail="Channel not found") - messages = ( - await Message.filter( - channel=channel, - is_temporary=False, - ) - .order_by("-date_added") - .limit(limit) - .offset(offset) + platform_id = await PlatformId.get_or_none( + platform_id=user_id, platform=platform, type=PlatformId.PlatformIdTypes.USER ) - return [await format_historical_message(message) for message in messages] + if not platform_id: + raise HTTPException(status_code=404, detail="User not found.") + user = await platform_id.user + if request_user == user or not settings.AUTHENTICATION_ENABLED: + user = await User.get_or_none(id=user_id) + messages = ( + await Message.filter( + user=user, + is_temporary=False, + ) + .order_by("-date_added") + .limit(limit) + .offset(offset) + ) + return [await format_historical_message(message) for message in messages] + raise HTTPException(status_code=403, detail="You are not authorized to access this user's message history.") -@app.post("/api/platform/{platform}") -async def process_message(platform: str, data: Dict[str, Any]): +@app.post("/api/platform/{platform}/user/{user_id}/messages") +async def process_message( + platform: str, + user_id: str, + data: Dict[str, Any], + request_user: Optional[User] = Depends(http_authenticate_user), +): """Process the message.""" - if platform not in SupportedPlatforms: - raise ValueError(f"Platform {platform} is not supported.") - interface = APIInterface() - await interface.handle_incoming_action(platform, data) - return interface.messages.get() + platform_id = await PlatformId.get_or_none( + platform_id=user_id, platform=platform, type=PlatformId.PlatformIdTypes.USER + ) + if not platform_id: + raise HTTPException(status_code=404, detail="User not found.") + user = await platform_id.user + if request_user == user or not settings.AUTHENTICATION_ENABLED: + user = await User.get_or_none(id=user_id) + interface = APIInterface() + await interface.handle_incoming_action(user, platform, data) + return interface.messages.get() + raise HTTPException(status_code=403, detail="You are not authorized to send messages on behalf of this user.") @app.websocket("/websocket/platform/{platform}/user/{user_id}") -async def websocket_endpoint(websocket: WebSocket, platform: str, user_id: str): # pylint: disable=W0613 - if platform not in SupportedPlatforms: - raise ValueError(f"Platform {platform} is not supported.") - interface = WebSocketInterface() - await interface.connect(websocket) - try: - while websocket.application_state == WebSocketState.CONNECTED: - data = await websocket.receive_json() - await interface.handle_incoming_action(platform, data) - except WebSocketDisconnect: - await interface.disconnect() +async def websocket_endpoint( + websocket: WebSocket, + platform: str, + user_id: str, + request_user: Optional[User] = Depends(ws_authenticate_user), +): + """Handle the websocket connection.""" + platform_id = await PlatformId.get_or_none( + platform_id=user_id, platform=platform, type=PlatformId.PlatformIdTypes.USER + ) + if not platform_id: + raise HTTPException(status_code=404, detail="User not found.") + user = await platform_id.user + if request_user == user or not settings.AUTHENTICATION_ENABLED: + interface = WebSocketInterface() + await interface.connect(websocket) + try: + while websocket.application_state == WebSocketState.CONNECTED: + data = await websocket.receive_json() + await interface.handle_incoming_action(user, platform, data) + except WebSocketDisconnect: + await interface.disconnect() + raise HTTPException(status_code=403, detail="You are not authorized to send messages on behalf of this user.") # region Register Tortoise diff --git a/electro/authentication.py b/electro/authentication.py new file mode 100644 index 0000000..930e78f --- /dev/null +++ b/electro/authentication.py @@ -0,0 +1,49 @@ +from typing import Optional + +import jwt +from fastapi import Header, HTTPException, Query + +from .enums import SupportedPlatforms +from .models import PlatformId, User +from .settings import settings + + +async def http_authenticate_user( + platform: str, user_id: str, authorization: Optional[str] = Header(default=None) +) -> User: + """Validate the Bearer token provided in the request header.""" + if not authorization.startswith(f"{settings.JWT_TOKEN_TYPE} "): + raise HTTPException(status_code=401, detail=f"Authorization header type must be {settings.JWT_TOKEN_TYPE}") + token = authorization.split(" ")[1] + return await _authenticate_user(platform, user_id, token) + + +async def ws_authenticate_user(platform: str, user_id: str, token: Optional[str] = Query(default=None)) -> User: + """Validate the Bearer token provided in the request header.""" + return await _authenticate_user(platform, user_id, token) + + +async def _authenticate_user(platform: str, user_id: str, token: Optional[str] = None) -> User: + if platform not in SupportedPlatforms: + raise HTTPException(status_code=400, detail=f"Platform {platform} is not supported.") + if settings.AUTHENTICATION_ENABLED: + try: + validated_token = jwt.decode(token, settings.JWT_KEY, algorithms=["RS256"], options={"verify_aud": False}) + except jwt.ExpiredSignatureError as e: + raise HTTPException(status_code=401, detail="Token has expired") from e + except jwt.InvalidTokenError as e: + raise HTTPException(status_code=401, detail="Invalid token") from e + user_id = validated_token.get(settings.JWT_ID_KEY) + username = validated_token.get(settings.JWT_USERNAME_KEY) + if not user_id or not username: + raise HTTPException(status_code=401, detail="Invalid token") + else: + username = user_id # TODO: Find a better solution to pass the username + platform_id, created = await PlatformId.get_or_create( + platform_id=user_id, platform=platform, type=PlatformId.PlatformIdTypes.USER + ) + if created: + user = await User.create(username=username) + platform_id.user = user + await platform_id.save() + return await platform_id.user diff --git a/electro/enums.py b/electro/enums.py index 09296eb..9924860 100644 --- a/electro/enums.py +++ b/electro/enums.py @@ -14,6 +14,7 @@ class SupportedPlatforms(str, Enum): class ResponseTypes(str, Enum): """The actions that can be processed by the clients.""" + ERROR = "error" MESSAGE = "message" IMAGE = "image" ADD_ROLE = "add_role" diff --git a/electro/flow_manager.py b/electro/flow_manager.py index 3e329f8..cee4dc4 100644 --- a/electro/flow_manager.py +++ b/electro/flow_manager.py @@ -45,23 +45,6 @@ async def get_or_create_guild(cls, platform: str, guild_data: schemas.Guild) -> await platform_id.save() return await platform_id.guild - @classmethod - async def get_or_create_user(cls, platform: str, user_data: schemas.User) -> User: - """Save the user to the database.""" - platform_id, created = await PlatformId.get_or_create( - platform_id=user_data.platform_id.id, platform=platform, type=PlatformId.PlatformIdTypes.USER - ) - if created: - user = await User.create(username=user_data.username) - platform_id.user = user - logger.info(f"Created the User record for {user.id=}, {user.username=}") - await platform_id.save() - if user_data.guild: - guild = await cls.get_or_create_guild(platform, user_data.guild) - user.guild = guild - await user.save() - return await platform_id.user - @classmethod async def get_or_create_channel( cls, platform: str, channel_data: schemas.Channel, user: typing.Optional[User] = None @@ -97,9 +80,8 @@ async def get_or_create_channel( return channel @classmethod - async def save_message(cls, platform: str, message_data: schemas.ReceivedMessage) -> Message: + async def save_message(cls, author: User, platform: str, message_data: schemas.ReceivedMessage) -> Message: """Save the message to the database.""" - author = await cls.get_or_create_user(platform, message_data.author) if message_data.channel: channel = await cls.get_or_create_channel(platform, message_data.channel, author) else: @@ -343,12 +325,13 @@ async def dispatch(self, flow_connector: FlowConnector): async with self: return await self._dispatch(flow_connector) - async def on_message(self, platform: str, message_data: schemas.ReceivedMessage, interface: BaseInterface): + async def on_message( + self, user: User, platform: str, message_data: schemas.ReceivedMessage, interface: BaseInterface + ): """Handle the messages sent by the users.""" # Save the message to the database - message = await self.analytics_manager.save_message(platform, message_data) - user = await message.user + message = await self.analytics_manager.save_message(user, platform, message_data) channel = await message.channel # Get the user state and data @@ -379,10 +362,11 @@ async def on_message(self, platform: str, message_data: schemas.ReceivedMessage, return await self.dispatch(flow_connector) - async def on_button_click(self, platform: str, button_data: schemas.ButtonClick, interface: BaseInterface): + async def on_button_click( + self, user: User, platform: str, button_data: schemas.ButtonClick, interface: BaseInterface + ): """Handle the buttons clicked by the users.""" # Save the button click to the database - user = await self.analytics_manager.get_or_create_user(platform, button_data.user) channel = await self.analytics_manager.get_or_create_channel(platform, button_data.channel, user) try: button = await self.analytics_manager.save_button_click(button_data.id) diff --git a/electro/interfaces.py b/electro/interfaces.py index 4e3b66e..035efa8 100644 --- a/electro/interfaces.py +++ b/electro/interfaces.py @@ -297,7 +297,7 @@ async def with_constant_typing(self, user: User, channel: Channel): await self.set_typing(user, channel, ResponseTypes.STOP_TYPING) async def handle_incoming_action( - self, platform: SupportedPlatforms, data: Dict[str, Any] + self, user: User, platform: SupportedPlatforms, data: Dict[str, Any] ) -> Tuple[Dict[str, str], int]: """ Handle incoming actions from the client. The action data is validated and processed. @@ -306,18 +306,28 @@ async def handle_incoming_action( platform: The platform from which the action was received (). data: The data received from the client. """ - action = data.get("action") - content = data.get("content") - if action == FlowConnectorEvents.MESSAGE: - content = ReceivedMessage.model_validate(content) - await global_flow_manager.on_message(platform, content, self) - if action == FlowConnectorEvents.BUTTON_CLICK: - content = ButtonClick.model_validate(content) - await global_flow_manager.on_button_click(platform, content, self) - if action == FlowConnectorEvents.MEMBER_JOIN: - pass - if action == FlowConnectorEvents.MEMBER_UPDATE: - pass + try: + action = data.get("action") + content = data.get("content") + if action == FlowConnectorEvents.MESSAGE: + content = ReceivedMessage.model_validate(content) + await global_flow_manager.on_message(user, platform, content, self) + if action == FlowConnectorEvents.BUTTON_CLICK: + content = ButtonClick.model_validate(content) + await global_flow_manager.on_button_click(user, platform, content, self) + if action == FlowConnectorEvents.MEMBER_JOIN: + pass + if action == FlowConnectorEvents.MEMBER_UPDATE: + pass + except Exception as exception: # pylint: disable=W0718 + await self.send_json( + { + "action": ResponseTypes.ERROR, + "content": { + "error": str(exception), + }, + } + ) @abstractmethod async def send_json(self, data: Dict[str, Any]): diff --git a/electro/schemas.py b/electro/schemas.py index ff2b7fc..3f4d2dc 100644 --- a/electro/schemas.py +++ b/electro/schemas.py @@ -10,12 +10,6 @@ class Guild(BaseModel): name: str -class User(BaseModel): - platform_id: PlatformId - username: str - guild: Guild | None - - class Channel(BaseModel): platform_id: PlatformId name: str @@ -25,12 +19,10 @@ class Channel(BaseModel): class ReceivedMessage(BaseModel): content: str - author: User channel: Channel | None class ButtonClick(BaseModel): id: int custom_id: str - user: User channel: Channel | None diff --git a/electro/settings.py b/electro/settings.py index 180032c..b52c21e 100644 --- a/electro/settings.py +++ b/electro/settings.py @@ -24,12 +24,17 @@ class Settings(BaseSettings): DEFAULT_SLEEP_TIME: int = 3 # seconds SLEEP_TIME_PER_CHARACTER: float = 0.05 HTTPX_CLIENT_DEFAULT_TIMEOUT: int = 60 - API_KEY_HEADER_NAME: str = "x-api-key" - API_KEY: str = "server-token" # Build urls for static files by removing root path and adding the server url SERVER_URL: str = "http://localhost:8000" APP_ROOT: str = "/app" + # JWT settings + AUTHENTICATION_ENABLED: bool = True + JWT_TOKEN_TYPE: str = "Bearer" + JWT_KEY: str = "" + JWT_ID_KEY: str = "sub" + JWT_USERNAME_KEY: str = "username" + # Bot settings MESSAGE_BREAK: str = "--- message break ---" MESSAGE_SLEEP_INSTRUCTION_PATTERN: str = r"--- sleep (\d+.?\d*) seconds ---" From 485326fbaf1cdffa20c81ebbc395d36aba567aae Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Mon, 26 May 2025 16:20:38 +0200 Subject: [PATCH 24/57] more flexible authentication --- electro/app.py | 42 ++++++++---------- electro/authentication.py | 89 +++++++++++++++++++++++++++------------ electro/settings.py | 8 +++- electro/utils.py | 27 +++++++++++- 4 files changed, 113 insertions(+), 53 deletions(-) diff --git a/electro/app.py b/electro/app.py index a9185aa..0366c22 100644 --- a/electro/app.py +++ b/electro/app.py @@ -6,12 +6,11 @@ from fastapi.websockets import WebSocketState from tortoise.contrib.fastapi import register_tortoise -from .authentication import http_authenticate_user, ws_authenticate_user +from .authentication import authenticate_user from .interfaces import APIInterface, WebSocketInterface from .models import Message, PlatformId, User -from .settings import settings from .toolkit.tortoise_orm import get_tortoise_config -from .utils import format_historical_message +from .utils import format_historical_message, paginate_response app = FastAPI( title="Electro API", @@ -23,7 +22,7 @@ @app.get("/api/platform/{platform}/user/{user_platform_id}") -async def get_user(platform: str, user_id: str, request_user: Optional[User] = Depends(http_authenticate_user)): +async def get_user(platform: str, user_id: str, request_user: Optional[User] = Depends(authenticate_user)): """ Test the API endpoint. """ @@ -33,9 +32,8 @@ async def get_user(platform: str, user_id: str, request_user: Optional[User] = D if not platform_id: raise HTTPException(status_code=404, detail="User not found.") user = await platform_id.user - if request_user == user or not settings.AUTHENTICATION_ENABLED: - # TODO: create a permission check to allow access to other users - user = await User.get_or_none(id=user_id) + # TODO: create a permission check to allow access to other users + if request_user == user: return { "id": user.id, "username": user.username, @@ -55,7 +53,7 @@ async def get_user(platform: str, user_id: str, request_user: Optional[User] = D async def get_user_messages( platform: str, user_id: str, - request_user: Optional[User] = Depends(http_authenticate_user), + request_user: Optional[User] = Depends(authenticate_user), limit: int = 20, offset: int = 0, ): @@ -73,18 +71,15 @@ async def get_user_messages( if not platform_id: raise HTTPException(status_code=404, detail="User not found.") user = await platform_id.user - if request_user == user or not settings.AUTHENTICATION_ENABLED: - user = await User.get_or_none(id=user_id) - messages = ( - await Message.filter( - user=user, - is_temporary=False, - ) - .order_by("-date_added") - .limit(limit) - .offset(offset) + if request_user == user: + messages = Message.filter(user=user, is_temporary=False, is_command=False).order_by("-date_added") + return await paginate_response( + messages, + format_historical_message, + limit=limit, + offset=offset, + url=f"/api/platform/{platform}/user/{user_id}/messages", ) - return [await format_historical_message(message) for message in messages] raise HTTPException(status_code=403, detail="You are not authorized to access this user's message history.") @@ -93,7 +88,7 @@ async def process_message( platform: str, user_id: str, data: Dict[str, Any], - request_user: Optional[User] = Depends(http_authenticate_user), + request_user: Optional[User] = Depends(authenticate_user), ): """Process the message.""" platform_id = await PlatformId.get_or_none( @@ -102,8 +97,7 @@ async def process_message( if not platform_id: raise HTTPException(status_code=404, detail="User not found.") user = await platform_id.user - if request_user == user or not settings.AUTHENTICATION_ENABLED: - user = await User.get_or_none(id=user_id) + if request_user == user: interface = APIInterface() await interface.handle_incoming_action(user, platform, data) return interface.messages.get() @@ -115,7 +109,7 @@ async def websocket_endpoint( websocket: WebSocket, platform: str, user_id: str, - request_user: Optional[User] = Depends(ws_authenticate_user), + request_user: Optional[User] = Depends(authenticate_user), ): """Handle the websocket connection.""" platform_id = await PlatformId.get_or_none( @@ -124,7 +118,7 @@ async def websocket_endpoint( if not platform_id: raise HTTPException(status_code=404, detail="User not found.") user = await platform_id.user - if request_user == user or not settings.AUTHENTICATION_ENABLED: + if request_user == user: interface = WebSocketInterface() await interface.connect(websocket) try: diff --git a/electro/authentication.py b/electro/authentication.py index 930e78f..c20c272 100644 --- a/electro/authentication.py +++ b/electro/authentication.py @@ -1,44 +1,79 @@ from typing import Optional import jwt -from fastapi import Header, HTTPException, Query +from fastapi import Cookie, Header, HTTPException from .enums import SupportedPlatforms from .models import PlatformId, User from .settings import settings -async def http_authenticate_user( - platform: str, user_id: str, authorization: Optional[str] = Header(default=None) +async def authenticate_user( + platform: str, + user_id: str, + header: Optional[str] = Header(default=None, alias="Authorization"), + cookie: Optional[str] = Cookie(default=None, alias="Authorization"), ) -> User: - """Validate the Bearer token provided in the request header.""" - if not authorization.startswith(f"{settings.JWT_TOKEN_TYPE} "): - raise HTTPException(status_code=401, detail=f"Authorization header type must be {settings.JWT_TOKEN_TYPE}") - token = authorization.split(" ")[1] - return await _authenticate_user(platform, user_id, token) + """Validate the Bearer token provided in the request header or in the cookie.""" + # Validate the platform + if platform not in SupportedPlatforms: + raise HTTPException(status_code=400, detail=f"Platform {platform} is not supported.") -async def ws_authenticate_user(platform: str, user_id: str, token: Optional[str] = Query(default=None)) -> User: - """Validate the Bearer token provided in the request header.""" - return await _authenticate_user(platform, user_id, token) + # If authentication is not enabled, return a user with the provided user_id + if not settings.AUTHENTICATION_ENABLED: + return await _get_or_create_user(platform, user_id) + # Get the authorization token from the header or cookie + authorization = header or cookie + if not authorization: + raise HTTPException(status_code=401, detail="Authorization header or cookie is required.") + if header: + if not authorization.startswith(f"{settings.JWT_TOKEN_TYPE} "): + raise HTTPException(status_code=401, detail=f"Authorization header type must be {settings.JWT_TOKEN_TYPE}") + authorization = authorization.split(" ")[1] + + # Determine the authentication method based on the platform + authentication_method = {client: "jwt" for client in settings.JWT_PLATFORMS}.get(platform, "api_key") + if authentication_method == "api_key": + return await _api_key_authenticate_user(platform, user_id, authorization) + return await _jwt_authenticate_user(platform, user_id, authorization) + + +async def _get_or_create_user(platform: str, user_id: str, username: Optional[str] = None) -> User: + """Get or create a user based on the platform and user ID.""" + platform_id, created = await PlatformId.get_or_create( + platform_id=user_id, platform=platform, type=PlatformId.PlatformIdTypes.USER + ) + if created: + user = await User.create(username=username or user_id) + platform_id.user = user + await platform_id.save() + return await platform_id.user + + +async def _api_key_authenticate_user( + platform: str, user_id: str, api_key: Optional[str] = Header(default=None) +) -> User: + """Validate the API key provided in the request header.""" + if api_key != settings.API_KEY: + raise HTTPException(status_code=401, detail="Invalid API key.") + return await _get_or_create_user(platform, user_id) + + +async def _jwt_authenticate_user(platform: str, user_id: str, token: Optional[str] = None) -> User: + try: + validated_token = jwt.decode(token, settings.JWT_KEY, algorithms=["RS256"], options={"verify_aud": False}) + except jwt.ExpiredSignatureError as e: + raise HTTPException(status_code=401, detail="Token has expired") from e + except jwt.InvalidTokenError as e: + raise HTTPException(status_code=401, detail="Invalid token") from e + user_id = validated_token.get(settings.JWT_ID_KEY) + username = validated_token.get(settings.JWT_USERNAME_KEY) + + if not user_id or not username: + raise HTTPException(status_code=401, detail="Invalid token") -async def _authenticate_user(platform: str, user_id: str, token: Optional[str] = None) -> User: - if platform not in SupportedPlatforms: - raise HTTPException(status_code=400, detail=f"Platform {platform} is not supported.") - if settings.AUTHENTICATION_ENABLED: - try: - validated_token = jwt.decode(token, settings.JWT_KEY, algorithms=["RS256"], options={"verify_aud": False}) - except jwt.ExpiredSignatureError as e: - raise HTTPException(status_code=401, detail="Token has expired") from e - except jwt.InvalidTokenError as e: - raise HTTPException(status_code=401, detail="Invalid token") from e - user_id = validated_token.get(settings.JWT_ID_KEY) - username = validated_token.get(settings.JWT_USERNAME_KEY) - if not user_id or not username: - raise HTTPException(status_code=401, detail="Invalid token") - else: - username = user_id # TODO: Find a better solution to pass the username platform_id, created = await PlatformId.get_or_create( platform_id=user_id, platform=platform, type=PlatformId.PlatformIdTypes.USER ) diff --git a/electro/settings.py b/electro/settings.py index b52c21e..749ecf9 100644 --- a/electro/settings.py +++ b/electro/settings.py @@ -3,6 +3,7 @@ from pydantic import PostgresDsn, RedisDsn from pydantic_settings import BaseSettings, SettingsConfigDict +from .enums import SupportedPlatforms from .toolkit.images_storage.storages_enums import StoragesIDs @@ -28,8 +29,13 @@ class Settings(BaseSettings): SERVER_URL: str = "http://localhost:8000" APP_ROOT: str = "/app" - # JWT settings + # Authentication settings AUTHENTICATION_ENABLED: bool = True + # Default authentication is API key, this list defines platforms that use JWT authentication : + JWT_PLATFORMS: list[str] = [SupportedPlatforms.CUSTOM] + # API key authentication + API_KEY: str = "your_api_key_here" + # JWT authentication JWT_TOKEN_TYPE: str = "Bearer" JWT_KEY: str = "" JWT_ID_KEY: str = "sub" diff --git a/electro/utils.py b/electro/utils.py index 23e62ff..5bd46bc 100644 --- a/electro/utils.py +++ b/electro/utils.py @@ -1,4 +1,6 @@ -from typing import Any, Dict +from typing import Any, Callable, Dict + +from tortoise.queryset import QuerySet from .models import Message from .toolkit.images_storage.universal_image_storage import universal_image_storage @@ -40,3 +42,26 @@ async def format_historical_message(message: Message) -> Dict[str, Any]: "buttons": buttons, } return {} + + +async def paginate_response(data: QuerySet, formatter: Callable, limit: int, offset: int, url: str) -> Dict[str, Any]: + """ + Paginate the response data. + """ + total_count = await data.count() + paginated_data = await data.offset(offset).limit(limit).all() + formatted_data = [await formatter(message) for message in paginated_data] + previous_page = f"{url}?limit={limit}&offset={max(0, offset - limit)}" if offset > 0 else None + next_page = f"{url}?limit={limit}&offset={offset + limit}" if offset + limit < total_count else None + total_pages = (total_count + limit - 1) // limit + current_page = offset // limit + 1 + return { + "count": total_count, + "offset": offset, + "limit": limit, + "pages": total_pages, + "page": current_page, + "previous": previous_page, + "next": next_page, + "data": formatted_data, + } From 754061e0e1f570c01000a158fb78dbb1384e10ef Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Mon, 26 May 2025 17:25:29 +0200 Subject: [PATCH 25/57] Update authentication --- electro/app.py | 44 +++++++++++++++++++++++++++++++++++++-- electro/authentication.py | 30 ++++++++------------------ electro/settings.py | 6 ++---- 3 files changed, 53 insertions(+), 27 deletions(-) diff --git a/electro/app.py b/electro/app.py index 0366c22..9e56060 100644 --- a/electro/app.py +++ b/electro/app.py @@ -21,7 +21,47 @@ ) -@app.get("/api/platform/{platform}/user/{user_platform_id}") +@app.patch("/api/platforms/{platform}/user/{user_id}") +async def update_user( + platform: str, + user_id: str, + data: Dict[str, Any], + request_user: Optional[User] = Depends(authenticate_user), +): + """ + Update the user information. + + Arguments: + platform: The platform where the user is registered. + user_id: The ID of the user on the platform. + username: Optional username to set for the user. + """ + platform_id = await PlatformId.get_or_none( + platform_id=user_id, platform=platform, type=PlatformId.PlatformIdTypes.USER + ) + if not platform_id: + raise HTTPException(status_code=404, detail="User not found.") + user: User = await platform_id.user + if request_user == user: + if "username" in data: + user.username = data["username"] + await user.save() + return { + "id": user.id, + "username": user.username, + "platform_ids": [ + { + "platform": platform.platform, + "platform_id": platform.platform_id, + "type": platform.type, + } + for platform in await user.platform_ids.all() + ], + } + raise HTTPException(status_code=403, detail="You are not authorized to update this user's information.") + + +@app.get("/api/platform/{platform}/user/{user_id}") async def get_user(platform: str, user_id: str, request_user: Optional[User] = Depends(authenticate_user)): """ Test the API endpoint. @@ -31,7 +71,7 @@ async def get_user(platform: str, user_id: str, request_user: Optional[User] = D ) if not platform_id: raise HTTPException(status_code=404, detail="User not found.") - user = await platform_id.user + user: User = await platform_id.user # TODO: create a permission check to allow access to other users if request_user == user: return { diff --git a/electro/authentication.py b/electro/authentication.py index c20c272..414db8d 100644 --- a/electro/authentication.py +++ b/electro/authentication.py @@ -16,28 +16,25 @@ async def authenticate_user( ) -> User: """Validate the Bearer token provided in the request header or in the cookie.""" + # Determine the authentication method based on the platform + authentication_method = {client: "jwt" for client in settings.JWT_PLATFORMS}.get(platform, "api_key") + # Validate the platform if platform not in SupportedPlatforms: raise HTTPException(status_code=400, detail=f"Platform {platform} is not supported.") - # If authentication is not enabled, return a user with the provided user_id - if not settings.AUTHENTICATION_ENABLED: - return await _get_or_create_user(platform, user_id) - # Get the authorization token from the header or cookie authorization = header or cookie if not authorization: raise HTTPException(status_code=401, detail="Authorization header or cookie is required.") - if header: + if header and authentication_method == "jwt": if not authorization.startswith(f"{settings.JWT_TOKEN_TYPE} "): raise HTTPException(status_code=401, detail=f"Authorization header type must be {settings.JWT_TOKEN_TYPE}") authorization = authorization.split(" ")[1] - # Determine the authentication method based on the platform - authentication_method = {client: "jwt" for client in settings.JWT_PLATFORMS}.get(platform, "api_key") if authentication_method == "api_key": return await _api_key_authenticate_user(platform, user_id, authorization) - return await _jwt_authenticate_user(platform, user_id, authorization) + return await _jwt_authenticate_user(platform, authorization) async def _get_or_create_user(platform: str, user_id: str, username: Optional[str] = None) -> User: @@ -52,33 +49,24 @@ async def _get_or_create_user(platform: str, user_id: str, username: Optional[st return await platform_id.user -async def _api_key_authenticate_user( - platform: str, user_id: str, api_key: Optional[str] = Header(default=None) -) -> User: +async def _api_key_authenticate_user(platform: str, user_id: str, api_key: str) -> User: """Validate the API key provided in the request header.""" if api_key != settings.API_KEY: raise HTTPException(status_code=401, detail="Invalid API key.") return await _get_or_create_user(platform, user_id) -async def _jwt_authenticate_user(platform: str, user_id: str, token: Optional[str] = None) -> User: +async def _jwt_authenticate_user(platform: str, token: str) -> User: try: validated_token = jwt.decode(token, settings.JWT_KEY, algorithms=["RS256"], options={"verify_aud": False}) except jwt.ExpiredSignatureError as e: raise HTTPException(status_code=401, detail="Token has expired") from e except jwt.InvalidTokenError as e: raise HTTPException(status_code=401, detail="Invalid token") from e + user_id = validated_token.get(settings.JWT_ID_KEY) username = validated_token.get(settings.JWT_USERNAME_KEY) - if not user_id or not username: raise HTTPException(status_code=401, detail="Invalid token") - platform_id, created = await PlatformId.get_or_create( - platform_id=user_id, platform=platform, type=PlatformId.PlatformIdTypes.USER - ) - if created: - user = await User.create(username=username) - platform_id.user = user - await platform_id.save() - return await platform_id.user + return await _get_or_create_user(platform, user_id, username) diff --git a/electro/settings.py b/electro/settings.py index 749ecf9..596250f 100644 --- a/electro/settings.py +++ b/electro/settings.py @@ -28,11 +28,9 @@ class Settings(BaseSettings): # Build urls for static files by removing root path and adding the server url SERVER_URL: str = "http://localhost:8000" APP_ROOT: str = "/app" - # Authentication settings - AUTHENTICATION_ENABLED: bool = True - # Default authentication is API key, this list defines platforms that use JWT authentication : - JWT_PLATFORMS: list[str] = [SupportedPlatforms.CUSTOM] + # Default authentication is API key, this list defines platforms that use JWT authentication instead : + JWT_PLATFORMS: list[SupportedPlatforms] = [] # API key authentication API_KEY: str = "your_api_key_here" # JWT authentication From 4b5eb52a81929b9776d879d6d908ce2f360e8fd9 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Mon, 26 May 2025 17:39:02 +0200 Subject: [PATCH 26/57] send command messages in history --- electro/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/electro/app.py b/electro/app.py index 9e56060..99a91bd 100644 --- a/electro/app.py +++ b/electro/app.py @@ -112,7 +112,7 @@ async def get_user_messages( raise HTTPException(status_code=404, detail="User not found.") user = await platform_id.user if request_user == user: - messages = Message.filter(user=user, is_temporary=False, is_command=False).order_by("-date_added") + messages = Message.filter(user=user, is_temporary=False).order_by("-date_added") return await paginate_response( messages, format_historical_message, From d7929a060b07ca1ed1e7ba567864d8c087482f15 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Tue, 27 May 2025 16:10:28 +0200 Subject: [PATCH 27/57] rename storage service methods --- electro/flow_step.py | 2 +- electro/interfaces.py | 6 +++--- .../storage_services/_base_storage_service.py | 6 +++--- .../storage_services/azure_blob_storage_service.py | 10 +++++----- .../images_storage/storage_services/s3_service.py | 14 +++++++------- .../images_storage/universal_image_storage.py | 12 ++++++------ electro/utils.py | 2 +- 7 files changed, 26 insertions(+), 26 deletions(-) diff --git a/electro/flow_step.py b/electro/flow_step.py index e117155..80b2d7a 100644 --- a/electro/flow_step.py +++ b/electro/flow_step.py @@ -512,7 +512,7 @@ async def process_response(self, connector: FlowConnector): # Save the File if self.storage_to_save_file_object_id_to or self.storage_to_save_saved_file_id_to: file_io = BytesIO(await attachment.read()) - file_object_key = await universal_image_storage.upload_image(file_io) + file_object_key = await universal_image_storage.upload_file(file_io) if self.storage_to_save_file_object_id_to: # Save the file object key diff --git a/electro/interfaces.py b/electro/interfaces.py index 035efa8..200d84d 100644 --- a/electro/interfaces.py +++ b/electro/interfaces.py @@ -175,15 +175,15 @@ async def send_image( if buttons and not caption: raise ValueError("A caption must be provided when sending an image with buttons.") if isinstance(image, File): - image_url = await universal_image_storage.get_image_url(image.storage_file_object_key) + image_url = await universal_image_storage.get_file_url(image.storage_file_object_key) elif isinstance(image, BytesIO): - object_key = await universal_image_storage.upload_image(image) + object_key = await universal_image_storage.upload_file(image) image = await File.create( owner=user, storage_service=settings.STORAGE_SERVICE_ID, storage_file_object_key=object_key, ) - image_url = await universal_image_storage.get_image_url(object_key) + image_url = await universal_image_storage.get_file_url(object_key) else: image_url = str(image) if image_url.startswith(settings.APP_ROOT): diff --git a/electro/toolkit/images_storage/storage_services/_base_storage_service.py b/electro/toolkit/images_storage/storage_services/_base_storage_service.py index a64a675..3c39627 100644 --- a/electro/toolkit/images_storage/storage_services/_base_storage_service.py +++ b/electro/toolkit/images_storage/storage_services/_base_storage_service.py @@ -8,7 +8,7 @@ class BaseStorageService(ABC): """Base class for storage services.""" @abstractmethod - async def upload_image(self, image_io: BytesIO) -> str: + async def upload_file(self, image_io: BytesIO) -> str: """Uploads an image to the storage and returns the object key. :param image_io: BytesIO object of the image to upload @@ -18,7 +18,7 @@ async def upload_image(self, image_io: BytesIO) -> str: raise NotImplementedError @abstractmethod - async def download_image(self, object_key: str) -> BytesIO: + async def download_file(self, object_key: str) -> BytesIO: """Downloads an image from the storage and returns a BytesIO object. :param object_key: object key of the image to download @@ -28,7 +28,7 @@ async def download_image(self, object_key: str) -> BytesIO: raise NotImplementedError @abstractmethod - async def get_image_url(self, object_key: str) -> str: + async def get_file_url(self, object_key: str) -> str: """Returns the URL of the image. :param object_key: object key of the image diff --git a/electro/toolkit/images_storage/storage_services/azure_blob_storage_service.py b/electro/toolkit/images_storage/storage_services/azure_blob_storage_service.py index cc2e034..9b809c7 100644 --- a/electro/toolkit/images_storage/storage_services/azure_blob_storage_service.py +++ b/electro/toolkit/images_storage/storage_services/azure_blob_storage_service.py @@ -44,7 +44,7 @@ async def _ensure_container_exists(self): except ResourceNotFoundError: await container_client.create_container() - async def upload_image(self, image_io: BytesIO) -> str: + async def upload_file(self, image_io: BytesIO) -> str: """Upload an image to the Azure Blob Storage.""" blob_name = f"image_{os.urandom(8).hex()}.png" async with await self.blob_service_client as client: @@ -56,7 +56,7 @@ async def upload_image(self, image_io: BytesIO) -> str: ) return blob_name - async def download_image(self, object_key: str) -> BytesIO: + async def download_file(self, object_key: str) -> BytesIO: """Download an image from the Azure Blob Storage.""" async with await self.blob_service_client as client: container_client = client.get_container_client(self.container_name) @@ -67,7 +67,7 @@ async def download_image(self, object_key: str) -> BytesIO: raise FileNotFoundError(f"Image with key '{object_key}' not found in the Azure Blob Storage.") from e return BytesIO(await image_data.readall()) - async def _create_image_access_token(self, blob_client: BlobClient) -> str: + async def _create_file_access_token(self, blob_client: BlobClient) -> str: start_time = datetime.datetime.now(datetime.timezone.utc) expiry_time = start_time + datetime.timedelta(days=1) return generate_blob_sas( @@ -80,10 +80,10 @@ async def _create_image_access_token(self, blob_client: BlobClient) -> str: start=start_time, ) - async def get_image_url(self, object_key: str) -> str: + async def get_file_url(self, object_key: str) -> str: """Get the URL of an image in the Azure Blob Storage.""" async with await self.blob_service_client as client: container_client = client.get_container_client(self.container_name) blob_client = container_client.get_blob_client(object_key) - token = await self._create_image_access_token(blob_client) + token = await self._create_file_access_token(blob_client) return f"{blob_client.url}?{token}" diff --git a/electro/toolkit/images_storage/storage_services/s3_service.py b/electro/toolkit/images_storage/storage_services/s3_service.py index 54e8d2a..d5bb716 100644 --- a/electro/toolkit/images_storage/storage_services/s3_service.py +++ b/electro/toolkit/images_storage/storage_services/s3_service.py @@ -41,14 +41,14 @@ async def ensure_bucket_exists(self): logger.error(f"Error checking bucket {self.bucket_name}: {e}") raise - async def upload_file(self, file_io: BytesIO, object_key: str, extra_args: dict | None = None): + async def _upload_file(self, file_io: BytesIO, object_key: str, extra_args: dict | None = None): """Upload a file to the S3 bucket.""" await self.ensure_bucket_exists() async with self.session.client("s3", endpoint_url=settings.S3_ENDPOINT_URL) as s3: await s3.upload_fileobj(file_io, self.bucket_name, object_key, ExtraArgs=extra_args) logger.info(f"Image uploaded successfully: {object_key}") - async def download_file(self, object_key: str, destination: str | BytesIO | None = None) -> str | BytesIO: + async def _download_file(self, object_key: str, destination: str | BytesIO | None = None) -> str | BytesIO: """Download a file from the S3 bucket.""" await self.ensure_bucket_exists() if not destination: @@ -63,7 +63,7 @@ async def download_file(self, object_key: str, destination: str | BytesIO | None return destination - async def upload_image(self, image_io: BytesIO) -> str: + async def upload_file(self, image_io: BytesIO) -> str: """Uploads an image to the S3 bucket and returns the object key. :param image_io: BytesIO object of the image to upload @@ -73,14 +73,14 @@ async def upload_image(self, image_io: BytesIO) -> str: object_key = str(uuid4()) try: # TODO: [2024-10-05 by Mykola] IT'S NOT ALWAYS JPEG - await self.upload_file(image_io, object_key, extra_args={"ContentType": "image/jpeg"}) + await self._upload_file(image_io, object_key, extra_args={"ContentType": "image/jpeg"}) logger.info(f"Image uploaded successfully: {object_key}") return object_key except Exception as e: logger.error(f"Failed to upload image: {e}") raise - async def download_image(self, object_key: str) -> BytesIO: + async def download_file(self, object_key: str) -> BytesIO: """Downloads an image from the S3 bucket and returns a BytesIO object. :param object_key: object key of the image to download @@ -89,14 +89,14 @@ async def download_image(self, object_key: str) -> BytesIO: """ image_io = BytesIO() try: - await self.download_file(object_key, image_io) + await self._download_file(object_key, image_io) logger.info(f"Image downloaded successfully: {object_key}") return image_io except Exception as e: logger.error(f"Failed to download image: {e}") raise - async def get_image_url(self, object_key: str) -> str: + async def get_file_url(self, object_key: str) -> str: """Returns the URL of the image. :param object_key: object key of the image diff --git a/electro/toolkit/images_storage/universal_image_storage.py b/electro/toolkit/images_storage/universal_image_storage.py index 0dcb653..6814992 100644 --- a/electro/toolkit/images_storage/universal_image_storage.py +++ b/electro/toolkit/images_storage/universal_image_storage.py @@ -17,17 +17,17 @@ def __init__(self, storage_service: BaseStorageService): """Initialize the UniversalImageStorage class.""" self.storage_service = storage_service - async def upload_image(self, image_io: BytesIO) -> str: + async def upload_file(self, image_io: BytesIO) -> str: """Upload an image to the storage service.""" - return await self.storage_service.upload_image(image_io) + return await self.storage_service.upload_file(image_io) - async def download_image(self, object_key: str) -> BytesIO: + async def download_file(self, object_key: str) -> BytesIO: """Download an image from the storage service.""" - return await self.storage_service.download_image(object_key) + return await self.storage_service.download_file(object_key) - async def get_image_url(self, object_key: str) -> str: + async def get_file_url(self, object_key: str) -> str: """Get the URL of the image from the storage service.""" - return await self.storage_service.get_image_url(object_key) + return await self.storage_service.get_file_url(object_key) STORAGES_IDS_TO_SERVICES = { diff --git a/electro/utils.py b/electro/utils.py index 5bd46bc..2c49503 100644 --- a/electro/utils.py +++ b/electro/utils.py @@ -22,7 +22,7 @@ async def format_historical_message(message: Message) -> Dict[str, Any]: if message.type == Message.MessageTypes.IMAGE: if len(message.files) > 0: image = message.files[0] - image_url = await universal_image_storage.get_image_url(image.storage_file_object_key) + image_url = await universal_image_storage.get_file_url(image.storage_file_object_key) else: image_url = message.content return { From ca12518bac261324f4591d9af7867ff5f884da16 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Wed, 28 May 2025 14:28:42 +0200 Subject: [PATCH 28/57] handle files and unique response format --- electro/authentication.py | 2 +- electro/flow_step.py | 13 +- electro/interfaces.py | 125 ++++++++---------- electro/models.py | 5 +- electro/settings.py | 2 +- electro/toolkit/files_storage/__init__.py | 1 + .../storage_services/__init__.py | 2 +- .../storage_services/_base_storage_service.py | 38 ++++++ .../azure_blob_storage_service.py | 20 +-- .../storage_services/s3_service.py | 50 +++---- .../storages_enums.py | 0 .../universal_file_storage.py} | 18 +-- electro/toolkit/images_storage/__init__.py | 1 - .../storage_services/_base_storage_service.py | 38 ------ electro/utils.py | 36 ++++- poetry.lock | 102 +++++++++++++- pyproject.toml | 1 + 17 files changed, 287 insertions(+), 167 deletions(-) create mode 100644 electro/toolkit/files_storage/__init__.py rename electro/toolkit/{images_storage => files_storage}/storage_services/__init__.py (85%) create mode 100644 electro/toolkit/files_storage/storage_services/_base_storage_service.py rename electro/toolkit/{images_storage => files_storage}/storage_services/azure_blob_storage_service.py (82%) rename electro/toolkit/{images_storage => files_storage}/storage_services/s3_service.py (70%) rename electro/toolkit/{images_storage => files_storage}/storages_enums.py (100%) rename electro/toolkit/{images_storage/universal_image_storage.py => files_storage/universal_file_storage.py} (66%) delete mode 100644 electro/toolkit/images_storage/__init__.py delete mode 100644 electro/toolkit/images_storage/storage_services/_base_storage_service.py diff --git a/electro/authentication.py b/electro/authentication.py index 414db8d..24a8b6f 100644 --- a/electro/authentication.py +++ b/electro/authentication.py @@ -15,7 +15,7 @@ async def authenticate_user( cookie: Optional[str] = Cookie(default=None, alias="Authorization"), ) -> User: """Validate the Bearer token provided in the request header or in the cookie.""" - + return await _get_or_create_user(platform, user_id) # Determine the authentication method based on the platform authentication_method = {client: "jwt" for client in settings.JWT_PLATFORMS}.get(platform, "api_key") diff --git a/electro/flow_step.py b/electro/flow_step.py index 80b2d7a..f077155 100644 --- a/electro/flow_step.py +++ b/electro/flow_step.py @@ -18,7 +18,7 @@ from .settings import settings from .substitutions import BaseSubstitution, GlobalAbstractChannel, resolve_channel from .toolkit.decorators import with_constant_typing -from .toolkit.images_storage.universal_image_storage import universal_image_storage +from .toolkit.files_storage.universal_file_storage import universal_file_storage from .toolkit.loguru_logging import logger from .toolkit.templated_i18n import TemplatedString @@ -244,9 +244,7 @@ async def send_message( ) channel_to_send_to = await self._resolve_channel_to_send_to(channel or self.channel_to_send_to, connector) files = await self._get_files_to_send(connector) - for file in files: - await connector.interface.send_image(file, connector.user, channel_to_send_to) - await connector.interface.send_message(message, connector.user, channel_to_send_to, buttons) + await connector.interface.send_message(message, connector.user, channel_to_send_to, files, buttons) @with_constant_typing() async def run( @@ -355,8 +353,8 @@ async def send_message( await self._get_formatted_message(message, connector) if isinstance(message, TemplatedString) else message ) channel_to_send_to = await self._resolve_channel_to_send_to(channel or self.channel_to_send_to, connector) - await connector.interface.send_image( - self.file, connector.user, channel_to_send_to, caption=self.caption, buttons=buttons + await connector.interface.send_message( + self.caption, connector.user, channel_to_send_to, [self.file], buttons=buttons ) @@ -512,7 +510,8 @@ async def process_response(self, connector: FlowConnector): # Save the File if self.storage_to_save_file_object_id_to or self.storage_to_save_saved_file_id_to: file_io = BytesIO(await attachment.read()) - file_object_key = await universal_image_storage.upload_file(file_io) + content_type = attachment.content_type + file_object_key = await universal_file_storage.upload_file(file_io, content_type) if self.storage_to_save_file_object_id_to: # Save the file object key diff --git a/electro/interfaces.py b/electro/interfaces.py index 200d84d..4683210 100644 --- a/electro/interfaces.py +++ b/electro/interfaces.py @@ -1,4 +1,5 @@ import contextvars +import mimetypes import pathlib from abc import ABC, abstractmethod from contextlib import asynccontextmanager @@ -6,6 +7,7 @@ from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING, Union from fastapi import WebSocket +from PIL import Image from .enums import ResponseTypes, SupportedPlatforms from .flow_connector import FlowConnectorEvents @@ -13,7 +15,8 @@ from .models import Button, Channel, File, Guild, Message, Role, User from .schemas import ButtonClick, ReceivedMessage from .settings import settings -from .toolkit.images_storage.universal_image_storage import universal_image_storage +from .toolkit.files_storage.universal_file_storage import universal_file_storage +from .utils import create_and_upload_file if TYPE_CHECKING: from .contrib.buttons import BaseButton @@ -94,9 +97,10 @@ async def _format_guild(self, guild: Optional[Guild]) -> Dict[str, Any]: async def send_message( self, - message: str, - user: Optional[User], - channel: Optional[Channel], + message: str = "", + user: Optional[User] = None, + channel: Optional[Channel] = None, + files: Optional[List[Union[File, BytesIO, str, pathlib.Path]]] = None, buttons: Optional[List["BaseButton"]] = None, delete_after: Optional[Union[int, str]] = None, ): @@ -125,14 +129,24 @@ async def send_message( channel=channel, content=message_chunk, ) + + # Send buttons only with the last message chunk if i == len(message_chunks) - 1: buttons = await self._create_and_format_buttons(buttons, message) else: buttons = [] + + # Send files only with the first message chunk + if i == 0: + processed_files = [await self._process_message_file(file, message) for file in files or []] + else: + processed_files = [] + data = { "user": user_data, "channel": channel_data, "message": message_chunk, + "files": processed_files, "buttons": buttons, "delete_after": delete_after, } @@ -143,79 +157,54 @@ async def send_message( } ) - async def send_image( + async def _process_message_file( self, - image: File | BytesIO | str | pathlib.Path, - user: Optional[User], - channel: Optional[Channel], - caption: Optional[str] = None, - buttons: Optional[List["BaseButton"]] = None, - delete_after: Optional[int] = None, + file: File | BytesIO | str | pathlib.Path, + message: Message, ): """ - Send images to the client as a link: + Send files to the client as a link: - If the image is a File, the link to the blob storage location will be sent. - If the image is a BytesIO object, it will be uploaded to blob storage and the link will be sent. - If the image is a string, it will be sent as is so make sure it is a valid URL. - If the image is a pathlib.Path object, it will be sent as a link to the static file endpoint. + If the file is a File, the link to the blob storage location will be sent. + If the file is a BytesIO object, it will be uploaded to blob storage and the link will be sent. + If the file is a string, it will be sent as is so make sure it is a valid URL. + If the file is a pathlib.Path object, it will be sent as a link to the static file endpoint. Arguments: - image: The image to be sent. - user: The user who will receive the image. - channel: The channel the image is being sent to. - caption: The caption to be included with the image. - buttons: A list of buttons to be included with the image. - delete_after: The time in seconds after which the image should be deleted. - - if None, the image will not be deleted. - - if "next", the image will be deleted after the next message is sent. - - if an integer, the image will be deleted after that many seconds. - + file: The file to be sent. + message: The message to which the file is attached. """ - if buttons and not caption: - raise ValueError("A caption must be provided when sending an image with buttons.") - if isinstance(image, File): - image_url = await universal_image_storage.get_file_url(image.storage_file_object_key) - elif isinstance(image, BytesIO): - object_key = await universal_image_storage.upload_file(image) - image = await File.create( - owner=user, - storage_service=settings.STORAGE_SERVICE_ID, - storage_file_object_key=object_key, - ) - image_url = await universal_image_storage.get_file_url(object_key) + if isinstance(file, BytesIO): + file = await create_and_upload_file(file, message.user) + + if isinstance(file, File) or issubclass(type(file), File): + file_url = await universal_file_storage.get_file_url(file.storage_file_object_key) + height = file.height + width = file.width + content_type = file.content_type else: - image_url = str(image) - if image_url.startswith(settings.APP_ROOT): - image_url = settings.SERVER_URL + image_url[len(settings.APP_ROOT) :] - if str(image_url).endswith(".gif") and (buttons or caption): - raise ValueError("GIFs do not support buttons or captions.") - - message = await Message.create( - is_temporary=delete_after is not None, - is_bot_message=True, - type=Message.MessageTypes.IMAGE, - user=user, - channel=channel, - content=image_url, - caption=caption, - ) - if isinstance(image, File): - await message.files.add(image) - data = { - "user": await self._format_user(user), - "channel": await self._format_channel(channel), - "image": image_url, - "caption": caption, - "buttons": await self._create_and_format_buttons(buttons), - "delete_after": delete_after, + file_url = str(file) + content_type, _ = mimetypes.guess_type(file_url) + try: + with Image.open(file) as img: + width, height = img.width, img.height + except Exception: # pylint: disable=W0718 + width, height = None, None + + if file_url.startswith(settings.APP_ROOT): + file_url = settings.SERVER_URL + file_url[len(settings.APP_ROOT) :] + + if isinstance(file, File): + await message.files.add(file) + return { + "type": str(type(file)), + "is_file": isinstance(file, File), + "is_file_subclass": issubclass(type(file), File), + "url": file_url, + "height": height, + "width": width, + "content_type": content_type, } - await self.send_json( - { - "action": ResponseTypes.IMAGE, - "content": data, - } - ) async def add_role(self, user: User, role: Role): """ diff --git a/electro/models.py b/electro/models.py index 1fb4c63..37d3b3d 100644 --- a/electro/models.py +++ b/electro/models.py @@ -7,7 +7,7 @@ from tortoise import fields from tortoise.fields import ForeignKeyRelation, ManyToManyField -from .toolkit.images_storage.storages_enums import StoragesIDs +from .toolkit.files_storage.storages_enums import StoragesIDs from .toolkit.tortoise_orm import Model @@ -99,6 +99,9 @@ class File(BaseModel): """The model for the file.""" owner: ForeignKeyRelation[User] = fields.ForeignKeyField("electro.User", null=True) + content_type = fields.CharField(max_length=255, null=True) + height = fields.IntField(null=True) + width = fields.IntField(null=True) storage_service: StoragesIDs = fields.CharEnumField(StoragesIDs, max_length=32) storage_file_object_key = fields.TextField() file_name = fields.TextField(null=True) diff --git a/electro/settings.py b/electro/settings.py index 596250f..716e120 100644 --- a/electro/settings.py +++ b/electro/settings.py @@ -4,7 +4,7 @@ from pydantic_settings import BaseSettings, SettingsConfigDict from .enums import SupportedPlatforms -from .toolkit.images_storage.storages_enums import StoragesIDs +from .toolkit.files_storage.storages_enums import StoragesIDs class Settings(BaseSettings): diff --git a/electro/toolkit/files_storage/__init__.py b/electro/toolkit/files_storage/__init__.py new file mode 100644 index 0000000..e66b784 --- /dev/null +++ b/electro/toolkit/files_storage/__init__.py @@ -0,0 +1 @@ +"""The Files storage. It is responsible for uploading and downloading files to and from a storage service.""" diff --git a/electro/toolkit/images_storage/storage_services/__init__.py b/electro/toolkit/files_storage/storage_services/__init__.py similarity index 85% rename from electro/toolkit/images_storage/storage_services/__init__.py rename to electro/toolkit/files_storage/storage_services/__init__.py index 2662152..c2a5338 100644 --- a/electro/toolkit/images_storage/storage_services/__init__.py +++ b/electro/toolkit/files_storage/storage_services/__init__.py @@ -1,4 +1,4 @@ -"""Storage Services Module. Responsible for uploading and downloading images from different storage services.""" +"""Storage Services Module. Responsible for uploading and downloading files from different storage services.""" from ._base_storage_service import BaseStorageService from .azure_blob_storage_service import AzureBlobStorageService diff --git a/electro/toolkit/files_storage/storage_services/_base_storage_service.py b/electro/toolkit/files_storage/storage_services/_base_storage_service.py new file mode 100644 index 0000000..1faa45c --- /dev/null +++ b/electro/toolkit/files_storage/storage_services/_base_storage_service.py @@ -0,0 +1,38 @@ +"""The `BaseStorageService` is an abstract class that defines the interface for a storage service.""" + +from abc import ABC, abstractmethod +from io import BytesIO + + +class BaseStorageService(ABC): + """Base class for storage services.""" + + @abstractmethod + async def upload_file(self, file_io: BytesIO, content_type: str) -> str: + """Uploads an file to the storage and returns the object key. + + :param file_io: BytesIO object of the file to upload + :return: object key of the uploaded file + + """ + raise NotImplementedError + + @abstractmethod + async def download_file(self, object_key: str) -> BytesIO: + """Downloads an file from the storage and returns a BytesIO object. + + :param object_key: object key of the file to download + :return: BytesIO object of the downloaded file + + """ + raise NotImplementedError + + @abstractmethod + async def get_file_url(self, object_key: str) -> str: + """Returns the URL of the file. + + :param object_key: object key of the file + :return: URL of the file + + """ + raise NotImplementedError diff --git a/electro/toolkit/images_storage/storage_services/azure_blob_storage_service.py b/electro/toolkit/files_storage/storage_services/azure_blob_storage_service.py similarity index 82% rename from electro/toolkit/images_storage/storage_services/azure_blob_storage_service.py rename to electro/toolkit/files_storage/storage_services/azure_blob_storage_service.py index 9b809c7..b0339da 100644 --- a/electro/toolkit/images_storage/storage_services/azure_blob_storage_service.py +++ b/electro/toolkit/files_storage/storage_services/azure_blob_storage_service.py @@ -10,7 +10,7 @@ from azure.storage.blob.aio import BlobClient, BlobServiceClient from ....settings import settings -from ...images_storage.storage_services._base_storage_service import BaseStorageService +from ...files_storage.storage_services._base_storage_service import BaseStorageService class AzureBlobStorageService(BaseStorageService): @@ -44,28 +44,28 @@ async def _ensure_container_exists(self): except ResourceNotFoundError: await container_client.create_container() - async def upload_file(self, image_io: BytesIO) -> str: - """Upload an image to the Azure Blob Storage.""" - blob_name = f"image_{os.urandom(8).hex()}.png" + async def upload_file(self, file_io: BytesIO, content_type: str) -> str: + """Upload an file to the Azure Blob Storage.""" + blob_name = f"file_{os.urandom(8).hex()}.png" async with await self.blob_service_client as client: await self._ensure_container_exists() container_client = client.get_container_client(self.container_name) blob_client = container_client.get_blob_client(blob_name) await blob_client.upload_blob( - image_io, blob_type="BlockBlob", content_settings=ContentSettings(content_type="image/png") + file_io, blob_type="BlockBlob", content_settings=ContentSettings(content_type=content_type) ) return blob_name async def download_file(self, object_key: str) -> BytesIO: - """Download an image from the Azure Blob Storage.""" + """Download an file from the Azure Blob Storage.""" async with await self.blob_service_client as client: container_client = client.get_container_client(self.container_name) blob_client = container_client.get_blob_client(object_key) try: - image_data = await blob_client.download_blob() + file_data = await blob_client.download_blob() except ResourceNotFoundError as e: - raise FileNotFoundError(f"Image with key '{object_key}' not found in the Azure Blob Storage.") from e - return BytesIO(await image_data.readall()) + raise FileNotFoundError(f"File with key '{object_key}' not found in the Azure Blob Storage.") from e + return BytesIO(await file_data.readall()) async def _create_file_access_token(self, blob_client: BlobClient) -> str: start_time = datetime.datetime.now(datetime.timezone.utc) @@ -81,7 +81,7 @@ async def _create_file_access_token(self, blob_client: BlobClient) -> str: ) async def get_file_url(self, object_key: str) -> str: - """Get the URL of an image in the Azure Blob Storage.""" + """Get the URL of an file in the Azure Blob Storage.""" async with await self.blob_service_client as client: container_client = client.get_container_client(self.container_name) blob_client = container_client.get_blob_client(object_key) diff --git a/electro/toolkit/images_storage/storage_services/s3_service.py b/electro/toolkit/files_storage/storage_services/s3_service.py similarity index 70% rename from electro/toolkit/images_storage/storage_services/s3_service.py rename to electro/toolkit/files_storage/storage_services/s3_service.py index d5bb716..7e9c77b 100644 --- a/electro/toolkit/images_storage/storage_services/s3_service.py +++ b/electro/toolkit/files_storage/storage_services/s3_service.py @@ -1,4 +1,4 @@ -"""The S3Service class is responsible for uploading and downloading images to and from an S3 bucket.""" +"""The S3Service class is responsible for uploading and downloading files to and from an S3 bucket.""" from io import BytesIO from uuid import uuid4 @@ -7,12 +7,12 @@ from botocore.exceptions import ClientError from ....settings import settings -from ....toolkit.images_storage.storage_services._base_storage_service import BaseStorageService +from ....toolkit.files_storage.storage_services._base_storage_service import BaseStorageService from ....toolkit.loguru_logging import logger class S3Service(BaseStorageService): - """The S3Service class is responsible for uploading and downloading images to and from an S3 bucket.""" + """The S3Service class is responsible for uploading and downloading files to and from an S3 bucket.""" # TODO: [13.06.2024 by Mykola] Allow the bucket_name to be passed as an argument to the __init__ method. # def __init__(self, bucket_name: str | None = None): @@ -46,7 +46,7 @@ async def _upload_file(self, file_io: BytesIO, object_key: str, extra_args: dict await self.ensure_bucket_exists() async with self.session.client("s3", endpoint_url=settings.S3_ENDPOINT_URL) as s3: await s3.upload_fileobj(file_io, self.bucket_name, object_key, ExtraArgs=extra_args) - logger.info(f"Image uploaded successfully: {object_key}") + logger.info(f"File uploaded successfully: {object_key}") async def _download_file(self, object_key: str, destination: str | BytesIO | None = None) -> str | BytesIO: """Download a file from the S3 bucket.""" @@ -59,55 +59,55 @@ async def _download_file(self, object_key: str, destination: str | BytesIO | Non await s3.download_file(self.bucket_name, object_key, destination) elif isinstance(destination, BytesIO): await s3.download_fileobj(self.bucket_name, object_key, destination) - logger.info(f"Image downloaded successfully: {object_key}") + logger.info(f"File downloaded successfully: {object_key}") return destination - async def upload_file(self, image_io: BytesIO) -> str: - """Uploads an image to the S3 bucket and returns the object key. + async def upload_file(self, file_io: BytesIO, content_type: str) -> str: + """Uploads an file to the S3 bucket and returns the object key. - :param image_io: BytesIO object of the image to upload - :return: object key of the uploaded image + :param file_io: BytesIO object of the file to upload + :return: object key of the uploaded file """ object_key = str(uuid4()) try: # TODO: [2024-10-05 by Mykola] IT'S NOT ALWAYS JPEG - await self._upload_file(image_io, object_key, extra_args={"ContentType": "image/jpeg"}) - logger.info(f"Image uploaded successfully: {object_key}") + await self._upload_file(file_io, object_key, extra_args={"ContentType": content_type}) + logger.info(f"File uploaded successfully: {object_key}") return object_key except Exception as e: - logger.error(f"Failed to upload image: {e}") + logger.error(f"Failed to upload file: {e}") raise async def download_file(self, object_key: str) -> BytesIO: - """Downloads an image from the S3 bucket and returns a BytesIO object. + """Downloads an file from the S3 bucket and returns a BytesIO object. - :param object_key: object key of the image to download - :return: BytesIO object of the downloaded image + :param object_key: object key of the file to download + :return: BytesIO object of the downloaded file """ - image_io = BytesIO() + file_io = BytesIO() try: - await self._download_file(object_key, image_io) - logger.info(f"Image downloaded successfully: {object_key}") - return image_io + await self._download_file(object_key, file_io) + logger.info(f"File downloaded successfully: {object_key}") + return file_io except Exception as e: - logger.error(f"Failed to download image: {e}") + logger.error(f"Failed to download file: {e}") raise async def get_file_url(self, object_key: str) -> str: - """Returns the URL of the image. + """Returns the URL of the file. - :param object_key: object key of the image - :return: URL of the image + :param object_key: object key of the file + :return: URL of the file """ async with self.session.client("s3", endpoint_url=settings.S3_ENDPOINT_URL): try: url = f"{settings.S3_ENDPOINT_URL}/{self.bucket_name}/{object_key}" - logger.info(f"Image URL: {url}") + logger.info(f"File URL: {url}") return url except Exception as e: - logger.error(f"Failed to get image URL: {e}") + logger.error(f"Failed to get file URL: {e}") raise diff --git a/electro/toolkit/images_storage/storages_enums.py b/electro/toolkit/files_storage/storages_enums.py similarity index 100% rename from electro/toolkit/images_storage/storages_enums.py rename to electro/toolkit/files_storage/storages_enums.py diff --git a/electro/toolkit/images_storage/universal_image_storage.py b/electro/toolkit/files_storage/universal_file_storage.py similarity index 66% rename from electro/toolkit/images_storage/universal_image_storage.py rename to electro/toolkit/files_storage/universal_file_storage.py index 6814992..c95ba47 100644 --- a/electro/toolkit/images_storage/universal_image_storage.py +++ b/electro/toolkit/files_storage/universal_file_storage.py @@ -6,27 +6,27 @@ from .storages_enums import StoragesIDs -class UniversalImageStorage: +class UniversalFileStorage: """ - The UniversalImageStorage class is responsible for uploading and downloading images to and from a storage service. + The UniversalFileStorage class is responsible for uploading and downloading files to and from a storage service. It can be used with any storage service that implements the BaseStorageService class. """ def __init__(self, storage_service: BaseStorageService): - """Initialize the UniversalImageStorage class.""" + """Initialize the UniversalFileStorage class.""" self.storage_service = storage_service - async def upload_file(self, image_io: BytesIO) -> str: - """Upload an image to the storage service.""" - return await self.storage_service.upload_file(image_io) + async def upload_file(self, file_io: BytesIO, content_type: str) -> str: + """Upload an file to the storage service.""" + return await self.storage_service.upload_file(file_io, content_type) async def download_file(self, object_key: str) -> BytesIO: - """Download an image from the storage service.""" + """Download an file from the storage service.""" return await self.storage_service.download_file(object_key) async def get_file_url(self, object_key: str) -> str: - """Get the URL of the image from the storage service.""" + """Get the URL of the file from the storage service.""" return await self.storage_service.get_file_url(object_key) @@ -44,4 +44,4 @@ def choose_storage_service(default: StoragesIDs = StoragesIDs.S3) -> BaseStorage return storage_service_class() -universal_image_storage = UniversalImageStorage(storage_service=choose_storage_service()) +universal_file_storage = UniversalFileStorage(storage_service=choose_storage_service()) diff --git a/electro/toolkit/images_storage/__init__.py b/electro/toolkit/images_storage/__init__.py deleted file mode 100644 index e76c591..0000000 --- a/electro/toolkit/images_storage/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""The Images storage. It is responsible for uploading and downloading images to and from a storage service.""" diff --git a/electro/toolkit/images_storage/storage_services/_base_storage_service.py b/electro/toolkit/images_storage/storage_services/_base_storage_service.py deleted file mode 100644 index 3c39627..0000000 --- a/electro/toolkit/images_storage/storage_services/_base_storage_service.py +++ /dev/null @@ -1,38 +0,0 @@ -"""The `BaseStorageService` is an abstract class that defines the interface for a storage service.""" - -from abc import ABC, abstractmethod -from io import BytesIO - - -class BaseStorageService(ABC): - """Base class for storage services.""" - - @abstractmethod - async def upload_file(self, image_io: BytesIO) -> str: - """Uploads an image to the storage and returns the object key. - - :param image_io: BytesIO object of the image to upload - :return: object key of the uploaded image - - """ - raise NotImplementedError - - @abstractmethod - async def download_file(self, object_key: str) -> BytesIO: - """Downloads an image from the storage and returns a BytesIO object. - - :param object_key: object key of the image to download - :return: BytesIO object of the downloaded image - - """ - raise NotImplementedError - - @abstractmethod - async def get_file_url(self, object_key: str) -> str: - """Returns the URL of the image. - - :param object_key: object key of the image - :return: URL of the image - - """ - raise NotImplementedError diff --git a/electro/utils.py b/electro/utils.py index 2c49503..966d003 100644 --- a/electro/utils.py +++ b/electro/utils.py @@ -1,9 +1,13 @@ -from typing import Any, Callable, Dict +import mimetypes +from io import BytesIO +from typing import Any, Callable, Dict, Optional +from PIL import Image from tortoise.queryset import QuerySet -from .models import Message -from .toolkit.images_storage.universal_image_storage import universal_image_storage +from .models import File, Message, User +from .settings import settings +from .toolkit.files_storage.universal_file_storage import universal_file_storage async def format_historical_message(message: Message) -> Dict[str, Any]: @@ -22,7 +26,7 @@ async def format_historical_message(message: Message) -> Dict[str, Any]: if message.type == Message.MessageTypes.IMAGE: if len(message.files) > 0: image = message.files[0] - image_url = await universal_image_storage.get_file_url(image.storage_file_object_key) + image_url = await universal_file_storage.get_file_url(image.storage_file_object_key) else: image_url = message.content return { @@ -65,3 +69,27 @@ async def paginate_response(data: QuerySet, formatter: Callable, limit: int, off "next": next_page, "data": formatted_data, } + + +async def create_and_upload_file(file: BytesIO, owner: User) -> File: + content_type, _ = mimetypes.guess_type(file.name) + width, height = get_image_dimensions(file) + object_key = await universal_file_storage.upload_file(file, content_type=content_type) + return await File.create( + owner=owner, + content_type=content_type, + width=width, + height=height, + storage_service=settings.STORAGE_SERVICE_ID, + storage_file_object_key=object_key, + ) + + +def get_image_dimensions(file: BytesIO) -> tuple[Optional[int], Optional[int]]: + """Get image dimensions from a BytesIO object.""" + try: + file.seek(0) + with Image.open(file) as img: + return img.width, img.height + except Exception: # pylint: disable=W0718 + return None, None diff --git a/poetry.lock b/poetry.lock index 91637f1..f4d5ef6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1744,6 +1744,106 @@ files = [ {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] +[[package]] +name = "pillow" +version = "11.2.1" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pillow-11.2.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:d57a75d53922fc20c165016a20d9c44f73305e67c351bbc60d1adaf662e74047"}, + {file = "pillow-11.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:127bf6ac4a5b58b3d32fc8289656f77f80567d65660bc46f72c0d77e6600cc95"}, + {file = "pillow-11.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4ba4be812c7a40280629e55ae0b14a0aafa150dd6451297562e1764808bbe61"}, + {file = "pillow-11.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8bd62331e5032bc396a93609982a9ab6b411c05078a52f5fe3cc59234a3abd1"}, + {file = "pillow-11.2.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:562d11134c97a62fe3af29581f083033179f7ff435f78392565a1ad2d1c2c45c"}, + {file = "pillow-11.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:c97209e85b5be259994eb5b69ff50c5d20cca0f458ef9abd835e262d9d88b39d"}, + {file = "pillow-11.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0c3e6d0f59171dfa2e25d7116217543310908dfa2770aa64b8f87605f8cacc97"}, + {file = "pillow-11.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc1c3bc53befb6096b84165956e886b1729634a799e9d6329a0c512ab651e579"}, + {file = "pillow-11.2.1-cp310-cp310-win32.whl", hash = "sha256:312c77b7f07ab2139924d2639860e084ec2a13e72af54d4f08ac843a5fc9c79d"}, + {file = "pillow-11.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9bc7ae48b8057a611e5fe9f853baa88093b9a76303937449397899385da06fad"}, + {file = "pillow-11.2.1-cp310-cp310-win_arm64.whl", hash = "sha256:2728567e249cdd939f6cc3d1f049595c66e4187f3c34078cbc0a7d21c47482d2"}, + {file = "pillow-11.2.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35ca289f712ccfc699508c4658a1d14652e8033e9b69839edf83cbdd0ba39e70"}, + {file = "pillow-11.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0409af9f829f87a2dfb7e259f78f317a5351f2045158be321fd135973fff7bf"}, + {file = "pillow-11.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4e5c5edee874dce4f653dbe59db7c73a600119fbea8d31f53423586ee2aafd7"}, + {file = "pillow-11.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b93a07e76d13bff9444f1a029e0af2964e654bfc2e2c2d46bfd080df5ad5f3d8"}, + {file = "pillow-11.2.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:e6def7eed9e7fa90fde255afaf08060dc4b343bbe524a8f69bdd2a2f0018f600"}, + {file = "pillow-11.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8f4f3724c068be008c08257207210c138d5f3731af6c155a81c2b09a9eb3a788"}, + {file = "pillow-11.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a0a6709b47019dff32e678bc12c63008311b82b9327613f534e496dacaefb71e"}, + {file = "pillow-11.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f6b0c664ccb879109ee3ca702a9272d877f4fcd21e5eb63c26422fd6e415365e"}, + {file = "pillow-11.2.1-cp311-cp311-win32.whl", hash = "sha256:cc5d875d56e49f112b6def6813c4e3d3036d269c008bf8aef72cd08d20ca6df6"}, + {file = "pillow-11.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:0f5c7eda47bf8e3c8a283762cab94e496ba977a420868cb819159980b6709193"}, + {file = "pillow-11.2.1-cp311-cp311-win_arm64.whl", hash = "sha256:4d375eb838755f2528ac8cbc926c3e31cc49ca4ad0cf79cff48b20e30634a4a7"}, + {file = "pillow-11.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:78afba22027b4accef10dbd5eed84425930ba41b3ea0a86fa8d20baaf19d807f"}, + {file = "pillow-11.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78092232a4ab376a35d68c4e6d5e00dfd73454bd12b230420025fbe178ee3b0b"}, + {file = "pillow-11.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25a5f306095c6780c52e6bbb6109624b95c5b18e40aab1c3041da3e9e0cd3e2d"}, + {file = "pillow-11.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c7b29dbd4281923a2bfe562acb734cee96bbb129e96e6972d315ed9f232bef4"}, + {file = "pillow-11.2.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e645b020f3209a0181a418bffe7b4a93171eef6c4ef6cc20980b30bebf17b7d"}, + {file = "pillow-11.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b2dbea1012ccb784a65349f57bbc93730b96e85b42e9bf7b01ef40443db720b4"}, + {file = "pillow-11.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:da3104c57bbd72948d75f6a9389e6727d2ab6333c3617f0a89d72d4940aa0443"}, + {file = "pillow-11.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:598174aef4589af795f66f9caab87ba4ff860ce08cd5bb447c6fc553ffee603c"}, + {file = "pillow-11.2.1-cp312-cp312-win32.whl", hash = "sha256:1d535df14716e7f8776b9e7fee118576d65572b4aad3ed639be9e4fa88a1cad3"}, + {file = "pillow-11.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:14e33b28bf17c7a38eede290f77db7c664e4eb01f7869e37fa98a5aa95978941"}, + {file = "pillow-11.2.1-cp312-cp312-win_arm64.whl", hash = "sha256:21e1470ac9e5739ff880c211fc3af01e3ae505859392bf65458c224d0bf283eb"}, + {file = "pillow-11.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fdec757fea0b793056419bca3e9932eb2b0ceec90ef4813ea4c1e072c389eb28"}, + {file = "pillow-11.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b0e130705d568e2f43a17bcbe74d90958e8a16263868a12c3e0d9c8162690830"}, + {file = "pillow-11.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bdb5e09068332578214cadd9c05e3d64d99e0e87591be22a324bdbc18925be0"}, + {file = "pillow-11.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d189ba1bebfbc0c0e529159631ec72bb9e9bc041f01ec6d3233d6d82eb823bc1"}, + {file = "pillow-11.2.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:191955c55d8a712fab8934a42bfefbf99dd0b5875078240943f913bb66d46d9f"}, + {file = "pillow-11.2.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:ad275964d52e2243430472fc5d2c2334b4fc3ff9c16cb0a19254e25efa03a155"}, + {file = "pillow-11.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:750f96efe0597382660d8b53e90dd1dd44568a8edb51cb7f9d5d918b80d4de14"}, + {file = "pillow-11.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fe15238d3798788d00716637b3d4e7bb6bde18b26e5d08335a96e88564a36b6b"}, + {file = "pillow-11.2.1-cp313-cp313-win32.whl", hash = "sha256:3fe735ced9a607fee4f481423a9c36701a39719252a9bb251679635f99d0f7d2"}, + {file = "pillow-11.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:74ee3d7ecb3f3c05459ba95eed5efa28d6092d751ce9bf20e3e253a4e497e691"}, + {file = "pillow-11.2.1-cp313-cp313-win_arm64.whl", hash = "sha256:5119225c622403afb4b44bad4c1ca6c1f98eed79db8d3bc6e4e160fc6339d66c"}, + {file = "pillow-11.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8ce2e8411c7aaef53e6bb29fe98f28cd4fbd9a1d9be2eeea434331aac0536b22"}, + {file = "pillow-11.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:9ee66787e095127116d91dea2143db65c7bb1e232f617aa5957c0d9d2a3f23a7"}, + {file = "pillow-11.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9622e3b6c1d8b551b6e6f21873bdcc55762b4b2126633014cea1803368a9aa16"}, + {file = "pillow-11.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63b5dff3a68f371ea06025a1a6966c9a1e1ee452fc8020c2cd0ea41b83e9037b"}, + {file = "pillow-11.2.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:31df6e2d3d8fc99f993fd253e97fae451a8db2e7207acf97859732273e108406"}, + {file = "pillow-11.2.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:062b7a42d672c45a70fa1f8b43d1d38ff76b63421cbbe7f88146b39e8a558d91"}, + {file = "pillow-11.2.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4eb92eca2711ef8be42fd3f67533765d9fd043b8c80db204f16c8ea62ee1a751"}, + {file = "pillow-11.2.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f91ebf30830a48c825590aede79376cb40f110b387c17ee9bd59932c961044f9"}, + {file = "pillow-11.2.1-cp313-cp313t-win32.whl", hash = "sha256:e0b55f27f584ed623221cfe995c912c61606be8513bfa0e07d2c674b4516d9dd"}, + {file = "pillow-11.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:36d6b82164c39ce5482f649b437382c0fb2395eabc1e2b1702a6deb8ad647d6e"}, + {file = "pillow-11.2.1-cp313-cp313t-win_arm64.whl", hash = "sha256:225c832a13326e34f212d2072982bb1adb210e0cc0b153e688743018c94a2681"}, + {file = "pillow-11.2.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:7491cf8a79b8eb867d419648fff2f83cb0b3891c8b36da92cc7f1931d46108c8"}, + {file = "pillow-11.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b02d8f9cb83c52578a0b4beadba92e37d83a4ef11570a8688bbf43f4ca50909"}, + {file = "pillow-11.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:014ca0050c85003620526b0ac1ac53f56fc93af128f7546623cc8e31875ab928"}, + {file = "pillow-11.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3692b68c87096ac6308296d96354eddd25f98740c9d2ab54e1549d6c8aea9d79"}, + {file = "pillow-11.2.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:f781dcb0bc9929adc77bad571b8621ecb1e4cdef86e940fe2e5b5ee24fd33b35"}, + {file = "pillow-11.2.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:2b490402c96f907a166615e9a5afacf2519e28295f157ec3a2bb9bd57de638cb"}, + {file = "pillow-11.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dd6b20b93b3ccc9c1b597999209e4bc5cf2853f9ee66e3fc9a400a78733ffc9a"}, + {file = "pillow-11.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4b835d89c08a6c2ee7781b8dd0a30209a8012b5f09c0a665b65b0eb3560b6f36"}, + {file = "pillow-11.2.1-cp39-cp39-win32.whl", hash = "sha256:b10428b3416d4f9c61f94b494681280be7686bda15898a3a9e08eb66a6d92d67"}, + {file = "pillow-11.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:6ebce70c3f486acf7591a3d73431fa504a4e18a9b97ff27f5f47b7368e4b9dd1"}, + {file = "pillow-11.2.1-cp39-cp39-win_arm64.whl", hash = "sha256:c27476257b2fdcd7872d54cfd119b3a9ce4610fb85c8e32b70b42e3680a29a1e"}, + {file = "pillow-11.2.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9b7b0d4fd2635f54ad82785d56bc0d94f147096493a79985d0ab57aedd563156"}, + {file = "pillow-11.2.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:aa442755e31c64037aa7c1cb186e0b369f8416c567381852c63444dd666fb772"}, + {file = "pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0d3348c95b766f54b76116d53d4cb171b52992a1027e7ca50c81b43b9d9e363"}, + {file = "pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85d27ea4c889342f7e35f6d56e7e1cb345632ad592e8c51b693d7b7556043ce0"}, + {file = "pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bf2c33d6791c598142f00c9c4c7d47f6476731c31081331664eb26d6ab583e01"}, + {file = "pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e616e7154c37669fc1dfc14584f11e284e05d1c650e1c0f972f281c4ccc53193"}, + {file = "pillow-11.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:39ad2e0f424394e3aebc40168845fee52df1394a4673a6ee512d840d14ab3013"}, + {file = "pillow-11.2.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:80f1df8dbe9572b4b7abdfa17eb5d78dd620b1d55d9e25f834efdbee872d3aed"}, + {file = "pillow-11.2.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ea926cfbc3957090becbcbbb65ad177161a2ff2ad578b5a6ec9bb1e1cd78753c"}, + {file = "pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:738db0e0941ca0376804d4de6a782c005245264edaa253ffce24e5a15cbdc7bd"}, + {file = "pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9db98ab6565c69082ec9b0d4e40dd9f6181dab0dd236d26f7a50b8b9bfbd5076"}, + {file = "pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:036e53f4170e270ddb8797d4c590e6dd14d28e15c7da375c18978045f7e6c37b"}, + {file = "pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:14f73f7c291279bd65fda51ee87affd7c1e097709f7fdd0188957a16c264601f"}, + {file = "pillow-11.2.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:208653868d5c9ecc2b327f9b9ef34e0e42a4cdd172c2988fd81d62d2bc9bc044"}, + {file = "pillow-11.2.1.tar.gz", hash = "sha256:a64dd61998416367b7ef979b73d3a85853ba9bec4c2925f74e588879a58716b6"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=8.2)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +test-arrow = ["pyarrow"] +tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "trove-classifiers (>=2024.10.12)"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + [[package]] name = "platformdirs" version = "4.3.6" @@ -3224,4 +3324,4 @@ propcache = ">=0.2.0" [metadata] lock-version = "2.1" python-versions = "^3.12" -content-hash = "ad97dcb629919c9eaa8d416320ff979abfb56334be409605a665c27177cf4fad" +content-hash = "fb6235544488a8832516f436ae689b7f1f58c6e9ab89cb973bdfe45b9fc17c2c" diff --git a/pyproject.toml b/pyproject.toml index 98afa07..a0dd966 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,6 +24,7 @@ aioboto3 = "^13.1.1" azure-storage-blob = "^12.23.1" azure-identity = "^1.18.0" fastapi = {extras = ["standard"], version = "^0.115.5"} +pillow = "^11.2.1" [tool.poetry.group.dev.dependencies] From 14240e6377ae53ae00c18f77034b7549daed4c26 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Wed, 28 May 2025 17:35:21 +0200 Subject: [PATCH 29/57] do not allow bytes as message files --- electro/authentication.py | 1 - electro/interfaces.py | 9 ++------- electro/utils.py | 26 +++++++++++--------------- 3 files changed, 13 insertions(+), 23 deletions(-) diff --git a/electro/authentication.py b/electro/authentication.py index 24a8b6f..d925db7 100644 --- a/electro/authentication.py +++ b/electro/authentication.py @@ -15,7 +15,6 @@ async def authenticate_user( cookie: Optional[str] = Cookie(default=None, alias="Authorization"), ) -> User: """Validate the Bearer token provided in the request header or in the cookie.""" - return await _get_or_create_user(platform, user_id) # Determine the authentication method based on the platform authentication_method = {client: "jwt" for client in settings.JWT_PLATFORMS}.get(platform, "api_key") diff --git a/electro/interfaces.py b/electro/interfaces.py index 4683210..3b3064b 100644 --- a/electro/interfaces.py +++ b/electro/interfaces.py @@ -3,7 +3,6 @@ import pathlib from abc import ABC, abstractmethod from contextlib import asynccontextmanager -from io import BytesIO from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING, Union from fastapi import WebSocket @@ -16,7 +15,6 @@ from .schemas import ButtonClick, ReceivedMessage from .settings import settings from .toolkit.files_storage.universal_file_storage import universal_file_storage -from .utils import create_and_upload_file if TYPE_CHECKING: from .contrib.buttons import BaseButton @@ -100,7 +98,7 @@ async def send_message( message: str = "", user: Optional[User] = None, channel: Optional[Channel] = None, - files: Optional[List[Union[File, BytesIO, str, pathlib.Path]]] = None, + files: Optional[List[Union[File, str, pathlib.Path]]] = None, buttons: Optional[List["BaseButton"]] = None, delete_after: Optional[Union[int, str]] = None, ): @@ -159,7 +157,7 @@ async def send_message( async def _process_message_file( self, - file: File | BytesIO | str | pathlib.Path, + file: File | str | pathlib.Path, message: Message, ): """ @@ -174,9 +172,6 @@ async def _process_message_file( file: The file to be sent. message: The message to which the file is attached. """ - if isinstance(file, BytesIO): - file = await create_and_upload_file(file, message.user) - if isinstance(file, File) or issubclass(type(file), File): file_url = await universal_file_storage.get_file_url(file.storage_file_object_key) height = file.height diff --git a/electro/utils.py b/electro/utils.py index 966d003..0164ccf 100644 --- a/electro/utils.py +++ b/electro/utils.py @@ -1,6 +1,5 @@ -import mimetypes from io import BytesIO -from typing import Any, Callable, Dict, Optional +from typing import Any, Callable, Dict from PIL import Image from tortoise.queryset import QuerySet @@ -71,9 +70,16 @@ async def paginate_response(data: QuerySet, formatter: Callable, limit: int, off } -async def create_and_upload_file(file: BytesIO, owner: User) -> File: - content_type, _ = mimetypes.guess_type(file.name) - width, height = get_image_dimensions(file) +async def create_and_upload_file(file: BytesIO, owner: User, content_type: str) -> File: + if content_type.startswith("image/"): + try: + file.seek(0) + with Image.open(file) as img: + width, height = img.width, img.height + except Exception: # pylint: disable=W0718 + width, height = None, None + else: + width, height = None, None object_key = await universal_file_storage.upload_file(file, content_type=content_type) return await File.create( owner=owner, @@ -83,13 +89,3 @@ async def create_and_upload_file(file: BytesIO, owner: User) -> File: storage_service=settings.STORAGE_SERVICE_ID, storage_file_object_key=object_key, ) - - -def get_image_dimensions(file: BytesIO) -> tuple[Optional[int], Optional[int]]: - """Get image dimensions from a BytesIO object.""" - try: - file.seek(0) - with Image.open(file) as img: - return img.width, img.height - except Exception: # pylint: disable=W0718 - return None, None From 831c4836ab0e85a93422ec4eff2e924f69cf4359 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Mon, 2 Jun 2025 10:27:35 +0200 Subject: [PATCH 30/57] remove test fields in files response --- electro/interfaces.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/electro/interfaces.py b/electro/interfaces.py index 3b3064b..93083a8 100644 --- a/electro/interfaces.py +++ b/electro/interfaces.py @@ -192,9 +192,6 @@ async def _process_message_file( if isinstance(file, File): await message.files.add(file) return { - "type": str(type(file)), - "is_file": isinstance(file, File), - "is_file_subclass": issubclass(type(file), File), "url": file_url, "height": height, "width": width, From 6d178f37da1f75ad527c3c0c39ec51c059256984 Mon Sep 17 00:00:00 2001 From: Julien Droulez Date: Mon, 2 Jun 2025 15:59:09 +0200 Subject: [PATCH 31/57] feat: add cookie setting endpoint --- electro/app.py | 41 ++++++++++++++++++++++++++++++++++------- electro/schemas.py | 4 ++++ 2 files changed, 38 insertions(+), 7 deletions(-) diff --git a/electro/app.py b/electro/app.py index 99a91bd..9dbbca3 100644 --- a/electro/app.py +++ b/electro/app.py @@ -2,7 +2,8 @@ from typing import Any, Dict, Optional -from fastapi import Depends, FastAPI, HTTPException, WebSocket, WebSocketDisconnect +from fastapi import Depends, FastAPI, HTTPException, WebSocket, WebSocketDisconnect, Response +from fastapi.middleware.cors import CORSMiddleware from fastapi.websockets import WebSocketState from tortoise.contrib.fastapi import register_tortoise @@ -11,6 +12,7 @@ from .models import Message, PlatformId, User from .toolkit.tortoise_orm import get_tortoise_config from .utils import format_historical_message, paginate_response +from .schemas import CookieToken app = FastAPI( title="Electro API", @@ -20,6 +22,18 @@ # redoc_url=None, ) +# CORS + +origins = ["*"] + +app.add_middleware( + CORSMiddleware, + allow_origins=origins, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + @app.patch("/api/platforms/{platform}/user/{user_id}") async def update_user( @@ -58,7 +72,8 @@ async def update_user( for platform in await user.platform_ids.all() ], } - raise HTTPException(status_code=403, detail="You are not authorized to update this user's information.") + raise HTTPException( + status_code=403, detail="You are not authorized to update this user's information.") @app.get("/api/platform/{platform}/user/{user_id}") @@ -86,7 +101,8 @@ async def get_user(platform: str, user_id: str, request_user: Optional[User] = D for platform in await user.platform_ids.all() ], } - raise HTTPException(status_code=403, detail="You are not authorized to access this user's information.") + raise HTTPException( + status_code=403, detail="You are not authorized to access this user's information.") @app.get("/api/platform/{platform}/user/{user_id}/messages") @@ -112,7 +128,8 @@ async def get_user_messages( raise HTTPException(status_code=404, detail="User not found.") user = await platform_id.user if request_user == user: - messages = Message.filter(user=user, is_temporary=False).order_by("-date_added") + messages = Message.filter( + user=user, is_temporary=False).order_by("-date_added") return await paginate_response( messages, format_historical_message, @@ -120,7 +137,8 @@ async def get_user_messages( offset=offset, url=f"/api/platform/{platform}/user/{user_id}/messages", ) - raise HTTPException(status_code=403, detail="You are not authorized to access this user's message history.") + raise HTTPException( + status_code=403, detail="You are not authorized to access this user's message history.") @app.post("/api/platform/{platform}/user/{user_id}/messages") @@ -141,7 +159,8 @@ async def process_message( interface = APIInterface() await interface.handle_incoming_action(user, platform, data) return interface.messages.get() - raise HTTPException(status_code=403, detail="You are not authorized to send messages on behalf of this user.") + raise HTTPException( + status_code=403, detail="You are not authorized to send messages on behalf of this user.") @app.websocket("/websocket/platform/{platform}/user/{user_id}") @@ -167,7 +186,15 @@ async def websocket_endpoint( await interface.handle_incoming_action(user, platform, data) except WebSocketDisconnect: await interface.disconnect() - raise HTTPException(status_code=403, detail="You are not authorized to send messages on behalf of this user.") + raise HTTPException( + status_code=403, detail="You are not authorized to send messages on behalf of this user.") + + +@app.post("/api/cookies") +async def set_cookie(data: CookieToken, response: Response): + cookie_value = "" if data.token is None else data.token + response.set_cookie(key="Authorization", value=cookie_value) + return {"status": "ok"} # region Register Tortoise diff --git a/electro/schemas.py b/electro/schemas.py index 3f4d2dc..f1a48c7 100644 --- a/electro/schemas.py +++ b/electro/schemas.py @@ -26,3 +26,7 @@ class ButtonClick(BaseModel): id: int custom_id: str channel: Channel | None + + +class CookieToken(BaseModel): + token: str | None = None From 69cec322adc1a45b5769c766f3b0f40f8c8f0f2f Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Tue, 3 Jun 2025 15:14:26 +0200 Subject: [PATCH 32/57] Update i18n setup --- electro/app.py | 22 ++-- electro/flow.py | 5 +- electro/flow_manager.py | 6 +- electro/flow_step.py | 2 +- electro/interfaces.py | 2 + electro/toolkit/decorators.py | 123 ++++++++++++++++++ {examples => electro/toolkit}/i18n_gettext.py | 4 +- electro/toolkit/loguru_logging.py | 2 +- examples/test_flow.py | 3 +- 9 files changed, 142 insertions(+), 27 deletions(-) rename {examples => electro/toolkit}/i18n_gettext.py (73%) diff --git a/electro/app.py b/electro/app.py index 9dbbca3..b005f6c 100644 --- a/electro/app.py +++ b/electro/app.py @@ -2,7 +2,7 @@ from typing import Any, Dict, Optional -from fastapi import Depends, FastAPI, HTTPException, WebSocket, WebSocketDisconnect, Response +from fastapi import Depends, FastAPI, HTTPException, Response, WebSocket, WebSocketDisconnect from fastapi.middleware.cors import CORSMiddleware from fastapi.websockets import WebSocketState from tortoise.contrib.fastapi import register_tortoise @@ -10,9 +10,9 @@ from .authentication import authenticate_user from .interfaces import APIInterface, WebSocketInterface from .models import Message, PlatformId, User +from .schemas import CookieToken from .toolkit.tortoise_orm import get_tortoise_config from .utils import format_historical_message, paginate_response -from .schemas import CookieToken app = FastAPI( title="Electro API", @@ -72,8 +72,7 @@ async def update_user( for platform in await user.platform_ids.all() ], } - raise HTTPException( - status_code=403, detail="You are not authorized to update this user's information.") + raise HTTPException(status_code=403, detail="You are not authorized to update this user's information.") @app.get("/api/platform/{platform}/user/{user_id}") @@ -101,8 +100,7 @@ async def get_user(platform: str, user_id: str, request_user: Optional[User] = D for platform in await user.platform_ids.all() ], } - raise HTTPException( - status_code=403, detail="You are not authorized to access this user's information.") + raise HTTPException(status_code=403, detail="You are not authorized to access this user's information.") @app.get("/api/platform/{platform}/user/{user_id}/messages") @@ -128,8 +126,7 @@ async def get_user_messages( raise HTTPException(status_code=404, detail="User not found.") user = await platform_id.user if request_user == user: - messages = Message.filter( - user=user, is_temporary=False).order_by("-date_added") + messages = Message.filter(user=user, is_temporary=False).order_by("-date_added") return await paginate_response( messages, format_historical_message, @@ -137,8 +134,7 @@ async def get_user_messages( offset=offset, url=f"/api/platform/{platform}/user/{user_id}/messages", ) - raise HTTPException( - status_code=403, detail="You are not authorized to access this user's message history.") + raise HTTPException(status_code=403, detail="You are not authorized to access this user's message history.") @app.post("/api/platform/{platform}/user/{user_id}/messages") @@ -159,8 +155,7 @@ async def process_message( interface = APIInterface() await interface.handle_incoming_action(user, platform, data) return interface.messages.get() - raise HTTPException( - status_code=403, detail="You are not authorized to send messages on behalf of this user.") + raise HTTPException(status_code=403, detail="You are not authorized to send messages on behalf of this user.") @app.websocket("/websocket/platform/{platform}/user/{user_id}") @@ -186,8 +181,7 @@ async def websocket_endpoint( await interface.handle_incoming_action(user, platform, data) except WebSocketDisconnect: await interface.disconnect() - raise HTTPException( - status_code=403, detail="You are not authorized to send messages on behalf of this user.") + raise HTTPException(status_code=403, detail="You are not authorized to send messages on behalf of this user.") @app.post("/api/cookies") diff --git a/electro/flow.py b/electro/flow.py index 82d5479..40bc4eb 100644 --- a/electro/flow.py +++ b/electro/flow.py @@ -14,7 +14,7 @@ from .scopes import FlowScopes from .settings import settings from .substitutions import BaseSubstitution -from .toolkit.decorators import with_constant_typing +from .toolkit.decorators import forbid_concurrent_execution, with_constant_typing from .toolkit.loguru_logging import logger from .triggers import BaseFlowTrigger @@ -234,8 +234,7 @@ async def run(self, connector: FlowConnector, upper_level_state: str | None = No return await self.step(connector, initial=True, upper_level_state=upper_level_state) # TODO: This is too complex and should be refactored. pylint: disable=R0912,R0914,R0915 - # TODO: [2024-07-19 by Mykola] Use the decorators - # @forbid_concurrent_execution() + @forbid_concurrent_execution() @with_constant_typing(run_only_on_events=[FlowConnectorEvents.MESSAGE]) async def step( self, connector: FlowConnector, initial: bool = False, upper_level_state: str | None = None diff --git a/electro/flow_manager.py b/electro/flow_manager.py index cee4dc4..7f5a9dd 100644 --- a/electro/flow_manager.py +++ b/electro/flow_manager.py @@ -10,12 +10,11 @@ from .exceptions import DisabledButtonClick, EventCannotBeProcessed from .flow import Flow, FlowConnector, FlowFinished from .flow_connector import FlowConnectorEvents - -# from decorators import fail_safely from .models import Button, Channel, Guild, Message, PlatformId, User, UserStateChanged from .scopes import FlowScopes from .settings import settings from .storage import BaseFlowStorage, ChannelData, FlowMemoryStorage, UserData +from .toolkit.decorators import fail_safely from .toolkit.loguru_logging import logger from .toolkit.tortoise_orm import Model @@ -231,8 +230,7 @@ async def _finish_flow(self, flow_connector: FlowConnector): return # TODO: This is too complex and should be refactored. pylint: disable=R0912 - # TODO: [2024-07-19 by Mykola] Use the decorators - # @fail_safely + @fail_safely async def _dispatch(self, flow_connector: FlowConnector): """Dispatch the flow.""" diff --git a/electro/flow_step.py b/electro/flow_step.py index f077155..1817d2f 100644 --- a/electro/flow_step.py +++ b/electro/flow_step.py @@ -315,7 +315,7 @@ class SendImageFlowStep(MessageFlowStep): """The Step that sends an image.""" file: File | pathlib.Path | BytesIO | str | None = None - caption: str | None = None + caption: str = "" language: str | None = None diff --git a/electro/interfaces.py b/electro/interfaces.py index 93083a8..3a22c88 100644 --- a/electro/interfaces.py +++ b/electro/interfaces.py @@ -1,6 +1,7 @@ import contextvars import mimetypes import pathlib +import traceback from abc import ABC, abstractmethod from contextlib import asynccontextmanager from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING, Union @@ -306,6 +307,7 @@ async def handle_incoming_action( "action": ResponseTypes.ERROR, "content": { "error": str(exception), + "traceback": traceback.format_exc(), }, } ) diff --git a/electro/toolkit/decorators.py b/electro/toolkit/decorators.py index c59a3eb..6d49d7f 100644 --- a/electro/toolkit/decorators.py +++ b/electro/toolkit/decorators.py @@ -1,9 +1,17 @@ """Decorators used in the Application.""" +from asyncio import Lock +from collections import defaultdict from functools import wraps from typing import Callable, Coroutine +from electro.exceptions import EventCannotBeProcessed from electro.flow_connector import FlowConnector, FlowConnectorEvents +from electro.models import Channel, Message +from electro.settings import settings +from electro.toolkit.i18n_gettext import _ +from electro.toolkit.loguru_logging import logger +from electro.toolkit.templated_i18n import TemplatedString def with_constant_typing( @@ -33,3 +41,118 @@ async def wrapper(*args, **kwargs): return wrapper return decorator + + +def fail_safely(function: Callable[..., Coroutine]): + """Fail safely and send the message to the User about the issue""" + + @wraps(function) + async def wrapper(*args, **kwargs): + try: + return await function(*args, **kwargs) + except Exception as exception: + # Log the exception with the traceback + logger.exception(exception) + + # Check if any of the arguments is a FlowConnector + flow_connector: FlowConnector | None = next( + (arg for arg in args if isinstance(arg, FlowConnector)), + None, + ) + + substitute_dict = { + "function_name": function.__name__, + "exception": exception, + "exception_type": type(exception), + "exception_text": str(exception), + "user_id": flow_connector.user.id if flow_connector else "N/A", + "user_name": flow_connector.user.username if flow_connector else "N/A", + } + + if flow_connector: + if isinstance(exception, EventCannotBeProcessed): + error_text__template = _("fail_safely.error_text.event_cannot_be_processed") + error_text__template__debug = _("fail_safely.error_text.event_cannot_be_processed.debug") + else: + error_text__template = _("fail_safely.error_text") + error_text__template__debug = _("fail_safely.error_text.debug") + + # Send a message to the user about the issue and delete after 20 seconds + if settings.DEBUG: + error_text = error_text__template__debug.safe_substitute(**substitute_dict) + else: + error_text = error_text__template.safe_substitute(**substitute_dict) + # Set delete_after=20 to delete the message after 20 seconds + await flow_connector.interface.send_message(error_text, delete_after=20) + else: + logger.error(f"FlowConnector is not set for the function: {function.__name__} in {args=}") + + if not ( + global_errors_channel_obj := await Channel.get_or_none(used_for=Channel.ChannelUsedFor.GLOBAL_ERRORS) + ): + logger.error("Global errors channel is not set.") + raise exception + + if settings.DEBUG: + message = _("fail_safely.global_error_text.debug").safe_substitute(**substitute_dict) + else: + message = _("fail_safely.global_error_text").safe_substitute(**substitute_dict) + await Message.create( + is_bot_message=True, + type=Message.MessageTypes.TEXT, + channel=global_errors_channel_obj, + content=message, + ) + # Re-raise the exception after handling + raise exception + + return wrapper + + +GLOBAL_USER_LOCKS: defaultdict[int, Lock] = defaultdict(Lock) + + +def forbid_concurrent_execution( + keep_extra_messages=False, + extra_messages_reply: TemplatedString | None = _("forbidden_concurrent_execution.extra_messages_reply"), +) -> Callable: + """Forbid concurrent execution of the function for the same User.""" + + def decorator(function: Callable[..., Coroutine]): + """The actual decorator.""" + + @wraps(function) + async def wrapper(*args, **kwargs): + # Get the `FlowConnector` from the arguments + flow_connector: FlowConnector | None = next( + (arg for arg in args if isinstance(arg, FlowConnector)), + None, + ) + + if not flow_connector: + logger.error(f"FlowConnector is not set for the function: {function.__name__} in {args=}") + return await function(*args, **kwargs) + + # Get the User's ID + user_id = flow_connector.user.id + # Get the User's lock + user_lock = GLOBAL_USER_LOCKS[user_id] + + # Check if the User's lock is already acquired + if user_lock.locked() and not keep_extra_messages: + if flow_connector.message: + # Send a message to the User saying that the function is already running + delete_after = 12 if not keep_extra_messages else None + await flow_connector.interface.send_message( + extra_messages_reply, flow_connector.user, flow_connector.channel, delete_after=delete_after + ) + else: + logger.warning(f"Extra messages are not allowed for the function: {function.__name__} in {args=}") + + # With the lock acquired, execute the function + async with user_lock: + return await function(*args, **kwargs) + + return wrapper + + return decorator diff --git a/examples/i18n_gettext.py b/electro/toolkit/i18n_gettext.py similarity index 73% rename from examples/i18n_gettext.py rename to electro/toolkit/i18n_gettext.py index 96a39da..2298d38 100644 --- a/examples/i18n_gettext.py +++ b/electro/toolkit/i18n_gettext.py @@ -5,6 +5,6 @@ # Set up `gettext` en = gettext.translation("messages", localedir=settings.LOCALES_PATH, languages=[settings.BOT_LANGUAGE]) -templated_gettext = make_templated_gettext(en.gettext) +_ = make_templated_gettext(en.gettext) -__all__ = ["templated_gettext"] +__all__ = ["_"] diff --git a/electro/toolkit/loguru_logging.py b/electro/toolkit/loguru_logging.py index 92392eb..fdcd679 100644 --- a/electro/toolkit/loguru_logging.py +++ b/electro/toolkit/loguru_logging.py @@ -5,7 +5,7 @@ from loguru import logger -from ..settings import settings +from electro.settings import settings # Remove the default logger try: diff --git a/examples/test_flow.py b/examples/test_flow.py index 51df29e..48666fa 100644 --- a/examples/test_flow.py +++ b/examples/test_flow.py @@ -6,8 +6,7 @@ from electro.app import app from electro.triggers import CommandTrigger from electro.flow_manager import global_flow_manager - -from .i18n_gettext import templated_gettext as _ +from electro.toolkit.i18n_gettext import _ class TestFlow(Flow): From e069513d3c562306caef2b253ef803efb5838b2a Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Tue, 3 Jun 2025 17:07:50 +0200 Subject: [PATCH 33/57] Update messages history response --- electro/app.py | 9 ++-- electro/interfaces.py | 12 ++++-- electro/models.py | 8 +--- electro/toolkit/decorators.py | 1 - electro/utils.py | 77 +++++++++++++++++++++++------------ 5 files changed, 66 insertions(+), 41 deletions(-) diff --git a/electro/app.py b/electro/app.py index b005f6c..554edd7 100644 --- a/electro/app.py +++ b/electro/app.py @@ -12,7 +12,7 @@ from .models import Message, PlatformId, User from .schemas import CookieToken from .toolkit.tortoise_orm import get_tortoise_config -from .utils import format_historical_message, paginate_response +from .utils import format_historical_message, limit_from_id_paginate_response app = FastAPI( title="Electro API", @@ -109,7 +109,7 @@ async def get_user_messages( user_id: str, request_user: Optional[User] = Depends(authenticate_user), limit: int = 20, - offset: int = 0, + from_id: Optional[int] = None, ): """ Get the message history for a user. @@ -118,6 +118,7 @@ async def get_user_messages( user: The user whose message history is to be retrieved. limit: The maximum number of messages to retrieve. offset: The number of messages to skip before retrieving the history. + from_id: If provided, this will override the offset to start from the latest message ID. """ platform_id = await PlatformId.get_or_none( platform_id=user_id, platform=platform, type=PlatformId.PlatformIdTypes.USER @@ -127,11 +128,11 @@ async def get_user_messages( user = await platform_id.user if request_user == user: messages = Message.filter(user=user, is_temporary=False).order_by("-date_added") - return await paginate_response( + return await limit_from_id_paginate_response( messages, format_historical_message, limit=limit, - offset=offset, + from_id=from_id, url=f"/api/platform/{platform}/user/{user_id}/messages", ) raise HTTPException(status_code=403, detail="You are not authorized to access this user's message history.") diff --git a/electro/interfaces.py b/electro/interfaces.py index 3a22c88..8d43020 100644 --- a/electro/interfaces.py +++ b/electro/interfaces.py @@ -123,7 +123,6 @@ async def send_message( message = await Message.create( is_temporary=delete_after is not None, is_bot_message=True, - type=Message.MessageTypes.TEXT, user=user, channel=channel, content=message_chunk, @@ -190,15 +189,20 @@ async def _process_message_file( if file_url.startswith(settings.APP_ROOT): file_url = settings.SERVER_URL + file_url[len(settings.APP_ROOT) :] - if isinstance(file, File): - await message.files.add(file) - return { + data = { "url": file_url, "height": height, "width": width, "content_type": content_type, } + if isinstance(file, File): + await message.files.add(file) + else: + message.static_files = [*message.static_files, data] + await message.save() + return data + async def add_role(self, user: User, role: Role): """ Assign a role to a user. diff --git a/electro/models.py b/electro/models.py index 37d3b3d..f953477 100644 --- a/electro/models.py +++ b/electro/models.py @@ -167,24 +167,18 @@ def __str__(self) -> str: class Message(BaseModel): """The model for Message.""" - class MessageTypes(str, Enum): - """The types of messages.""" - - TEXT = "text" - IMAGE = "image" - id = fields.BigIntField(pk=True) is_bot_message = fields.BooleanField(default=False) is_command = fields.BooleanField(default=False) is_temporary = fields.BooleanField(default=False) - type = fields.CharEnumField(MessageTypes, max_length=255, default=MessageTypes.TEXT) user: ForeignKeyRelation[User] = fields.ForeignKeyField("electro.User", related_name="messages", null=True) channel: ForeignKeyRelation[Channel] = fields.ForeignKeyField("electro.Channel", related_name="messages", null=True) content = fields.TextField(null=True) caption = fields.TextField(null=True) files: fields.ManyToManyRelation[File] = ManyToManyField("electro.File", related_name="messages") + static_files = fields.JSONField(default=list, null=True) buttons: fields.ReverseRelation[Button] def __str__(self) -> str: diff --git a/electro/toolkit/decorators.py b/electro/toolkit/decorators.py index 6d49d7f..c7dcb14 100644 --- a/electro/toolkit/decorators.py +++ b/electro/toolkit/decorators.py @@ -99,7 +99,6 @@ async def wrapper(*args, **kwargs): message = _("fail_safely.global_error_text").safe_substitute(**substitute_dict) await Message.create( is_bot_message=True, - type=Message.MessageTypes.TEXT, channel=global_errors_channel_obj, content=message, ) diff --git a/electro/utils.py b/electro/utils.py index 0164ccf..5d1e6c4 100644 --- a/electro/utils.py +++ b/electro/utils.py @@ -1,10 +1,11 @@ from io import BytesIO -from typing import Any, Callable, Dict +from typing import Any, Callable, Dict, Optional +from fastapi import HTTPException from PIL import Image from tortoise.queryset import QuerySet -from .models import File, Message, User +from .models import BaseModel, File, Message, User from .settings import settings from .toolkit.files_storage.universal_file_storage import universal_file_storage @@ -22,32 +23,28 @@ async def format_historical_message(message: Message) -> Dict[str, Any]: } for button in message.buttons ] - if message.type == Message.MessageTypes.IMAGE: - if len(message.files) > 0: - image = message.files[0] - image_url = await universal_file_storage.get_file_url(image.storage_file_object_key) - else: - image_url = message.content - return { - "id": message.id, - "type": message.type, - "is_bot_message": message.is_bot_message, - "image": image_url, - "caption": message.caption, - "buttons": buttons, - } - if message.type == Message.MessageTypes.TEXT: - return { - "id": message.id, - "type": message.type, - "is_bot_message": message.is_bot_message, - "message": message.content, - "buttons": buttons, + files = [ + { + "url": await universal_file_storage.get_file_url(file.storage_file_object_key), + "height": file.height, + "width": file.width, + "content_type": file.content_type, } - return {} + for file in message.files + ] + return { + "id": message.id, + "is_bot_message": message.is_bot_message, + "date_added": message.date_added.timestamp(), + "message": message.content, + "files": files + (message.static_files or []), + "buttons": buttons, + } -async def paginate_response(data: QuerySet, formatter: Callable, limit: int, offset: int, url: str) -> Dict[str, Any]: +async def limit_offset_paginate_response( + data: QuerySet[BaseModel], formatter: Callable, limit: int, offset: int, url: str +) -> Dict[str, Any]: """ Paginate the response data. """ @@ -70,6 +67,36 @@ async def paginate_response(data: QuerySet, formatter: Callable, limit: int, off } +async def limit_from_id_paginate_response( + data: QuerySet[BaseModel], formatter: Callable, limit: int, from_id: Optional[int], url: str +) -> Dict[str, Any]: + """ + Paginate the response data based on the latest ID. + """ + if from_id is not None: + latest_item = await data.get_or_none(id=from_id) + if not latest_item: + raise HTTPException(status_code=400, detail=f"Item with ID {from_id} not found.") + data_from_id = data.filter(date_added__lt=latest_item.date_added) + else: + data_from_id = data + fetched_data_from_id = await data_from_id.limit(limit + 1).all() + if len(fetched_data_from_id) == limit + 1: + next_from_id = fetched_data_from_id[limit - 1] + next_page = f"{url}?limit={limit}&from_id={next_from_id.id}" + else: + next_page = None + + paginated_data = await data_from_id.limit(limit).all() + formatted_data = [await formatter(message) for message in paginated_data] + return { + "from_id": from_id, + "limit": limit, + "next": next_page, + "data": formatted_data, + } + + async def create_and_upload_file(file: BytesIO, owner: User, content_type: str) -> File: if content_type.startswith("image/"): try: From 59a45c272990f4566c022c768ff6dd086a8f583a Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Wed, 4 Jun 2025 11:05:38 +0200 Subject: [PATCH 34/57] Decorators --- electro/app.py | 8 +- electro/flow.py | 3 +- electro/flow_manager.py | 3 +- electro/interfaces.py | 150 ++++++++++++++++++---------------- electro/toolkit/decorators.py | 10 +-- 5 files changed, 94 insertions(+), 80 deletions(-) diff --git a/electro/app.py b/electro/app.py index 554edd7..b1a3a8e 100644 --- a/electro/app.py +++ b/electro/app.py @@ -1,5 +1,6 @@ """The API server that works as an endpoint for all the Electro Interfaces.""" +import asyncio from typing import Any, Dict, Optional from fastapi import Depends, FastAPI, HTTPException, Response, WebSocket, WebSocketDisconnect @@ -179,10 +180,11 @@ async def websocket_endpoint( try: while websocket.application_state == WebSocketState.CONNECTED: data = await websocket.receive_json() - await interface.handle_incoming_action(user, platform, data) + asyncio.create_task(interface.handle_incoming_action(user, platform, data)) except WebSocketDisconnect: - await interface.disconnect() - raise HTTPException(status_code=403, detail="You are not authorized to send messages on behalf of this user.") + del interface + else: + raise HTTPException(status_code=403, detail="You are not authorized to send messages on behalf of this user.") @app.post("/api/cookies") diff --git a/electro/flow.py b/electro/flow.py index 40bc4eb..71ee6ec 100644 --- a/electro/flow.py +++ b/electro/flow.py @@ -14,7 +14,7 @@ from .scopes import FlowScopes from .settings import settings from .substitutions import BaseSubstitution -from .toolkit.decorators import forbid_concurrent_execution, with_constant_typing +from .toolkit.decorators import with_constant_typing from .toolkit.loguru_logging import logger from .triggers import BaseFlowTrigger @@ -234,7 +234,6 @@ async def run(self, connector: FlowConnector, upper_level_state: str | None = No return await self.step(connector, initial=True, upper_level_state=upper_level_state) # TODO: This is too complex and should be refactored. pylint: disable=R0912,R0914,R0915 - @forbid_concurrent_execution() @with_constant_typing(run_only_on_events=[FlowConnectorEvents.MESSAGE]) async def step( self, connector: FlowConnector, initial: bool = False, upper_level_state: str | None = None diff --git a/electro/flow_manager.py b/electro/flow_manager.py index 7f5a9dd..2042bad 100644 --- a/electro/flow_manager.py +++ b/electro/flow_manager.py @@ -14,7 +14,7 @@ from .scopes import FlowScopes from .settings import settings from .storage import BaseFlowStorage, ChannelData, FlowMemoryStorage, UserData -from .toolkit.decorators import fail_safely +from .toolkit.decorators import fail_safely, forbid_concurrent_execution from .toolkit.loguru_logging import logger from .toolkit.tortoise_orm import Model @@ -231,6 +231,7 @@ async def _finish_flow(self, flow_connector: FlowConnector): # TODO: This is too complex and should be refactored. pylint: disable=R0912 @fail_safely + @forbid_concurrent_execution() async def _dispatch(self, flow_connector: FlowConnector): """Dispatch the flow.""" diff --git a/electro/interfaces.py b/electro/interfaces.py index 8d43020..b5b5368 100644 --- a/electro/interfaces.py +++ b/electro/interfaces.py @@ -1,7 +1,6 @@ import contextvars import mimetypes import pathlib -import traceback from abc import ABC, abstractmethod from contextlib import asynccontextmanager from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING, Union @@ -94,6 +93,54 @@ async def _format_guild(self, guild: Optional[Guild]) -> Dict[str, Any]: }, } + async def _process_message_file( + self, + file: File | str | pathlib.Path, + message: Message, + ): + """ + Send files to the client as a link: + + If the file is a File, the link to the blob storage location will be sent. + If the file is a BytesIO object, it will be uploaded to blob storage and the link will be sent. + If the file is a string, it will be sent as is so make sure it is a valid URL. + If the file is a pathlib.Path object, it will be sent as a link to the static file endpoint. + + Arguments: + file: The file to be sent. + message: The message to which the file is attached. + """ + if isinstance(file, File) or issubclass(type(file), File): + file_url = await universal_file_storage.get_file_url(file.storage_file_object_key) + height = file.height + width = file.width + content_type = file.content_type + else: + file_url = str(file) + content_type, _ = mimetypes.guess_type(file_url) + try: + with Image.open(file) as img: + width, height = img.width, img.height + except Exception: # pylint: disable=W0718 + width, height = None, None + + if file_url.startswith(settings.APP_ROOT): + file_url = settings.SERVER_URL + file_url[len(settings.APP_ROOT) :] + + data = { + "url": file_url, + "height": height, + "width": width, + "content_type": content_type, + } + + if isinstance(file, File): + await message.files.add(file) + else: + message.static_files = [*message.static_files, data] + await message.save() + return data + async def send_message( self, message: str = "", @@ -155,53 +202,33 @@ async def send_message( } ) - async def _process_message_file( + async def send_error( self, - file: File | str | pathlib.Path, - message: Message, + error: str, + user: Optional[User] = None, + channel: Optional[Channel] = None, ): """ - Send files to the client as a link: - - If the file is a File, the link to the blob storage location will be sent. - If the file is a BytesIO object, it will be uploaded to blob storage and the link will be sent. - If the file is a string, it will be sent as is so make sure it is a valid URL. - If the file is a pathlib.Path object, it will be sent as a link to the static file endpoint. + Send an error message to the client. Arguments: - file: The file to be sent. - message: The message to which the file is attached. + error: The error message to be sent. + traceback: The traceback of the error, if available. + user: The user who will receive the error message. + channel: The channel the error message is being sent to. """ - if isinstance(file, File) or issubclass(type(file), File): - file_url = await universal_file_storage.get_file_url(file.storage_file_object_key) - height = file.height - width = file.width - content_type = file.content_type - else: - file_url = str(file) - content_type, _ = mimetypes.guess_type(file_url) - try: - with Image.open(file) as img: - width, height = img.width, img.height - except Exception: # pylint: disable=W0718 - width, height = None, None - - if file_url.startswith(settings.APP_ROOT): - file_url = settings.SERVER_URL + file_url[len(settings.APP_ROOT) :] - - data = { - "url": file_url, - "height": height, - "width": width, - "content_type": content_type, - } - - if isinstance(file, File): - await message.files.add(file) - else: - message.static_files = [*message.static_files, data] - await message.save() - return data + user_data = await self._format_user(user) + channel_data = await self._format_channel(channel) + await self.send_json( + { + "action": ResponseTypes.ERROR, + "content": { + "user": user_data, + "channel": channel_data, + "error": error, + }, + } + ) async def add_role(self, user: User, role: Role): """ @@ -292,29 +319,18 @@ async def handle_incoming_action( platform: The platform from which the action was received (). data: The data received from the client. """ - try: - action = data.get("action") - content = data.get("content") - if action == FlowConnectorEvents.MESSAGE: - content = ReceivedMessage.model_validate(content) - await global_flow_manager.on_message(user, platform, content, self) - if action == FlowConnectorEvents.BUTTON_CLICK: - content = ButtonClick.model_validate(content) - await global_flow_manager.on_button_click(user, platform, content, self) - if action == FlowConnectorEvents.MEMBER_JOIN: - pass - if action == FlowConnectorEvents.MEMBER_UPDATE: - pass - except Exception as exception: # pylint: disable=W0718 - await self.send_json( - { - "action": ResponseTypes.ERROR, - "content": { - "error": str(exception), - "traceback": traceback.format_exc(), - }, - } - ) + action = data.get("action") + content = data.get("content") + if action == FlowConnectorEvents.MESSAGE: + content = ReceivedMessage.model_validate(content) + await global_flow_manager.on_message(user, platform, content, self) + if action == FlowConnectorEvents.BUTTON_CLICK: + content = ButtonClick.model_validate(content) + await global_flow_manager.on_button_click(user, platform, content, self) + if action == FlowConnectorEvents.MEMBER_JOIN: + pass + if action == FlowConnectorEvents.MEMBER_UPDATE: + pass @abstractmethod async def send_json(self, data: Dict[str, Any]): @@ -332,10 +348,6 @@ async def connect(self, websocket: WebSocket): await websocket.accept() self.interface = websocket - async def disconnect(self): - await self.interface.close() - self.interface = None - async def stop_process(self, code: int = 1000, reason: Optional[str] = None): await super().stop_process() await self.interface.close(code, reason) diff --git a/electro/toolkit/decorators.py b/electro/toolkit/decorators.py index c7dcb14..499b5f5 100644 --- a/electro/toolkit/decorators.py +++ b/electro/toolkit/decorators.py @@ -83,7 +83,7 @@ async def wrapper(*args, **kwargs): else: error_text = error_text__template.safe_substitute(**substitute_dict) # Set delete_after=20 to delete the message after 20 seconds - await flow_connector.interface.send_message(error_text, delete_after=20) + await flow_connector.interface.send_error(error_text, flow_connector.user, flow_connector.channel) else: logger.error(f"FlowConnector is not set for the function: {function.__name__} in {args=}") @@ -147,10 +147,10 @@ async def wrapper(*args, **kwargs): ) else: logger.warning(f"Extra messages are not allowed for the function: {function.__name__} in {args=}") - - # With the lock acquired, execute the function - async with user_lock: - return await function(*args, **kwargs) + else: + # With the lock acquired, execute the function + async with user_lock: + return await function(*args, **kwargs) return wrapper From a0da321e1ea033e9c7989c6130bef64791657cd6 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Thu, 5 Jun 2025 17:05:06 +0200 Subject: [PATCH 35/57] handle cors policy with env var --- electro/app.py | 6 ++---- electro/settings.py | 1 + 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/electro/app.py b/electro/app.py index b1a3a8e..46faaad 100644 --- a/electro/app.py +++ b/electro/app.py @@ -12,6 +12,7 @@ from .interfaces import APIInterface, WebSocketInterface from .models import Message, PlatformId, User from .schemas import CookieToken +from .settings import settings from .toolkit.tortoise_orm import get_tortoise_config from .utils import format_historical_message, limit_from_id_paginate_response @@ -23,13 +24,10 @@ # redoc_url=None, ) -# CORS - -origins = ["*"] app.add_middleware( CORSMiddleware, - allow_origins=origins, + allow_origins=settings.CORS_ALLOW_ORIGINS, allow_credentials=True, allow_methods=["*"], allow_headers=["*"], diff --git a/electro/settings.py b/electro/settings.py index 716e120..7938d5f 100644 --- a/electro/settings.py +++ b/electro/settings.py @@ -25,6 +25,7 @@ class Settings(BaseSettings): DEFAULT_SLEEP_TIME: int = 3 # seconds SLEEP_TIME_PER_CHARACTER: float = 0.05 HTTPX_CLIENT_DEFAULT_TIMEOUT: int = 60 + CORS_ALLOW_ORIGINS: list[str] = ["*"] # Allow all origins by default # Build urls for static files by removing root path and adding the server url SERVER_URL: str = "http://localhost:8000" APP_ROOT: str = "/app" From 43cb30b83ec4490a0dcc42ebca251a2d5b7fabf3 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Fri, 6 Jun 2025 10:23:35 +0200 Subject: [PATCH 36/57] temporary turn off auth --- electro/authentication.py | 1 + 1 file changed, 1 insertion(+) diff --git a/electro/authentication.py b/electro/authentication.py index d925db7..1394a10 100644 --- a/electro/authentication.py +++ b/electro/authentication.py @@ -16,6 +16,7 @@ async def authenticate_user( ) -> User: """Validate the Bearer token provided in the request header or in the cookie.""" # Determine the authentication method based on the platform + return await _get_or_create_user(platform, user_id) authentication_method = {client: "jwt" for client in settings.JWT_PLATFORMS}.get(platform, "api_key") # Validate the platform From a847e8c969fca3dfce62534d17f5d6cb8f30c601 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Tue, 10 Jun 2025 10:15:55 +0200 Subject: [PATCH 37/57] reactivate authentication --- electro/authentication.py | 1 - 1 file changed, 1 deletion(-) diff --git a/electro/authentication.py b/electro/authentication.py index 1394a10..d925db7 100644 --- a/electro/authentication.py +++ b/electro/authentication.py @@ -16,7 +16,6 @@ async def authenticate_user( ) -> User: """Validate the Bearer token provided in the request header or in the cookie.""" # Determine the authentication method based on the platform - return await _get_or_create_user(platform, user_id) authentication_method = {client: "jwt" for client in settings.JWT_PLATFORMS}.get(platform, "api_key") # Validate the platform From 56c6523906ededeb2e4eba68ddba04d993f1b92f Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Wed, 11 Jun 2025 14:03:46 +0200 Subject: [PATCH 38/57] Remove duplicate constant_typing --- electro/flow.py | 2 -- electro/flow_connector.py | 3 +-- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/electro/flow.py b/electro/flow.py index 71ee6ec..869d542 100644 --- a/electro/flow.py +++ b/electro/flow.py @@ -14,7 +14,6 @@ from .scopes import FlowScopes from .settings import settings from .substitutions import BaseSubstitution -from .toolkit.decorators import with_constant_typing from .toolkit.loguru_logging import logger from .triggers import BaseFlowTrigger @@ -234,7 +233,6 @@ async def run(self, connector: FlowConnector, upper_level_state: str | None = No return await self.step(connector, initial=True, upper_level_state=upper_level_state) # TODO: This is too complex and should be refactored. pylint: disable=R0912,R0914,R0915 - @with_constant_typing(run_only_on_events=[FlowConnectorEvents.MESSAGE]) async def step( self, connector: FlowConnector, initial: bool = False, upper_level_state: str | None = None ) -> list[Message] | None: diff --git a/electro/flow_connector.py b/electro/flow_connector.py index 7f0e423..41a72f9 100644 --- a/electro/flow_connector.py +++ b/electro/flow_connector.py @@ -11,8 +11,7 @@ from .storage import ChannelData, UserData if TYPE_CHECKING: - from electro import FlowManager - + from .flow_manager import FlowManager from .interfaces import BaseInterface From cf3685bf1c5772c6455dbe7a1435820e8183679e Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Wed, 11 Jun 2025 14:22:36 +0200 Subject: [PATCH 39/57] Send images with last message chunk if splited --- electro/interfaces.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/electro/interfaces.py b/electro/interfaces.py index b5b5368..0ba4a55 100644 --- a/electro/interfaces.py +++ b/electro/interfaces.py @@ -175,16 +175,12 @@ async def send_message( content=message_chunk, ) - # Send buttons only with the last message chunk + # Send buttons and files only with the last message chunk if i == len(message_chunks) - 1: buttons = await self._create_and_format_buttons(buttons, message) - else: - buttons = [] - - # Send files only with the first message chunk - if i == 0: processed_files = [await self._process_message_file(file, message) for file in files or []] else: + buttons = [] processed_files = [] data = { From 87cb02e21f9c609192f7c22b3e0371fde6302815 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Tue, 17 Jun 2025 11:47:44 +0200 Subject: [PATCH 40/57] Use Redis to store flow state and data --- electro/flow_manager.py | 4 +- electro/settings.py | 6 +- electro/storage.py | 119 ++++++++++++++++++++++++++++++++++++++-- 3 files changed, 120 insertions(+), 9 deletions(-) diff --git a/electro/flow_manager.py b/electro/flow_manager.py index 2042bad..7727ffc 100644 --- a/electro/flow_manager.py +++ b/electro/flow_manager.py @@ -13,7 +13,7 @@ from .models import Button, Channel, Guild, Message, PlatformId, User, UserStateChanged from .scopes import FlowScopes from .settings import settings -from .storage import BaseFlowStorage, ChannelData, FlowMemoryStorage, UserData +from .storage import BaseFlowStorage, ChannelData, FlowRedisStorage, UserData from .toolkit.decorators import fail_safely, forbid_concurrent_execution from .toolkit.loguru_logging import logger from .toolkit.tortoise_orm import Model @@ -128,7 +128,7 @@ def __init__( ): self.flows: list[Flow] = flows or [] - self.storage = storage or FlowMemoryStorage() + self.storage = storage or FlowRedisStorage() self.analytics_manager = AnalyticsManager(self) self._on_finish_callbacks: list[typing.Callable[[FlowConnector], typing.Awaitable[None]]] = ( diff --git a/electro/settings.py b/electro/settings.py index 7938d5f..f5af1c4 100644 --- a/electro/settings.py +++ b/electro/settings.py @@ -78,9 +78,13 @@ class Settings(BaseSettings): # Redis settings REDIS_URL: RedisDsn | None # if the `REDIS_URL` is not set, then use the following credentials: - REDIS_HOST: str | None = None + REDIS_HOST: str | None = "redis" REDIS_PORT: int | None = 6379 REDIS_DB: int | None = 0 + REDIS_PASSWORD: str | None = None + # Flow storage settings + FLOW_STORAGE_STATE_TTL: int | None = None + FLOW_STORAGE_DATA_TTL: int | None = None # Storage settings STORAGE_SERVICE_ID: StoragesIDs = "S3" diff --git a/electro/storage.py b/electro/storage.py index 4fa8721..d25e510 100644 --- a/electro/storage.py +++ b/electro/storage.py @@ -9,10 +9,15 @@ from __future__ import annotations -import typing +import json from abc import ABC, abstractmethod +from typing import Any, Optional -DEFAULT_FLOW_STORAGE_PREFIX = "flow::" +from redis.asyncio import Redis + +from .settings import settings + +DEFAULT_FLOW_STORAGE_PREFIX = "flow" DEFAULT_MISSING_ADDRESS_PART = "missing" @@ -72,12 +77,12 @@ async def get_channel_data(self, channel_id: int) -> ChannelData: raise NotImplementedError @abstractmethod - async def set_user_data(self, user_id: int, data: UserData | dict[str, typing.Any] | None): + async def set_user_data(self, user_id: int, data: UserData | dict[str, Any] | None): """Set the data for a user.""" raise NotImplementedError @abstractmethod - async def set_channel_data(self, channel_id: int, data: ChannelData | dict[str, typing.Any] | None): + async def set_channel_data(self, channel_id: int, data: ChannelData | dict[str, Any] | None): """Set the data for a channel.""" raise NotImplementedError @@ -148,11 +153,11 @@ async def get_channel_data(self, channel_id: int) -> ChannelData: return self._channel_data[channel_id] - async def set_user_data(self, user_id: int, data: UserData | dict[str, typing.Any] | None): + async def set_user_data(self, user_id: int, data: UserData | dict[str, Any] | None): """Set the data for a user.""" self._user_data[user_id] = data if isinstance(data, UserData) else UserData(**data) if data else UserData() - async def set_channel_data(self, channel_id: int, data: ChannelData | dict[str, typing.Any] | None): + async def set_channel_data(self, channel_id: int, data: ChannelData | dict[str, Any] | None): """Set the data for a channel.""" self._channel_data[channel_id] = ( data if isinstance(data, ChannelData) else ChannelData(**data) if data else ChannelData() @@ -174,3 +179,105 @@ async def clear(self): self._user_data.clear() self._channel_states.clear() self._channel_data.clear() + + +class FlowRedisStorage(BaseFlowStorage): + """The storage used for `Flow`. Stores data for all the users in Redis.""" + + def __init__( + self, + host: str = settings.REDIS_HOST, + port: int = settings.REDIS_PORT, + db: int = settings.REDIS_DB, + password: Optional[str] = settings.REDIS_PASSWORD, + prefix: str = DEFAULT_FLOW_STORAGE_PREFIX, + state_ttl: Optional[int] = settings.FLOW_STORAGE_STATE_TTL, + data_ttl: Optional[int] = settings.FLOW_STORAGE_DATA_TTL, + ): + self._redis = Redis( + host=host, + port=port, + db=db, + password=password, + decode_responses=True, + ) + self._prefix = prefix + self._state_ttl = state_ttl + self._data_ttl = data_ttl + + def _user_state_key(self, user_id: int) -> str: + return f"{self._prefix}:user:{user_id}:state" + + def _user_data_key(self, user_id: int) -> str: + return f"{self._prefix}:user:{user_id}:data" + + def _channel_state_key(self, channel_id: int) -> str: + return f"{self._prefix}:channel:{channel_id}:state" + + def _channel_data_key(self, channel_id: int) -> str: + return f"{self._prefix}:channel:{channel_id}:data" + + async def get_user_state(self, user_id: int) -> str | None: + return await self._redis.get(self._user_state_key(user_id)) + + async def get_channel_state(self, channel_id: int) -> str | None: + return await self._redis.get(self._channel_state_key(channel_id)) + + async def set_user_state(self, user_id: int, state: str | None): + key = self._user_state_key(user_id) + if state is None: + await self._redis.delete(key) + else: + await self._redis.set(key, state, ex=self._state_ttl) + + async def set_channel_state(self, channel_id: int, state: str | None): + key = self._channel_state_key(channel_id) + if state is None: + await self._redis.delete(key) + else: + await self._redis.set(key, state, ex=self._state_ttl) + + async def delete_user_state(self, user_id: int): + await self._redis.delete(self._user_state_key(user_id)) + + async def delete_channel_state(self, channel_id: int): + await self._redis.delete(self._channel_state_key(channel_id)) + + async def get_user_data(self, user_id: int) -> UserData: + raw = await self._redis.get(self._user_data_key(user_id)) + return UserData(**json.loads(raw)) if raw else UserData() + + async def get_channel_data(self, channel_id: int) -> ChannelData: + raw = await self._redis.get(self._channel_data_key(channel_id)) + return ChannelData(**json.loads(raw)) if raw else ChannelData() + + async def set_user_data(self, user_id: int, data: UserData | dict[str, Any] | None): + key = self._user_data_key(user_id) + if data: + await self._redis.set(key, json.dumps(dict(data)), ex=self._data_ttl) + else: + await self._redis.delete(key) + + async def set_channel_data(self, channel_id: int, data: ChannelData | dict[str, Any] | None): + key = self._channel_data_key(channel_id) + if data: + await self._redis.set(key, json.dumps(dict(data)), ex=self._data_ttl) + else: + await self._redis.delete(key) + + async def delete_user_data(self, user_id: int): + await self._redis.delete(self._user_data_key(user_id)) + + async def delete_channel_data(self, channel_id: int): + await self._redis.delete(self._channel_data_key(channel_id)) + + async def clear(self): + # WARNING: This will delete all keys with the prefix! + keys = [] + async for key in self._redis.scan_iter(f"{self._prefix}:*"): + keys.append(key) + if keys: + await self._redis.delete(*keys) + + async def close(self): + await self._redis.close() From d7c83b84e656e3f9271f05fd973ec60cd1f9d0be Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Tue, 22 Jul 2025 16:28:07 +0200 Subject: [PATCH 41/57] feat: Flow specific chat history --- electro/app.py | 19 +++-- electro/flow_connector.py | 1 + electro/flow_manager.py | 99 ++++++++++++----------- electro/interfaces.py | 16 ++-- electro/models.py | 1 + electro/storage.py | 166 ++++++++++++++++++++------------------ 6 files changed, 164 insertions(+), 138 deletions(-) diff --git a/electro/app.py b/electro/app.py index 46faaad..6f1b027 100644 --- a/electro/app.py +++ b/electro/app.py @@ -102,10 +102,11 @@ async def get_user(platform: str, user_id: str, request_user: Optional[User] = D raise HTTPException(status_code=403, detail="You are not authorized to access this user's information.") -@app.get("/api/platform/{platform}/user/{user_id}/messages") +@app.get("/api/platform/{platform}/user/{user_id}/flow/{flow_code}/messages") async def get_user_messages( platform: str, user_id: str, + flow_code: str, request_user: Optional[User] = Depends(authenticate_user), limit: int = 20, from_id: Optional[int] = None, @@ -126,7 +127,7 @@ async def get_user_messages( raise HTTPException(status_code=404, detail="User not found.") user = await platform_id.user if request_user == user: - messages = Message.filter(user=user, is_temporary=False).order_by("-date_added") + messages = Message.filter(user=user, flow_code=flow_code, is_temporary=False).order_by("-date_added") return await limit_from_id_paginate_response( messages, format_historical_message, @@ -137,10 +138,11 @@ async def get_user_messages( raise HTTPException(status_code=403, detail="You are not authorized to access this user's message history.") -@app.post("/api/platform/{platform}/user/{user_id}/messages") +@app.post("/api/platform/{platform}/user/{user_id}/flow/{flow_code}/messages") async def process_message( platform: str, user_id: str, + flow_code: str, data: Dict[str, Any], request_user: Optional[User] = Depends(authenticate_user), ): @@ -152,17 +154,18 @@ async def process_message( raise HTTPException(status_code=404, detail="User not found.") user = await platform_id.user if request_user == user: - interface = APIInterface() - await interface.handle_incoming_action(user, platform, data) + interface = APIInterface(flow_code=flow_code) + await interface.handle_incoming_action(user, platform, flow_code, data) return interface.messages.get() raise HTTPException(status_code=403, detail="You are not authorized to send messages on behalf of this user.") -@app.websocket("/websocket/platform/{platform}/user/{user_id}") +@app.websocket("/websocket/platform/{platform}/user/{user_id}/flow/{flow_code}") async def websocket_endpoint( websocket: WebSocket, platform: str, user_id: str, + flow_code: str, request_user: Optional[User] = Depends(authenticate_user), ): """Handle the websocket connection.""" @@ -173,12 +176,12 @@ async def websocket_endpoint( raise HTTPException(status_code=404, detail="User not found.") user = await platform_id.user if request_user == user: - interface = WebSocketInterface() + interface = WebSocketInterface(flow_code=flow_code) await interface.connect(websocket) try: while websocket.application_state == WebSocketState.CONNECTED: data = await websocket.receive_json() - asyncio.create_task(interface.handle_incoming_action(user, platform, data)) + asyncio.create_task(interface.handle_incoming_action(user, platform, flow_code, data)) except WebSocketDisconnect: del interface else: diff --git a/electro/flow_connector.py b/electro/flow_connector.py index 41a72f9..b1d37c5 100644 --- a/electro/flow_connector.py +++ b/electro/flow_connector.py @@ -31,6 +31,7 @@ class FlowConnector(ContextInstanceMixin): flow_manager: FlowManager interface: BaseInterface event: FlowConnectorEvents + flow_code: str user_state: str | None user_data: UserData diff --git a/electro/flow_manager.py b/electro/flow_manager.py index 7727ffc..6746fc2 100644 --- a/electro/flow_manager.py +++ b/electro/flow_manager.py @@ -79,13 +79,16 @@ async def get_or_create_channel( return channel @classmethod - async def save_message(cls, author: User, platform: str, message_data: schemas.ReceivedMessage) -> Message: + async def save_message( + cls, author: User, platform: str, flow_code: str, message_data: schemas.ReceivedMessage + ) -> Message: """Save the message to the database.""" if message_data.channel: channel = await cls.get_or_create_channel(platform, message_data.channel, author) else: channel = None return await Message.create( + flow_code=flow_code, is_command=message_data.content.startswith(settings.BOT_COMMAND_PREFIX), is_bot_message=False, user=author, @@ -139,63 +142,65 @@ def __init__( self.set_current(self) # region User State and Data management - async def _get_user_state(self, user: User) -> str | None: + async def _get_user_state(self, user: User, flow_code: str) -> str | None: """Get the state of the user.""" - return await self.storage.get_user_state(user.id) + return await self.storage.get_user_state(user.id, flow_code) - async def _set_user_state(self, user: User, state: str | None): + async def _set_user_state(self, user: User, flow_code: str, state: str | None): """Set the state of the user.""" # Save the state to the database - old_state = await self._get_user_state(user) + old_state = await self._get_user_state(user, flow_code) if old_state != state: await self.analytics_manager.save_user_state_changed(user, old_state, state) - await self.storage.set_user_state(user.id, state) + await self.storage.set_user_state(user.id, flow_code, state) - async def _delete_user_state(self, user: User): + async def _delete_user_state(self, user: User, flow_code: str): """Delete the state of the user.""" - old_state = await self._get_user_state(user) + old_state = await self._get_user_state(user, flow_code) if old_state: await self.analytics_manager.save_user_state_changed(user, old_state, None) - await self.storage.delete_user_state(user.id) + await self.storage.delete_user_state(user.id, flow_code) - async def _get_user_data(self, user: User) -> UserData: + async def _get_user_data(self, user: User, flow_code: str) -> UserData: """Get the data of the user.""" - return await self.storage.get_user_data(user.id) + return await self.storage.get_user_data(user.id, flow_code) - async def _set_user_data(self, user: User, data: UserData | dict[str, typing.Any] | None): + async def _set_user_data(self, user: User, flow_code: str, data: UserData | dict[str, typing.Any] | None): """Set the data of the user.""" - await self.storage.set_user_data(user.id, data) + await self.storage.set_user_data(user.id, flow_code, data) - async def _delete_user_data(self, user: User): + async def _delete_user_data(self, user: User, flow_code: str): """Delete the data of the user.""" - await self.storage.delete_user_data(user.id) + await self.storage.delete_user_data(user.id, flow_code) # endregion # region Channel State and Data management - async def _get_channel_state(self, channel: Channel) -> str | None: + async def _get_channel_state(self, channel: Channel, flow_code: str) -> str | None: """Get the state of the channel.""" - return await self.storage.get_channel_state(channel.id) + return await self.storage.get_channel_state(channel.id, flow_code) - async def _set_channel_state(self, channel: Channel, state: str | None): + async def _set_channel_state(self, channel: Channel, flow_code: str, state: str | None): """Set the state of the channel.""" - await self.storage.set_channel_state(channel.id, state) + await self.storage.set_channel_state(channel.id, flow_code, state) - async def _delete_channel_state(self, channel: Channel): + async def _delete_channel_state(self, channel: Channel, flow_code: str): """Delete the state of the channel.""" - await self.storage.delete_channel_state(channel.id) + await self.storage.delete_channel_state(channel.id, flow_code) - async def _get_channel_data(self, channel: Channel) -> ChannelData: + async def _get_channel_data(self, channel: Channel, flow_code: str) -> ChannelData: """Get the data of the channel.""" - return await self.storage.get_channel_data(channel.id) + return await self.storage.get_channel_data(channel.id, flow_code) - async def _set_channel_data(self, channel: Channel, data: ChannelData | dict[str, typing.Any] | None): + async def _set_channel_data( + self, channel: Channel, flow_code: str, data: ChannelData | dict[str, typing.Any] | None + ): """Set the data of the channel.""" - await self.storage.set_channel_data(channel.id, data) + await self.storage.set_channel_data(channel.id, flow_code, data) - async def _delete_channel_data(self, channel: Channel): + async def _delete_channel_data(self, channel: Channel, flow_code: str): """Delete the data of the channel.""" - await self.storage.delete_channel_data(channel.id) + await self.storage.delete_channel_data(channel.id, flow_code) # endregion @@ -221,8 +226,8 @@ def get_flow(self, flow_name: str) -> Flow | None: async def _finish_flow(self, flow_connector: FlowConnector): """Finish the flow.""" # Delete the state and data for the user - await self.storage.delete_user_state(flow_connector.user.id) - await self.storage.delete_user_data(flow_connector.user.id) + await self.storage.delete_user_state(flow_connector.user.id, flow_connector.flow_code) + await self.storage.delete_user_data(flow_connector.user.id, flow_connector.flow_code) # Run the callbacks for callback in self._on_finish_callbacks: @@ -257,7 +262,7 @@ async def _dispatch(self, flow_connector: FlowConnector): ): if scope == FlowScopes.USER: # Remove user's state, so that the user wouldn't resume any flow - await self.storage.delete_user_state(flow_connector.user.id) + await self.storage.delete_user_state(flow_connector.user.id, flow_connector.flow_code) raise EventCannotBeProcessed( f"The message is a command that is not handled by any of the flows: " @@ -325,23 +330,23 @@ async def dispatch(self, flow_connector: FlowConnector): return await self._dispatch(flow_connector) async def on_message( - self, user: User, platform: str, message_data: schemas.ReceivedMessage, interface: BaseInterface + self, user: User, platform: str, flow_code: str, message_data: schemas.ReceivedMessage, interface: BaseInterface ): """Handle the messages sent by the users.""" # Save the message to the database - message = await self.analytics_manager.save_message(user, platform, message_data) + message = await self.analytics_manager.save_message(user, platform, flow_code, message_data) channel = await message.channel # Get the user state and data # TODO: [20.08.2023 by Mykola] Use context manager for this - user_state = await self._get_user_state(user) - user_data = await self._get_user_data(user) + user_state = await self._get_user_state(user, flow_code) + user_data = await self._get_user_data(user, flow_code) # Get the channel state and data if channel: - channel_state = await self._get_channel_state(message.channel) - channel_data = await self._get_channel_data(message.channel) + channel_state = await self._get_channel_state(message.channel, flow_code) + channel_data = await self._get_channel_data(message.channel, flow_code) else: channel_state = None channel_data = ChannelData() @@ -349,6 +354,7 @@ async def on_message( flow_connector = FlowConnector( flow_manager=self, event=FlowConnectorEvents.MESSAGE, + flow_code=flow_code, user=user, channel=channel, message=message, @@ -362,7 +368,7 @@ async def on_message( return await self.dispatch(flow_connector) async def on_button_click( - self, user: User, platform: str, button_data: schemas.ButtonClick, interface: BaseInterface + self, user: User, platform: str, flow_code: str, button_data: schemas.ButtonClick, interface: BaseInterface ): """Handle the buttons clicked by the users.""" # Save the button click to the database @@ -373,17 +379,18 @@ async def on_button_click( return await interface.send_message("button already clicked", user, channel) # Get the user state and data - user_state = await self._get_user_state(user) - user_data = await self._get_user_data(user) + user_state = await self._get_user_state(user, flow_code) + user_data = await self._get_user_data(user, flow_code) # Get the channel state and data - channel_state = await self._get_channel_state(channel) - channel_data = await self._get_channel_data(channel) + channel_state = await self._get_channel_state(channel, flow_code) + channel_data = await self._get_channel_data(channel, flow_code) # noinspection PyTypeChecker flow_connector = FlowConnector( flow_manager=self, event=FlowConnectorEvents.BUTTON_CLICK, + flow_code=flow_code, user=user, channel=channel, button=button, @@ -462,13 +469,15 @@ async def __aexit__(self, exc_type, exc_val, exc_tb): # After the flow step(s) is/are run, update the user state and data if flow_connector.user: - await self._set_user_state(flow_connector.user, flow_connector.user_state) - await self._set_user_data(flow_connector.user, flow_connector.user_data) + await self._set_user_state(flow_connector.user, flow_connector.flow_code, flow_connector.user_state) + await self._set_user_data(flow_connector.user, flow_connector.flow_code, flow_connector.user_data) # Also, update the channel state and data if flow_connector.channel: - await self._set_channel_state(flow_connector.channel, flow_connector.channel_state) - await self._set_channel_data(flow_connector.channel, flow_connector.channel_data) + await self._set_channel_state( + flow_connector.channel, flow_connector.flow_code, flow_connector.channel_state + ) + await self._set_channel_data(flow_connector.channel, flow_connector.flow_code, flow_connector.channel_data) # endregion diff --git a/electro/interfaces.py b/electro/interfaces.py index 0ba4a55..ba4d686 100644 --- a/electro/interfaces.py +++ b/electro/interfaces.py @@ -30,6 +30,9 @@ class BaseInterface(ABC): more incoming actions from the client. """ + def __init__(self, flow_code: str = ""): + self.flow_code = flow_code + async def _create_and_format_buttons( self, buttons: Optional[List["BaseButton"]] = None, message: Optional[Message] = None ) -> List[Button]: @@ -168,6 +171,7 @@ async def send_message( channel_data = await self._format_channel(channel) for i, message_chunk in enumerate(message_chunks): message = await Message.create( + flow_code=self.flow_code, is_temporary=delete_after is not None, is_bot_message=True, user=user, @@ -306,7 +310,7 @@ async def with_constant_typing(self, user: User, channel: Channel): await self.set_typing(user, channel, ResponseTypes.STOP_TYPING) async def handle_incoming_action( - self, user: User, platform: SupportedPlatforms, data: Dict[str, Any] + self, user: User, platform: SupportedPlatforms, flow_code: str, data: Dict[str, Any] ) -> Tuple[Dict[str, str], int]: """ Handle incoming actions from the client. The action data is validated and processed. @@ -319,10 +323,10 @@ async def handle_incoming_action( content = data.get("content") if action == FlowConnectorEvents.MESSAGE: content = ReceivedMessage.model_validate(content) - await global_flow_manager.on_message(user, platform, content, self) + await global_flow_manager.on_message(user, platform, flow_code, content, self) if action == FlowConnectorEvents.BUTTON_CLICK: content = ButtonClick.model_validate(content) - await global_flow_manager.on_button_click(user, platform, content, self) + await global_flow_manager.on_button_click(user, platform, flow_code, content, self) if action == FlowConnectorEvents.MEMBER_JOIN: pass if action == FlowConnectorEvents.MEMBER_UPDATE: @@ -337,7 +341,8 @@ async def send_json(self, data: Dict[str, Any]): class WebSocketInterface(BaseInterface): """WebSocket Interface for the Electro framework.""" - def __init__(self): + def __init__(self, flow_code: str = ""): + super().__init__(flow_code=flow_code) self.interface: WebSocket | None = None async def connect(self, websocket: WebSocket): @@ -355,7 +360,8 @@ async def send_json(self, data: Dict[str, Any]): class APIInterface(BaseInterface): """API Interface for the Electro framework.""" - def __init__(self): + def __init__(self, flow_code: str = ""): + super().__init__(flow_code=flow_code) self.messages = contextvars.ContextVar("messages") self.messages.set([]) diff --git a/electro/models.py b/electro/models.py index f953477..32d2551 100644 --- a/electro/models.py +++ b/electro/models.py @@ -168,6 +168,7 @@ class Message(BaseModel): """The model for Message.""" id = fields.BigIntField(pk=True) + flow_code = fields.CharField(max_length=255, null=True) is_bot_message = fields.BooleanField(default=False) is_command = fields.BooleanField(default=False) diff --git a/electro/storage.py b/electro/storage.py index d25e510..bfdede7 100644 --- a/electro/storage.py +++ b/electro/storage.py @@ -37,62 +37,62 @@ class BaseFlowStorage(ABC): """The base class for the storage.""" @abstractmethod - async def get_user_state(self, user_id: int) -> str | None: + async def get_user_state(self, user_id: int, flow_code: str) -> str | None: """Get the state for a user.""" raise NotImplementedError @abstractmethod - async def get_channel_state(self, channel_id: int) -> str | None: + async def get_channel_state(self, channel_id: int, flow_code: str) -> str | None: """Get the state for a channel.""" raise NotImplementedError @abstractmethod - async def set_user_state(self, user_id: int, state: str | None): + async def set_user_state(self, user_id: int, flow_code: str, state: str | None): """Set the state for a user.""" raise NotImplementedError @abstractmethod - async def set_channel_state(self, channel_id: int, state: str | None): + async def set_channel_state(self, channel_id: int, flow_code: str, state: str | None): """Set the state for a channel.""" raise NotImplementedError @abstractmethod - async def delete_user_state(self, user_id: int): + async def delete_user_state(self, user_id: int, flow_code: str): """Delete the state for a user.""" raise NotImplementedError @abstractmethod - async def delete_channel_state(self, channel_id: int): + async def delete_channel_state(self, channel_id: int, flow_code: str): """Delete the state for a channel.""" raise NotImplementedError @abstractmethod - async def get_user_data(self, user_id: int) -> UserData: + async def get_user_data(self, user_id: int, flow_code: str) -> UserData: """Get the data for a user.""" raise NotImplementedError @abstractmethod - async def get_channel_data(self, channel_id: int) -> ChannelData: + async def get_channel_data(self, channel_id: int, flow_code: str) -> ChannelData: """Get the data for a channel.""" raise NotImplementedError @abstractmethod - async def set_user_data(self, user_id: int, data: UserData | dict[str, Any] | None): + async def set_user_data(self, user_id: int, flow_code: str, data: UserData | dict[str, Any] | None): """Set the data for a user.""" raise NotImplementedError @abstractmethod - async def set_channel_data(self, channel_id: int, data: ChannelData | dict[str, Any] | None): + async def set_channel_data(self, channel_id: int, flow_code: str, data: ChannelData | dict[str, Any] | None): """Set the data for a channel.""" raise NotImplementedError @abstractmethod - async def delete_user_data(self, user_id: int): + async def delete_user_data(self, user_id: int, flow_code: str): """Delete the data for a user.""" raise NotImplementedError @abstractmethod - async def delete_channel_data(self, channel_id: int): + async def delete_channel_data(self, channel_id: int, flow_code: str): """Delete the data for a channel.""" raise NotImplementedError @@ -107,71 +107,77 @@ class FlowMemoryStorage(BaseFlowStorage): """The storage used for `Flow`. Stores data for all the users.""" def __init__(self): - self._user_states: dict[int, str] = {} - self._user_data: dict[int, UserData] = {} + self._user_states: dict[str, str] = {} + self._user_data: dict[str, UserData] = {} - self._channel_states: dict[int, str] = {} - self._channel_data: dict[int, ChannelData] = {} + self._channel_states: dict[str, str] = {} + self._channel_data: dict[str, ChannelData] = {} - async def get_user_state(self, user_id: int) -> str | None: + def _get_key(self, object_id: int, flow_code: str) -> str: + """Get the key for the object.""" + return f"{flow_code}:{object_id}" + + async def get_user_state(self, user_id: int, flow_code: str) -> str | None: """Get the state for a user.""" - return self._user_states.get(user_id) + return self._user_states.get(self._get_key(user_id, flow_code)) - async def get_channel_state(self, channel_id: int) -> str | None: + async def get_channel_state(self, channel_id: int, flow_code: str) -> str | None: """Get the state for a channel.""" - return self._channel_states.get(channel_id) + return self._channel_states.get(self._get_key(channel_id, flow_code)) - async def set_user_state(self, user_id: int, state: str | None): + async def set_user_state(self, user_id: int, flow_code: str, state: str | None): """Set the state for a user.""" - self._user_states[user_id] = state + self._user_states[self._get_key(user_id, flow_code)] = state - async def set_channel_state(self, channel_id: int, state: str | None): + async def set_channel_state(self, channel_id: int, flow_code: str, state: str | None): """Set the state for a channel.""" - self._channel_states[channel_id] = state + self._channel_states[self._get_key(channel_id, flow_code)] = state - async def delete_user_state(self, user_id: int): + async def delete_user_state(self, user_id: int, flow_code: str): """Delete the state for a user.""" - if user_id in self._user_states: - del self._user_states[user_id] + if self._get_key(user_id, flow_code) in self._user_states: + del self._user_states[self._get_key(user_id, flow_code)] - async def delete_channel_state(self, channel_id: int): + async def delete_channel_state(self, channel_id: int, flow_code: str): """Delete the state for a channel.""" - if channel_id in self._channel_states: - del self._channel_states[channel_id] + if self._get_key(channel_id, flow_code) in self._channel_states: + del self._channel_states[self._get_key(channel_id, flow_code)] - async def get_user_data(self, user_id: int) -> UserData: + async def get_user_data(self, user_id: int, flow_code: str) -> UserData: """Get the data for a user.""" - if user_id not in self._user_data: - self._user_data[user_id] = UserData() + if self._get_key(user_id, flow_code) not in self._user_data: + self._user_data[self._get_key(user_id, flow_code)] = UserData() - return self._user_data[user_id] + return self._user_data[self._get_key(user_id, flow_code)] - async def get_channel_data(self, channel_id: int) -> ChannelData: + async def get_channel_data(self, channel_id: int, flow_code: str) -> ChannelData: """Get the data for a channel.""" - if channel_id not in self._channel_data: - self._channel_data[channel_id] = ChannelData() + if self._get_key(channel_id, flow_code) not in self._channel_data: + self._channel_data[self._get_key(channel_id, flow_code)] = ChannelData() - return self._channel_data[channel_id] + return self._channel_data[self._get_key(channel_id, flow_code)] - async def set_user_data(self, user_id: int, data: UserData | dict[str, Any] | None): + async def set_user_data(self, user_id: int, flow_code: str, data: UserData | dict[str, Any] | None): """Set the data for a user.""" - self._user_data[user_id] = data if isinstance(data, UserData) else UserData(**data) if data else UserData() + self._user_data[self._get_key(user_id, flow_code)] = ( + data if isinstance(data, UserData) else UserData(**data) if data else UserData() + ) - async def set_channel_data(self, channel_id: int, data: ChannelData | dict[str, Any] | None): + async def set_channel_data(self, channel_id: int, flow_code: str, data: ChannelData | dict[str, Any] | None): """Set the data for a channel.""" - self._channel_data[channel_id] = ( + self._channel_data[self._get_key(channel_id, flow_code)] = ( data if isinstance(data, ChannelData) else ChannelData(**data) if data else ChannelData() ) - async def delete_user_data(self, user_id: int): + async def delete_user_data(self, user_id: int, flow_code: str): """Delete the data for a user.""" - if user_id in self._user_data: - del self._user_data[user_id] + if self._get_key(user_id, flow_code) in self._user_data: + del self._user_data[self._get_key(user_id, flow_code)] - async def delete_channel_data(self, channel_id: int): + async def delete_channel_data(self, channel_id: int, flow_code: str): """Delete the data for a channel.""" - if channel_id in self._channel_data: - del self._channel_data[channel_id] + if self._get_key(channel_id, flow_code) in self._channel_data: + del self._channel_data[self._get_key(channel_id, flow_code)] async def clear(self): """Clear the storage.""" @@ -205,71 +211,71 @@ def __init__( self._state_ttl = state_ttl self._data_ttl = data_ttl - def _user_state_key(self, user_id: int) -> str: - return f"{self._prefix}:user:{user_id}:state" + def _user_state_key(self, user_id: int, flow_code: str) -> str: + return f"{self._prefix}:user:{user_id}:state:{flow_code}" - def _user_data_key(self, user_id: int) -> str: - return f"{self._prefix}:user:{user_id}:data" + def _user_data_key(self, user_id: int, flow_code: str) -> str: + return f"{self._prefix}:user:{user_id}:data:{flow_code}" - def _channel_state_key(self, channel_id: int) -> str: - return f"{self._prefix}:channel:{channel_id}:state" + def _channel_state_key(self, channel_id: int, flow_code: str) -> str: + return f"{self._prefix}:channel:{channel_id}:state:{flow_code}" - def _channel_data_key(self, channel_id: int) -> str: - return f"{self._prefix}:channel:{channel_id}:data" + def _channel_data_key(self, channel_id: int, flow_code: str) -> str: + return f"{self._prefix}:channel:{channel_id}:data:{flow_code}" - async def get_user_state(self, user_id: int) -> str | None: - return await self._redis.get(self._user_state_key(user_id)) + async def get_user_state(self, user_id: int, flow_code: str) -> str | None: + return await self._redis.get(self._user_state_key(user_id, flow_code)) - async def get_channel_state(self, channel_id: int) -> str | None: - return await self._redis.get(self._channel_state_key(channel_id)) + async def get_channel_state(self, channel_id: int, flow_code: str) -> str | None: + return await self._redis.get(self._channel_state_key(channel_id, flow_code)) - async def set_user_state(self, user_id: int, state: str | None): - key = self._user_state_key(user_id) + async def set_user_state(self, user_id: int, flow_code: str, state: str | None): + key = self._user_state_key(user_id, flow_code) if state is None: await self._redis.delete(key) else: await self._redis.set(key, state, ex=self._state_ttl) - async def set_channel_state(self, channel_id: int, state: str | None): - key = self._channel_state_key(channel_id) + async def set_channel_state(self, channel_id: int, flow_code: str, state: str | None): + key = self._channel_state_key(channel_id, flow_code) if state is None: await self._redis.delete(key) else: await self._redis.set(key, state, ex=self._state_ttl) - async def delete_user_state(self, user_id: int): - await self._redis.delete(self._user_state_key(user_id)) + async def delete_user_state(self, user_id: int, flow_code: str): + await self._redis.delete(self._user_state_key(user_id, flow_code)) - async def delete_channel_state(self, channel_id: int): - await self._redis.delete(self._channel_state_key(channel_id)) + async def delete_channel_state(self, channel_id: int, flow_code: str): + await self._redis.delete(self._channel_state_key(channel_id, flow_code)) - async def get_user_data(self, user_id: int) -> UserData: - raw = await self._redis.get(self._user_data_key(user_id)) + async def get_user_data(self, user_id: int, flow_code: str) -> UserData: + raw = await self._redis.get(self._user_data_key(user_id, flow_code)) return UserData(**json.loads(raw)) if raw else UserData() - async def get_channel_data(self, channel_id: int) -> ChannelData: - raw = await self._redis.get(self._channel_data_key(channel_id)) + async def get_channel_data(self, channel_id: int, flow_code: str) -> ChannelData: + raw = await self._redis.get(self._channel_data_key(channel_id, flow_code)) return ChannelData(**json.loads(raw)) if raw else ChannelData() - async def set_user_data(self, user_id: int, data: UserData | dict[str, Any] | None): - key = self._user_data_key(user_id) + async def set_user_data(self, user_id: int, flow_code: str, data: UserData | dict[str, Any] | None): + key = self._user_data_key(user_id, flow_code) if data: await self._redis.set(key, json.dumps(dict(data)), ex=self._data_ttl) else: await self._redis.delete(key) - async def set_channel_data(self, channel_id: int, data: ChannelData | dict[str, Any] | None): - key = self._channel_data_key(channel_id) + async def set_channel_data(self, channel_id: int, flow_code: str, data: ChannelData | dict[str, Any] | None): + key = self._channel_data_key(channel_id, flow_code) if data: await self._redis.set(key, json.dumps(dict(data)), ex=self._data_ttl) else: await self._redis.delete(key) - async def delete_user_data(self, user_id: int): - await self._redis.delete(self._user_data_key(user_id)) + async def delete_user_data(self, user_id: int, flow_code: str): + await self._redis.delete(self._user_data_key(user_id, flow_code)) - async def delete_channel_data(self, channel_id: int): - await self._redis.delete(self._channel_data_key(channel_id)) + async def delete_channel_data(self, channel_id: int, flow_code: str): + await self._redis.delete(self._channel_data_key(channel_id, flow_code)) async def clear(self): # WARNING: This will delete all keys with the prefix! From f229a8a5b94339c6753fa3a3a009ac9b71b28e2e Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Tue, 29 Jul 2025 17:52:13 +0200 Subject: [PATCH 42/57] :sparkles: Send flow finished signal --- electro/enums.py | 1 + electro/flow_manager.py | 1 + electro/interfaces.py | 13 +++++++++++++ 3 files changed, 15 insertions(+) diff --git a/electro/enums.py b/electro/enums.py index 9924860..1da98ab 100644 --- a/electro/enums.py +++ b/electro/enums.py @@ -22,3 +22,4 @@ class ResponseTypes(str, Enum): START_TYPING = "start_typing" STOP_TYPING = "stop_typing" STOP_PROCESS = "stop_process" + FINISH_FLOW = "finish_flow" diff --git a/electro/flow_manager.py b/electro/flow_manager.py index 6746fc2..ed16c57 100644 --- a/electro/flow_manager.py +++ b/electro/flow_manager.py @@ -232,6 +232,7 @@ async def _finish_flow(self, flow_connector: FlowConnector): # Run the callbacks for callback in self._on_finish_callbacks: await callback(flow_connector) + await flow_connector.interface.finish_flow() return # TODO: This is too complex and should be refactored. pylint: disable=R0912 diff --git a/electro/interfaces.py b/electro/interfaces.py index ba4d686..d1bd8bc 100644 --- a/electro/interfaces.py +++ b/electro/interfaces.py @@ -302,6 +302,19 @@ async def stop_process(self): } ) + async def finish_flow(self): + """ + Finish the flow for the client. + + This is used to finish the flow for the client and close the connection. + """ + await self.send_json( + { + "action": ResponseTypes.FINISH_FLOW, + "content": {}, + } + ) + @asynccontextmanager async def with_constant_typing(self, user: User, channel: Channel): """An asynchronous context manager for typing indicators or other tasks.""" From 4fe6b1c3c0b5e07a7c8d0bbd4f1d0576f2a650f3 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Mon, 25 Aug 2025 16:58:33 +0200 Subject: [PATCH 43/57] :adhesive_bandage: Use unique name for auth cookie --- electro/app.py | 2 +- electro/authentication.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/electro/app.py b/electro/app.py index 6f1b027..5d2fb5b 100644 --- a/electro/app.py +++ b/electro/app.py @@ -191,7 +191,7 @@ async def websocket_endpoint( @app.post("/api/cookies") async def set_cookie(data: CookieToken, response: Response): cookie_value = "" if data.token is None else data.token - response.set_cookie(key="Authorization", value=cookie_value) + response.set_cookie(key="IKIGAI_AUTHORIZATION", value=cookie_value) return {"status": "ok"} diff --git a/electro/authentication.py b/electro/authentication.py index d925db7..e3b3f16 100644 --- a/electro/authentication.py +++ b/electro/authentication.py @@ -12,7 +12,7 @@ async def authenticate_user( platform: str, user_id: str, header: Optional[str] = Header(default=None, alias="Authorization"), - cookie: Optional[str] = Cookie(default=None, alias="Authorization"), + cookie: Optional[str] = Cookie(default=None, alias="IKIGAI_AUTHORIZATION"), ) -> User: """Validate the Bearer token provided in the request header or in the cookie.""" # Determine the authentication method based on the platform @@ -25,7 +25,7 @@ async def authenticate_user( # Get the authorization token from the header or cookie authorization = header or cookie if not authorization: - raise HTTPException(status_code=401, detail="Authorization header or cookie is required.") + raise HTTPException(status_code=401, detail="Authorization header or IKIGAI_AUTHORIZATION cookie is required.") if header and authentication_method == "jwt": if not authorization.startswith(f"{settings.JWT_TOKEN_TYPE} "): raise HTTPException(status_code=401, detail=f"Authorization header type must be {settings.JWT_TOKEN_TYPE}") From 4637e1582ef4fd45d003d59947434857b7a703a9 Mon Sep 17 00:00:00 2001 From: samonaisi Date: Tue, 2 Sep 2025 11:49:08 +0200 Subject: [PATCH 44/57] =?UTF-8?q?=F0=9F=8C=90=20Dynamic=20language=20selec?= =?UTF-8?q?tion=20(#18)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * :globe_with_meridians: Dynamic language selection * fix flow steps * default user locale --- electro/app.py | 11 ++++--- electro/contrib/buttons.py | 7 +++-- electro/flow_step.py | 50 +++++++++++++---------------- electro/interfaces.py | 14 ++++++--- electro/models.py | 3 +- electro/settings.py | 3 +- electro/substitutions.py | 2 +- electro/toolkit/decorators.py | 5 ++- electro/toolkit/i18n.py | 52 +++++++++++++++++++++++++++++++ electro/toolkit/i18n_gettext.py | 10 ------ electro/toolkit/templated_i18n.py | 22 ------------- examples/test_flow.py | 2 +- 12 files changed, 101 insertions(+), 80 deletions(-) create mode 100644 electro/toolkit/i18n.py delete mode 100644 electro/toolkit/i18n_gettext.py delete mode 100644 electro/toolkit/templated_i18n.py diff --git a/electro/app.py b/electro/app.py index 5d2fb5b..84b9598 100644 --- a/electro/app.py +++ b/electro/app.py @@ -34,7 +34,7 @@ ) -@app.patch("/api/platforms/{platform}/user/{user_id}") +@app.patch("/api/platform/{platform}/user/{user_id}") async def update_user( platform: str, user_id: str, @@ -56,12 +56,14 @@ async def update_user( raise HTTPException(status_code=404, detail="User not found.") user: User = await platform_id.user if request_user == user: - if "username" in data: - user.username = data["username"] - await user.save() + for field in ["username", "locale"]: + if field in data: + setattr(user, field, data[field]) + await user.save() return { "id": user.id, "username": user.username, + "locale": user.locale, "platform_ids": [ { "platform": platform.platform, @@ -90,6 +92,7 @@ async def get_user(platform: str, user_id: str, request_user: Optional[User] = D return { "id": user.id, "username": user.username, + "locale": user.locale, "platform_ids": [ { "platform": platform.platform, diff --git a/electro/contrib/buttons.py b/electro/contrib/buttons.py index 8cf4035..92663e1 100644 --- a/electro/contrib/buttons.py +++ b/electro/contrib/buttons.py @@ -7,6 +7,7 @@ from ..flow_connector import FlowConnector from ..flow_step import BaseFlowStep, FlowStepDone +from ..toolkit.i18n import TranslatedString CALLBACK_TYPE = typing.Callable[[FlowConnector], typing.Awaitable[None]] | BaseFlowStep | None @@ -35,7 +36,7 @@ class BaseButton(ABC): def __init__( self, - label: str | None = None, + label: str | TranslatedString | None = None, style: ButtonStyle = ButtonStyle.PRIMARY, disabled: bool = False, remove_after_click: bool = False, @@ -55,7 +56,7 @@ class DataButton(BaseButton): def __init__( self, - label: str | None = None, + label: str | TranslatedString | None = None, style: ButtonStyle = ButtonStyle.PRIMARY, disabled: bool = False, remove_after_click: bool = False, @@ -116,7 +117,7 @@ async def trigger_action(self, flow_connector: FlowConnector): class ConfirmButton(ActionButton): def __init__( self, - label: str | None = None, + label: str | TranslatedString | None = None, style: ButtonStyle = ButtonStyle.PRIMARY, disabled: bool = False, remove_after_click: bool = True, diff --git a/electro/flow_step.py b/electro/flow_step.py index 1817d2f..ee42553 100644 --- a/electro/flow_step.py +++ b/electro/flow_step.py @@ -19,8 +19,8 @@ from .substitutions import BaseSubstitution, GlobalAbstractChannel, resolve_channel from .toolkit.decorators import with_constant_typing from .toolkit.files_storage.universal_file_storage import universal_file_storage +from .toolkit.i18n import resolve_translation, TranslatedString from .toolkit.loguru_logging import logger -from .toolkit.templated_i18n import TemplatedString if typing.TYPE_CHECKING: from .contrib.buttons import ActionButton @@ -55,13 +55,15 @@ class MessageFormatterMixin: substitutions: dict[str, str] | None = None - async def _get_formatted_message(self, message: TemplatedString, connector: FlowConnector, **kwargs) -> str: + async def _get_formatted_message(self, message: TranslatedString | str, connector: FlowConnector, **kwargs) -> str: """Get the formatted message.""" + if not isinstance(message, TranslatedString): + return message or "" generic_substitutions: dict[str, str | int | BaseSubstitution] = ( connector.user_data | (connector.substitutions or {}) | (self.substitutions or {}) | kwargs ) - variables_used_in_message = message.get_identifiers() + variables_used_in_message = message.get_identifiers(connector.user.locale) logger.debug( f"Variables used in the message: {variables_used_in_message}, {generic_substitutions=}, {message=}" ) @@ -76,9 +78,7 @@ async def _get_formatted_message(self, message: TemplatedString, connector: Flow if key in variables_used_in_message } - return message.safe_substitute( - **substitutions, - ) + return resolve_translation(message.safe_substitute(**substitutions), connector.user.locale) @dataclass(kw_only=True) @@ -198,8 +198,8 @@ def decorator(callback: typing.Callable[[FlowConnector], typing.Awaitable[None]] class MessageFlowStep(BaseFlowStep, FilesMixin, MessageFormatterMixin): """The class for `MessageFlowStep`.""" - message: TemplatedString | None = None - response_message: TemplatedString | None = None + message: TranslatedString | None = None + response_message: TranslatedString | None = None channel_to_send_to: Channel | BaseSubstitution | GlobalAbstractChannel | None = None @@ -208,7 +208,7 @@ class MessageFlowStep(BaseFlowStep, FilesMixin, MessageFormatterMixin): buttons: typing.List[ActionButton] | None = None validator: typing.Callable[[str], bool] | None = None - validator_error_message: TemplatedString | None = None + validator_error_message: TranslatedString | None = None # TODO: [27.09.2023 by Mykola] Make this automatic, on the `Flow` level save_response_to_storage: StorageBucketElement | None = None @@ -234,14 +234,12 @@ async def _resolve_channel_to_send_to( async def send_message( self, connector: FlowConnector, - message: TemplatedString | str, + message: TranslatedString | str, channel: Channel | BaseSubstitution[Channel] | None = None, buttons: typing.Optional[typing.List[ActionButton]] = None, ): """Send the message.""" - message: str | None = ( - await self._get_formatted_message(message, connector) if isinstance(message, TemplatedString) else message - ) + message = await self._get_formatted_message(message, connector) channel_to_send_to = await self._resolve_channel_to_send_to(channel or self.channel_to_send_to, connector) files = await self._get_files_to_send(connector) await connector.interface.send_message(message, connector.user, channel_to_send_to, files, buttons) @@ -317,21 +315,18 @@ class SendImageFlowStep(MessageFlowStep): file: File | pathlib.Path | BytesIO | str | None = None caption: str = "" - language: str | None = None - force_blocking_step: bool = False - def __post_init__(self): + def _select_image_language(self, locale: str): """Post-initialization.""" # If the user doesn't want to force the blocking step, set the `non_blocking` flag to `True` if not self.force_blocking_step: self.non_blocking = True # If the language is set, try to use the language-specific file - if self.language and isinstance(self.file, pathlib.Path): - language = self.language.lower() + if isinstance(self.file, pathlib.Path): file, extention = str(self.file).rsplit(".", 1) - language_specific_file = f"{file}__{language}.{extention}" + language_specific_file = f"{file}__{locale}.{extention}" try: with open(language_specific_file, "rb"): self.file = language_specific_file @@ -344,14 +339,13 @@ def __post_init__(self): async def send_message( self, connector: FlowConnector, - message: TemplatedString | str, + message: TranslatedString | str, channel: Channel | BaseSubstitution[Channel] | None = None, buttons: typing.Optional[typing.List[ActionButton]] = None, ): """Send the message.""" - message: str | None = ( - await self._get_formatted_message(message, connector) if isinstance(message, TemplatedString) else message - ) + self._select_image_language(connector.user.locale) + message = await self._get_formatted_message(message, connector) channel_to_send_to = await self._resolve_channel_to_send_to(channel or self.channel_to_send_to, connector) await connector.interface.send_message( self.caption, connector.user, channel_to_send_to, [self.file], buttons=buttons @@ -427,15 +421,15 @@ class ChatGPTRequestMessageFlowStep(MessageFlowStep, ChatGPTMixin): openai_client: AsyncOpenAI | None = None chat_completion_model: str | None = None - message_prompt: TemplatedString | None = None - response_message_prompt: TemplatedString | None = None + message_prompt: TranslatedString | None = None + response_message_prompt: TranslatedString | None = None response_format: ChatGPTResponseFormat | str = ChatGPTResponseFormat.AUTO save_prompt_response_to_storage: StorageBucketElement | None = None parse_json_before_saving: bool | None = None - async def _get_formatted_message(self, message: TemplatedString, connector: FlowConnector, **kwargs) -> str: + async def _get_formatted_message(self, message: TranslatedString | str, connector: FlowConnector, **kwargs) -> str: """Get the formatted message.""" if not self.message_prompt: return await super()._get_formatted_message(message, connector, **kwargs) @@ -482,8 +476,8 @@ class AcceptFileStep(MessageFlowStep): storage_to_save_saved_file_id_to: BaseStorageBucketElement | None = None - file_is_required_message: TemplatedString | str = "You need to upload a file." - file_saved_confirmation_message: TemplatedString | str | None = None + file_is_required_message: TranslatedString | str = "You need to upload a file." + file_saved_confirmation_message: TranslatedString | str | None = None allow_skip: bool = False diff --git a/electro/interfaces.py b/electro/interfaces.py index d1bd8bc..157521a 100644 --- a/electro/interfaces.py +++ b/electro/interfaces.py @@ -15,6 +15,7 @@ from .schemas import ButtonClick, ReceivedMessage from .settings import settings from .toolkit.files_storage.universal_file_storage import universal_file_storage +from .toolkit.i18n import resolve_translation, TranslatedString if TYPE_CHECKING: from .contrib.buttons import BaseButton @@ -34,16 +35,17 @@ def __init__(self, flow_code: str = ""): self.flow_code = flow_code async def _create_and_format_buttons( - self, buttons: Optional[List["BaseButton"]] = None, message: Optional[Message] = None + self, user: User, buttons: Optional[List["BaseButton"]] = None, message: Optional[Message] = None ) -> List[Button]: """Format the buttons to be sent to the client.""" response = [] for button in buttons or []: + label = resolve_translation(button.label, user.locale) button_object = await Button.create( message=message, custom_id=button.custom_id, style=button.style, - label=button.label, + label=label, remove_after_click=button.remove_after_click, extra_data=getattr(button, "extra_data", {}), ) @@ -146,7 +148,7 @@ async def _process_message_file( async def send_message( self, - message: str = "", + message: str | TranslatedString = "", user: Optional[User] = None, channel: Optional[Channel] = None, files: Optional[List[Union[File, str, pathlib.Path]]] = None, @@ -166,6 +168,7 @@ async def send_message( - if "next", the message will be deleted after the next message is sent. - if an integer, the message will be deleted after that many seconds. """ + message = resolve_translation(message, user.locale) message_chunks = message.split(settings.MESSAGE_BREAK) user_data = await self._format_user(user) channel_data = await self._format_channel(channel) @@ -181,7 +184,7 @@ async def send_message( # Send buttons and files only with the last message chunk if i == len(message_chunks) - 1: - buttons = await self._create_and_format_buttons(buttons, message) + buttons = await self._create_and_format_buttons(user, buttons, message) processed_files = [await self._process_message_file(file, message) for file in files or []] else: buttons = [] @@ -204,7 +207,7 @@ async def send_message( async def send_error( self, - error: str, + error: str | TranslatedString, user: Optional[User] = None, channel: Optional[Channel] = None, ): @@ -217,6 +220,7 @@ async def send_error( user: The user who will receive the error message. channel: The channel the error message is being sent to. """ + error = resolve_translation(error, user.locale) user_data = await self._format_user(user) channel_data = await self._format_channel(channel) await self.send_json( diff --git a/electro/models.py b/electro/models.py index 32d2551..3787c76 100644 --- a/electro/models.py +++ b/electro/models.py @@ -7,6 +7,7 @@ from tortoise import fields from tortoise.fields import ForeignKeyRelation, ManyToManyField +from .settings import settings from .toolkit.files_storage.storages_enums import StoragesIDs from .toolkit.tortoise_orm import Model @@ -73,7 +74,7 @@ class User(BaseModel): id = fields.BigIntField(pk=True) username = fields.CharField(max_length=255) - locale = fields.CharField(max_length=255, null=True) + locale = fields.CharField(max_length=255, default=settings.DEFAULT_LOCALE) is_admin = fields.BooleanField(default=False) # guilds: fields.ManyToManyRelation["Guild"] # TODO: [2024-08-30 by Mykola] Allow multiple guilds for the user. diff --git a/electro/settings.py b/electro/settings.py index f5af1c4..f455958 100644 --- a/electro/settings.py +++ b/electro/settings.py @@ -45,7 +45,6 @@ class Settings(BaseSettings): MESSAGE_SLEEP_INSTRUCTION_PATTERN: str = r"--- sleep (\d+.?\d*) seconds ---" MESSAGE_MAX_LENGTH: int = 1900 # 2000 - 100 (safe margin) BOT_COMMAND_PREFIX: str = "!" - BOT_LANGUAGE: str = "en" # Should mirror the `DEFAULT_LOCALE` setting. User in the Python code GO_BACK_COMMAND: str = "_go_back" RELOAD_COMMAND: str = "_reload" # Validate GO_BACK_COMMAND @@ -63,7 +62,7 @@ class Settings(BaseSettings): # Locales settings LOCALES_PATH: str = "locales" # Relative to the current working directory - DEFAULT_LOCALE: str = "en" # Should mirror the `BOT_LANGUAGE` setting. User in the `make upload-locales` target + DEFAULT_LOCALE: str = "en" # Database settings DATABASE_URL: PostgresDsn | None = None diff --git a/electro/substitutions.py b/electro/substitutions.py index a63fad8..90c9dcb 100644 --- a/electro/substitutions.py +++ b/electro/substitutions.py @@ -1,4 +1,4 @@ -"""The substitutions' module. Used to substitute the variables in all the `TemplatedString`s.""" +"""The substitutions' module. Used to substitute the variables in all the `TranslatedString`s.""" from __future__ import annotations diff --git a/electro/toolkit/decorators.py b/electro/toolkit/decorators.py index 499b5f5..acd449a 100644 --- a/electro/toolkit/decorators.py +++ b/electro/toolkit/decorators.py @@ -9,9 +9,8 @@ from electro.flow_connector import FlowConnector, FlowConnectorEvents from electro.models import Channel, Message from electro.settings import settings -from electro.toolkit.i18n_gettext import _ +from electro.toolkit.i18n import _, TranslatedString from electro.toolkit.loguru_logging import logger -from electro.toolkit.templated_i18n import TemplatedString def with_constant_typing( @@ -113,7 +112,7 @@ async def wrapper(*args, **kwargs): def forbid_concurrent_execution( keep_extra_messages=False, - extra_messages_reply: TemplatedString | None = _("forbidden_concurrent_execution.extra_messages_reply"), + extra_messages_reply: TranslatedString | None = _("forbidden_concurrent_execution.extra_messages_reply"), ) -> Callable: """Forbid concurrent execution of the function for the same User.""" diff --git a/electro/toolkit/i18n.py b/electro/toolkit/i18n.py new file mode 100644 index 0000000..b07431c --- /dev/null +++ b/electro/toolkit/i18n.py @@ -0,0 +1,52 @@ +import gettext +from string import Template + +from electro.settings import settings + +translations = { + "en": gettext.translation("messages", localedir=settings.LOCALES_PATH, languages=["en"]), + "fr": gettext.translation("messages", localedir=settings.LOCALES_PATH, languages=["fr"]), +} + + +class TemplatedString(str, Template): + """A string that can be used both as a string and as a template.""" + + def __repr__(self) -> str: + """Return a representation of the string.""" + return f"TemplatedString({super().__repr__()})" + + +class TranslatedString: + def __init__(self, key: str): + self.key = key + self.substitutions = {} + + def __repr__(self) -> str: + """Return a representation of the string.""" + return f"TranslatedString({self.key!r})" + + def get_identifiers(self, locale: str) -> list[str]: + """Get the identifiers used in the translation string.""" + lang = translations[locale if locale in translations else "en"] + translated_message = TemplatedString(lang.gettext(self.key)) + return translated_message.get_identifiers() + + def safe_substitute(self, **kwargs): + """Safely substitute variables in the translation string.""" + self.substitutions.update(kwargs) + return self + + def resolve(self, locale: str) -> str: + lang = translations[locale if locale in translations else "en"] + translated_message = TemplatedString(lang.gettext(self.key)) + return translated_message.safe_substitute(**self.substitutions) + + +def resolve_translation(text: str | TranslatedString, locale: str) -> str: + if isinstance(text, TranslatedString): + return text.resolve(locale) or "" + return text or "" + + +_ = TranslatedString diff --git a/electro/toolkit/i18n_gettext.py b/electro/toolkit/i18n_gettext.py deleted file mode 100644 index 2298d38..0000000 --- a/electro/toolkit/i18n_gettext.py +++ /dev/null @@ -1,10 +0,0 @@ -import gettext - -from electro.settings import settings -from electro.toolkit.templated_i18n import make_templated_gettext - -# Set up `gettext` -en = gettext.translation("messages", localedir=settings.LOCALES_PATH, languages=[settings.BOT_LANGUAGE]) -_ = make_templated_gettext(en.gettext) - -__all__ = ["_"] diff --git a/electro/toolkit/templated_i18n.py b/electro/toolkit/templated_i18n.py deleted file mode 100644 index 64da832..0000000 --- a/electro/toolkit/templated_i18n.py +++ /dev/null @@ -1,22 +0,0 @@ -"""A subclass of `I18nMiddleware` that allows for string templating.""" - -from string import Template -from typing import Callable - - -class TemplatedString(str, Template): - """A string that can be used both as a string and as a template.""" - - def __repr__(self) -> str: - """Return a representation of the string.""" - return f"TemplatedString({super().__repr__()})" - - -def make_templated_gettext(gettext_function: Callable[..., str]) -> Callable[..., TemplatedString]: - """Return a function that returns `TemplatedString` instead of strings.""" - - def templated_gettext(*args, **kwargs) -> TemplatedString: - """Return a `TemplatedString(str)()` instead of a string.""" - return TemplatedString(gettext_function(*args, **kwargs)) - - return templated_gettext diff --git a/examples/test_flow.py b/examples/test_flow.py index 48666fa..7fb3534 100644 --- a/examples/test_flow.py +++ b/examples/test_flow.py @@ -6,7 +6,7 @@ from electro.app import app from electro.triggers import CommandTrigger from electro.flow_manager import global_flow_manager -from electro.toolkit.i18n_gettext import _ +from toolkit.i18n import _ class TestFlow(Flow): From fdb03df4cbebe4634fa8331eb01fb16ddb848e2a Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Tue, 2 Sep 2025 13:58:13 +0200 Subject: [PATCH 45/57] :sparkles: Use class for authentication --- electro/app.py | 14 +++-- electro/authentication.py | 118 ++++++++++++++++++++++---------------- 2 files changed, 75 insertions(+), 57 deletions(-) diff --git a/electro/app.py b/electro/app.py index 84b9598..dcfe7cb 100644 --- a/electro/app.py +++ b/electro/app.py @@ -8,7 +8,7 @@ from fastapi.websockets import WebSocketState from tortoise.contrib.fastapi import register_tortoise -from .authentication import authenticate_user +from .authentication import ElectroAuthentication from .interfaces import APIInterface, WebSocketInterface from .models import Message, PlatformId, User from .schemas import CookieToken @@ -39,7 +39,7 @@ async def update_user( platform: str, user_id: str, data: Dict[str, Any], - request_user: Optional[User] = Depends(authenticate_user), + request_user: Optional[User] = Depends(ElectroAuthentication.authenticate_user), ): """ Update the user information. @@ -77,7 +77,9 @@ async def update_user( @app.get("/api/platform/{platform}/user/{user_id}") -async def get_user(platform: str, user_id: str, request_user: Optional[User] = Depends(authenticate_user)): +async def get_user( + platform: str, user_id: str, request_user: Optional[User] = Depends(ElectroAuthentication.authenticate_user) +): """ Test the API endpoint. """ @@ -110,7 +112,7 @@ async def get_user_messages( platform: str, user_id: str, flow_code: str, - request_user: Optional[User] = Depends(authenticate_user), + request_user: Optional[User] = Depends(ElectroAuthentication.authenticate_user), limit: int = 20, from_id: Optional[int] = None, ): @@ -147,7 +149,7 @@ async def process_message( user_id: str, flow_code: str, data: Dict[str, Any], - request_user: Optional[User] = Depends(authenticate_user), + request_user: Optional[User] = Depends(ElectroAuthentication.authenticate_user), ): """Process the message.""" platform_id = await PlatformId.get_or_none( @@ -169,7 +171,7 @@ async def websocket_endpoint( platform: str, user_id: str, flow_code: str, - request_user: Optional[User] = Depends(authenticate_user), + request_user: Optional[User] = Depends(ElectroAuthentication.authenticate_user), ): """Handle the websocket connection.""" platform_id = await PlatformId.get_or_none( diff --git a/electro/authentication.py b/electro/authentication.py index e3b3f16..954949e 100644 --- a/electro/authentication.py +++ b/electro/authentication.py @@ -8,64 +8,80 @@ from .settings import settings -async def authenticate_user( - platform: str, - user_id: str, - header: Optional[str] = Header(default=None, alias="Authorization"), - cookie: Optional[str] = Cookie(default=None, alias="IKIGAI_AUTHORIZATION"), -) -> User: - """Validate the Bearer token provided in the request header or in the cookie.""" - # Determine the authentication method based on the platform - authentication_method = {client: "jwt" for client in settings.JWT_PLATFORMS}.get(platform, "api_key") +class ElectroAuthentication: + """ + This class handles the authentication of users for the Electro API. + It supports both API key and JWT authentication methods. + """ - # Validate the platform - if platform not in SupportedPlatforms: - raise HTTPException(status_code=400, detail=f"Platform {platform} is not supported.") + @classmethod + async def authenticate_user( + cls, + platform: str, + user_id: str, + header: Optional[str] = Header(default=None, alias="Authorization"), + cookie: Optional[str] = Cookie(default=None, alias="IKIGAI_AUTHORIZATION"), + ) -> User: + """Validate the Bearer token provided in the request header or in the cookie.""" + # Determine the authentication method based on the platform + authentication_method = {client: "jwt" for client in settings.JWT_PLATFORMS}.get(platform, "api_key") - # Get the authorization token from the header or cookie - authorization = header or cookie - if not authorization: - raise HTTPException(status_code=401, detail="Authorization header or IKIGAI_AUTHORIZATION cookie is required.") - if header and authentication_method == "jwt": - if not authorization.startswith(f"{settings.JWT_TOKEN_TYPE} "): - raise HTTPException(status_code=401, detail=f"Authorization header type must be {settings.JWT_TOKEN_TYPE}") - authorization = authorization.split(" ")[1] + # Validate the platform + if platform not in SupportedPlatforms: + raise HTTPException(status_code=400, detail=f"Platform {platform} is not supported.") - if authentication_method == "api_key": - return await _api_key_authenticate_user(platform, user_id, authorization) - return await _jwt_authenticate_user(platform, authorization) + # Get the authorization token from the header or cookie + authorization = header or cookie + if not authorization: + raise HTTPException( + status_code=401, detail="Authorization header or IKIGAI_AUTHORIZATION cookie is required." + ) + if header and authentication_method == "jwt": + if not authorization.startswith(f"{settings.JWT_TOKEN_TYPE} "): + raise HTTPException( + status_code=401, detail=f"Authorization header type must be {settings.JWT_TOKEN_TYPE}" + ) + authorization = authorization.split(" ")[1] + if authentication_method == "api_key": + return await cls._api_key_authenticate_user(platform, user_id, authorization) + return await cls._jwt_authenticate_user(platform, authorization) -async def _get_or_create_user(platform: str, user_id: str, username: Optional[str] = None) -> User: - """Get or create a user based on the platform and user ID.""" - platform_id, created = await PlatformId.get_or_create( - platform_id=user_id, platform=platform, type=PlatformId.PlatformIdTypes.USER - ) - if created: - user = await User.create(username=username or user_id) - platform_id.user = user - await platform_id.save() - return await platform_id.user + @classmethod + async def _get_or_create_user(cls, platform: str, user_id: str, username: Optional[str] = None) -> User: + """Get or create a user based on the platform and user ID.""" + platform_id, created = await PlatformId.get_or_create( + platform_id=user_id, platform=platform, type=PlatformId.PlatformIdTypes.USER + ) + if created: + user = await User.create(username=username or user_id) + platform_id.user = user + await platform_id.save() + return await platform_id.user + @classmethod + async def _api_key_authenticate_user(cls, platform: str, user_id: str, api_key: str) -> User: + """Validate the API key provided in the request header.""" + if api_key != settings.API_KEY: + raise HTTPException(status_code=401, detail="Invalid API key.") + return await cls._get_or_create_user(platform, user_id) -async def _api_key_authenticate_user(platform: str, user_id: str, api_key: str) -> User: - """Validate the API key provided in the request header.""" - if api_key != settings.API_KEY: - raise HTTPException(status_code=401, detail="Invalid API key.") - return await _get_or_create_user(platform, user_id) + @classmethod + def decode_token(cls, token: str) -> dict: + try: + return jwt.decode(token, settings.JWT_KEY, algorithms=["RS256"], options={"verify_aud": False}) + except jwt.ExpiredSignatureError as e: + raise HTTPException(status_code=401, detail="Token has expired") from e + except jwt.InvalidTokenError as e: + raise HTTPException(status_code=401, detail="Invalid token") from e + @classmethod + async def _jwt_authenticate_user(cls, platform: str, token: str) -> User: + validated_token = cls.decode_token(token) -async def _jwt_authenticate_user(platform: str, token: str) -> User: - try: - validated_token = jwt.decode(token, settings.JWT_KEY, algorithms=["RS256"], options={"verify_aud": False}) - except jwt.ExpiredSignatureError as e: - raise HTTPException(status_code=401, detail="Token has expired") from e - except jwt.InvalidTokenError as e: - raise HTTPException(status_code=401, detail="Invalid token") from e + user_id = validated_token.get(settings.JWT_ID_KEY) + username = validated_token.get(settings.JWT_USERNAME_KEY) + if not user_id or not username: + raise HTTPException(status_code=401, detail="Invalid token") - user_id = validated_token.get(settings.JWT_ID_KEY) - username = validated_token.get(settings.JWT_USERNAME_KEY) - if not user_id or not username: - raise HTTPException(status_code=401, detail="Invalid token") - - return await _get_or_create_user(platform, user_id, username) + return await cls._get_or_create_user(platform, user_id, username) From 6200d1108e5fecc8261f6f572457acc05799e7d0 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Mon, 8 Sep 2025 12:07:34 +0200 Subject: [PATCH 46/57] Button already clicked message --- electro/flow_manager.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/electro/flow_manager.py b/electro/flow_manager.py index ed16c57..7481914 100644 --- a/electro/flow_manager.py +++ b/electro/flow_manager.py @@ -15,6 +15,7 @@ from .settings import settings from .storage import BaseFlowStorage, ChannelData, FlowRedisStorage, UserData from .toolkit.decorators import fail_safely, forbid_concurrent_execution +from .toolkit.i18n import _ from .toolkit.loguru_logging import logger from .toolkit.tortoise_orm import Model @@ -377,7 +378,7 @@ async def on_button_click( try: button = await self.analytics_manager.save_button_click(button_data.id) except DisabledButtonClick: - return await interface.send_message("button already clicked", user, channel) + return await interface.send_message(_("buttons.already_clicked"), user, channel) # Get the user state and data user_state = await self._get_user_state(user, flow_code) From 736abb8557c16183fbfb41d3c0fa2bb3e5529bcc Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Tue, 9 Sep 2025 10:42:42 +0200 Subject: [PATCH 47/57] :adhesive_bandage: Fix images language selection --- electro/flow_step.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/electro/flow_step.py b/electro/flow_step.py index ee42553..f4481f3 100644 --- a/electro/flow_step.py +++ b/electro/flow_step.py @@ -317,7 +317,7 @@ class SendImageFlowStep(MessageFlowStep): force_blocking_step: bool = False - def _select_image_language(self, locale: str): + def get_translated_image(self, locale: str) -> str | None: """Post-initialization.""" # If the user doesn't want to force the blocking step, set the `non_blocking` flag to `True` if not self.force_blocking_step: @@ -329,12 +329,13 @@ def _select_image_language(self, locale: str): language_specific_file = f"{file}__{locale}.{extention}" try: with open(language_specific_file, "rb"): - self.file = language_specific_file + return language_specific_file except FileNotFoundError: logger.warning( f"In step {self.__class__.__name__}: " f"Language-specific file {language_specific_file} does not exist. Using the default." ) + return None async def send_message( self, @@ -344,11 +345,12 @@ async def send_message( buttons: typing.Optional[typing.List[ActionButton]] = None, ): """Send the message.""" - self._select_image_language(connector.user.locale) + translated_image = self.get_translated_image(connector.user.locale) + file = translated_image or self.file message = await self._get_formatted_message(message, connector) channel_to_send_to = await self._resolve_channel_to_send_to(channel or self.channel_to_send_to, connector) await connector.interface.send_message( - self.caption, connector.user, channel_to_send_to, [self.file], buttons=buttons + self.caption, connector.user, channel_to_send_to, [file], buttons=buttons ) From 9ddc19a64e46ede2af631cc7e5f9bf9d7a95be4c Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Tue, 16 Sep 2025 10:04:49 +0200 Subject: [PATCH 48/57] :adhesive_bandage: Fix older messages url in messages history --- electro/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/electro/app.py b/electro/app.py index dcfe7cb..218dffc 100644 --- a/electro/app.py +++ b/electro/app.py @@ -138,7 +138,7 @@ async def get_user_messages( format_historical_message, limit=limit, from_id=from_id, - url=f"/api/platform/{platform}/user/{user_id}/messages", + url=f"/api/platform/{platform}/user/{user_id}/flow/{flow_code}/messages", ) raise HTTPException(status_code=403, detail="You are not authorized to access this user's message history.") From ffe0c94298a845e3313079af9675a8aba8e96364 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Tue, 14 Oct 2025 11:25:27 +0200 Subject: [PATCH 49/57] :wrench: Remove Poeditor dependencies --- electro/settings.py | 2 +- examples/.env.example | 5 -- examples/Makefile | 29 +++----- examples/locales/en/LC_MESSAGES/messages.po | 19 +++-- examples/locales/fr/LC_MESSAGES/messages.po | 20 +++-- .../download_translations_from_poeditor.sh | 64 ---------------- .../upload_translations_to_poeditor.sh | 74 ------------------- 7 files changed, 38 insertions(+), 175 deletions(-) delete mode 100755 examples/scripts/download_translations_from_poeditor.sh delete mode 100755 examples/scripts/upload_translations_to_poeditor.sh diff --git a/electro/settings.py b/electro/settings.py index f455958..bdac102 100644 --- a/electro/settings.py +++ b/electro/settings.py @@ -75,7 +75,7 @@ class Settings(BaseSettings): ENABLE_DATABASE_SSL: bool = True # Redis settings - REDIS_URL: RedisDsn | None + REDIS_URL: RedisDsn | None = None # if the `REDIS_URL` is not set, then use the following credentials: REDIS_HOST: str | None = "redis" REDIS_PORT: int | None = 6379 diff --git a/examples/.env.example b/examples/.env.example index ab81af1..5ee8278 100644 --- a/examples/.env.example +++ b/examples/.env.example @@ -31,8 +31,3 @@ ELECTRO__AZURE_STORAGE_ACCOUNT_NAME= ELECTRO__AZURE_CONTAINER_NAME=images # endregion - - -# Optional, used in `Makefile` -POEDITOR__API_TOKEN= -POEDITOR__PROJECT_ID= diff --git a/examples/Makefile b/examples/Makefile index b1a3c25..942cd23 100644 --- a/examples/Makefile +++ b/examples/Makefile @@ -29,37 +29,28 @@ migrate: poetry run aerich upgrade -# region Locales management with Poeditor # TODO: [2024-07-20 by Mykola] Use `$(SOURCES_DIR)` instead of `.` in the commands below. + .PHONY: extract-locales extract-locales: - poetry run pybabel extract --input-dirs . --output ./locales/messages.pot + poetry run pybabel --quiet extract --input-dirs . --output ./locales/messages.pot + +.PHONY: locales +locales: + $(MAKE) extract-locales + poetry run pybabel update --input-file ./locales/messages.pot --output-dir ./locales/ .PHONY: compile-locales compile-locales: - poetry run pybabel compile --directory ./locales/ - -.PHONY: download-translations -download-translations: - @./scripts/download_translations_from_poeditor.sh $(POEDITOR__API_TOKEN) $(POEDITOR__PROJECT_ID) $(SOURCES_DIR) - -.PHONY: upload-translations -upload-translations: - @./scripts/upload_translations_to_poeditor.sh $(POEDITOR__API_TOKEN) $(POEDITOR__PROJECT_ID) $(SOURCES_DIR) - -.PHONY: upload-locales -upload-locales: extract-locales upload-translations - -.PHONY: update-locales -update-locales: download-translations compile-locales + poetry run pybabel compile -f --directory ./locales/ # endregion .PHONY: pre-start pre-start: - make migrate - make update-locales + ${MAKE} migrate + ${MAKE} compile-locales .PHONY: start start: diff --git a/examples/locales/en/LC_MESSAGES/messages.po b/examples/locales/en/LC_MESSAGES/messages.po index 5777374..e59348a 100644 --- a/examples/locales/en/LC_MESSAGES/messages.po +++ b/examples/locales/en/LC_MESSAGES/messages.po @@ -1,17 +1,24 @@ + msgid "" msgstr "" +"Project-Id-Version: electro\n" +"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" +"POT-Creation-Date: 2025-10-14 11:23+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language: en\n" +"Language-Team: en \n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" "MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" +"Content-Type: text/plain; charset=utf-8\n" "Content-Transfer-Encoding: 8bit\n" -"X-Generator: POEditor.com\n" -"Project-Id-Version: electro\n" -"Language: en\n" +"Generated-By: Babel 2.17.0\n" -#: test_flow.py:21 +#: test_flow.py:19 msgid "test_flow_message" msgstr "Testing this `electro` Flow step 1. Everything works!" -#: test_flow.py:22 +#: test_flow.py:20 msgid "test_flow_message_2" msgstr "Testing this `electro` Flow step 2. Everything works!" diff --git a/examples/locales/fr/LC_MESSAGES/messages.po b/examples/locales/fr/LC_MESSAGES/messages.po index 59ead81..fd31879 100644 --- a/examples/locales/fr/LC_MESSAGES/messages.po +++ b/examples/locales/fr/LC_MESSAGES/messages.po @@ -1,16 +1,24 @@ + msgid "" msgstr "" +"Project-Id-Version: electro\n" +"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" +"POT-Creation-Date: 2025-10-14 11:23+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language: fr\n" +"Language-Team: fr \n" +"Plural-Forms: nplurals=2; plural=(n > 1);\n" "MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" +"Content-Type: text/plain; charset=utf-8\n" "Content-Transfer-Encoding: 8bit\n" -"X-Generator: POEditor.com\n" -"Project-Id-Version: electro\n" -"Language: fr\n" +"Generated-By: Babel 2.17.0\n" -#: test_flow.py:21 +#: test_flow.py:19 msgid "test_flow_message" msgstr "Test du Flow `electro` étape 1. Tout fonctionne!" -#: test_flow.py:22 +#: test_flow.py:20 msgid "test_flow_message_2" msgstr "Test du Flow `electro` étape 2. Tout fonctionne!" + diff --git a/examples/scripts/download_translations_from_poeditor.sh b/examples/scripts/download_translations_from_poeditor.sh deleted file mode 100755 index 2536b02..0000000 --- a/examples/scripts/download_translations_from_poeditor.sh +++ /dev/null @@ -1,64 +0,0 @@ -#!/bin/bash - -set -euo pipefail - -echo "Fetching all languages from the project..." - -# Set the API token and project ID from parameters or default to environment variables -POEDITOR__API_TOKEN="${1:-$POEDITOR__API_TOKEN}" -POEDITOR__PROJECT_ID="${2:-$POEDITOR__PROJECT_ID}" - -# Set the sources directory. The `locales` directory should be in it. -SOURCES_DIR="${3:-$SOURCES_DIR}" - -if [ -z "$POEDITOR__API_TOKEN" ]; then - echo "Error: POEDITOR__API_TOKEN is not set." - exit 1 -fi - -if [ -z "$POEDITOR__PROJECT_ID" ]; then - echo "Error: POEDITOR__PROJECT_ID is not set." - exit 1 -fi - -if [ -z "$SOURCES_DIR" ]; then - SOURCES_DIR="." -fi - -# Get all languages from the project -LANGUAGES=$(curl -s -X POST https://api.poeditor.com/v2/languages/list \ - -d api_token="$POEDITOR__API_TOKEN" \ - -d id="$POEDITOR__PROJECT_ID" | jq -r '.result.languages[].code') - -echo "Languages fetched. Starting to export translations..." - -# For each language, export the translations -for LANGUAGE in $LANGUAGES -do - echo "Processing language: $LANGUAGE" - - echo "$POEDITOR__PROJECT_ID" - - # Request export - EXPORT=$(curl -s -X POST https://api.poeditor.com/v2/projects/export \ - -d api_token="$POEDITOR__API_TOKEN" \ - -d id="$POEDITOR__PROJECT_ID" \ - -d language="$LANGUAGE" \ - -d type="po") - - # Get export URL - URL=$(echo "$EXPORT" | jq -r '.result.url') - - # Create directory if not exists - echo "Creating directory for $LANGUAGE if not exists..." - mkdir -p "./$SOURCES_DIR/locales/$LANGUAGE/LC_MESSAGES/" - - echo "Downloading translations file for $LANGUAGE..." - - # Download the file - curl -s -o "./$SOURCES_DIR/locales/$LANGUAGE/LC_MESSAGES/messages.po" "$URL" - - echo "Translations for $LANGUAGE have been downloaded and saved!" -done - -echo "All translations have been exported successfully." diff --git a/examples/scripts/upload_translations_to_poeditor.sh b/examples/scripts/upload_translations_to_poeditor.sh deleted file mode 100755 index 0eb4453..0000000 --- a/examples/scripts/upload_translations_to_poeditor.sh +++ /dev/null @@ -1,74 +0,0 @@ -#!/bin/bash - -set -euo pipefail - -echo "Uploading terms to POEditor..." - - -# Set the API token and project ID from parameters or default to environment variables -POEDITOR__API_TOKEN="${1:-$POEDITOR__API_TOKEN}" -POEDITOR__PROJECT_ID="${2:-$POEDITOR__PROJECT_ID}" - -if [ -z "$POEDITOR__API_TOKEN" ]; then - echo "Error: POEDITOR__API_TOKEN is not set." - exit 1 -fi - -if [ -z "$POEDITOR__PROJECT_ID" ]; then - echo "Error: POEDITOR__PROJECT_ID is not set." - exit 1 -fi - -# Set the sources directory. The `locales` directory should be in it. -SOURCES_DIR="${3:-$SOURCES_DIR}" - -# File to be uploaded -FILE_PATH="./$SOURCES_DIR/locales/messages.pot" - -# Check if the file exists -if [ ! -f "$FILE_PATH" ]; then - echo "Error: File $FILE_PATH does not exist." - exit 1 -fi - -# Default locale -DEFAULT_LOCALE="${DEFAULT_LOCALE:-en}" - -# Function to get language value -get_language_value() { - local locale="$1" - case "$locale" in - "en") - echo "189" - ;; - "fr") - echo "50" - ;; - *) - echo "Unknown locale $locale. Cannot get its ID for POEditor." - exit 1 - esac -} - - -echo "Starting upload of terms to POEditor from $FILE_PATH..." - -# Updating terms using the API -RESPONSE=$(curl -s -X POST https://api.poeditor.com/v2/projects/upload \ - -F api_token="$POEDITOR__API_TOKEN" \ - -F id="$POEDITOR__PROJECT_ID" \ - -F updating=terms \ - -F file=@"$FILE_PATH") - -# Checking if the request was successful -SUCCESS=$(echo "$RESPONSE" | jq -r '.response.status') - -if [[ "$SUCCESS" == "success" ]]; then - echo "Upload successful!" - - # Open the web page with untranslated terms - echo "Opening web page with untranslated terms for $DEFAULT_LOCALE..." - open "https://poeditor.com/projects/po_edit?id=$POEDITOR__PROJECT_ID&per_page=100&id_language=$(get_language_value "$DEFAULT_LOCALE")&filter=ut" -else - echo "Upload failed. Response from POEditor API: $RESPONSE" -fi From fcd7703a93f8287e1cccd9ded07e48a0d912731d Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Tue, 21 Oct 2025 16:15:15 +0200 Subject: [PATCH 50/57] handle filters for storage buckets --- electro/contrib/storage_buckets.py | 36 +++++++++--------------------- electro/models.py | 29 ------------------------ 2 files changed, 11 insertions(+), 54 deletions(-) diff --git a/electro/contrib/storage_buckets.py b/electro/contrib/storage_buckets.py index 3bd25e2..2efabd2 100644 --- a/electro/contrib/storage_buckets.py +++ b/electro/contrib/storage_buckets.py @@ -305,7 +305,7 @@ async def _delete_channel_data(self): # endregion - async def _get_current_model_instance(self, create_if_not_exists: bool = False) -> tortoise.Model | None: + async def _get_current_model_instance(self, create_if_not_exists: bool = False, **filters) -> tortoise.Model | None: """Get the current model instance.""" if self._scope == FlowScopes.USER: param_name = "user_id" @@ -316,32 +316,32 @@ async def _get_current_model_instance(self, create_if_not_exists: bool = False) else: raise NotImplementedError(f"Unknown scope: {self._scope}") - model_instance = await self.model.get_or_none(**{param_name: param_value}) + model_instance = await self.model.get_or_none(**{param_name: param_value, **filters}) if model_instance is None and create_if_not_exists: - model_instance = await self.model.create(**{param_name: param_value}) + model_instance = await self.model.create(**{param_name: param_value, **filters}) return model_instance - async def get_data(self, default: VALUE | None = None) -> VALUE | None: + async def get_data(self, default: VALUE | None = None, **filters) -> VALUE | None: """Get the data for the storage element.""" - model_instance = await self._get_current_model_instance() + model_instance = await self._get_current_model_instance(**filters) if model_instance is None: return default return getattr(model_instance, self.field_name, default) or default - async def set_data(self, data: VALUE): + async def set_data(self, data: VALUE, **filters): """Set the data for the storage element.""" - model_instance = await self._get_current_model_instance(create_if_not_exists=True) + model_instance = await self._get_current_model_instance(create_if_not_exists=True, **filters) setattr(model_instance, self.field_name, data) await model_instance.save() - async def delete_data(self): + async def delete_data(self, **filters): """Delete the data for the storage element.""" - model_instance = await self._get_current_model_instance() + model_instance = await self._get_current_model_instance(**filters) if model_instance is not None: setattr(model_instance, self.field_name, None) @@ -485,10 +485,10 @@ class BasePostgresStorageBucket(BaseStorageBucket, metaclass=PostgresStorageBuck _model: tortoise.Model @classmethod - async def empty(cls): + async def empty(cls, **filters): flow_connector = FlowConnector.get_current() - await cls._model.filter(user_id=flow_connector.user.id).delete() + await cls._model.filter(user_id=flow_connector.user.id, **filters).delete() # _tortoise_meta: tortoise.models.ModelMeta @@ -538,18 +538,4 @@ async def empty(cls): # } -class BaseAssistantsStorageBucket(BaseStorageBucket, ABC): - """Base storage bucket for the `GPTAssistantStep`s.""" - - __abstract = True # pylint: disable=W0238 - - thread_id: StorageBucketElement[str] - - -class BasePostgresAssistantsStorageBucket(BasePostgresStorageBucket, BaseAssistantsStorageBucket): - """Base storage bucket for the `GPTAssistantStep`s.""" - - __abstract = True # pylint: disable=W0238 - - # endregion diff --git a/electro/models.py b/electro/models.py index 3787c76..645cf0f 100644 --- a/electro/models.py +++ b/electro/models.py @@ -245,33 +245,4 @@ class Meta: abstract = True -class BaseImagesStepStorageModel(BaseStorageModel): - """The base model for images step storage models.""" - - buttons_sent_to_images = fields.JSONField(default=dict, null=True) - images_sent_in_this_step = fields.JSONField(default=list, null=True) - image_chosen = fields.CharField(max_length=255, null=True) - # TODO: [2024-11-08 by Mykola] Add this later to maintain compatibility with the old data - # TODO: [2024-11-08 by Mykola] Remove this from this model. It should be downstream - # metaphors = fields.JSONField(default=list, null=True) - - load_more_button_custom_id = fields.CharField(max_length=255, null=True) - - class Meta: - """The metaclass for the model.""" - - abstract = True - - -class BaseAssistantsStorageModel(BaseStorageModel): - """The base model for OpenAI Assistants storage models.""" - - thread_id = fields.CharField(max_length=255, null=True) - - class Meta: - """The metaclass for the model.""" - - abstract = True - - # endregion From 65e9905470494f072c099e5b48da5984a9f48e45 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Fri, 24 Oct 2025 15:39:49 +0200 Subject: [PATCH 51/57] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20Remove=20last=20disc?= =?UTF-8?q?ord=20legacy=20models?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- electro/enums.py | 2 -- electro/flow.py | 4 --- electro/flow_manager.py | 76 +---------------------------------------- electro/interfaces.py | 71 +++++++++++--------------------------- electro/models.py | 48 ++++---------------------- electro/schemas.py | 6 ---- electro/scopes.py | 2 -- 7 files changed, 27 insertions(+), 182 deletions(-) diff --git a/electro/enums.py b/electro/enums.py index 1da98ab..fffea8e 100644 --- a/electro/enums.py +++ b/electro/enums.py @@ -17,8 +17,6 @@ class ResponseTypes(str, Enum): ERROR = "error" MESSAGE = "message" IMAGE = "image" - ADD_ROLE = "add_role" - REMOVE_ROLE = "remove_role" START_TYPING = "start_typing" STOP_TYPING = "stop_typing" STOP_PROCESS = "stop_process" diff --git a/electro/flow.py b/electro/flow.py index 869d542..897078e 100644 --- a/electro/flow.py +++ b/electro/flow.py @@ -93,8 +93,6 @@ def _get_connector_state(self, connector: FlowConnector) -> str: return connector.user_state case FlowScopes.CHANNEL: return connector.channel_state - # case FlowScopes.GUILD: - # return connector.guild_state case _: raise ValueError(f"Unknown scope: {self._scope}. Supported scopes: {FlowScopes.__members__}") @@ -105,8 +103,6 @@ def _set_connector_state(self, connector: FlowConnector, state: str): connector.user_state = state case FlowScopes.CHANNEL: connector.channel_state = state - # case FlowScopes.GUILD: - # connector.guild_state = state case _: raise ValueError(f"Unknown scope: {self._scope}. Supported scopes: {FlowScopes.__members__}") diff --git a/electro/flow_manager.py b/electro/flow_manager.py index 7481914..37c8d2f 100644 --- a/electro/flow_manager.py +++ b/electro/flow_manager.py @@ -10,7 +10,7 @@ from .exceptions import DisabledButtonClick, EventCannotBeProcessed from .flow import Flow, FlowConnector, FlowFinished from .flow_connector import FlowConnectorEvents -from .models import Button, Channel, Guild, Message, PlatformId, User, UserStateChanged +from .models import Button, Channel, Message, PlatformId, User, UserStateChanged from .scopes import FlowScopes from .settings import settings from .storage import BaseFlowStorage, ChannelData, FlowRedisStorage, UserData @@ -32,19 +32,6 @@ def __init__(self, flow_manager: FlowManager): # Set the current analytics manager self.set_current(self) - @classmethod - async def get_or_create_guild(cls, platform: str, guild_data: schemas.Guild) -> Guild: - """Save the guild to the database.""" - platform_id, created = await PlatformId.get_or_create( - platform_id=guild_data.platform_id.id, platform=platform, type=PlatformId.PlatformIdTypes.GUILD - ) - if created: - guild = await Guild.create(name=guild_data.name) - platform_id.guild = guild - logger.info(f"Created the Guild record for {guild.id=}, {guild.name=}") - await platform_id.save() - return await platform_id.guild - @classmethod async def get_or_create_channel( cls, platform: str, channel_data: schemas.Channel, user: typing.Optional[User] = None @@ -61,11 +48,6 @@ async def get_or_create_channel( platform_id.channel = channel logger.info(f"Created the Channel record for {channel.id=}, {channel.name=}") await platform_id.save() - if channel_data.guild: - logger.error(f"{channel_data=}, {channel_data.guild=}") - guild = await cls.get_or_create_guild(platform, channel_data.guild) - channel.guild = guild - await channel.save() channel = await platform_id.channel if user and channel.type == Channel.ChannelTypes.DM: if not user.dm_channel: @@ -247,7 +229,6 @@ async def _dispatch(self, flow_connector: FlowConnector): scope = FlowScopes.CHANNEL else: scope = FlowScopes.USER - # TODO: [17.05.2024 by Mykola] Allow for `FlowScopes.GUILD` flows # Check whether this event has triggered any of the flows for flow in self.flows: @@ -405,61 +386,6 @@ async def on_button_click( return await self.dispatch(flow_connector) - # async def on_member_join(self, member: types.Member): - # """Handle the `member_join` event.""" - # # Save the user to the database - # await self.analytics_manager.save_new_member(member) - - # # Get the user state and data - # logger.info(f"Getting the user state and data for {member.id}") - # # TODO: [22.08.2023 by Mykola] Use correct types here - # user_state = await self._get_user_state(member) - # user_data = await self._get_user_data(member) - - # # noinspection PyProtectedMember - # flow_connector = FlowConnector( - # flow_manager=self, - # event=FlowConnectorEvents.MEMBER_JOIN, - # user=member._user, - # member=member, - # # TODO: [28.08.2023 by Mykola] Use the correct channel here - # channel=member.guild.system_channel, - # message=None, - # user_state=user_state, - # user_data=user_data, - # channel_state=None, - # channel_data=ChannelData(), - # ) - - # return await self.dispatch(flow_connector) - - # async def on_member_update(self, before: types.Member, after: types.Member): - # """Handle the `member_update` event.""" - # # Save the member update record to the database - # await self.analytics_manager.save_updated_member(before, after) - - # # Get the user state and data - # logger.info(f"Getting the user state and data for {after.id}") - # user_state = await self._get_user_state(after) - # user_data = await self._get_user_data(after) - - # # noinspection PyProtectedMember - # flow_connector = FlowConnector( - # flow_manager=self, - # event=FlowConnectorEvents.MEMBER_UPDATE, - # user=after._user, - # member=after, - # channel=after.guild.system_channel, - # message=None, - # user_state=user_state, - # user_data=user_data, - # extra_data={"old_member": before}, - # channel_state=None, - # channel_data=ChannelData(), - # ) - - # return await self.dispatch(flow_connector) - # region Context Manager async def __aenter__(self): """Enter the context manager.""" diff --git a/electro/interfaces.py b/electro/interfaces.py index 157521a..560cca3 100644 --- a/electro/interfaces.py +++ b/electro/interfaces.py @@ -11,7 +11,7 @@ from .enums import ResponseTypes, SupportedPlatforms from .flow_connector import FlowConnectorEvents from .flow_manager import global_flow_manager -from .models import Button, Channel, File, Guild, Message, Role, User +from .models import Button, Channel, File, Message, User from .schemas import ButtonClick, ReceivedMessage from .settings import settings from .toolkit.files_storage.universal_file_storage import universal_file_storage @@ -86,18 +86,6 @@ async def _format_channel(self, channel: Optional[Channel]) -> Dict[str, Any]: }, } - async def _format_guild(self, guild: Optional[Guild]) -> Dict[str, Any]: - """Format the guild to be sent to the client.""" - if not guild: - return None - return { - "id": guild.id, - "name": guild.name, - "platform_ids": { - identifier.platform: identifier.platform_id for identifier in await guild.platform_ids.all() - }, - } - async def _process_message_file( self, file: File | str | pathlib.Path, @@ -234,44 +222,6 @@ async def send_error( } ) - async def add_role(self, user: User, role: Role): - """ - Assign a role to a user. - - Arguments: - user: The user to whom the role will be assigned. - role: The role to be assigned to the user. - """ - await self.send_json( - { - "action": ResponseTypes.ADD_ROLE, - "content": { - "role": role.name, - "guild": await self._format_guild(role.guild), - "user": await self._format_user(user), - }, - } - ) - - async def remove_role(self, user: User, role: Role): - """ - Remove a role from a user. - - Arguments: - user: The user from whom the role will be removed. - role: The role to be removed from the user. - """ - await self.send_json( - { - "action": ResponseTypes.REMOVE_ROLE, - "content": { - "role": role.name, - "guild": await self._format_guild(role.guild), - "user": await self._format_user(user), - }, - } - ) - async def set_typing(self, user: User, channel: Channel, action: ResponseTypes): """ Set the typing indicator for a user or a channel. @@ -319,6 +269,25 @@ async def finish_flow(self): } ) + async def custom_action(self, user: User, channel: Channel, action: str, content: Dict[str, Any]): + """ + Send a custom action to the client. + + Arguments: + action: The action to be sent. + content: The content of the action. + """ + await self.send_json( + { + "action": action, + "content": { + "user": await self._format_user(user), + "channel": await self._format_channel(channel), + "data": content, + }, + } + ) + @asynccontextmanager async def with_constant_typing(self, user: User, channel: Channel): """An asynchronous context manager for typing indicators or other tasks.""" diff --git a/electro/models.py b/electro/models.py index 645cf0f..ee75f96 100644 --- a/electro/models.py +++ b/electro/models.py @@ -35,19 +35,18 @@ class Meta: class PlatformId(Model): """ - This model is used to store the IDs of the users, channels, and guilds on different platforms. + This model is used to store the IDs of the users and channels on different platforms. - It is used to link the users, channels, and guilds on different platforms to the same user, channel, - or guild in the database. + It is used to link the users and channels on different platforms to the same user or channel + in the database. Attributes: id (int): The ID of the platform ID. - type (str): The type of the platform ID. Can be "user", "channel", or "guild". - platform_id (str): The ID of the user, channel, or guild on the platform. + type (str): The type of the platform ID. Can be "user" or "channel". + platform_id (str): The ID of the user or channel on the platform. platform (str): The name of the platform. user (User): The user associated with the platform ID. channel (Channel): The channel associated with the platform ID. - guild (Guild): The guild associated with the platform ID. """ class PlatformIdTypes(str, Enum): @@ -55,7 +54,6 @@ class PlatformIdTypes(str, Enum): USER = "user" CHANNEL = "channel" - GUILD = "guild" id = fields.IntField(pk=True) type = fields.CharField(max_length=255) @@ -63,7 +61,6 @@ class PlatformIdTypes(str, Enum): platform = fields.CharField(max_length=255) user = fields.ForeignKeyField("electro.User", related_name="platform_ids", null=True) channel = fields.ForeignKeyField("electro.Channel", related_name="platform_ids", null=True) - guild = fields.ForeignKeyField("electro.Guild", related_name="platform_ids", null=True) class Meta: unique_together = (("type", "platform_id", "platform"),) @@ -77,14 +74,9 @@ class User(BaseModel): locale = fields.CharField(max_length=255, default=settings.DEFAULT_LOCALE) is_admin = fields.BooleanField(default=False) - # guilds: fields.ManyToManyRelation["Guild"] # TODO: [2024-08-30 by Mykola] Allow multiple guilds for the user. dm_channel: fields.ForeignKeyRelation[Channel] | Channel = fields.ForeignKeyField( "electro.Channel", related_name="dm_users", null=True ) - guild: fields.ForeignKeyRelation[Guild] | Guild = fields.ForeignKeyField( - "electro.Guild", related_name="users", null=True - ) - roles: fields.ManyToManyRelation[Role] = fields.ManyToManyField("electro.Role", related_name="users") platform_ids: fields.ReverseRelation[PlatformId] messages: fields.ReverseRelation[Message] @@ -108,19 +100,6 @@ class File(BaseModel): file_name = fields.TextField(null=True) -class Guild(BaseModel): - """The model for Guild.""" - - id = fields.BigIntField(pk=True) - name = fields.CharField(max_length=255) - - platform_ids: fields.ReverseRelation[PlatformId] - - def __str__(self) -> str: - """Return the string representation of the model.""" - return self.name - - class Channel(BaseModel): """The model for Channel.""" @@ -138,7 +117,6 @@ class ChannelUsedFor(str, Enum): PROFESSION_CARDS = "profession_cards" id = fields.BigIntField(pk=True) - guild: Guild = fields.ForeignKeyField("electro.Guild", related_name="channels", null=True) name = fields.CharField(max_length=255, null=True) type = fields.CharField(max_length=255) used_for = fields.CharEnumField(ChannelUsedFor, max_length=255, null=True) @@ -148,21 +126,7 @@ class ChannelUsedFor(str, Enum): def __str__(self) -> str: """Return the string representation of the model.""" - return f"Channel `{self.name}` in {self.guild}" - - -class Role(BaseModel): - """The model for Role.""" - - id = fields.BigIntField(pk=True) - guild: Guild = fields.ForeignKeyField("electro.Guild", related_name="roles") - name = fields.CharField(max_length=255) - - users: fields.ManyToManyRelation[User] - - def __str__(self) -> str: - """Return the string representation of the model.""" - return f"{self.name} in {self.guild}" + return self.name class Message(BaseModel): diff --git a/electro/schemas.py b/electro/schemas.py index f1a48c7..5e47dac 100644 --- a/electro/schemas.py +++ b/electro/schemas.py @@ -5,15 +5,9 @@ class PlatformId(BaseModel): id: str -class Guild(BaseModel): - platform_id: PlatformId - name: str - - class Channel(BaseModel): platform_id: PlatformId name: str - guild: Guild | None type: str diff --git a/electro/scopes.py b/electro/scopes.py index 8138165..f7d1f6f 100644 --- a/electro/scopes.py +++ b/electro/scopes.py @@ -6,5 +6,3 @@ class FlowScopes(str, Enum): USER = "user" CHANNEL = "channel" - # TODO: [23.10.2023 by Mykola] Allow having guild storage buckets - # GUILD = "guild" From c1b502f971bf99e57a2dbd5303191940361fff02 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Thu, 4 Dec 2025 16:00:54 +0100 Subject: [PATCH 52/57] use settings for i18n --- electro/settings.py | 1 + electro/toolkit/i18n.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/electro/settings.py b/electro/settings.py index bdac102..585c53e 100644 --- a/electro/settings.py +++ b/electro/settings.py @@ -63,6 +63,7 @@ class Settings(BaseSettings): # Locales settings LOCALES_PATH: str = "locales" # Relative to the current working directory DEFAULT_LOCALE: str = "en" + SUPPORTED_LOCALES: list[str] = ["en", "fr"] # Database settings DATABASE_URL: PostgresDsn | None = None diff --git a/electro/toolkit/i18n.py b/electro/toolkit/i18n.py index b07431c..09cc808 100644 --- a/electro/toolkit/i18n.py +++ b/electro/toolkit/i18n.py @@ -4,8 +4,8 @@ from electro.settings import settings translations = { - "en": gettext.translation("messages", localedir=settings.LOCALES_PATH, languages=["en"]), - "fr": gettext.translation("messages", localedir=settings.LOCALES_PATH, languages=["fr"]), + locale: gettext.translation("messages", localedir=settings.LOCALES_PATH, languages=[locale]) + for locale in settings.SUPPORTED_LOCALES } From 66f891f08a3a29af652845f9c772b284e9260c14 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Tue, 9 Dec 2025 14:29:22 +0100 Subject: [PATCH 53/57] add admin auth --- electro/authentication.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/electro/authentication.py b/electro/authentication.py index 954949e..43e0866 100644 --- a/electro/authentication.py +++ b/electro/authentication.py @@ -47,6 +47,20 @@ async def authenticate_user( return await cls._api_key_authenticate_user(platform, user_id, authorization) return await cls._jwt_authenticate_user(platform, authorization) + @classmethod + async def authenticate_admin( + cls, + platform: str, + user_id: str, + header: Optional[str] = Header(default=None, alias="Authorization"), + cookie: Optional[str] = Cookie(default=None, alias="IKIGAI_AUTHORIZATION"), + ) -> User: + """Authenticate an admin user.""" + user = await cls.authenticate_user(platform, user_id, header, cookie) + if not user.is_admin: + raise HTTPException(status_code=403, detail="User does not have admin privileges.") + return user + @classmethod async def _get_or_create_user(cls, platform: str, user_id: str, username: Optional[str] = None) -> User: """Get or create a user based on the platform and user ID.""" From b428ad23a45c6255bbc822d9af70dd4c56dfe60e Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Wed, 17 Dec 2025 17:36:11 +0100 Subject: [PATCH 54/57] fix: Add fallback for locales --- electro/toolkit/i18n.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/electro/toolkit/i18n.py b/electro/toolkit/i18n.py index 09cc808..e1e7cdd 100644 --- a/electro/toolkit/i18n.py +++ b/electro/toolkit/i18n.py @@ -4,7 +4,7 @@ from electro.settings import settings translations = { - locale: gettext.translation("messages", localedir=settings.LOCALES_PATH, languages=[locale]) + locale: gettext.translation("messages", localedir=settings.LOCALES_PATH, languages=[locale], fallback=True) for locale in settings.SUPPORTED_LOCALES } From 5a643509e40afd036715c4ac4eb6108d57e8f520 Mon Sep 17 00:00:00 2001 From: Mykola Solodukha Date: Mon, 5 Jan 2026 11:11:54 +0200 Subject: [PATCH 55/57] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20Bump=20FastAPI=20to?= =?UTF-8?q?=20fix=20starlette=20DoS=20vulnerabilities?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Allow starlette >=0.49.1 which patches: - High: DoS via Range header in FileResponse - Medium: DoS via multipart form parsing --- electro/toolkit/whisper_client.py | 131 +++++ poetry.lock | 765 ++++++++++++++++++++++++------ pyproject.toml | 2 +- 3 files changed, 758 insertions(+), 140 deletions(-) create mode 100644 electro/toolkit/whisper_client.py diff --git a/electro/toolkit/whisper_client.py b/electro/toolkit/whisper_client.py new file mode 100644 index 0000000..83d7a8c --- /dev/null +++ b/electro/toolkit/whisper_client.py @@ -0,0 +1,131 @@ +"""Whisper client utility for audio transcription.""" + +import logging +import tempfile +from pathlib import Path +from typing import Optional + +from fastapi import HTTPException, UploadFile +from openai import AsyncOpenAI + +from settings import settings + +from electro.toolkit.loguru_logging import logger + + +class WhisperTranscriptionError(Exception): + """Custom exception for Whisper transcription errors.""" + + pass + + +async def validate_audio_file(file: UploadFile) -> None: + """ + Validate the uploaded audio file. + + Args: + file: The uploaded file to validate + + Raises: + HTTPException: If file validation fails + """ + if not file.filename: + raise HTTPException(status_code=400, detail="No file uploaded") + + file_extension = Path(file.filename).suffix.lower().lstrip('.') + if file_extension not in settings.SUPPORTED_AUDIO_FORMATS: + raise HTTPException( + status_code=400, + detail=f"Unsupported file format. Supported formats: {', '.join(settings.SUPPORTED_AUDIO_FORMATS)}" + ) + + if file.size and file.size > settings.MAX_AUDIO_FILE_SIZE: + raise HTTPException( + status_code=413, + detail=f"File too large. Maximum size: {settings.MAX_AUDIO_FILE_SIZE // (1024 * 1024)}MB" + ) + + +async def transcribe_audio( + file: UploadFile, + language: Optional[str] = None, + response_format: str = "json", + temperature: float = 0.0 +) -> dict: + """ + Transcribe audio file using OpenAI Whisper. + + Args: + file: Audio file to transcribe + language: Optional language code (e.g., 'en', 'es', 'fr') + response_format: Response format ('json', 'text', 'srt', 'verbose_json', 'vtt') + temperature: Sampling temperature between 0 and 1 + + Returns: + Dictionary containing transcription result + + Raises: + WhisperTranscriptionError: If transcription fails + """ + await validate_audio_file(file) + + client = AsyncOpenAI( + base_url=settings.OPENAI_API_BASE_URL, + api_key=settings.OPENAI_API_KEY + ) + + with tempfile.NamedTemporaryFile(delete=False, suffix=f".{Path(file.filename).suffix}") as temp_file: + try: + # Write uploaded file to temporary file + content = await file.read() + temp_file.write(content) + temp_file.flush() + + # Transcribe using OpenAI Whisper + with open(temp_file.name, 'rb') as audio_file: + transcription_params = { + "file": audio_file, + "model": settings.OPENAI_WHISPER_MODEL, + "response_format": response_format, + "temperature": temperature + } + + if language: + transcription_params["language"] = language + + logger.info(f"Starting transcription for file: {file.filename}") + transcript = await client.audio.transcriptions.create(**transcription_params) + logger.info(f"Transcription completed for file: {file.filename}") + + # Handle different response formats + if response_format == "json": + return { + "text": transcript.text, + "language": getattr(transcript, 'language', None), + "duration": getattr(transcript, 'duration', None), + "filename": file.filename + } + elif response_format == "verbose_json": + return { + "text": transcript.text, + "language": getattr(transcript, 'language', None), + "duration": getattr(transcript, 'duration', None), + "segments": getattr(transcript, 'segments', []), + "filename": file.filename + } + else: + return { + "text": str(transcript), + "filename": file.filename + } + + except Exception as e: + logger.error(f"Transcription failed for file {file.filename}: {str(e)}") + raise WhisperTranscriptionError(f"Transcription failed: {str(e)}") + + finally: + # Clean up temporary file + try: + Path(temp_file.name).unlink(missing_ok=True) + except Exception as cleanup_error: + logger.warning(f"Failed to cleanup temporary file: {cleanup_error}") \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index f4d5ef6..816f84d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.0.0 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. [[package]] name = "aerich" @@ -195,7 +195,7 @@ propcache = ">=0.2.0" yarl = ">=1.17.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] +speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.2.0) ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""] [[package]] name = "aioitertools" @@ -259,6 +259,18 @@ files = [ {file = "alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e"}, ] +[[package]] +name = "annotated-doc" +version = "0.0.4" +description = "Document parameters, class attributes, return types, and variables inline, with Annotated." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320"}, + {file = "annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4"}, +] + [[package]] name = "annotated-types" version = "0.7.0" @@ -290,7 +302,7 @@ typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] trio = ["trio (>=0.26.1)"] [[package]] @@ -366,8 +378,8 @@ files = [ [package.extras] docs = ["Sphinx (>=8.1.3,<8.2.0)", "sphinx-rtd-theme (>=1.2.2)"] -gssauth = ["gssapi", "sspilib"] -test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi", "k5test", "mypy (>=1.8.0,<1.9.0)", "sspilib", "uvloop (>=0.15.3)"] +gssauth = ["gssapi ; platform_system != \"Windows\"", "sspilib ; platform_system == \"Windows\""] +test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi ; platform_system == \"Linux\"", "k5test ; platform_system == \"Linux\"", "mypy (>=1.8.0,<1.9.0)", "sspilib ; platform_system == \"Windows\"", "uvloop (>=0.15.3) ; platform_system != \"Windows\" and python_version < \"3.14.0\""] [[package]] name = "attrs" @@ -382,12 +394,12 @@ files = [ ] [package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] [[package]] name = "autodoc-pydantic" @@ -411,7 +423,7 @@ erdantic = ["erdantic (<2.0)"] linting = ["ruff (>=0.4.0,<0.5.0)"] security = ["pip-audit (>=2.7.2,<3.0.0)"] test = ["coverage (>=7,<8)", "defusedxml (>=0.7.1)", "pytest (>=8.0.0,<9.0.0)", "pytest-sugar (>=1.0.0,<2.0.0)"] -type-checking = ["mypy (>=1.9,<2.0)", "types-docutils (>=0.20,<0.21)", "typing-extensions (>=4.11,<5.0)"] +type-checking = ["mypy (>=1.9,<2.0)", "types-docutils (>=0.20,<0.21)", "typing-extensions (>=4.11,<5.0) ; python_version <= \"3.9\""] [[package]] name = "azure-core" @@ -486,7 +498,7 @@ files = [ ] [package.extras] -dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] +dev = ["backports.zoneinfo ; python_version < \"3.9\"", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata ; sys_platform == \"win32\""] [[package]] name = "black" @@ -841,10 +853,10 @@ files = [ cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0)"] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""] docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] -nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"] -pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""] +pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] test = ["certifi (>=2024)", "cryptography-vectors (==44.0.1)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] @@ -863,10 +875,10 @@ files = [ ] [package.extras] -all = ["Sphinx (>=3)", "check-manifest (>=0.42)", "mock (>=1.3.0)", "numpy (>=1.13.0)", "numpy (>=1.15.0)", "numpy (>=1.18.0)", "numpy (>=1.20.0)", "pytest (==5.4.3)", "pytest (>=6)", "pytest-cov (>=2.10.1)", "pytest-isort (>=1.2.0)", "pytest-pycodestyle (>=2)", "pytest-pycodestyle (>=2.2.0)", "pytest-pydocstyle (>=2)", "pytest-pydocstyle (>=2.2.0)", "sphinx (>=3)", "sphinx-rtd-theme (>=0.2)", "tox (>=3.7.0)"] +all = ["Sphinx (>=3)", "check-manifest (>=0.42)", "mock (>=1.3.0)", "numpy (>=1.13.0) ; python_version < \"3.7\"", "numpy (>=1.15.0) ; python_version < \"3.8\"", "numpy (>=1.18.0) ; python_version < \"3.9\"", "numpy (>=1.20.0) ; python_version >= \"3.9\"", "pytest (==5.4.3) ; python_version <= \"3.5\"", "pytest (>=6) ; python_version > \"3.5\"", "pytest-cov (>=2.10.1)", "pytest-isort (>=1.2.0)", "pytest-pycodestyle (>=2) ; python_version <= \"3.5\"", "pytest-pycodestyle (>=2.2.0) ; python_version > \"3.5\"", "pytest-pydocstyle (>=2) ; python_version <= \"3.5\"", "pytest-pydocstyle (>=2.2.0) ; python_version > \"3.5\"", "sphinx (>=3)", "sphinx-rtd-theme (>=0.2)", "tox (>=3.7.0)"] docs = ["Sphinx (>=3)", "sphinx-rtd-theme (>=0.2)"] -numpy = ["numpy (>=1.13.0)", "numpy (>=1.15.0)", "numpy (>=1.18.0)", "numpy (>=1.20.0)"] -tests = ["check-manifest (>=0.42)", "mock (>=1.3.0)", "pytest (==5.4.3)", "pytest (>=6)", "pytest-cov (>=2.10.1)", "pytest-isort (>=1.2.0)", "pytest-pycodestyle (>=2)", "pytest-pycodestyle (>=2.2.0)", "pytest-pydocstyle (>=2)", "pytest-pydocstyle (>=2.2.0)", "sphinx (>=3)", "tox (>=3.7.0)"] +numpy = ["numpy (>=1.13.0) ; python_version < \"3.7\"", "numpy (>=1.15.0) ; python_version < \"3.8\"", "numpy (>=1.18.0) ; python_version < \"3.9\"", "numpy (>=1.20.0) ; python_version >= \"3.9\""] +tests = ["check-manifest (>=0.42)", "mock (>=1.3.0)", "pytest (==5.4.3) ; python_version <= \"3.5\"", "pytest (>=6) ; python_version > \"3.5\"", "pytest-cov (>=2.10.1)", "pytest-isort (>=1.2.0)", "pytest-pycodestyle (>=2) ; python_version <= \"3.5\"", "pytest-pycodestyle (>=2.2.0) ; python_version > \"3.5\"", "pytest-pydocstyle (>=2) ; python_version <= \"3.5\"", "pytest-pydocstyle (>=2.2.0) ; python_version > \"3.5\"", "sphinx (>=3)", "tox (>=3.7.0)"] [[package]] name = "dill" @@ -961,51 +973,247 @@ idna = ">=2.0.0" [[package]] name = "fastapi" -version = "0.115.8" +version = "0.128.0" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "fastapi-0.115.8-py3-none-any.whl", hash = "sha256:753a96dd7e036b34eeef8babdfcfe3f28ff79648f86551eb36bfc1b0bf4a8cbf"}, - {file = "fastapi-0.115.8.tar.gz", hash = "sha256:0ce9111231720190473e222cdf0f07f7206ad7e53ea02beb1d2dc36e2f0741e9"}, + {file = "fastapi-0.128.0-py3-none-any.whl", hash = "sha256:aebd93f9716ee3b4f4fcfe13ffb7cf308d99c9f3ab5622d8877441072561582d"}, + {file = "fastapi-0.128.0.tar.gz", hash = "sha256:1cc179e1cef10a6be60ffe429f79b829dce99d8de32d7acb7e6c8dfdf7f2645a"}, ] [package.dependencies] +annotated-doc = ">=0.0.2" email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"standard\""} -fastapi-cli = {version = ">=0.0.5", extras = ["standard"], optional = true, markers = "extra == \"standard\""} -httpx = {version = ">=0.23.0", optional = true, markers = "extra == \"standard\""} +fastapi-cli = {version = ">=0.0.8", extras = ["standard"], optional = true, markers = "extra == \"standard\""} +httpx = {version = ">=0.23.0,<1.0.0", optional = true, markers = "extra == \"standard\""} jinja2 = {version = ">=3.1.5", optional = true, markers = "extra == \"standard\""} -pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +pydantic = ">=2.7.0" +pydantic-extra-types = {version = ">=2.0.0", optional = true, markers = "extra == \"standard\""} +pydantic-settings = {version = ">=2.0.0", optional = true, markers = "extra == \"standard\""} python-multipart = {version = ">=0.0.18", optional = true, markers = "extra == \"standard\""} -starlette = ">=0.40.0,<0.46.0" +starlette = ">=0.40.0,<0.51.0" typing-extensions = ">=4.8.0" uvicorn = {version = ">=0.12.0", extras = ["standard"], optional = true, markers = "extra == \"standard\""} [package.extras] -all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] -standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] +all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] +standard-no-fastapi-cloud-cli = ["email-validator (>=2.0.0)", "fastapi-cli[standard-no-fastapi-cloud-cli] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] [[package]] name = "fastapi-cli" -version = "0.0.7" +version = "0.0.20" description = "Run and manage FastAPI apps from the command line with FastAPI CLI. 🚀" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "fastapi_cli-0.0.20-py3-none-any.whl", hash = "sha256:e58b6a0038c0b1532b7a0af690656093dee666201b6b19d3c87175b358e9f783"}, + {file = "fastapi_cli-0.0.20.tar.gz", hash = "sha256:d17c2634f7b96b6b560bc16b0035ed047d523c912011395f49f00a421692bc3a"}, +] + +[package.dependencies] +fastapi-cloud-cli = {version = ">=0.1.1", optional = true, markers = "extra == \"standard\""} +rich-toolkit = ">=0.14.8" +typer = ">=0.15.1" +uvicorn = {version = ">=0.15.0", extras = ["standard"]} + +[package.extras] +new = ["fastapi-new (>=0.0.2) ; python_version >= \"3.10\""] +standard = ["fastapi-cloud-cli (>=0.1.1)", "uvicorn[standard] (>=0.15.0)"] +standard-no-fastapi-cloud-cli = ["uvicorn[standard] (>=0.15.0)"] + +[[package]] +name = "fastapi-cloud-cli" +version = "0.8.0" +description = "Deploy and manage FastAPI Cloud apps from the command line 🚀" +optional = false +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "fastapi_cli-0.0.7-py3-none-any.whl", hash = "sha256:d549368ff584b2804336c61f192d86ddea080c11255f375959627911944804f4"}, - {file = "fastapi_cli-0.0.7.tar.gz", hash = "sha256:02b3b65956f526412515907a0793c9094abd4bfb5457b389f645b0ea6ba3605e"}, + {file = "fastapi_cloud_cli-0.8.0-py3-none-any.whl", hash = "sha256:e9f40bee671d985fd25d7a5409b56d4f103777bf8a0c6d746ea5fbf97a8186d9"}, + {file = "fastapi_cloud_cli-0.8.0.tar.gz", hash = "sha256:cf07c502528bfd9e6b184776659f05d9212811d76bbec9fbb6bf34bed4c7456f"}, ] [package.dependencies] -rich-toolkit = ">=0.11.1" +fastar = ">=0.8.0" +httpx = ">=0.27.0" +pydantic = {version = ">=2.0", extras = ["email"]} +rich-toolkit = ">=0.14.5" +rignore = ">=0.5.1" +sentry-sdk = ">=2.20.0" typer = ">=0.12.3" uvicorn = {version = ">=0.15.0", extras = ["standard"]} [package.extras] standard = ["uvicorn[standard] (>=0.15.0)"] +[[package]] +name = "fastar" +version = "0.8.0" +description = "High-level bindings for the Rust tar crate" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "fastar-0.8.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:c9f930cff014cf79d396d0541bd9f3a3f170c9b5e45d10d634d98f9ed08788c3"}, + {file = "fastar-0.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07b70f712d20622346531a4b46bb332569bea621f61314c0b7e80903a16d14cf"}, + {file = "fastar-0.8.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:330639db3bfba4c6d132421a2a4aeb81e7bea8ce9159cdb6e247fbc5fae97686"}, + {file = "fastar-0.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98ea7ceb6231e48d7bb0d7dc13e946baa29c7f6873eaf4afb69725d6da349033"}, + {file = "fastar-0.8.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a90695a601a78bbca910fdf2efcdf3103c55d0de5a5c6e93556d707bf886250b"}, + {file = "fastar-0.8.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d0bf655ff4c9320b0ca8a5b128063d5093c0c8c1645a2b5f7167143fd8531aa"}, + {file = "fastar-0.8.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d8df22cdd8d58e7689aa89b2e4a07e8e5fa4f88d2d9c2621f0e88a49be97ccea"}, + {file = "fastar-0.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8a5e6ad722685128521c8fb44cf25bd38669650ba3a4b466b8903e5aa28e1a0"}, + {file = "fastar-0.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:31cd541231a2456e32104da891cf9962c3b40234d0465cbf9322a6bc8a1b05d5"}, + {file = "fastar-0.8.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:175db2a98d67ced106468e8987975484f8bbbd5ad99201da823b38bafb565ed5"}, + {file = "fastar-0.8.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ada877ab1c65197d772ce1b1c2e244d4799680d8b3f136a4308360f3d8661b23"}, + {file = "fastar-0.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:01084cb75f13ca6a8e80bd41584322523189f8e81b472053743d6e6c3062b5a6"}, + {file = "fastar-0.8.0-cp310-cp310-win32.whl", hash = "sha256:ca639b9909805e44364ea13cca2682b487e74826e4ad75957115ec693228d6b6"}, + {file = "fastar-0.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:fbc0f2ed0f4add7fb58034c576584d44d7eaaf93dee721dfb26dbed6e222dbac"}, + {file = "fastar-0.8.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cd9c0d3ebf7a0a6f642f771cf41b79f7c98d40a3072a8abe1174fbd9bd615bd3"}, + {file = "fastar-0.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2875a077340fe4f8099bd3ed8fa90d9595e1ac3cd62ae19ab690d5bf550eeb35"}, + {file = "fastar-0.8.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a999263d9f87184bf2801833b2ecf105e03c0dd91cac78685673b70da564fd64"}, + {file = "fastar-0.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c41111da56430f638cbfc498ebdcc7d30f63416e904b27b7695c29bd4889cb8"}, + {file = "fastar-0.8.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3719541a12bb09ab1eae91d2c987a9b2b7d7149c52e7109ba6e15b74aabc49b1"}, + {file = "fastar-0.8.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7a9b0fff8079b18acdface7ef1b7f522fd9a589f65ca4a1a0dd7c92a0886c2a2"}, + {file = "fastar-0.8.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ac073576c1931959191cb20df38bab21dd152f66c940aa3ca8b22e39f753b2f3"}, + {file = "fastar-0.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:003b59a7c3e405b6a7bff8fab17d31e0ccbc7f06730a8f8ca1694eeea75f3c76"}, + {file = "fastar-0.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a7b96748425efd9fc155cd920d65088a1b0d754421962418ea73413d02ff515a"}, + {file = "fastar-0.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:90957a30e64418b02df5b4d525bea50403d98a4b1f29143ce5914ddfa7e54ee4"}, + {file = "fastar-0.8.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f6e784a8015623fbb7ccca1af372fd82cb511b408ddd2348dc929fc6e415df73"}, + {file = "fastar-0.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a03eaf287bbc93064688a1220580ce261e7557c8898f687f4d0b281c85b28d3c"}, + {file = "fastar-0.8.0-cp311-cp311-win32.whl", hash = "sha256:661a47ed90762f419406c47e802f46af63a08254ba96abd1c8191e4ce967b665"}, + {file = "fastar-0.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:b48abd6056fef7bc3d414aafb453c5b07fdf06d2df5a2841d650288a3aa1e9d3"}, + {file = "fastar-0.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:50c18788b3c6ffb85e176dcb8548bb8e54616a0519dcdbbfba66f6bbc4316933"}, + {file = "fastar-0.8.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f10d2adfe40f47ff228f4efaa32d409d732ded98580e03ed37c9535b5fc923d"}, + {file = "fastar-0.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b930da9d598e3bc69513d131f397e6d6be4643926ef3de5d33d1e826631eb036"}, + {file = "fastar-0.8.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9d210da2de733ca801de83e931012349d209f38b92d9630ccaa94bd445bdc9b8"}, + {file = "fastar-0.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa02270721517078a5bd61a38719070ac2537a4aa6b6c48cf369cf2abc59174a"}, + {file = "fastar-0.8.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:83c391e5b789a720e4d0029b9559f5d6dee3226693c5b39c0eab8eaece997e0f"}, + {file = "fastar-0.8.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3258d7a78a72793cdd081545da61cabe85b1f37634a1d0b97ffee0ff11d105ef"}, + {file = "fastar-0.8.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6eab95dd985cdb6a50666cbeb9e4814676e59cfe52039c880b69d67cfd44767"}, + {file = "fastar-0.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:829b1854166141860887273c116c94e31357213fa8e9fe8baeb18bd6c38aa8d9"}, + {file = "fastar-0.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b1667eae13f9457a3c737f4376d68e8c3e548353538b28f7e4273a30cb3965cd"}, + {file = "fastar-0.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b864a95229a7db0814cd9ef7987cb713fd43dce1b0d809dd17d9cd6f02fdde3e"}, + {file = "fastar-0.8.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c05fbc5618ce17675a42576fa49858d79734627f0a0c74c0875ab45ee8de340c"}, + {file = "fastar-0.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7f41c51ee96f338662ee3c3df4840511ba3f9969606840f1b10b7cb633a3c716"}, + {file = "fastar-0.8.0-cp312-cp312-win32.whl", hash = "sha256:d949a1a2ea7968b734632c009df0571c94636a5e1622c87a6e2bf712a7334f47"}, + {file = "fastar-0.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:fc645994d5b927d769121094e8a649b09923b3c13a8b0b98696d8f853f23c532"}, + {file = "fastar-0.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:d81ee82e8dc78a0adb81728383bd39611177d642a8fa2d601d4ad5ad59e5f3bd"}, + {file = "fastar-0.8.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:a3253a06845462ca2196024c7a18f5c0ba4de1532ab1c4bad23a40b332a06a6a"}, + {file = "fastar-0.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5cbeb3ebfa0980c68ff8b126295cc6b208ccd81b638aebc5a723d810a7a0e5d2"}, + {file = "fastar-0.8.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1c0d5956b917daac77d333d48b3f0f3ff927b8039d5b32d8125462782369f761"}, + {file = "fastar-0.8.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27b404db2b786b65912927ce7f3790964a4bcbde42cdd13091b82a89cd655e1c"}, + {file = "fastar-0.8.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0902fc89dcf1e7f07b8563032a4159fe2b835e4c16942c76fd63451d0e5f76a3"}, + {file = "fastar-0.8.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:069347e2f0f7a8b99bbac8cd1bc0e06c7b4a31dc964fc60d84b95eab3d869dc1"}, + {file = "fastar-0.8.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7fd135306f6bfe9a835918280e0eb440b70ab303e0187d90ab51ca86e143f70d"}, + {file = "fastar-0.8.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d06d6897f43c27154b5f2d0eb930a43a81b7eec73f6f0b0114814d4a10ab38"}, + {file = "fastar-0.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a922f8439231fa0c32b15e8d70ff6d415619b9d40492029dabbc14a0c53b5f18"}, + {file = "fastar-0.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a739abd51eb766384b4caff83050888e80cd75bbcfec61e6d1e64875f94e4a40"}, + {file = "fastar-0.8.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5a65f419d808b23ac89d5cd1b13a2f340f15bc5d1d9af79f39fdb77bba48ff1b"}, + {file = "fastar-0.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7bb2ae6c0cce58f0db1c9f20495e7557cca2c1ee9c69bbd90eafd54f139171c5"}, + {file = "fastar-0.8.0-cp313-cp313-win32.whl", hash = "sha256:b28753e0d18a643272597cb16d39f1053842aa43131ad3e260c03a2417d38401"}, + {file = "fastar-0.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:620e5d737dce8321d49a5ebb7997f1fd0047cde3512082c27dc66d6ac8c1927a"}, + {file = "fastar-0.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:c4c4bd08df563120cd33e854fe0a93b81579e8571b11f9b7da9e84c37da2d6b6"}, + {file = "fastar-0.8.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:50b36ce654ba44b0e13fae607ae17ee6e1597b69f71df1bee64bb8328d881dfc"}, + {file = "fastar-0.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:63a892762683d7ab00df0227d5ea9677c62ff2cde9b875e666c0be569ed940f3"}, + {file = "fastar-0.8.0-cp314-cp314-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4ae6a145c1bff592644bde13f2115e0239f4b7babaf506d14e7d208483cf01a5"}, + {file = "fastar-0.8.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ae0ff7c0a1c7e1428404b81faee8aebef466bfd0be25bfe4dabf5d535c68741"}, + {file = "fastar-0.8.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dbfd87dbd217b45c898b2dbcd0169aae534b2c1c5cbe3119510881f6a5ac8ef5"}, + {file = "fastar-0.8.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5abd99fcba83ef28c8fe6ae2927edc79053db43a0457a962ed85c9bf150d37"}, + {file = "fastar-0.8.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91d4c685620c3a9d6b5ae091dbabab4f98b20049b7ecc7976e19cc9016c0d5d6"}, + {file = "fastar-0.8.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f77c2f2cad76e9dc7b6701297adb1eba87d0485944b416fc2ccf5516c01219a3"}, + {file = "fastar-0.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e7f07c4a3dada7757a8fc430a5b4a29e6ef696d2212747213f57086ffd970316"}, + {file = "fastar-0.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:90c0c3fe55105c0aed8a83135dbdeb31e683455dbd326a1c48fa44c378b85616"}, + {file = "fastar-0.8.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:fb9ee51e5bffe0dab3d3126d3a4fac8d8f7235cedcb4b8e74936087ce1c157f3"}, + {file = "fastar-0.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e380b1e8d30317f52406c43b11e98d11e1d68723bbd031e18049ea3497b59a6d"}, + {file = "fastar-0.8.0-cp314-cp314-win32.whl", hash = "sha256:1c4ffc06e9c4a8ca498c07e094670d8d8c0d25b17ca6465b9774da44ea997ab1"}, + {file = "fastar-0.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:5517a8ad4726267c57a3e0e2a44430b782e00b230bf51c55b5728e758bb3a692"}, + {file = "fastar-0.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:58030551046ff4a8616931e52a36c83545ff05996db5beb6e0cd2b7e748aa309"}, + {file = "fastar-0.8.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:1e7d29b6bfecb29db126a08baf3c04a5ab667f6cea2b7067d3e623a67729c4a6"}, + {file = "fastar-0.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:05eb7b96940f9526b485f1d0b02393839f0f61cac4b1f60024984f8b326d2640"}, + {file = "fastar-0.8.0-cp314-cp314t-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:619352d8ac011794e2345c462189dc02ba634750d23cd9d86a9267dd71b1f278"}, + {file = "fastar-0.8.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74ebfecef3fe6d7a90355fac1402fd30636988332a1d33f3e80019a10782bb24"}, + {file = "fastar-0.8.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2975aca5a639e26a3ab0d23b4b0628d6dd6d521146c3c11486d782be621a35aa"}, + {file = "fastar-0.8.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afc438eaed8ff0dcdd9308268be5cb38c1db7e94c3ccca7c498ca13a4a4535a3"}, + {file = "fastar-0.8.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6ced0a5399cc0a84a858ef0a31ca2d0c24d3bbec4bcda506a9192d8119f3590a"}, + {file = "fastar-0.8.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec9b23da8c4c039da3fe2e358973c66976a0c8508aa06d6626b4403cb5666c19"}, + {file = "fastar-0.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:dfba078fcd53478032fd0ceed56960ec6b7ff0511cfc013a8a3a4307e3a7bac4"}, + {file = "fastar-0.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:ade56c94c14be356d295fecb47a3fcd473dd43a8803ead2e2b5b9e58feb6dcfa"}, + {file = "fastar-0.8.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:e48d938f9366db5e59441728f70b7f6c1ccfab7eff84f96f9b7e689b07786c52"}, + {file = "fastar-0.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:79c441dc1482ff51a54fb3f57ae6f7bb3d2cff88fa2cc5d196c519f8aab64a56"}, + {file = "fastar-0.8.0-cp314-cp314t-win32.whl", hash = "sha256:187f61dc739afe45ac8e47ed7fd1adc45d52eac110cf27d579155720507d6fbe"}, + {file = "fastar-0.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:40e9d763cf8bf85ce2fa256e010aa795c0fe3d3bd1326d5c3084e6ce7857127e"}, + {file = "fastar-0.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:e59673307b6a08210987059a2bdea2614fe26e3335d0e5d1a3d95f49a05b1418"}, + {file = "fastar-0.8.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5793b5db86ff0d588057b9089bf904a9ac288de0323a9973452a011a48ec23eb"}, + {file = "fastar-0.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3cb073ab1905127ab6e052a5c7ccd409557ef086571f27de938764d3eaadfe07"}, + {file = "fastar-0.8.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2716309f7326224b9f1341077d8c65ebb26335e5c93c409e1a23be03f1a01c50"}, + {file = "fastar-0.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5ea223170ee6eb1eaf25ff8193df66a939c891f85a9a33def3add9df2ee1232"}, + {file = "fastar-0.8.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f5f24c6c3628faa3ee51df54d77dbf47c4f77a1951ea4ea14e4ccb855babced5"}, + {file = "fastar-0.8.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d98894e4c3a2178f33f695940a615376728f6109f1a3431ac0a9fe98fe84ec7"}, + {file = "fastar-0.8.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:77d7016f446678d44f1823f40a947db741643fa328142dac6f181046ba205b01"}, + {file = "fastar-0.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e88a80b40b7f929a7719a13d7332b4cb1344c5a1ac497044bd24f2adadf04c4"}, + {file = "fastar-0.8.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:7b8eb42f346024df3800d078fc0763275b1964d5d0762aa831bb0b539b5f1ee3"}, + {file = "fastar-0.8.0-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:41527617a8b592a29fa874e4dba305874b150601e2bf2e17a9f8099a9d179f28"}, + {file = "fastar-0.8.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:feb8f73ad25ad84f986dc53e7c6561b281ee2087500f6e400899c3bf1a3f6dc0"}, + {file = "fastar-0.8.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:afbdc2e87b7e56e11ad330859fe17d7a93a76cd637d7f33d1c9edd566d2f58d9"}, + {file = "fastar-0.8.0-cp38-cp38-win32.whl", hash = "sha256:1ccc1610c05183c0ff82fe93cdbc4eb0ea8b11f2f6d94f6d31ae342164fc6033"}, + {file = "fastar-0.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:c96abf34135cffb9652360cd827bda19855b803038d932dcd2a686b3d4e7e1ce"}, + {file = "fastar-0.8.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:923afc2db5192e56e71952a88e3fe5965c7c9c910d385d2db7573136f064f2fa"}, + {file = "fastar-0.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4fbe775356930f3aab0ce709fdf8ecf90c10882f5bbdcea215c89a3b14090c50"}, + {file = "fastar-0.8.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2ff516154e77f4bf78c31a0c11aa78a8a80e11b6964ec6f28982e42ffcbb543c"}, + {file = "fastar-0.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d2fdd1c987ff2300bdf39baed556f8e155f8577018775e794a268ecf1707610"}, + {file = "fastar-0.8.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d80e4dad8ee2362a71870b1e735800bb5e97f12ebbee4bd0cf15a81ad2428b5a"}, + {file = "fastar-0.8.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a17abee1febf5363ed2633f5e13de4be481ba1ab5f77860d39470eccdc4b65af"}, + {file = "fastar-0.8.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64cbde8e0ece3d799090a4727f936f66c5990d3ac59416f3de76a2c676e8e568"}, + {file = "fastar-0.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63d98b26590d293a9d9a379bae88367a8f3a6137c28819ed6dd6e11aca4a5c6e"}, + {file = "fastar-0.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bf440983d4d64582bddf2f0bd3c43ea1db93a8c31cf7c20e473bffaf6d9c0b6d"}, + {file = "fastar-0.8.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:1d90cbf984a39afda27afe08e40c2d8eddc49c5e80590af641610c7b6dc20161"}, + {file = "fastar-0.8.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ca0db5e563d84b639fe15385eeca940777b6d2f0a1f3bb7cd5b55ab7124f0554"}, + {file = "fastar-0.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:42ff3052d74684a636423d4f040db88eebd4caf20842fa5f06020e0130c01f69"}, + {file = "fastar-0.8.0-cp39-cp39-win32.whl", hash = "sha256:15e3dfaa769d2117ef707e5f47c62126d1b63f8e9c85133112f33f1fbdf8942f"}, + {file = "fastar-0.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:5153aa1c194316d0f67b6884a62d122d51fce4196263e92e4bca2a6c47cd44c0"}, + {file = "fastar-0.8.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2127cf2e80ffd49744a160201e0e2f55198af6c028a7b3f750026e0b1f1caa4e"}, + {file = "fastar-0.8.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:ff85094f10003801339ac4fa9b20a3410c2d8f284d4cba2dc99de6e98c877812"}, + {file = "fastar-0.8.0-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3dbca235f0bd804cca6602fe055d3892bebf95fb802e6c6c7d872fb10f7abc6c"}, + {file = "fastar-0.8.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e54bfdee6c81a0005e147319e93d8797f442308032c92fa28d03ef8fda076"}, + {file = "fastar-0.8.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0a78e5221b94a80800930b7fd0d0e797ae73aadf7044c05ed46cb9bdf870f022"}, + {file = "fastar-0.8.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:997092d31ff451de8d0568f6773f3517cb87dcd0bc76184edb65d7154390a6f8"}, + {file = "fastar-0.8.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:558e8fcf8fe574541df5db14a46cd98bfbed14a811b7014a54f2b714c0cfac42"}, + {file = "fastar-0.8.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1d2a54f87e2908cc19e1a6ee249620174fbefc54a219aba1eaa6f31657683c3"}, + {file = "fastar-0.8.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:ef94901537be277f9ec59db939eb817960496c6351afede5b102699b5098604d"}, + {file = "fastar-0.8.0-pp310-pypy310_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:0afbb92f78bf29d5e9db76fb46cbabc429e49015cddf72ab9e761afbe88ac100"}, + {file = "fastar-0.8.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:fb59c7925e7710ad178d9e1a3e65edf295d9a042a0cdcb673b4040949eb8ad0a"}, + {file = "fastar-0.8.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e6c4d6329da568ec36b1347b0c09c4d27f9dfdeddf9f438ddb16799ecf170098"}, + {file = "fastar-0.8.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:998e3fa4b555b63eb134e6758437ed739ad1652fdd2a61dfe1dacbfddc35fe66"}, + {file = "fastar-0.8.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:5f83e60d845091f3a12bc37f412774264d161576eaf810ed8b43567eb934b7e5"}, + {file = "fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:299672e1c74d8b73c61684fac9159cfc063d35f4b165996a88facb0e26862cb5"}, + {file = "fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3d3a27066b84d015deab5faee78565509bb33b137896443e4144cb1be1a5f90"}, + {file = "fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef0bcf4385bbdd3c1acecce2d9ea7dab7cc9b8ee0581bbccb7ab11908a7ce288"}, + {file = "fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f10ef62b6eda6cb6fd9ba8e1fe08a07d7b2bdcc8eaa00eb91566143b92ed7eee"}, + {file = "fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c4f6c82a8ee98c17aa48585ee73b51c89c1b010e5c951af83e07c3436180e3fc"}, + {file = "fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6129067fcb86276635b5857010f4e9b9c7d5d15dd571bb03c6c1ed73c40fd92"}, + {file = "fastar-0.8.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4cc9e77019e489f1ddac446b6a5b9dfb5c3d9abd142652c22a1d9415dbcc0e47"}, + {file = "fastar-0.8.0-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:382bfe82c026086487cb17fee12f4c1e2b4e67ce230f2e04487d3e7ddfd69031"}, + {file = "fastar-0.8.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:908d2b9a1ff3d549cc304b32f95706a536da8f0bcb0bc0f9e4c1cce39b80e218"}, + {file = "fastar-0.8.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:1aa7dbde2d2d73eb5b6203d0f74875cb66350f0f1b4325b4839fc8fbbf5d074e"}, + {file = "fastar-0.8.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:284036bae786a520456ad3f58e72aaf1bd5d74e309132e568343564daa4ae383"}, + {file = "fastar-0.8.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5aba0942b4f56acdb8fa8aa7cb506f70c1a17bf13dcab318a17ffb467cb2e7ec"}, + {file = "fastar-0.8.0-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:52eda6230799db7bbd44461c622161e9bcd43603399da19b0daab2782e0030b0"}, + {file = "fastar-0.8.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f860566b9f3cb1900980f46a4c3f003990c0009c11730f988f758542c17a2364"}, + {file = "fastar-0.8.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:78f3fe5f45437c66d1dbece5f31aa487e48ef46d76b2082b873d5fa18013ebe1"}, + {file = "fastar-0.8.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:82bc445202bbc53f067bb15e3b8639f01fd54d3096a0f9601240690cfd7c9684"}, + {file = "fastar-0.8.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b1208b5453cfe7192e54765f73844b80d684bd8dc6d6acbbb60ead42590b13e"}, + {file = "fastar-0.8.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8922754c66699e27d4f1ce07c9c256228054cdc9bb36363e8bb5b503453a6da"}, + {file = "fastar-0.8.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:92cad46dfbb9969359823c9f61165ec32d5d675d86e863889416e9b64efea95c"}, + {file = "fastar-0.8.0-pp39-pypy39_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:f4eb9560a447ff6a4b377f08b6e5d3a31909a612b028f2c57810ffaf570eceb8"}, + {file = "fastar-0.8.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:52455794e6cc2b6a6dbf141a1c4312a1a1215d75e8849a35fcff694454da880f"}, + {file = "fastar-0.8.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:8de5decfa18a03807ae26ba5af095c2c04ac31ae915e9a849363a4495463171f"}, + {file = "fastar-0.8.0.tar.gz", hash = "sha256:f4d4d68dbf1c4c2808f0e730fac5843493fc849f70fe3ad3af60dfbaf68b9a12"}, +] + [[package]] name = "frozenlist" version = "1.5.0" @@ -1217,7 +1425,7 @@ httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli", "brotlicffi"] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -1422,7 +1630,7 @@ colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] -dev = ["Sphinx (==8.1.3)", "build (==1.2.2)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.5.0)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.13.0)", "mypy (==v1.4.1)", "myst-parser (==4.0.0)", "pre-commit (==4.0.1)", "pytest (==6.1.2)", "pytest (==8.3.2)", "pytest-cov (==2.12.1)", "pytest-cov (==5.0.0)", "pytest-cov (==6.0.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.1.0)", "sphinx-rtd-theme (==3.0.2)", "tox (==3.27.1)", "tox (==4.23.2)", "twine (==6.0.1)"] +dev = ["Sphinx (==8.1.3) ; python_version >= \"3.11\"", "build (==1.2.2) ; python_version >= \"3.11\"", "colorama (==0.4.5) ; python_version < \"3.8\"", "colorama (==0.4.6) ; python_version >= \"3.8\"", "exceptiongroup (==1.1.3) ; python_version >= \"3.7\" and python_version < \"3.11\"", "freezegun (==1.1.0) ; python_version < \"3.8\"", "freezegun (==1.5.0) ; python_version >= \"3.8\"", "mypy (==v0.910) ; python_version < \"3.6\"", "mypy (==v0.971) ; python_version == \"3.6\"", "mypy (==v1.13.0) ; python_version >= \"3.8\"", "mypy (==v1.4.1) ; python_version == \"3.7\"", "myst-parser (==4.0.0) ; python_version >= \"3.11\"", "pre-commit (==4.0.1) ; python_version >= \"3.9\"", "pytest (==6.1.2) ; python_version < \"3.8\"", "pytest (==8.3.2) ; python_version >= \"3.8\"", "pytest-cov (==2.12.1) ; python_version < \"3.8\"", "pytest-cov (==5.0.0) ; python_version == \"3.8\"", "pytest-cov (==6.0.0) ; python_version >= \"3.9\"", "pytest-mypy-plugins (==1.9.3) ; python_version >= \"3.6\" and python_version < \"3.8\"", "pytest-mypy-plugins (==3.1.0) ; python_version >= \"3.8\"", "sphinx-rtd-theme (==3.0.2) ; python_version >= \"3.11\"", "tox (==3.27.1) ; python_version < \"3.8\"", "tox (==4.23.2) ; python_version >= \"3.8\"", "twine (==6.0.1) ; python_version >= \"3.11\""] [[package]] name = "markdown-it-py" @@ -1562,7 +1770,7 @@ PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]} requests = ">=2.0.0,<3" [package.extras] -broker = ["pymsalruntime (>=0.14,<0.18)", "pymsalruntime (>=0.17,<0.18)"] +broker = ["pymsalruntime (>=0.14,<0.18) ; python_version >= \"3.6\" and platform_system == \"Windows\"", "pymsalruntime (>=0.17,<0.18) ; python_version >= \"3.8\" and platform_system == \"Darwin\""] [[package]] name = "msal-extensions" @@ -1746,102 +1954,127 @@ files = [ [[package]] name = "pillow" -version = "11.2.1" +version = "11.3.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pillow-11.2.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:d57a75d53922fc20c165016a20d9c44f73305e67c351bbc60d1adaf662e74047"}, - {file = "pillow-11.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:127bf6ac4a5b58b3d32fc8289656f77f80567d65660bc46f72c0d77e6600cc95"}, - {file = "pillow-11.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4ba4be812c7a40280629e55ae0b14a0aafa150dd6451297562e1764808bbe61"}, - {file = "pillow-11.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8bd62331e5032bc396a93609982a9ab6b411c05078a52f5fe3cc59234a3abd1"}, - {file = "pillow-11.2.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:562d11134c97a62fe3af29581f083033179f7ff435f78392565a1ad2d1c2c45c"}, - {file = "pillow-11.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:c97209e85b5be259994eb5b69ff50c5d20cca0f458ef9abd835e262d9d88b39d"}, - {file = "pillow-11.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0c3e6d0f59171dfa2e25d7116217543310908dfa2770aa64b8f87605f8cacc97"}, - {file = "pillow-11.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc1c3bc53befb6096b84165956e886b1729634a799e9d6329a0c512ab651e579"}, - {file = "pillow-11.2.1-cp310-cp310-win32.whl", hash = "sha256:312c77b7f07ab2139924d2639860e084ec2a13e72af54d4f08ac843a5fc9c79d"}, - {file = "pillow-11.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9bc7ae48b8057a611e5fe9f853baa88093b9a76303937449397899385da06fad"}, - {file = "pillow-11.2.1-cp310-cp310-win_arm64.whl", hash = "sha256:2728567e249cdd939f6cc3d1f049595c66e4187f3c34078cbc0a7d21c47482d2"}, - {file = "pillow-11.2.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35ca289f712ccfc699508c4658a1d14652e8033e9b69839edf83cbdd0ba39e70"}, - {file = "pillow-11.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0409af9f829f87a2dfb7e259f78f317a5351f2045158be321fd135973fff7bf"}, - {file = "pillow-11.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4e5c5edee874dce4f653dbe59db7c73a600119fbea8d31f53423586ee2aafd7"}, - {file = "pillow-11.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b93a07e76d13bff9444f1a029e0af2964e654bfc2e2c2d46bfd080df5ad5f3d8"}, - {file = "pillow-11.2.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:e6def7eed9e7fa90fde255afaf08060dc4b343bbe524a8f69bdd2a2f0018f600"}, - {file = "pillow-11.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8f4f3724c068be008c08257207210c138d5f3731af6c155a81c2b09a9eb3a788"}, - {file = "pillow-11.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a0a6709b47019dff32e678bc12c63008311b82b9327613f534e496dacaefb71e"}, - {file = "pillow-11.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f6b0c664ccb879109ee3ca702a9272d877f4fcd21e5eb63c26422fd6e415365e"}, - {file = "pillow-11.2.1-cp311-cp311-win32.whl", hash = "sha256:cc5d875d56e49f112b6def6813c4e3d3036d269c008bf8aef72cd08d20ca6df6"}, - {file = "pillow-11.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:0f5c7eda47bf8e3c8a283762cab94e496ba977a420868cb819159980b6709193"}, - {file = "pillow-11.2.1-cp311-cp311-win_arm64.whl", hash = "sha256:4d375eb838755f2528ac8cbc926c3e31cc49ca4ad0cf79cff48b20e30634a4a7"}, - {file = "pillow-11.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:78afba22027b4accef10dbd5eed84425930ba41b3ea0a86fa8d20baaf19d807f"}, - {file = "pillow-11.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78092232a4ab376a35d68c4e6d5e00dfd73454bd12b230420025fbe178ee3b0b"}, - {file = "pillow-11.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25a5f306095c6780c52e6bbb6109624b95c5b18e40aab1c3041da3e9e0cd3e2d"}, - {file = "pillow-11.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c7b29dbd4281923a2bfe562acb734cee96bbb129e96e6972d315ed9f232bef4"}, - {file = "pillow-11.2.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3e645b020f3209a0181a418bffe7b4a93171eef6c4ef6cc20980b30bebf17b7d"}, - {file = "pillow-11.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b2dbea1012ccb784a65349f57bbc93730b96e85b42e9bf7b01ef40443db720b4"}, - {file = "pillow-11.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:da3104c57bbd72948d75f6a9389e6727d2ab6333c3617f0a89d72d4940aa0443"}, - {file = "pillow-11.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:598174aef4589af795f66f9caab87ba4ff860ce08cd5bb447c6fc553ffee603c"}, - {file = "pillow-11.2.1-cp312-cp312-win32.whl", hash = "sha256:1d535df14716e7f8776b9e7fee118576d65572b4aad3ed639be9e4fa88a1cad3"}, - {file = "pillow-11.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:14e33b28bf17c7a38eede290f77db7c664e4eb01f7869e37fa98a5aa95978941"}, - {file = "pillow-11.2.1-cp312-cp312-win_arm64.whl", hash = "sha256:21e1470ac9e5739ff880c211fc3af01e3ae505859392bf65458c224d0bf283eb"}, - {file = "pillow-11.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fdec757fea0b793056419bca3e9932eb2b0ceec90ef4813ea4c1e072c389eb28"}, - {file = "pillow-11.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b0e130705d568e2f43a17bcbe74d90958e8a16263868a12c3e0d9c8162690830"}, - {file = "pillow-11.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bdb5e09068332578214cadd9c05e3d64d99e0e87591be22a324bdbc18925be0"}, - {file = "pillow-11.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d189ba1bebfbc0c0e529159631ec72bb9e9bc041f01ec6d3233d6d82eb823bc1"}, - {file = "pillow-11.2.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:191955c55d8a712fab8934a42bfefbf99dd0b5875078240943f913bb66d46d9f"}, - {file = "pillow-11.2.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:ad275964d52e2243430472fc5d2c2334b4fc3ff9c16cb0a19254e25efa03a155"}, - {file = "pillow-11.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:750f96efe0597382660d8b53e90dd1dd44568a8edb51cb7f9d5d918b80d4de14"}, - {file = "pillow-11.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fe15238d3798788d00716637b3d4e7bb6bde18b26e5d08335a96e88564a36b6b"}, - {file = "pillow-11.2.1-cp313-cp313-win32.whl", hash = "sha256:3fe735ced9a607fee4f481423a9c36701a39719252a9bb251679635f99d0f7d2"}, - {file = "pillow-11.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:74ee3d7ecb3f3c05459ba95eed5efa28d6092d751ce9bf20e3e253a4e497e691"}, - {file = "pillow-11.2.1-cp313-cp313-win_arm64.whl", hash = "sha256:5119225c622403afb4b44bad4c1ca6c1f98eed79db8d3bc6e4e160fc6339d66c"}, - {file = "pillow-11.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8ce2e8411c7aaef53e6bb29fe98f28cd4fbd9a1d9be2eeea434331aac0536b22"}, - {file = "pillow-11.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:9ee66787e095127116d91dea2143db65c7bb1e232f617aa5957c0d9d2a3f23a7"}, - {file = "pillow-11.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9622e3b6c1d8b551b6e6f21873bdcc55762b4b2126633014cea1803368a9aa16"}, - {file = "pillow-11.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63b5dff3a68f371ea06025a1a6966c9a1e1ee452fc8020c2cd0ea41b83e9037b"}, - {file = "pillow-11.2.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:31df6e2d3d8fc99f993fd253e97fae451a8db2e7207acf97859732273e108406"}, - {file = "pillow-11.2.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:062b7a42d672c45a70fa1f8b43d1d38ff76b63421cbbe7f88146b39e8a558d91"}, - {file = "pillow-11.2.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4eb92eca2711ef8be42fd3f67533765d9fd043b8c80db204f16c8ea62ee1a751"}, - {file = "pillow-11.2.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f91ebf30830a48c825590aede79376cb40f110b387c17ee9bd59932c961044f9"}, - {file = "pillow-11.2.1-cp313-cp313t-win32.whl", hash = "sha256:e0b55f27f584ed623221cfe995c912c61606be8513bfa0e07d2c674b4516d9dd"}, - {file = "pillow-11.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:36d6b82164c39ce5482f649b437382c0fb2395eabc1e2b1702a6deb8ad647d6e"}, - {file = "pillow-11.2.1-cp313-cp313t-win_arm64.whl", hash = "sha256:225c832a13326e34f212d2072982bb1adb210e0cc0b153e688743018c94a2681"}, - {file = "pillow-11.2.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:7491cf8a79b8eb867d419648fff2f83cb0b3891c8b36da92cc7f1931d46108c8"}, - {file = "pillow-11.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b02d8f9cb83c52578a0b4beadba92e37d83a4ef11570a8688bbf43f4ca50909"}, - {file = "pillow-11.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:014ca0050c85003620526b0ac1ac53f56fc93af128f7546623cc8e31875ab928"}, - {file = "pillow-11.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3692b68c87096ac6308296d96354eddd25f98740c9d2ab54e1549d6c8aea9d79"}, - {file = "pillow-11.2.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:f781dcb0bc9929adc77bad571b8621ecb1e4cdef86e940fe2e5b5ee24fd33b35"}, - {file = "pillow-11.2.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:2b490402c96f907a166615e9a5afacf2519e28295f157ec3a2bb9bd57de638cb"}, - {file = "pillow-11.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dd6b20b93b3ccc9c1b597999209e4bc5cf2853f9ee66e3fc9a400a78733ffc9a"}, - {file = "pillow-11.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4b835d89c08a6c2ee7781b8dd0a30209a8012b5f09c0a665b65b0eb3560b6f36"}, - {file = "pillow-11.2.1-cp39-cp39-win32.whl", hash = "sha256:b10428b3416d4f9c61f94b494681280be7686bda15898a3a9e08eb66a6d92d67"}, - {file = "pillow-11.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:6ebce70c3f486acf7591a3d73431fa504a4e18a9b97ff27f5f47b7368e4b9dd1"}, - {file = "pillow-11.2.1-cp39-cp39-win_arm64.whl", hash = "sha256:c27476257b2fdcd7872d54cfd119b3a9ce4610fb85c8e32b70b42e3680a29a1e"}, - {file = "pillow-11.2.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9b7b0d4fd2635f54ad82785d56bc0d94f147096493a79985d0ab57aedd563156"}, - {file = "pillow-11.2.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:aa442755e31c64037aa7c1cb186e0b369f8416c567381852c63444dd666fb772"}, - {file = "pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0d3348c95b766f54b76116d53d4cb171b52992a1027e7ca50c81b43b9d9e363"}, - {file = "pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85d27ea4c889342f7e35f6d56e7e1cb345632ad592e8c51b693d7b7556043ce0"}, - {file = "pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bf2c33d6791c598142f00c9c4c7d47f6476731c31081331664eb26d6ab583e01"}, - {file = "pillow-11.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e616e7154c37669fc1dfc14584f11e284e05d1c650e1c0f972f281c4ccc53193"}, - {file = "pillow-11.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:39ad2e0f424394e3aebc40168845fee52df1394a4673a6ee512d840d14ab3013"}, - {file = "pillow-11.2.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:80f1df8dbe9572b4b7abdfa17eb5d78dd620b1d55d9e25f834efdbee872d3aed"}, - {file = "pillow-11.2.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ea926cfbc3957090becbcbbb65ad177161a2ff2ad578b5a6ec9bb1e1cd78753c"}, - {file = "pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:738db0e0941ca0376804d4de6a782c005245264edaa253ffce24e5a15cbdc7bd"}, - {file = "pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9db98ab6565c69082ec9b0d4e40dd9f6181dab0dd236d26f7a50b8b9bfbd5076"}, - {file = "pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:036e53f4170e270ddb8797d4c590e6dd14d28e15c7da375c18978045f7e6c37b"}, - {file = "pillow-11.2.1-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:14f73f7c291279bd65fda51ee87affd7c1e097709f7fdd0188957a16c264601f"}, - {file = "pillow-11.2.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:208653868d5c9ecc2b327f9b9ef34e0e42a4cdd172c2988fd81d62d2bc9bc044"}, - {file = "pillow-11.2.1.tar.gz", hash = "sha256:a64dd61998416367b7ef979b73d3a85853ba9bec4c2925f74e588879a58716b6"}, -] - -[package.extras] -docs = ["furo", "olefile", "sphinx (>=8.2)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] + {file = "pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860"}, + {file = "pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad"}, + {file = "pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0"}, + {file = "pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b"}, + {file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50"}, + {file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae"}, + {file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9"}, + {file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:040a5b691b0713e1f6cbe222e0f4f74cd233421e105850ae3b3c0ceda520f42e"}, + {file = "pillow-11.3.0-cp310-cp310-win32.whl", hash = "sha256:89bd777bc6624fe4115e9fac3352c79ed60f3bb18651420635f26e643e3dd1f6"}, + {file = "pillow-11.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:19d2ff547c75b8e3ff46f4d9ef969a06c30ab2d4263a9e287733aa8b2429ce8f"}, + {file = "pillow-11.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f"}, + {file = "pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722"}, + {file = "pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288"}, + {file = "pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d"}, + {file = "pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494"}, + {file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58"}, + {file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f"}, + {file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e"}, + {file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94"}, + {file = "pillow-11.3.0-cp311-cp311-win32.whl", hash = "sha256:b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0"}, + {file = "pillow-11.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac"}, + {file = "pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd"}, + {file = "pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4"}, + {file = "pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69"}, + {file = "pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d"}, + {file = "pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6"}, + {file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7"}, + {file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024"}, + {file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809"}, + {file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6be31e3fc9a621e071bc17bb7de63b85cbe0bfae91bb0363c893cbe67247780d"}, + {file = "pillow-11.3.0-cp312-cp312-win32.whl", hash = "sha256:7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149"}, + {file = "pillow-11.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d"}, + {file = "pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542"}, + {file = "pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:1c627742b539bba4309df89171356fcb3cc5a9178355b2727d1b74a6cf155fbd"}, + {file = "pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:30b7c02f3899d10f13d7a48163c8969e4e653f8b43416d23d13d1bbfdc93b9f8"}, + {file = "pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f"}, + {file = "pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c"}, + {file = "pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd"}, + {file = "pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e"}, + {file = "pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1"}, + {file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805"}, + {file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8"}, + {file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2"}, + {file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:45dfc51ac5975b938e9809451c51734124e73b04d0f0ac621649821a63852e7b"}, + {file = "pillow-11.3.0-cp313-cp313-win32.whl", hash = "sha256:a4d336baed65d50d37b88ca5b60c0fa9d81e3a87d4a7930d3880d1624d5b31f3"}, + {file = "pillow-11.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0bce5c4fd0921f99d2e858dc4d4d64193407e1b99478bc5cacecba2311abde51"}, + {file = "pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580"}, + {file = "pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e"}, + {file = "pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d"}, + {file = "pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced"}, + {file = "pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c"}, + {file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8"}, + {file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59"}, + {file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe"}, + {file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:83e1b0161c9d148125083a35c1c5a89db5b7054834fd4387499e06552035236c"}, + {file = "pillow-11.3.0-cp313-cp313t-win32.whl", hash = "sha256:2a3117c06b8fb646639dce83694f2f9eac405472713fcb1ae887469c0d4f6788"}, + {file = "pillow-11.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:857844335c95bea93fb39e0fa2726b4d9d758850b34075a7e3ff4f4fa3aa3b31"}, + {file = "pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e"}, + {file = "pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12"}, + {file = "pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a"}, + {file = "pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632"}, + {file = "pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673"}, + {file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027"}, + {file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77"}, + {file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874"}, + {file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:98a9afa7b9007c67ed84c57c9e0ad86a6000da96eaa638e4f8abe5b65ff83f0a"}, + {file = "pillow-11.3.0-cp314-cp314-win32.whl", hash = "sha256:02a723e6bf909e7cea0dac1b0e0310be9d7650cd66222a5f1c571455c0a45214"}, + {file = "pillow-11.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a418486160228f64dd9e9efcd132679b7a02a5f22c982c78b6fc7dab3fefb635"}, + {file = "pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6"}, + {file = "pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae"}, + {file = "pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653"}, + {file = "pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6"}, + {file = "pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36"}, + {file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b"}, + {file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477"}, + {file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50"}, + {file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a1bc6ba083b145187f648b667e05a2534ecc4b9f2784c2cbe3089e44868f2b9b"}, + {file = "pillow-11.3.0-cp314-cp314t-win32.whl", hash = "sha256:118ca10c0d60b06d006be10a501fd6bbdfef559251ed31b794668ed569c87e12"}, + {file = "pillow-11.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:8924748b688aa210d79883357d102cd64690e56b923a186f35a82cbc10f997db"}, + {file = "pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa"}, + {file = "pillow-11.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:48d254f8a4c776de343051023eb61ffe818299eeac478da55227d96e241de53f"}, + {file = "pillow-11.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7aee118e30a4cf54fdd873bd3a29de51e29105ab11f9aad8c32123f58c8f8081"}, + {file = "pillow-11.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:23cff760a9049c502721bdb743a7cb3e03365fafcdfc2ef9784610714166e5a4"}, + {file = "pillow-11.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6359a3bc43f57d5b375d1ad54a0074318a0844d11b76abccf478c37c986d3cfc"}, + {file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:092c80c76635f5ecb10f3f83d76716165c96f5229addbd1ec2bdbbda7d496e06"}, + {file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cadc9e0ea0a2431124cde7e1697106471fc4c1da01530e679b2391c37d3fbb3a"}, + {file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6a418691000f2a418c9135a7cf0d797c1bb7d9a485e61fe8e7722845b95ef978"}, + {file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:97afb3a00b65cc0804d1c7abddbf090a81eaac02768af58cbdcaaa0a931e0b6d"}, + {file = "pillow-11.3.0-cp39-cp39-win32.whl", hash = "sha256:ea944117a7974ae78059fcc1800e5d3295172bb97035c0c1d9345fca1419da71"}, + {file = "pillow-11.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:e5c5858ad8ec655450a7c7df532e9842cf8df7cc349df7225c60d5d348c8aada"}, + {file = "pillow-11.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:6abdbfd3aea42be05702a8dd98832329c167ee84400a1d1f61ab11437f1717eb"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a"}, + {file = "pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7"}, + {file = "pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8"}, + {file = "pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=8.2)", "sphinx-autobuild", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] fpx = ["olefile"] mic = ["olefile"] test-arrow = ["pyarrow"] -tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "trove-classifiers (>=2024.10.12)"] -typing = ["typing-extensions"] +tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "trove-classifiers (>=2024.10.12)"] +typing = ["typing-extensions ; python_version < \"3.10\""] xmp = ["defusedxml"] [[package]] @@ -2016,12 +2249,13 @@ files = [ [package.dependencies] annotated-types = ">=0.6.0" +email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"email\""} pydantic-core = "2.27.2" typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] [[package]] name = "pydantic-core" @@ -2136,6 +2370,31 @@ files = [ [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" +[[package]] +name = "pydantic-extra-types" +version = "2.11.0" +description = "Extra Pydantic types." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_extra_types-2.11.0-py3-none-any.whl", hash = "sha256:84b864d250a0fc62535b7ec591e36f2c5b4d1325fa0017eb8cda9aeb63b374a6"}, + {file = "pydantic_extra_types-2.11.0.tar.gz", hash = "sha256:4e9991959d045b75feb775683437a97991d02c138e00b59176571db9ce634f0e"}, +] + +[package.dependencies] +pydantic = ">=2.5.2" +typing-extensions = "*" + +[package.extras] +all = ["cron-converter (>=1.2.2)", "pendulum (>=3.0.0,<4.0.0)", "phonenumbers (>=8,<10)", "pycountry (>=23)", "pymongo (>=4.0.0,<5.0.0)", "python-ulid (>=1,<2) ; python_version < \"3.9\"", "python-ulid (>=1,<4) ; python_version >= \"3.9\"", "pytz (>=2024.1)", "semver (>=3.0.2)", "semver (>=3.0.2,<3.1.0)", "tzdata (>=2024.1)"] +cron = ["cron-converter (>=1.2.2)"] +pendulum = ["pendulum (>=3.0.0,<4.0.0)"] +phonenumbers = ["phonenumbers (>=8,<10)"] +pycountry = ["pycountry (>=23)"] +python-ulid = ["python-ulid (>=1,<2) ; python_version < \"3.9\"", "python-ulid (>=1,<4) ; python_version >= \"3.9\""] +semver = ["semver (>=3.0.2)"] + [[package]] name = "pydantic-settings" version = "2.8.1" @@ -2435,14 +2694,14 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rich-toolkit" -version = "0.13.2" +version = "0.17.1" description = "Rich toolkit for building command-line applications" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "rich_toolkit-0.13.2-py3-none-any.whl", hash = "sha256:f3f6c583e5283298a2f7dbd3c65aca18b7f818ad96174113ab5bec0b0e35ed61"}, - {file = "rich_toolkit-0.13.2.tar.gz", hash = "sha256:fea92557530de7c28f121cbed572ad93d9e0ddc60c3ca643f1b831f2f56b95d3"}, + {file = "rich_toolkit-0.17.1-py3-none-any.whl", hash = "sha256:96d24bb921ecd225ffce7c526a9149e74006410c05e6d405bd74ffd54d5631ed"}, + {file = "rich_toolkit-0.17.1.tar.gz", hash = "sha256:5af54df8d1dd9c8530e462e1bdcaed625c9b49f5a55b035aa0ba1c17bdb87c9a"}, ] [package.dependencies] @@ -2450,6 +2709,170 @@ click = ">=8.1.7" rich = ">=13.7.1" typing-extensions = ">=4.12.2" +[[package]] +name = "rignore" +version = "0.7.6" +description = "Python Bindings for the ignore crate" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "rignore-0.7.6-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f3c74a7e5ee77aea669c95fdb3933f2a6c7549893700082e759128a29cf67e45"}, + {file = "rignore-0.7.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b7202404958f5fe3474bac91f65350f0b1dde1a5e05089f2946549b7e91e79ec"}, + {file = "rignore-0.7.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bde7c5835fa3905bfb7e329a4f1d7eccb676de63da7a3f934ddd5c06df20597"}, + {file = "rignore-0.7.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:626c3d4ba03af266694d25101bc1d8d16eda49c5feb86cedfec31c614fceca7d"}, + {file = "rignore-0.7.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0a43841e651e7a05a4274b9026cc408d1912e64016ede8cd4c145dae5d0635be"}, + {file = "rignore-0.7.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7978c498dbf7f74d30cdb8859fe612167d8247f0acd377ae85180e34490725da"}, + {file = "rignore-0.7.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d22f72ab695c07d2d96d2a645208daff17084441b5d58c07378c9dd6f9c4c87"}, + {file = "rignore-0.7.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5bd8e1a91ed1a789b2cbe39eeea9204a6719d4f2cf443a9544b521a285a295f"}, + {file = "rignore-0.7.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bc1fc03efad5789365018e94ac4079f851a999bc154d1551c45179f7fcf45322"}, + {file = "rignore-0.7.6-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:ce2617fe28c51367fd8abfd4eeea9e61664af63c17d4ea00353d8ef56dfb95fa"}, + {file = "rignore-0.7.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:7c4ad2cee85068408e7819a38243043214e2c3047e9bd4c506f8de01c302709e"}, + {file = "rignore-0.7.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:02cd240bfd59ecc3907766f4839cbba20530a2e470abca09eaa82225e4d946fb"}, + {file = "rignore-0.7.6-cp310-cp310-win32.whl", hash = "sha256:fe2bd8fa1ff555259df54c376abc73855cb02628a474a40d51b358c3a1ddc55b"}, + {file = "rignore-0.7.6-cp310-cp310-win_amd64.whl", hash = "sha256:d80afd6071c78baf3765ec698841071b19e41c326f994cfa69b5a1df676f5d39"}, + {file = "rignore-0.7.6-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:40be8226e12d6653abbebaffaea2885f80374c1c8f76fe5ca9e0cadd120a272c"}, + {file = "rignore-0.7.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:182f4e5e4064d947c756819446a7d4cdede8e756b8c81cf9e509683fe38778d7"}, + {file = "rignore-0.7.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16b63047648a916a87be1e51bb5c009063f1b8b6f5afe4f04f875525507e63dc"}, + {file = "rignore-0.7.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ba5524f5178deca4d7695e936604ebc742acb8958f9395776e1fcb8133f8257a"}, + {file = "rignore-0.7.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:62020dbb89a1dd4b84ab3d60547b3b2eb2723641d5fb198463643f71eaaed57d"}, + {file = "rignore-0.7.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b34acd532769d5a6f153a52a98dcb81615c949ab11697ce26b2eb776af2e174d"}, + {file = "rignore-0.7.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c5e53b752f9de44dff7b3be3c98455ce3bf88e69d6dc0cf4f213346c5e3416c"}, + {file = "rignore-0.7.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25b3536d13a5d6409ce85f23936f044576eeebf7b6db1d078051b288410fc049"}, + {file = "rignore-0.7.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6e01cad2b0b92f6b1993f29fc01f23f2d78caf4bf93b11096d28e9d578eb08ce"}, + {file = "rignore-0.7.6-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:5991e46ab9b4868334c9e372ab0892b0150f3f586ff2b1e314272caeb38aaedb"}, + {file = "rignore-0.7.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6c8ae562e5d1246cba5eaeb92a47b2a279e7637102828dde41dcbe291f529a3e"}, + {file = "rignore-0.7.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:aaf938530dcc0b47c4cfa52807aa2e5bfd5ca6d57a621125fe293098692f6345"}, + {file = "rignore-0.7.6-cp311-cp311-win32.whl", hash = "sha256:166ebce373105dd485ec213a6a2695986346e60c94ff3d84eb532a237b24a4d5"}, + {file = "rignore-0.7.6-cp311-cp311-win_amd64.whl", hash = "sha256:44f35ee844b1a8cea50d056e6a595190ce9d42d3cccf9f19d280ae5f3058973a"}, + {file = "rignore-0.7.6-cp311-cp311-win_arm64.whl", hash = "sha256:14b58f3da4fa3d5c3fa865cab49821675371f5e979281c683e131ae29159a581"}, + {file = "rignore-0.7.6-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:03e82348cb7234f8d9b2834f854400ddbbd04c0f8f35495119e66adbd37827a8"}, + {file = "rignore-0.7.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9e624f6be6116ea682e76c5feb71ea91255c67c86cb75befe774365b2931961"}, + {file = "rignore-0.7.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bda49950d405aa8d0ebe26af807c4e662dd281d926530f03f29690a2e07d649a"}, + {file = "rignore-0.7.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5fd5ab3840b8c16851d327ed06e9b8be6459702a53e5ab1fc4073b684b3789e"}, + {file = "rignore-0.7.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ced2a248352636a5c77504cb755dc02c2eef9a820a44d3f33061ce1bb8a7f2d2"}, + {file = "rignore-0.7.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a04a3b73b75ddc12c9c9b21efcdaab33ca3832941d6f1d67bffd860941cd448a"}, + {file = "rignore-0.7.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d24321efac92140b7ec910ac7c53ab0f0c86a41133d2bb4b0e6a7c94967f44dd"}, + {file = "rignore-0.7.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:73c7aa109d41e593785c55fdaa89ad80b10330affa9f9d3e3a51fa695f739b20"}, + {file = "rignore-0.7.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1734dc49d1e9501b07852ef44421f84d9f378da9fbeda729e77db71f49cac28b"}, + {file = "rignore-0.7.6-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5719ea14ea2b652c0c0894be5dfde954e1853a80dea27dd2fbaa749618d837f5"}, + {file = "rignore-0.7.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8e23424fc7ce35726854f639cb7968151a792c0c3d9d082f7f67e0c362cfecca"}, + {file = "rignore-0.7.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3efdcf1dd84d45f3e2bd2f93303d9be103888f56dfa7c3349b5bf4f0657ec696"}, + {file = "rignore-0.7.6-cp312-cp312-win32.whl", hash = "sha256:ccca9d1a8b5234c76b71546fc3c134533b013f40495f394a65614a81f7387046"}, + {file = "rignore-0.7.6-cp312-cp312-win_amd64.whl", hash = "sha256:c96a285e4a8bfec0652e0bfcf42b1aabcdda1e7625f5006d188e3b1c87fdb543"}, + {file = "rignore-0.7.6-cp312-cp312-win_arm64.whl", hash = "sha256:a64a750e7a8277a323f01ca50b7784a764845f6cce2fe38831cb93f0508d0051"}, + {file = "rignore-0.7.6-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:2bdab1d31ec9b4fb1331980ee49ea051c0d7f7bb6baa28b3125ef03cdc48fdaf"}, + {file = "rignore-0.7.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:90f0a00ce0c866c275bf888271f1dc0d2140f29b82fcf33cdbda1e1a6af01010"}, + {file = "rignore-0.7.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1ad295537041dc2ed4b540fb1a3906bd9ede6ccdad3fe79770cd89e04e3c73c"}, + {file = "rignore-0.7.6-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f782dbd3a65a5ac85adfff69e5c6b101285ef3f845c3a3cae56a54bebf9fe116"}, + {file = "rignore-0.7.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65cece3b36e5b0826d946494734c0e6aaf5a0337e18ff55b071438efe13d559e"}, + {file = "rignore-0.7.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d7e4bb66c13cd7602dc8931822c02dfbbd5252015c750ac5d6152b186f0a8be0"}, + {file = "rignore-0.7.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:297e500c15766e196f68aaaa70e8b6db85fa23fdc075b880d8231fdfba738cd7"}, + {file = "rignore-0.7.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a07084211a8d35e1a5b1d32b9661a5ed20669970b369df0cf77da3adea3405de"}, + {file = "rignore-0.7.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:181eb2a975a22256a1441a9d2f15eb1292839ea3f05606620bd9e1938302cf79"}, + {file = "rignore-0.7.6-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:7bbcdc52b5bf9f054b34ce4af5269df5d863d9c2456243338bc193c28022bd7b"}, + {file = "rignore-0.7.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f2e027a6da21a7c8c0d87553c24ca5cc4364def18d146057862c23a96546238e"}, + {file = "rignore-0.7.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee4a18b82cbbc648e4aac1510066682fe62beb5dc88e2c67c53a83954e541360"}, + {file = "rignore-0.7.6-cp313-cp313-win32.whl", hash = "sha256:a7d7148b6e5e95035d4390396895adc384d37ff4e06781a36fe573bba7c283e5"}, + {file = "rignore-0.7.6-cp313-cp313-win_amd64.whl", hash = "sha256:b037c4b15a64dced08fc12310ee844ec2284c4c5c1ca77bc37d0a04f7bff386e"}, + {file = "rignore-0.7.6-cp313-cp313-win_arm64.whl", hash = "sha256:e47443de9b12fe569889bdbe020abe0e0b667516ee2ab435443f6d0869bd2804"}, + {file = "rignore-0.7.6-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:8e41be9fa8f2f47239ded8920cc283699a052ac4c371f77f5ac017ebeed75732"}, + {file = "rignore-0.7.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6dc1e171e52cefa6c20e60c05394a71165663b48bca6c7666dee4f778f2a7d90"}, + {file = "rignore-0.7.6-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ce2268837c3600f82ab8db58f5834009dc638ee17103582960da668963bebc5"}, + {file = "rignore-0.7.6-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:690a3e1b54bfe77e89c4bacb13f046e642f8baadafc61d68f5a726f324a76ab6"}, + {file = "rignore-0.7.6-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09d12ac7a0b6210c07bcd145007117ebd8abe99c8eeb383e9e4673910c2754b2"}, + {file = "rignore-0.7.6-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a2b2b74a8c60203b08452479b90e5ce3dbe96a916214bc9eb2e5af0b6a9beb0"}, + {file = "rignore-0.7.6-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fc5a531ef02131e44359419a366bfac57f773ea58f5278c2cdd915f7d10ea94"}, + {file = "rignore-0.7.6-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7a1f77d9c4cd7e76229e252614d963442686bfe12c787a49f4fe481df49e7a9"}, + {file = "rignore-0.7.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ead81f728682ba72b5b1c3d5846b011d3e0174da978de87c61645f2ed36659a7"}, + {file = "rignore-0.7.6-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:12ffd50f520c22ffdabed8cd8bfb567d9ac165b2b854d3e679f4bcaef11a9441"}, + {file = "rignore-0.7.6-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:e5a16890fbe3c894f8ca34b0fcacc2c200398d4d46ae654e03bc9b3dbf2a0a72"}, + {file = "rignore-0.7.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3abab3bf99e8a77488ef6c7c9a799fac22224c28fe9f25cc21aa7cc2b72bfc0b"}, + {file = "rignore-0.7.6-cp314-cp314-win32.whl", hash = "sha256:eeef421c1782953c4375aa32f06ecae470c1285c6381eee2a30d2e02a5633001"}, + {file = "rignore-0.7.6-cp314-cp314-win_amd64.whl", hash = "sha256:6aeed503b3b3d5af939b21d72a82521701a4bd3b89cd761da1e7dc78621af304"}, + {file = "rignore-0.7.6-cp314-cp314-win_arm64.whl", hash = "sha256:104f215b60b3c984c386c3e747d6ab4376d5656478694e22c7bd2f788ddd8304"}, + {file = "rignore-0.7.6-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:bb24a5b947656dd94cb9e41c4bc8b23cec0c435b58be0d74a874f63c259549e8"}, + {file = "rignore-0.7.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5b1e33c9501cefe24b70a1eafd9821acfd0ebf0b35c3a379430a14df089993e3"}, + {file = "rignore-0.7.6-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bec3994665a44454df86deb762061e05cd4b61e3772f5b07d1882a8a0d2748d5"}, + {file = "rignore-0.7.6-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26cba2edfe3cff1dfa72bddf65d316ddebf182f011f2f61538705d6dbaf54986"}, + {file = "rignore-0.7.6-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ffa86694fec604c613696cb91e43892aa22e1fec5f9870e48f111c603e5ec4e9"}, + {file = "rignore-0.7.6-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48efe2ed95aa8104145004afb15cdfa02bea5cdde8b0344afeb0434f0d989aa2"}, + {file = "rignore-0.7.6-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dcae43eb44b7f2457fef7cc87f103f9a0013017a6f4e62182c565e924948f21"}, + {file = "rignore-0.7.6-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2cd649a7091c0dad2f11ef65630d30c698d505cbe8660dd395268e7c099cc99f"}, + {file = "rignore-0.7.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42de84b0289d478d30ceb7ae59023f7b0527786a9a5b490830e080f0e4ea5aeb"}, + {file = "rignore-0.7.6-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:875a617e57b53b4acbc5a91de418233849711c02e29cc1f4f9febb2f928af013"}, + {file = "rignore-0.7.6-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:8703998902771e96e49968105207719f22926e4431b108450f3f430b4e268b7c"}, + {file = "rignore-0.7.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:602ef33f3e1b04c1e9a10a3c03f8bc3cef2d2383dcc250d309be42b49923cabc"}, + {file = "rignore-0.7.6-cp314-cp314t-win32.whl", hash = "sha256:c1d8f117f7da0a4a96a8daef3da75bc090e3792d30b8b12cfadc240c631353f9"}, + {file = "rignore-0.7.6-cp314-cp314t-win_amd64.whl", hash = "sha256:ca36e59408bec81de75d307c568c2d0d410fb880b1769be43611472c61e85c96"}, + {file = "rignore-0.7.6-cp314-cp314t-win_arm64.whl", hash = "sha256:b83adabeb3e8cf662cabe1931b83e165b88c526fa6af6b3aa90429686e474896"}, + {file = "rignore-0.7.6-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:1bd0bf3f4e57f3d50a91dd4eff6a22ddc9b999dbab2b20fb0473332a5551a0be"}, + {file = "rignore-0.7.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:afb5157cd217af4f47a13ad7cbfc35de0aa1740331ba662fa02fea94269d5894"}, + {file = "rignore-0.7.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca877c5a7b78fe74d97b34b735ea8f320f97c49083f7bf8fe9b61a02cf677e67"}, + {file = "rignore-0.7.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5fde2bdfd6b3afee19db5efe01e4165437329f9300441c1b25d5b2aa6752c0cc"}, + {file = "rignore-0.7.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ef2183285a49653517a100f28d8c1a3e037a5e8cefe79cffe205ecc4b98f5095"}, + {file = "rignore-0.7.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87554ae12f813d3a287a0f2aad957c11e5c4ace17bfed15d471e5be13e95d9fb"}, + {file = "rignore-0.7.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3111040f77ec6b543a501a194c48d5260898e618712472deb91bf48026f1606c"}, + {file = "rignore-0.7.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8dfa178ead3abeeaf6b8c4fe9c6c9b333d2d66c88735566f919169d18e728fa5"}, + {file = "rignore-0.7.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:112527b824eaa93c99c2c7eb11e7df83eab46a63d527bcd71a92151bba5d0435"}, + {file = "rignore-0.7.6-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:435c0c0f38f15d9bef2a97b039b5157bbc32791510670b89504e644de1d27a5e"}, + {file = "rignore-0.7.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:96e899cd34b422c2d3ad7bef279e16387f217d53ec5f9a25dbc3fcad19470381"}, + {file = "rignore-0.7.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2ba1b9c80df4ea126ef303c7646021f44486342d43b7153f3454e15cd55eaa87"}, + {file = "rignore-0.7.6-cp38-cp38-win32.whl", hash = "sha256:1a1dffbfd930b27aef1962098710344297d52368b362f918eaf1464b0d8d052c"}, + {file = "rignore-0.7.6-cp38-cp38-win_amd64.whl", hash = "sha256:7f41cecc799005a029407893071b15082d504f9115a57db9ea893b35f3f70604"}, + {file = "rignore-0.7.6-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b3746bda73f2fe6a9c3ab2f20b792e7d810b30acbdba044313fbd2d0174802e7"}, + {file = "rignore-0.7.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:67a99cf19a5137cc12f14b78dc1bb3f48500f1d5580702c623297d5297bf2752"}, + {file = "rignore-0.7.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9e851cfa87033c0c3fd9d35dd8b102aff2981db8bc6e0cab27b460bfe38bf3f"}, + {file = "rignore-0.7.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e9b0def154665036516114437a5d603274e5451c0dc9694f622cc3b7e94603e7"}, + {file = "rignore-0.7.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b81274a47e8121224f7f637392b5dfcd9558e32a53e67ba7d04007d8b5281da9"}, + {file = "rignore-0.7.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d75d0b0696fb476664bea1169c8e67b13197750b91eceb4f10b3c7f379c7a204"}, + {file = "rignore-0.7.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ad3aa4dca77cef9168d0c142f72376f5bd27d1d4b8a81561bd01276d3ad9fe1"}, + {file = "rignore-0.7.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00f8a59e19d219f44a93af7173de197e0d0e61c386364da20ebe98a303cbe38c"}, + {file = "rignore-0.7.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dd6c682f3cdd741e7a30af2581f6a382ac910080977cd1f97c651467b6268352"}, + {file = "rignore-0.7.6-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:ae4e93193f75ebf6b820241594a78f347785cfd5a5fbbac94634052589418352"}, + {file = "rignore-0.7.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1163d8b5d3a320d4d7cc8635213328850dc41f60e438c7869d540061adf66c98"}, + {file = "rignore-0.7.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3e685f47b4c58b2df7dee81ebc1ec9dbb7f798b9455c3f22be6d75ac6bddee30"}, + {file = "rignore-0.7.6-cp39-cp39-win32.whl", hash = "sha256:2af6a0a76575220863cd838693c808a94e750640e0c8a3e9f707e93c2f131fdf"}, + {file = "rignore-0.7.6-cp39-cp39-win_amd64.whl", hash = "sha256:a326eab6db9ab85b4afb5e6eb28736a9f2b885a9246d9e8c1989bc693dd059a0"}, + {file = "rignore-0.7.6-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c3d3a523af1cd4ed2c0cba8d277a32d329b0c96ef9901fb7ca45c8cfaccf31a5"}, + {file = "rignore-0.7.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:990853566e65184a506e1e2af2d15045afad3ebaebb8859cb85b882081915110"}, + {file = "rignore-0.7.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cab9ff2e436ce7240d7ee301c8ef806ed77c1fd6b8a8239ff65f9bbbcb5b8a3"}, + {file = "rignore-0.7.6-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d1a6671b2082c13bfd9a5cf4ce64670f832a6d41470556112c4ab0b6519b2fc4"}, + {file = "rignore-0.7.6-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2468729b4c5295c199d084ab88a40afcb7c8b974276805105239c07855bbacee"}, + {file = "rignore-0.7.6-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:775710777fd71e5fdf54df69cdc249996a1d6f447a2b5bfb86dbf033fddd9cf9"}, + {file = "rignore-0.7.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4565407f4a77f72cf9d91469e75d15d375f755f0a01236bb8aaa176278cc7085"}, + {file = "rignore-0.7.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc44c33f8fb2d5c9da748de7a6e6653a78aa740655e7409895e94a247ffa97c8"}, + {file = "rignore-0.7.6-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:8f32478f05540513c11923e8838afab9efef0131d66dca7f67f0e1bbd118af6a"}, + {file = "rignore-0.7.6-pp310-pypy310_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:1b63a3dd76225ea35b01dd6596aa90b275b5d0f71d6dc28fce6dd295d98614aa"}, + {file = "rignore-0.7.6-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:fe6c41175c36554a4ef0994cd1b4dbd6d73156fca779066456b781707402048e"}, + {file = "rignore-0.7.6-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9a0c6792406ae36f4e7664dc772da909451d46432ff8485774526232d4885063"}, + {file = "rignore-0.7.6-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a20b6fb61bcced9a83dfcca6599ad45182b06ba720cff7c8d891e5b78db5b65f"}, + {file = "rignore-0.7.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:392dcabfecbe176c9ebbcb40d85a5e86a5989559c4f988c2741da7daf1b5be25"}, + {file = "rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22baa462abdc36fdd5a5e2dae423107723351b85ff093762f9261148b9d0a04a"}, + {file = "rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53fb28882d2538cb2d231972146c4927a9d9455e62b209f85d634408c4103538"}, + {file = "rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87409f7eeb1103d6b77f3472a3a0d9a5953e3ae804a55080bdcb0120ee43995b"}, + {file = "rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:684014e42e4341ab3ea23a203551857fcc03a7f8ae96ca3aefb824663f55db32"}, + {file = "rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77356ebb01ba13f8a425c3d30fcad40e57719c0e37670d022d560884a30e4767"}, + {file = "rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6cbd8a48abbd3747a6c830393cd578782fab5d43f4deea48c5f5e344b8fed2b0"}, + {file = "rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2673225dcec7f90497e79438c35e34638d0d0391ccea3cbb79bfb9adc0dc5bd7"}, + {file = "rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:c081f17290d8a2b96052b79207622aa635686ea39d502b976836384ede3d303c"}, + {file = "rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:57e8327aacc27f921968cb2a174f9e47b084ce9a7dd0122c8132d22358f6bd79"}, + {file = "rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:d8955b57e42f2a5434670d5aa7b75eaf6e74602ccd8955dddf7045379cd762fb"}, + {file = "rignore-0.7.6-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e6ba1511c0ab8cd1ed8d6055bb0a6e629f48bfe04854293e0cd2dd88bd7153f8"}, + {file = "rignore-0.7.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:50586d90be15f9aa8a2e2ee5a042ee6c51e28848812a35f0c95d4bfc0533d469"}, + {file = "rignore-0.7.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b129873dd0ade248e67f25a09b5b72288cbef76ba1a9aae6bac193ee1d8be72"}, + {file = "rignore-0.7.6-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d9d6dd947556ddebfd62753005104986ee14a4e0663818aed19cdf2c33a6b5d5"}, + {file = "rignore-0.7.6-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91b95faa532efba888b196331e9af69e693635d469185ac52c796e435e2484e5"}, + {file = "rignore-0.7.6-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a1016f430fb56f7e400838bbc56fdf43adddb6fcb7bf2a14731dfd725c2fae6c"}, + {file = "rignore-0.7.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f00c519861926dc703ecbb7bbeb884be67099f96f98b175671fa0a54718f55d1"}, + {file = "rignore-0.7.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e34d172bf50e881b7c02e530ae8b1ea96093f0b16634c344f637227b39707b41"}, + {file = "rignore-0.7.6-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:101d3143619898db1e7bede2e3e647daf19bb867c4fb25978016d67978d14868"}, + {file = "rignore-0.7.6-pp39-pypy39_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:c9f3b420f54199a2b2b3b532d8c7e0860be3fa51f67501113cca6c7bfc392840"}, + {file = "rignore-0.7.6-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:1c6795e3694d750ae5ef172eab7d68a52aefbd9168d2e06647df691db2b03a50"}, + {file = "rignore-0.7.6-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:750a83a254b020e1193bfa7219dc7edca26bd8888a94cdc59720cbe386ab0c72"}, + {file = "rignore-0.7.6.tar.gz", hash = "sha256:00d3546cd793c30cb17921ce674d2c8f3a4b00501cb0e3dd0e82217dbeba2671"}, +] + [[package]] name = "roman-numerals-py" version = "3.1.0" @@ -2484,6 +2907,69 @@ botocore = ">=1.36.0,<2.0a.0" [package.extras] crt = ["botocore[crt] (>=1.36.0,<2.0a.0)"] +[[package]] +name = "sentry-sdk" +version = "2.48.0" +description = "Python client for Sentry (https://sentry.io)" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "sentry_sdk-2.48.0-py2.py3-none-any.whl", hash = "sha256:6b12ac256769d41825d9b7518444e57fa35b5642df4c7c5e322af4d2c8721172"}, + {file = "sentry_sdk-2.48.0.tar.gz", hash = "sha256:5213190977ff7fdff8a58b722fb807f8d5524a80488626ebeda1b5676c0c1473"}, +] + +[package.dependencies] +certifi = "*" +urllib3 = ">=1.26.11" + +[package.extras] +aiohttp = ["aiohttp (>=3.5)"] +anthropic = ["anthropic (>=0.16)"] +arq = ["arq (>=0.23)"] +asyncpg = ["asyncpg (>=0.23)"] +beam = ["apache-beam (>=2.12)"] +bottle = ["bottle (>=0.12.13)"] +celery = ["celery (>=3)"] +celery-redbeat = ["celery-redbeat (>=2)"] +chalice = ["chalice (>=1.16.0)"] +clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] +django = ["django (>=1.8)"] +falcon = ["falcon (>=1.4)"] +fastapi = ["fastapi (>=0.79.0)"] +flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] +google-genai = ["google-genai (>=1.29.0)"] +grpcio = ["grpcio (>=1.21.1)", "protobuf (>=3.8.0)"] +http2 = ["httpcore[http2] (==1.*)"] +httpx = ["httpx (>=0.16.0)"] +huey = ["huey (>=2)"] +huggingface-hub = ["huggingface_hub (>=0.22)"] +langchain = ["langchain (>=0.0.210)"] +langgraph = ["langgraph (>=0.6.6)"] +launchdarkly = ["launchdarkly-server-sdk (>=9.8.0)"] +litellm = ["litellm (>=1.77.5)"] +litestar = ["litestar (>=2.0.0)"] +loguru = ["loguru (>=0.5)"] +mcp = ["mcp (>=1.15.0)"] +openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] +openfeature = ["openfeature-sdk (>=0.7.1)"] +opentelemetry = ["opentelemetry-distro (>=0.35b0)"] +opentelemetry-experimental = ["opentelemetry-distro"] +opentelemetry-otlp = ["opentelemetry-distro[otlp] (>=0.35b0)"] +pure-eval = ["asttokens", "executing", "pure_eval"] +pydantic-ai = ["pydantic-ai (>=1.0.0)"] +pymongo = ["pymongo (>=3.1)"] +pyspark = ["pyspark (>=2.4.4)"] +quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] +rq = ["rq (>=0.6)"] +sanic = ["sanic (>=0.8)"] +sqlalchemy = ["sqlalchemy (>=1.2)"] +starlette = ["starlette (>=0.19.1)"] +starlite = ["starlite (>=1.48)"] +statsig = ["statsig (>=0.55.3)"] +tornado = ["tornado (>=6)"] +unleash = ["UnleashClient (>=6.0.1)"] + [[package]] name = "shellingham" version = "1.5.4" @@ -2724,18 +3210,19 @@ test = ["pytest"] [[package]] name = "starlette" -version = "0.45.3" +version = "0.50.0" description = "The little ASGI library that shines." optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" groups = ["main"] files = [ - {file = "starlette-0.45.3-py3-none-any.whl", hash = "sha256:dfb6d332576f136ec740296c7e8bb8c8a7125044e7c6da30744718880cdd059d"}, - {file = "starlette-0.45.3.tar.gz", hash = "sha256:2cbcba2a75806f8a41c722141486f37c28e30a0921c5f6fe4346cb0dcee1302f"}, + {file = "starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca"}, + {file = "starlette-0.50.0.tar.gz", hash = "sha256:a2a17b22203254bcbc2e1f926d2d55f3f9497f769416b3190768befe598fa3ca"}, ] [package.dependencies] anyio = ">=3.6.2,<5" +typing-extensions = {version = ">=4.10.0", markers = "python_version < \"3.13\""} [package.extras] full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] @@ -2800,7 +3287,7 @@ pypika-tortoise = ">=0.2.1,<0.3.0" pytz = "*" [package.extras] -accel = ["ciso8601", "orjson", "uvloop"] +accel = ["ciso8601 ; sys_platform != \"win32\" and implementation_name == \"cpython\"", "orjson", "uvloop ; sys_platform != \"win32\" and implementation_name == \"cpython\""] aiomysql = ["aiomysql"] asyncmy = ["asyncmy (>=0.2.8,<0.3.0)"] asyncodbc = ["asyncodbc (>=0.1.1,<0.2.0)"] @@ -2872,7 +3359,7 @@ files = [ ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -2896,12 +3383,12 @@ h11 = ">=0.8" httptools = {version = ">=0.6.3", optional = true, markers = "extra == \"standard\""} python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} -uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\" and extra == \"standard\""} watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} [package.extras] -standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] +standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] [[package]] name = "uvloop" @@ -2910,7 +3397,7 @@ description = "Fast implementation of asyncio event loop on top of libuv" optional = false python-versions = ">=3.8.0" groups = ["main"] -markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\"" +markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"" files = [ {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, @@ -3133,7 +3620,7 @@ files = [ ] [package.extras] -dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] +dev = ["black (>=19.3b0) ; python_version >= \"3.6\"", "pytest (>=4.6.2)"] [[package]] name = "wrapt" @@ -3324,4 +3811,4 @@ propcache = ">=0.2.0" [metadata] lock-version = "2.1" python-versions = "^3.12" -content-hash = "fb6235544488a8832516f436ae689b7f1f58c6e9ab89cb973bdfe45b9fc17c2c" +content-hash = "abe1e1908f4dd6820823789af76063314a0d94bf9b5953308af8c4f4d5b57edb" diff --git a/pyproject.toml b/pyproject.toml index a0dd966..eaa33be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ tenacity = "^8.5.0" aioboto3 = "^13.1.1" azure-storage-blob = "^12.23.1" azure-identity = "^1.18.0" -fastapi = {extras = ["standard"], version = "^0.115.5"} +fastapi = {extras = ["standard"], version = ">=0.115.5,<1.0.0"} pillow = "^11.2.1" From 041edc95ea5428de67d4c51593cce7cf53c01693 Mon Sep 17 00:00:00 2001 From: Mykola Solodukha Date: Mon, 2 Feb 2026 11:45:00 +0200 Subject: [PATCH 56/57] =?UTF-8?q?=E2=9C=A8=20Add=20`make=5Fpublic`=20param?= =?UTF-8?q?eter=20to=20file=20storage=20`upload=5Ffile`?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../storage_services/_base_storage_service.py | 6 ++++-- .../storage_services/azure_blob_storage_service.py | 9 +++++++-- .../files_storage/storage_services/s3_service.py | 12 +++++++++--- .../toolkit/files_storage/universal_file_storage.py | 4 ++-- 4 files changed, 22 insertions(+), 9 deletions(-) diff --git a/electro/toolkit/files_storage/storage_services/_base_storage_service.py b/electro/toolkit/files_storage/storage_services/_base_storage_service.py index 1faa45c..e1c32d3 100644 --- a/electro/toolkit/files_storage/storage_services/_base_storage_service.py +++ b/electro/toolkit/files_storage/storage_services/_base_storage_service.py @@ -8,10 +8,12 @@ class BaseStorageService(ABC): """Base class for storage services.""" @abstractmethod - async def upload_file(self, file_io: BytesIO, content_type: str) -> str: - """Uploads an file to the storage and returns the object key. + async def upload_file(self, file_io: BytesIO, content_type: str, *, make_public: bool = False) -> str: + """Uploads a file to the storage and returns the object key. :param file_io: BytesIO object of the file to upload + :param content_type: MIME type of the file + :param make_public: If True, make the file publicly accessible (S3: sets ACL to public-read) :return: object key of the uploaded file """ diff --git a/electro/toolkit/files_storage/storage_services/azure_blob_storage_service.py b/electro/toolkit/files_storage/storage_services/azure_blob_storage_service.py index b0339da..49a4471 100644 --- a/electro/toolkit/files_storage/storage_services/azure_blob_storage_service.py +++ b/electro/toolkit/files_storage/storage_services/azure_blob_storage_service.py @@ -44,8 +44,13 @@ async def _ensure_container_exists(self): except ResourceNotFoundError: await container_client.create_container() - async def upload_file(self, file_io: BytesIO, content_type: str) -> str: - """Upload an file to the Azure Blob Storage.""" + async def upload_file(self, file_io: BytesIO, content_type: str, *, make_public: bool = False) -> str: + """Upload a file to the Azure Blob Storage. + + Note: make_public is accepted for API consistency but Azure blob public access + is controlled at the container level. Use SAS tokens via get_file_url() for access. + """ + _ = make_public # Azure public access is container-level, not per-blob blob_name = f"file_{os.urandom(8).hex()}.png" async with await self.blob_service_client as client: await self._ensure_container_exists() diff --git a/electro/toolkit/files_storage/storage_services/s3_service.py b/electro/toolkit/files_storage/storage_services/s3_service.py index 7e9c77b..187fdbb 100644 --- a/electro/toolkit/files_storage/storage_services/s3_service.py +++ b/electro/toolkit/files_storage/storage_services/s3_service.py @@ -63,17 +63,23 @@ async def _download_file(self, object_key: str, destination: str | BytesIO | Non return destination - async def upload_file(self, file_io: BytesIO, content_type: str) -> str: + async def upload_file(self, file_io: BytesIO, content_type: str, *, make_public: bool = False) -> str: """Uploads an file to the S3 bucket and returns the object key. :param file_io: BytesIO object of the file to upload + :param content_type: MIME type of the file + :param make_public: If True, set ACL to public-read for public access :return: object key of the uploaded file """ object_key = str(uuid4()) + extra_args = {"ContentType": content_type} + + if make_public: + extra_args["ACL"] = "public-read" + try: - # TODO: [2024-10-05 by Mykola] IT'S NOT ALWAYS JPEG - await self._upload_file(file_io, object_key, extra_args={"ContentType": content_type}) + await self._upload_file(file_io, object_key, extra_args=extra_args) logger.info(f"File uploaded successfully: {object_key}") return object_key except Exception as e: diff --git a/electro/toolkit/files_storage/universal_file_storage.py b/electro/toolkit/files_storage/universal_file_storage.py index c95ba47..0def93d 100644 --- a/electro/toolkit/files_storage/universal_file_storage.py +++ b/electro/toolkit/files_storage/universal_file_storage.py @@ -17,9 +17,9 @@ def __init__(self, storage_service: BaseStorageService): """Initialize the UniversalFileStorage class.""" self.storage_service = storage_service - async def upload_file(self, file_io: BytesIO, content_type: str) -> str: + async def upload_file(self, file_io: BytesIO, content_type: str, *, make_public: bool = False) -> str: """Upload an file to the storage service.""" - return await self.storage_service.upload_file(file_io, content_type) + return await self.storage_service.upload_file(file_io, content_type, make_public=make_public) async def download_file(self, object_key: str) -> BytesIO: """Download an file from the storage service.""" From 01e7e0192dcccb645000ec688fffeeca6c2bd895 Mon Sep 17 00:00:00 2001 From: Sam Onaisi Date: Tue, 3 Feb 2026 17:39:37 +0100 Subject: [PATCH 57/57] fix: platform default value in auth --- electro/authentication.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/electro/authentication.py b/electro/authentication.py index 43e0866..fdb2acc 100644 --- a/electro/authentication.py +++ b/electro/authentication.py @@ -17,8 +17,8 @@ class ElectroAuthentication: @classmethod async def authenticate_user( cls, - platform: str, - user_id: str, + platform: str = SupportedPlatforms.CUSTOM.value, + user_id: Optional[str] = None, header: Optional[str] = Header(default=None, alias="Authorization"), cookie: Optional[str] = Cookie(default=None, alias="IKIGAI_AUTHORIZATION"), ) -> User: @@ -44,14 +44,16 @@ async def authenticate_user( authorization = authorization.split(" ")[1] if authentication_method == "api_key": + if not user_id: + raise HTTPException(status_code=400, detail="user_id is required for API key authentication.") return await cls._api_key_authenticate_user(platform, user_id, authorization) return await cls._jwt_authenticate_user(platform, authorization) @classmethod async def authenticate_admin( cls, - platform: str, - user_id: str, + platform: str = SupportedPlatforms.CUSTOM.value, + user_id: Optional[str] = None, header: Optional[str] = Header(default=None, alias="Authorization"), cookie: Optional[str] = Cookie(default=None, alias="IKIGAI_AUTHORIZATION"), ) -> User: