diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..70347413 --- /dev/null +++ b/.gitignore @@ -0,0 +1,15 @@ +venv/ +.venv/ +__pycache__/ +*.pyc +*.pyo +*.pyd + +.vscode/ +.idea/ + +.pytest_cache/ +.mypy_cache/ + +.env +*.log \ No newline at end of file diff --git a/README.md b/README.md index 163d41b9..a9fb2c06 100644 --- a/README.md +++ b/README.md @@ -1,58 +1,14 @@ -# Безопасность веб-приложений. Лабораторка №2 +# Безопасность веб-приложений -## Схема сдачи +# Лабораторная работа №2 -1. Получить задание -2. Сделать форк данного репозитория -3. Выполнить задание согласно полученному варианту -4. Сделать PR (pull request) в данный репозиторий -6. Исправить замечания после code review -7. Получить approve -8. Прийти на занятие и защитить работу - -Что нужно проявить в работе: -- умение разработать завершенное целое веб-приложение, с клиентской и серверной частями (допустимы открытые АПИ) -- навыки верстки на html в объеме 200-300 тегов -- навыки применения css для лейаута и стилизации, желательно с адаптацией к мобилке -- использование jQuery или аналогичных JS-фреймворков -- динамическая подгрузка контента -- динамическое изменение DOM и CSSOM - -Если у вас своя идея по заданию, то расскажите, обсудим и подкорректирую. - -## Вариант 1. Расписания - -Сделать аналог раздела https://ssau.ru/rasp?groupId=531030143 - -Какие нужны возможности: -- справочники групп, табличные данные по расписаниям добывать с настоящего сайта на серверной стороне приложения -- в клиентскую часть подгружать эти сведения динамически по JSON-API -- обеспечить возможность смотреть расписания в разрезе группы или препода -- обеспечить возможность выбора учебной недели (по умолчанию выбирается автоматически) - -## Вариант 2. Аналог Прибывалки для электричек - -Сделать веб-версию Прибывалки, только для электричек - -Какие нужны возможности: -- находить желаемую ЖД-станцию поиском по названию и по карте -- отображать расписания всех проходящих поездов через выбранную станцию -- отображать расписания для поездов между двумя станциями -- работа через АПИ Яндекс.Расписаний https://yandex.ru/dev/rasp/doc/ru/ (доступ получите сами) -- хорошая работа в условиях экрана смартфона -- бонус: функция "любимых остановок" - -## Вариант 3. Прогноз погоды - -Сделать одностраничный сайт с картой, на которой можно выбрать населенный пункт и получить прогноз погоды на несколько дней по нему. - -Какие нужны возможности: - - увидеть на карте точки с населенными пунктами. Координаты населенных пунктов взять из https://tochno.st/datasets/allsettlements - но все 150 тысяч не нужно, выберите 1 тысячу с самым большим населением. - - при нажатии на точку получить всплывающее окошко с графиками изменения температуры, осадков, силы ветра. API для прогнозов возьмите с https://projecteol.ru/ru/ с соблюдением правил. - - графики рисовать каким-нибудь приличным компонентом, например, https://www.chartjs.org/ - - находить населенный пункт по названию - - можете реализовать с собственным серверным компонентом или придумать, как обойтись без него +Вариант 1: веб-приложение для просмотра расписания SSAU +Реализовано: +- получение справочников институтов, курсов и групп с сайта SSAU +- просмотр расписания по группе +- поиск и просмотр расписания по преподавателю +- выбор учебной недели diff --git a/backend/api/routes.py b/backend/api/routes.py new file mode 100644 index 00000000..aec49e5e --- /dev/null +++ b/backend/api/routes.py @@ -0,0 +1,351 @@ +from __future__ import annotations + +import logging +from typing import Optional + +import httpx +from fastapi import APIRouter, Query +from fastapi.responses import JSONResponse + +from backend.core.cache import AsyncTTLCache +from backend.services.parser import ScheduleParser +from backend.services.ssau_api import SsauAPI + +logger = logging.getLogger(__name__) + +router = APIRouter() + +api = SsauAPI(timeout_seconds=25.0) +parser = ScheduleParser() + +institutes_cache = AsyncTTLCache(ttl_seconds=12 * 60 * 60, max_items=8) +courses_cache = AsyncTTLCache(ttl_seconds=12 * 60 * 60, max_items=256) +groups_cache = AsyncTTLCache(ttl_seconds=6 * 60 * 60, max_items=1024) +teachers_cache = AsyncTTLCache(ttl_seconds=10 * 60, max_items=2048) +schedule_cache = AsyncTTLCache(ttl_seconds=5 * 60, max_items=2048) + + +@router.get("/institutes") +async def get_institutes(): + async def _fetch(): + logger.info("Загрузка списка институтов с SSAU") + items = await api.get_institutes() + logger.info("Получено институтов: %s", len(items)) + return { + "institutes": [{"id": i, "name": n, "url": u} for (i, n, u) in items] + } + + try: + return await institutes_cache.get_or_set("institutes", _fetch) + except Exception as e: + logger.exception("Ошибка при получении списка институтов") + return JSONResponse( + status_code=500, + content={ + "error": "INSTITUTES_ERROR", + "detail": f"Не удалось загрузить список институтов: {str(e)}", + }, + ) + + +@router.get("/groups") +async def get_groups( + institute_id: int = Query(...), + course: Optional[int] = Query(None), +): + async def _fetch_courses(): + logger.info("Загрузка курсов для institute_id=%s", institute_id) + return await api.get_faculty_available_courses(institute_id) + + try: + available_courses = await courses_cache.get_or_set( + f"courses:{institute_id}", + _fetch_courses, + ) + + if course is None: + return { + "institute_id": institute_id, + "course": None, + "available_courses": available_courses, + "groups": [], + } + + async def _fetch_groups(): + logger.info( + "Загрузка групп для institute_id=%s, course=%s", + institute_id, + course, + ) + items = await api.get_groups_for_faculty(institute_id, course) + logger.info( + "Получено групп для institute_id=%s, course=%s: %s", + institute_id, + course, + len(items), + ) + return [{"id": gid, "name": name, "url": url} for (gid, name, url) in items] + + groups = await groups_cache.get_or_set( + f"groups:{institute_id}:{course}", + _fetch_groups, + ) + + return { + "institute_id": institute_id, + "course": course, + "available_courses": available_courses, + "groups": groups, + } + + except httpx.HTTPStatusError as e: + logger.exception( + "SSAU вернул HTTP %s при загрузке групп/курсов: institute_id=%s, course=%s", + e.response.status_code, + institute_id, + course, + ) + return JSONResponse( + status_code=502, + content={ + "error": "GROUPS_ERROR", + "detail": f"SSAU вернул HTTP {e.response.status_code} при загрузке групп или курсов", + }, + ) + except Exception as e: + logger.exception( + "Ошибка при получении групп/курсов: institute_id=%s, course=%s", + institute_id, + course, + ) + return JSONResponse( + status_code=500, + content={ + "error": "GROUPS_ERROR", + "detail": f"Не удалось загрузить группы или курсы: {str(e)}", + }, + ) + + +@router.get("/teachers") +async def search_teachers( + q: str = Query(..., min_length=2, max_length=64), +): + q_norm = q.strip() + + async def _fetch(): + logger.info("Поиск преподавателей по запросу: %r", q_norm) + items = await api.search_groups_and_teachers(q_norm) + teachers = [ + {"id": item.id, "name": item.name, "url": item.url} + for item in items + if item.type == "teacher" + ] + logger.info("Найдено преподавателей по запросу %r: %s", q_norm, len(teachers)) + return {"q": q_norm, "teachers": teachers} + + try: + return await teachers_cache.get_or_set( + f"teachers:{q_norm.lower()}", + _fetch, + ) + except httpx.HTTPStatusError as e: + logger.exception( + "SSAU вернул HTTP %s при поиске преподавателей по запросу %r", + e.response.status_code, + q_norm, + ) + return JSONResponse( + status_code=502, + content={ + "error": "TEACHERS_SEARCH_ERROR", + "detail": f"SSAU вернул HTTP {e.response.status_code} при поиске преподавателей", + }, + ) + except Exception as e: + logger.exception("Ошибка при поиске преподавателей по запросу %r", q_norm) + return JSONResponse( + status_code=500, + content={ + "error": "TEACHERS_SEARCH_ERROR", + "detail": f"Не удалось выполнить поиск преподавателей: {str(e)}", + }, + ) + + +@router.get("/schedule/group") +async def get_group_schedule( + group_id: int = Query(...), + week: Optional[int] = Query(None), +): + week_key = "current" if week is None else str(week) + cache_key = f"group:{group_id}:{week_key}" + + async def _fetch(): + logger.info("Загрузка расписания группы: group_id=%s, week=%s", group_id, week_key) + html = await api.fetch_group_schedule_html(group_id, week) + + if not html or not html.strip(): + raise ValueError("SSAU вернул пустой HTML для расписания группы") + + parsed = parser.parse(html) + parsed["mode"] = "group" + parsed["group_id"] = group_id + parsed["requested_week"] = week + logger.info( + "Расписание группы успешно разобрано: group_id=%s, week=%s, days=%s, slots=%s", + group_id, + week_key, + len(parsed.get("days", [])), + len(parsed.get("time_slots", [])), + ) + return parsed + + try: + return await schedule_cache.get_or_set(cache_key, _fetch) + + except httpx.HTTPStatusError as e: + logger.exception( + "SSAU вернул HTTP %s при загрузке расписания группы: group_id=%s, week=%s", + e.response.status_code, + group_id, + week_key, + ) + return JSONResponse( + status_code=502, + content={ + "error": "GROUP_SCHEDULE_ERROR", + "detail": f"SSAU вернул HTTP {e.response.status_code} при загрузке расписания группы", + }, + ) + + except httpx.RequestError as e: + logger.exception( + "Сетевая ошибка при загрузке расписания группы: group_id=%s, week=%s", + group_id, + week_key, + ) + return JSONResponse( + status_code=502, + content={ + "error": "GROUP_SCHEDULE_NETWORK_ERROR", + "detail": f"Сетевая ошибка при обращении к SSAU: {str(e)}", + }, + ) + + except ValueError as e: + logger.exception( + "Ошибка разбора расписания группы: group_id=%s, week=%s", + group_id, + week_key, + ) + return JSONResponse( + status_code=500, + content={ + "error": "GROUP_SCHEDULE_PARSE_ERROR", + "detail": f"Не удалось разобрать HTML расписания группы: {str(e)}", + }, + ) + + except Exception as e: + logger.exception( + "Неожиданная ошибка при загрузке расписания группы: group_id=%s, week=%s", + group_id, + week_key, + ) + return JSONResponse( + status_code=500, + content={ + "error": "GROUP_SCHEDULE_ERROR", + "detail": f"Не удалось загрузить расписание группы: {str(e)}", + }, + ) + + +@router.get("/schedule/teacher") +async def get_teacher_schedule( + staff_id: int = Query(...), + week: Optional[int] = Query(None), +): + week_key = "current" if week is None else str(week) + cache_key = f"teacher:{staff_id}:{week_key}" + + async def _fetch(): + logger.info("Загрузка расписания преподавателя: staff_id=%s, week=%s", staff_id, week_key) + html = await api.fetch_teacher_schedule_html(staff_id, week) + + if not html or not html.strip(): + raise ValueError("SSAU вернул пустой HTML для расписания преподавателя") + + parsed = parser.parse(html) + parsed["mode"] = "teacher" + parsed["staff_id"] = staff_id + parsed["requested_week"] = week + logger.info( + "Расписание преподавателя успешно разобрано: staff_id=%s, week=%s, days=%s, slots=%s", + staff_id, + week_key, + len(parsed.get("days", [])), + len(parsed.get("time_slots", [])), + ) + return parsed + + try: + return await schedule_cache.get_or_set(cache_key, _fetch) + + except httpx.HTTPStatusError as e: + logger.exception( + "SSAU вернул HTTP %s при загрузке расписания преподавателя: staff_id=%s, week=%s", + e.response.status_code, + staff_id, + week_key, + ) + return JSONResponse( + status_code=502, + content={ + "error": "TEACHER_SCHEDULE_ERROR", + "detail": f"SSAU вернул HTTP {e.response.status_code} при загрузке расписания преподавателя", + }, + ) + + except httpx.RequestError as e: + logger.exception( + "Сетевая ошибка при загрузке расписания преподавателя: staff_id=%s, week=%s", + staff_id, + week_key, + ) + return JSONResponse( + status_code=502, + content={ + "error": "TEACHER_SCHEDULE_NETWORK_ERROR", + "detail": f"Сетевая ошибка при обращении к SSAU: {str(e)}", + }, + ) + + except ValueError as e: + logger.exception( + "Ошибка разбора расписания преподавателя: staff_id=%s, week=%s", + staff_id, + week_key, + ) + return JSONResponse( + status_code=500, + content={ + "error": "TEACHER_SCHEDULE_PARSE_ERROR", + "detail": f"Не удалось разобрать HTML расписания преподавателя: {str(e)}", + }, + ) + + except Exception as e: + logger.exception( + "Неожиданная ошибка при загрузке расписания преподавателя: staff_id=%s, week=%s", + staff_id, + week_key, + ) + return JSONResponse( + status_code=500, + content={ + "error": "TEACHER_SCHEDULE_ERROR", + "detail": f"Не удалось загрузить расписание преподавателя: {str(e)}", + }, + ) \ No newline at end of file diff --git a/backend/core/cache.py b/backend/core/cache.py new file mode 100644 index 00000000..a2e2147d --- /dev/null +++ b/backend/core/cache.py @@ -0,0 +1,66 @@ +from __future__ import annotations + +import asyncio +import time +from typing import Any, Awaitable, Callable, Dict, Generic, Optional, Tuple, TypeVar + +T = TypeVar("T") + + +class AsyncTTLCache(Generic[T]): + def __init__(self, ttl_seconds: int, max_items: int = 512) -> None: + self.ttl_seconds = int(ttl_seconds) + self.max_items = int(max_items) + + self._data: Dict[Any, Tuple[float, T]] = {} + self._inflight: Dict[Any, asyncio.Task[T]] = {} + self._lock = asyncio.Lock() + + def _now(self) -> float: + return time.monotonic() + + def _is_expired(self, expires_at: float) -> bool: + return expires_at <= self._now() + + async def get(self, key: Any) -> Optional[T]: + async with self._lock: + item = self._data.get(key) + if not item: + return None + expires_at, value = item + if self._is_expired(expires_at): + self._data.pop(key, None) + return None + return value + + async def set(self, key: Any, value: T) -> None: + async with self._lock: + if len(self._data) >= self.max_items: + self._data.pop(next(iter(self._data.keys())), None) + self._data[key] = (self._now() + self.ttl_seconds, value) + + async def get_or_set(self, key: Any, factory: Callable[[], Awaitable[T]]) -> T: + cached = await self.get(key) + if cached is not None: + return cached + + async with self._lock: + item = self._data.get(key) + if item and not self._is_expired(item[0]): + return item[1] + + inflight = self._inflight.get(key) + if inflight is None: + task: asyncio.Task[T] = asyncio.create_task(factory()) + self._inflight[key] = task + else: + task = inflight + + try: + value = await task + finally: + async with self._lock: + self._inflight.pop(key, None) + + await self.set(key, value) + return value \ No newline at end of file diff --git a/backend/main.py b/backend/main.py new file mode 100644 index 00000000..c1def422 --- /dev/null +++ b/backend/main.py @@ -0,0 +1,22 @@ +from contextlib import asynccontextmanager + +from fastapi import FastAPI +from fastapi.staticfiles import StaticFiles + +from backend.api.routes import api, router + + +@asynccontextmanager +async def lifespan(app: FastAPI): + yield + await api.aclose() + + +app = FastAPI( + title="SSAU Schedule Dashboard", + version="1.0.0", + lifespan=lifespan, +) + +app.include_router(router, prefix="/api") +app.mount("/", StaticFiles(directory="frontend", html=True), name="frontend") \ No newline at end of file diff --git a/backend/services/parser.py b/backend/services/parser.py new file mode 100644 index 00000000..7a6aaab6 --- /dev/null +++ b/backend/services/parser.py @@ -0,0 +1,320 @@ +from __future__ import annotations + +import re +from datetime import datetime +from typing import List, Optional, Tuple +from urllib.parse import parse_qs, urlparse + +from bs4 import BeautifulSoup + + +def _text(el) -> str: + if not el: + return "" + return el.get_text(" ", strip=True) + + +def _parse_ddmmyyyy(s: str) -> Optional[datetime]: + s = (s or "").strip() + try: + return datetime.strptime(s, "%d.%m.%Y") + except Exception: + return None + + +def _to_iso(dt: Optional[datetime]) -> Optional[str]: + return dt.date().isoformat() if dt else None + + +def _extract_week_from_href(href: str) -> Optional[int]: + if not href: + return None + try: + query = parse_qs(urlparse(href).query) + if "selectedWeek" in query: + return int(query["selectedWeek"][0]) + except Exception: + return None + return None + + +def _parse_time_block(time_block) -> Tuple[str, str]: + times = [t.get_text(" ", strip=True) for t in time_block.select(".schedule__time-item")] + times = [t for t in times if re.search(r"\b\d{1,2}:\d{2}\b", t)] + if len(times) >= 2: + return times[0], times[1] + + text = _text(time_block) + found = re.findall(r"\b\d{1,2}:\d{2}\b", text) + found = (found + ["", ""])[:2] + return found[0], found[1] + + +def _parse_teachers(lesson_div) -> List[dict]: + teacher_block = lesson_div.select_one(".schedule__teacher") + if not teacher_block: + return [] + + teachers: List[dict] = [] + links = teacher_block.select('a[href*="staffId="]') + for a in links: + href = a.get("href") or "" + name = _text(a) + staff_id = None + try: + query = parse_qs(urlparse(href).query) + if "staffId" in query: + staff_id = int(query["staffId"][0]) + except Exception: + staff_id = None + if name: + teachers.append({"staff_id": staff_id, "name": name}) + + if not teachers: + name = _text(teacher_block) + if name: + teachers.append({"staff_id": None, "name": name}) + + uniq = {} + for teacher in teachers: + key = (teacher.get("staff_id"), teacher.get("name")) + uniq[key] = teacher + return list(uniq.values()) + + +def _parse_groups(lesson_div) -> List[dict]: + groups_block = lesson_div.select_one(".schedule__groups") + if not groups_block: + return [] + + groups: List[dict] = [] + links = groups_block.select('a[href*="groupId="]') + for a in links: + href = a.get("href") or "" + name = _text(a) + group_id = None + try: + query = parse_qs(urlparse(href).query) + if "groupId" in query: + group_id = int(query["groupId"][0]) + except Exception: + group_id = None + if name: + groups.append({"group_id": group_id, "name": name}) + + if not groups: + text = _text(groups_block) + if text: + for part in re.split(r"\s{2,}|\n|,|;", text): + part = part.strip() + if part: + groups.append({"group_id": None, "name": part}) + + uniq = {} + for group in groups: + key = (group.get("group_id"), group.get("name")) + uniq[key] = group + return list(uniq.values()) + + +def _parse_subgroup(lesson_div) -> Optional[str]: + for span in lesson_div.select("span.caption-text"): + text = _text(span) + if not text: + continue + if re.search(r"Подгрупп", text, flags=re.IGNORECASE): + match = re.search(r":\s*(.+)$", text) + if match: + return match.group(1).strip() + parts = text.split() + return parts[-1].strip() if parts else None + return None + + +def _parse_cell_lessons(cell, time_start: str, time_end: str) -> List[dict]: + lesson_divs = cell.find_all("div", class_="schedule__lesson", recursive=False) + if not lesson_divs: + return [] + + lessons: List[dict] = [] + for lesson_div in lesson_divs: + lesson_type = _text(lesson_div.select_one(".schedule__lesson-type-chip")) or _text( + lesson_div.select_one(".schedule__lesson-type") + ) + subject = _text(lesson_div.select_one(".schedule__discipline")) or _text( + lesson_div.select_one(".schedule__discipline-name") + ) + room = _text(lesson_div.select_one(".schedule__place")) or None + + teachers = _parse_teachers(lesson_div) + groups = _parse_groups(lesson_div) + subgroup = _parse_subgroup(lesson_div) + comment = _text(lesson_div.select_one(".schedule__comment")) or None + + if comment and re.search(r"Подгрупп", comment, flags=re.IGNORECASE): + comment = None + + if not subject: + body_text = lesson_div.select_one(".body-text") + subject = _text(body_text) + + if not lesson_type: + lesson_type = "—" + + teacher_text = ", ".join(t["name"] for t in teachers) if teachers else "—" + + lessons.append( + { + "time_start": time_start, + "time_end": time_end, + "subject": subject or "—", + "lesson_type": lesson_type, + "room": room, + "teachers": teachers, + "teacher": teacher_text, + "groups": groups, + "subgroup": subgroup, + "comment": comment, + } + ) + + return lessons + + +class ScheduleParser: + def parse(self, html: str) -> dict: + soup = BeautifulSoup(html, "lxml") + + container = soup.select_one("div.container.timetable") or soup.find( + "div", class_=re.compile(r"\btimetable\b") + ) + if not container: + raise ValueError("SSAU: не найден контейнер timetable") + + entity_name = "" + h2 = container.find("h2") + if h2: + entity_name = _text(h2) + if not entity_name: + h1 = container.find("h1") + if h1: + title = _text(h1) + entity_name = title.split(",", 1)[-1].strip() if "," in title else title.strip() + if not entity_name: + entity_name = "—" + + week_label = _text(container.select_one(".week-nav-current_week")) + week_number = None + if week_label: + match = re.search(r"(\d+)", week_label) + if match: + week_number = int(match.group(1)) + + prev_link = container.select_one(".week-nav-prev") + next_link = container.select_one(".week-nav-next") + prev_week = _extract_week_from_href(prev_link.get("href") if prev_link else "") + next_week = _extract_week_from_href(next_link.get("href") if next_link else "") + + schedule_items = container.select_one(".schedule .schedule__items") or container.select_one(".schedule__items") + if not schedule_items: + raise ValueError("SSAU: не найден блок расписания") + + children = schedule_items.find_all(recursive=False) + if not children: + raise ValueError("SSAU: пустой блок расписания") + + first_time_idx = None + for idx, child in enumerate(children): + if "schedule__time" in (child.get("class") or []): + first_time_idx = idx + break + + header_cells = [] + i = 0 + while i < len(children): + classes = children[i].get("class") or [] + if "schedule__head" in classes: + header_cells.append(children[i]) + i += 1 + else: + break + + if len(header_cells) < 2: + if first_time_idx is None or first_time_idx < 2: + raise ValueError("SSAU: не удалось извлечь заголовки дней недели") + header_cells = children[:first_time_idx] + i = first_time_idx + + if first_time_idx is not None: + i = first_time_idx + + day_headers = header_cells[1:] + days: List[dict] = [] + + for day_header in day_headers: + header_text = _text(day_header) + date_match = re.search(r"(\d{2}\.\d{2}\.\d{4})", header_text) + date_str = date_match.group(1) if date_match else None + dt = _parse_ddmmyyyy(date_str or "") if date_str else None + + weekday = header_text.replace(date_str, "").strip() if date_str else header_text.strip() + weekday = re.sub(r"\s+", " ", weekday) + + days.append( + { + "weekday": weekday or "—", + "date": date_str, + "date_iso": _to_iso(dt), + "lessons": [], + } + ) + + time_slots: List[dict] = [] + + while i < len(children): + time_block = children[i] + i += 1 + + if "schedule__time" not in (time_block.get("class") or []): + continue + + time_start, time_end = _parse_time_block(time_block) + if time_start and time_end: + if not time_slots or time_slots[-1] != {"time_start": time_start, "time_end": time_end}: + time_slots.append({"time_start": time_start, "time_end": time_end}) + + for day_idx in range(len(days)): + if i >= len(children): + break + cell = children[i] + i += 1 + + lessons = _parse_cell_lessons(cell, time_start=time_start, time_end=time_end) + if lessons: + days[day_idx]["lessons"].extend(lessons) + + parsed_dates = [_parse_ddmmyyyy(d["date"] or "") for d in days if d["date"]] + parsed_dates = [d for d in parsed_dates if d] + + week_dates = None + week_start = None + week_end = None + if parsed_dates: + week_start_dt = min(parsed_dates) + week_end_dt = max(parsed_dates) + week_start = _to_iso(week_start_dt) + week_end = _to_iso(week_end_dt) + week_dates = f"{week_start_dt.strftime('%d.%m.%Y')} - {week_end_dt.strftime('%d.%m.%Y')}" + + return { + "week_number": week_number, + "week_label": week_label or (f"{week_number} неделя" if week_number is not None else None), + "week_dates": week_dates, + "week_start_date": week_start, + "week_end_date": week_end, + "prev_week": prev_week, + "next_week": next_week, + "entity_name": entity_name, + "time_slots": time_slots, + "days": days, + } \ No newline at end of file diff --git a/backend/services/ssau_api.py b/backend/services/ssau_api.py new file mode 100644 index 00000000..786a3f41 --- /dev/null +++ b/backend/services/ssau_api.py @@ -0,0 +1,204 @@ +from __future__ import annotations + +import re +from dataclasses import dataclass +from typing import Dict, List, Literal, Optional, Tuple +from urllib.parse import parse_qs, urljoin, urlparse + +import httpx +from bs4 import BeautifulSoup + +SSAU_BASE = "https://ssau.ru" + + +@dataclass(frozen=True) +class SearchItem: + id: int + name: str + type: Literal["group", "teacher"] + url: str + + +class SsauAPI: + def __init__(self, timeout_seconds: float = 25.0) -> None: + self._client = httpx.AsyncClient( + headers={ + "User-Agent": "ssau-schedule-dashboard/1.0", + "Accept": "text/html,application/json;q=0.9,*/*;q=0.8", + "Accept-Language": "ru-RU,ru;q=0.9,en;q=0.5", + }, + timeout=httpx.Timeout(timeout_seconds), + follow_redirects=True, + trust_env=False, + ) + self._cached_csrf: Optional[str] = None + + async def aclose(self) -> None: + await self._client.aclose() + + async def fetch_html(self, url: str) -> str: + response = await self._client.get(url) + response.raise_for_status() + return response.text + + async def _post_form_json(self, url: str, form: Dict[str, str], headers: Dict[str, str]): + response = await self._client.post(url, data=form, headers=headers) + response.raise_for_status() + return response.json() + + async def _ensure_rasp_csrf(self) -> str: + if self._cached_csrf: + return self._cached_csrf + + html = await self.fetch_html(urljoin(SSAU_BASE, "/rasp")) + match = re.search(r'name="csrf-token"\s+content="([^"]+)"', html, flags=re.IGNORECASE) + if not match: + match = re.search(r"name='csrf-token'\s+content='([^']+)'", html, flags=re.IGNORECASE) + if not match: + raise RuntimeError("Не удалось найти csrf-token на странице SSAU /rasp") + + self._cached_csrf = match.group(1).strip() + return self._cached_csrf + + async def search_groups_and_teachers(self, text: str) -> List[SearchItem]: + text = (text or "").strip() + if not text: + return [] + + for attempt in range(2): + csrf = await self._ensure_rasp_csrf() + try: + data = await self._post_form_json( + urljoin(SSAU_BASE, "/rasp/search"), + form={"text": text}, + headers={ + "X-CSRF-TOKEN": csrf, + "Content-Type": "application/x-www-form-urlencoded", + }, + ) + return _parse_search_items(data) + except httpx.HTTPStatusError as e: + if e.response.status_code in (403, 419) and attempt == 0: + self._cached_csrf = None + try: + self._client.cookies.clear() + except Exception: + pass + continue + raise + + return [] + + async def get_institutes(self) -> List[Tuple[int, str, str]]: + html = await self.fetch_html(urljoin(SSAU_BASE, "/rasp")) + soup = BeautifulSoup(html, "lxml") + + links = soup.select('a[href*="/rasp/faculty/"]') + out: Dict[int, Tuple[int, str, str]] = {} + + for a in links: + href = a.get("href") + if not href: + continue + abs_url = urljoin(SSAU_BASE, href) + match = re.search(r"/rasp/faculty/(\d+)", abs_url) + if not match: + continue + fid = int(match.group(1)) + name = a.get_text(" ", strip=True) + if not name: + continue + out[fid] = (fid, name, abs_url) + + return sorted(out.values(), key=lambda x: x[1].lower()) + + async def get_faculty_available_courses(self, faculty_id: int) -> List[int]: + url = urljoin(SSAU_BASE, f"/rasp/faculty/{faculty_id}?course=1") + html = await self.fetch_html(url) + soup = BeautifulSoup(html, "lxml") + + courses: set[int] = set() + for a in soup.select('a[href*="course="]'): + href = a.get("href") or "" + try: + query = parse_qs(urlparse(urljoin(SSAU_BASE, href)).query) + if "course" in query: + course = int(query["course"][0]) + if 0 <= course <= 12: + courses.add(course) + except Exception: + continue + + if not courses: + courses.add(1) + + return sorted(courses) + + async def get_groups_for_faculty(self, faculty_id: int, course: int) -> List[Tuple[int, str, str]]: + url = urljoin(SSAU_BASE, f"/rasp/faculty/{faculty_id}?course={course}") + html = await self.fetch_html(url) + soup = BeautifulSoup(html, "lxml") + + out: Dict[int, Tuple[int, str, str]] = {} + for a in soup.select('a[href*="groupId="]'): + href = a.get("href") + if not href: + continue + abs_url = urljoin(SSAU_BASE, href) + query = parse_qs(urlparse(abs_url).query) + if "groupId" not in query: + continue + try: + gid = int(query["groupId"][0]) + except Exception: + continue + name = a.get_text(" ", strip=True) + if not name: + continue + out[gid] = (gid, name, abs_url) + + return sorted(out.values(), key=lambda x: x[1].lower()) + + async def fetch_group_schedule_html(self, group_id: int, week: Optional[int] = None) -> str: + if week is None: + url = f"{SSAU_BASE}/rasp?groupId={group_id}" + else: + url = f"{SSAU_BASE}/rasp?groupId={group_id}&selectedWeek={week}&selectedWeekday=1" + return await self.fetch_html(url) + + async def fetch_teacher_schedule_html(self, staff_id: int, week: Optional[int] = None) -> str: + if week is None: + url = f"{SSAU_BASE}/rasp?staffId={staff_id}" + else: + url = f"{SSAU_BASE}/rasp?staffId={staff_id}&selectedWeek={week}&selectedWeekday=1" + return await self.fetch_html(url) + + +def _parse_search_items(data) -> List[SearchItem]: + items: List[SearchItem] = [] + if not isinstance(data, list): + return items + + for obj in data: + if not isinstance(obj, dict): + continue + try: + item_id = int(obj.get("id")) + except Exception: + continue + + url = str(obj.get("url") or "").strip() + name = str(obj.get("text") or "").strip() + if not name: + continue + + if "groupId=" in url: + item_type: Literal["group", "teacher"] = "group" + elif "staffId=" in url: + item_type = "teacher" + else: + item_type = "teacher" if "staff" in url.lower() else "group" + + items.append(SearchItem(id=item_id, name=name, type=item_type, url=url)) + + return items \ No newline at end of file diff --git a/frontend/assets/css/main.css b/frontend/assets/css/main.css new file mode 100644 index 00000000..6acac877 --- /dev/null +++ b/frontend/assets/css/main.css @@ -0,0 +1,482 @@ +:root { + --bg: #07101f; + --bg2: #0b1730; + --panel: rgba(10, 19, 40, 0.94); + --panel2: rgba(18, 31, 61, 0.92); + --card: #101d39; + --card2: #14264a; + --text: #eef4ff; + --muted: #96a6c7; + --border: rgba(125, 148, 194, 0.22); + --accent: #7c3aed; + --accent2: #22c55e; + --danger: #ef4444; + --shadow: 0 14px 40px rgba(0, 0, 0, 0.3); + --radius: 18px; + --font: Inter, system-ui, -apple-system, Segoe UI, Roboto, sans-serif; +} + +* { + box-sizing: border-box; +} + +html, body { + min-height: 100%; +} + +body { + margin: 0; + font-family: var(--font); + color: var(--text); + background: + radial-gradient(circle at top left, rgba(124, 58, 237, 0.16), transparent 28%), + radial-gradient(circle at top right, rgba(34, 197, 94, 0.12), transparent 22%), + linear-gradient(180deg, var(--bg), var(--bg2)); +} + +.shell { + display: grid; + grid-template-columns: 320px 1fr; + min-height: 100vh; +} + +.sidebar { + border-right: 1px solid var(--border); + background: rgba(4, 10, 22, 0.9); + backdrop-filter: blur(12px); + padding: 24px 20px; +} + +.brand { + display: flex; + align-items: center; + gap: 14px; + margin-bottom: 24px; +} + +.brand__logo { + width: 48px; + height: 48px; + border-radius: 14px; + display: grid; + place-items: center; + background: linear-gradient(135deg, var(--accent), #3b82f6); + box-shadow: var(--shadow); + font-size: 24px; +} + +.brand__title { + font-weight: 800; + font-size: 20px; +} + +.brand__subtitle { + font-size: 12px; + color: var(--muted); + margin-top: 4px; +} + +.mode-switch { + display: grid; + gap: 10px; + margin-bottom: 18px; +} + +.mode-switch__item { + display: flex; + align-items: center; + gap: 10px; + background: var(--panel2); + border: 1px solid var(--border); + border-radius: 14px; + padding: 12px 14px; + cursor: pointer; +} + +.mode-switch__item input { + accent-color: var(--accent); +} + +.panel-block { + background: var(--panel); + border: 1px solid var(--border); + border-radius: var(--radius); + padding: 16px; + box-shadow: var(--shadow); + margin-bottom: 16px; +} + +.field-label { + display: block; + font-size: 12px; + color: var(--muted); + margin-bottom: 8px; +} + +.field-control { + width: 100%; + height: 42px; + border-radius: 12px; + border: 1px solid var(--border); + background: rgba(255,255,255,0.04); + color: var(--text); + padding: 0 12px; + outline: none; + margin-bottom: 14px; +} + +.field-control:focus { + border-color: rgba(124, 58, 237, 0.7); + box-shadow: 0 0 0 3px rgba(124, 58, 237, 0.18); +} + +.field-control option { + color: #111827; +} + +.action-btn, +.ghost-btn { + height: 42px; + border: none; + border-radius: 12px; + font-weight: 700; + cursor: pointer; +} + +.action-btn { + width: 100%; + background: linear-gradient(135deg, var(--accent), #3b82f6); + color: white; +} + +.action-btn:disabled { + opacity: 0.5; + cursor: not-allowed; +} + +.ghost-btn { + width: 42px; + background: rgba(255,255,255,0.06); + color: var(--text); + border: 1px solid var(--border); +} + +.week-box { + display: grid; + grid-template-columns: 42px 1fr 42px; + gap: 10px; + align-items: center; +} + +.suggest-wrap { + position: relative; +} + +.suggest-box { + position: absolute; + top: 46px; + left: 0; + right: 0; + background: #0e1a35; + border: 1px solid var(--border); + border-radius: 14px; + overflow: hidden; + box-shadow: var(--shadow); + z-index: 30; + max-height: 280px; + overflow-y: auto; +} + +.suggest-item { + padding: 12px; + border-bottom: 1px solid rgba(255,255,255,0.05); + cursor: pointer; +} + +.suggest-item:hover { + background: rgba(255,255,255,0.04); +} + +.suggest-item__meta { + color: var(--muted); + font-size: 12px; + margin-top: 4px; +} + +.content { + padding: 28px; +} + +.hero { + padding: 24px 28px; + border: 1px solid var(--border); + border-radius: 26px; + background: + linear-gradient(135deg, rgba(124, 58, 237, 0.22), rgba(59, 130, 246, 0.14)), + rgba(10, 19, 40, 0.88); + box-shadow: var(--shadow); + margin-bottom: 18px; +} + +.hero__caption { + color: var(--muted); + font-size: 13px; + margin-bottom: 8px; +} + +.hero__title { + margin: 0; + font-size: 48px; + line-height: 1; +} + +.hero__entity { + margin-top: 14px; + font-size: 18px; + font-weight: 700; +} + +.hero__meta { + margin-top: 8px; + color: var(--muted); + display: flex; + gap: 12px; + flex-wrap: wrap; +} + +.status-box { + background: rgba(255,255,255,0.04); + border: 1px solid var(--border); + border-radius: 16px; + padding: 14px 16px; + margin-bottom: 18px; + color: var(--muted); +} + +.mobile-day-wrap { + margin-bottom: 18px; + max-width: 240px; +} + +.schedule-grid { + border: 1px solid var(--border); + border-radius: 22px; + overflow: hidden; + background: rgba(7, 16, 31, 0.68); + box-shadow: var(--shadow); +} + +.schedule-grid__header, +.schedule-grid__row { + display: flex; + min-width: 860px; +} + +.schedule-grid__header { + background: rgba(255,255,255,0.04); + border-bottom: 1px solid var(--border); +} + +.schedule-grid__time-head, +.schedule-grid__time-cell { + width: 92px; + min-width: 92px; + padding: 14px 10px; + border-right: 1px solid var(--border); +} + +.schedule-grid__time-head { + color: var(--muted); + font-size: 11px; + font-weight: 800; + letter-spacing: .08em; + text-transform: uppercase; +} + +.schedule-grid__day-head, +.schedule-grid__cell { + flex: 1; + min-width: 130px; + border-right: 1px solid var(--border); +} + +.schedule-grid__day-head:last-child, +.schedule-grid__cell:last-child { + border-right: none; +} + +.schedule-grid__day-head { + padding: 14px 12px; +} + +.schedule-grid__row { + border-bottom: 1px solid rgba(255,255,255,0.05); +} + +.schedule-grid__row:last-child { + border-bottom: none; +} + +.schedule-grid__time-cell { + background: rgba(255,255,255,0.03); +} + +.schedule-grid__cell { + padding: 8px; +} + +.day-name { + font-weight: 800; +} + +.day-date { + color: var(--muted); + font-size: 12px; + margin-top: 4px; +} + +.time-start { + font-weight: 800; + font-size: 14px; +} + +.time-end { + color: var(--muted); + font-size: 12px; + margin-top: 3px; +} + +.cell-stack { + display: grid; + gap: 8px; +} + +.lesson-card { + border-radius: 16px; + padding: 12px; + background: linear-gradient(180deg, var(--card), var(--card2)); + border: 1px solid rgba(255,255,255,0.06); + box-shadow: 0 10px 22px rgba(0,0,0,0.2); +} + +.lesson-card__top { + display: flex; + gap: 8px; + flex-wrap: wrap; + align-items: center; +} + +.lesson-badge { + display: inline-flex; + padding: 4px 8px; + border-radius: 999px; + font-size: 11px; + font-weight: 800; + color: white; +} + +.lesson-badge--lecture { background: #2563eb; } +.lesson-badge--practice { background: #16a34a; } +.lesson-badge--lab { background: #ea580c; } +.lesson-badge--exam { background: #dc2626; } +.lesson-badge--credit { background: #9333ea; } +.lesson-badge--consult { background: #0f766e; } +.lesson-badge--other { background: #475569; } + +.lesson-subgroup { + color: var(--muted); + font-size: 11px; + padding: 4px 8px; + border-radius: 999px; + background: rgba(255,255,255,0.06); +} + +.lesson-card__subject { + margin-top: 10px; + font-size: 14px; + font-weight: 800; + line-height: 1.35; +} + +.lesson-card__meta { + margin-top: 10px; + display: grid; + gap: 6px; + color: var(--muted); + font-size: 12px; +} + +.lesson-card__comment { + margin-top: 10px; + padding-left: 10px; + border-left: 3px solid rgba(255,255,255,0.15); + color: var(--muted); + font-size: 12px; +} + +.day-mobile { + display: grid; + gap: 12px; +} + +.day-mobile__block { + background: rgba(255,255,255,0.04); + border: 1px solid var(--border); + border-radius: 18px; + overflow: hidden; + box-shadow: var(--shadow); +} + +.day-mobile__head { + padding: 14px 16px; + border-bottom: 1px solid var(--border); + background: rgba(255,255,255,0.04); +} + +.day-mobile__title { + font-weight: 800; +} + +.day-mobile__date { + margin-top: 4px; + font-size: 12px; + color: var(--muted); +} + +.day-mobile__slot { + padding: 14px 16px; + border-bottom: 1px solid rgba(255,255,255,0.05); +} + +.day-mobile__slot:last-child { + border-bottom: none; +} + +.day-mobile__time { + margin-bottom: 10px; + font-weight: 700; +} + +.empty-note { + color: var(--muted); + font-size: 13px; +} + +@media (max-width: 980px) { + .shell { + grid-template-columns: 1fr; + } + + .sidebar { + border-right: none; + border-bottom: 1px solid var(--border); + } + + .content { + padding: 18px; + } + + .hero__title { + font-size: 36px; + } +} \ No newline at end of file diff --git a/frontend/assets/js/app.js b/frontend/assets/js/app.js new file mode 100644 index 00000000..08bec3c9 --- /dev/null +++ b/frontend/assets/js/app.js @@ -0,0 +1,515 @@ +(function () { + "use strict"; + + const state = { + mode: "group", + instituteId: null, + course: null, + groupId: null, + staffId: null, + week: null, + schedule: null, + }; + + const WEEKDAY_SHORT = { + пон: "пн", + вто: "вт", + сре: "ср", + чет: "чт", + пят: "пт", + суб: "сб", + вос: "вс", + }; + + const LESSON_BADGE_MAP = [ + [/лекц/i, "lesson-badge--lecture"], + [/практ/i, "lesson-badge--practice"], + [/лаб/i, "lesson-badge--lab"], + [/экза/i, "lesson-badge--exam"], + [/зач[eё]т/i, "lesson-badge--credit"], + [/консульт/i, "lesson-badge--consult"], + ]; + + const isMobile = () => window.matchMedia("(max-width: 980px)").matches; + + function debounce(fn, ms) { + let timer = null; + return function (...args) { + clearTimeout(timer); + timer = setTimeout(() => fn.apply(null, args), ms); + }; + } + + function escapeHtml(value) { + return String(value ?? "") + .replace(/&/g, "&") + .replace(//g, ">") + .replace(/"/g, """) + .replace(/'/g, "'"); + } + + function setStatus(text) { + $("#status").text(text); + } + + function apiGet(url, data = {}) { + return $.ajax({ + url, + method: "GET", + data, + dataType: "json", + cache: false, + }); + } + + function badgeClass(lessonType) { + const text = (lessonType || "").toLowerCase(); + const match = LESSON_BADGE_MAP.find(([re]) => re.test(text)); + return match ? match[1] : "lesson-badge--other"; + } + + function shortWeekday(name) { + const prefix = (name || "").toLowerCase().slice(0, 3); + return WEEKDAY_SHORT[prefix] ?? name ?? ""; + } + + function ensureWeekOptions() { + const $weekSelect = $("#weekSelect"); + + const hasNumericOptions = $weekSelect + .find("option") + .toArray() + .some((option) => /^\d+$/.test(String(option.value))); + + if (hasNumericOptions) { + return; + } + + $weekSelect.empty(); + + for (let week = 0; week <= 52; week++) { + $weekSelect.append( + $("`); + }); + + function renderDay(dayIndex) { + const day = days[dayIndex]; + const lessonsBySlot = {}; + (day.lessons || []).forEach((lesson) => { + const key = `${lesson.time_start}|${lesson.time_end}`; + (lessonsBySlot[key] ??= []).push(lesson); + }); + + const slotsHtml = slots.map((slot) => { + const key = `${slot.time_start}|${slot.time_end}`; + const lessons = lessonsBySlot[key] || []; + + return ` +