diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..d4ea37f --- /dev/null +++ b/.dockerignore @@ -0,0 +1,14 @@ +.github +.git +.gitignore +frontend/node_modules +.env +.idea +README.md +LICENSE +.dockerignore +docker-compose.yml +target +backend/Dockerfile +frontend/Dockerfile +.qodo \ No newline at end of file diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 82d7327..cabc485 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -13,25 +13,25 @@ jobs: linter: runs-on: blacksmith-2vcpu-ubuntu-2404 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Rust toolchain uses: dtolnay/rust-toolchain@master #uses: actions-rust-lang/setup-rust-toolchain@v1 with: components: clippy - toolchain: stable + toolchain: nightly - name: Install and Cache Apt packages uses: awalsh128/cache-apt-pkgs-action@latest with: packages: nasm version: 1.0 - name: Cache Rust dependencies - uses: Swatinem/rust-cache@v2.8.0 + uses: Swatinem/rust-cache@v2 with: cache-on-failure: true - name: Run linter (clippy action) #env: SQLX_OFFLINE: true - uses: clechasseur/rs-clippy-check@v5.0.1 + uses: clechasseur/rs-clippy-check@v5 with: #token: ${{ secrets.GITHUB_TOKEN }} args: --workspace @@ -40,10 +40,11 @@ jobs: formatting: runs-on: blacksmith-2vcpu-ubuntu-2404 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Rustfmt Toolchain uses: actions-rust-lang/setup-rust-toolchain@v1 with: + toolchain: nightly components: rustfmt - name: Rust fmt check uses: actions-rust-lang/rustfmt@v1 diff --git a/.gitignore b/.gitignore index 76485ea..4c99a5f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,4 @@ -.qodo .idea .env +prod.env /target \ No newline at end of file diff --git a/.sqlx/query-01526138d0cd787dd0121a8a0ced390d439346378ab4d3590328aad7f6fae62a.json b/.sqlx/query-01526138d0cd787dd0121a8a0ced390d439346378ab4d3590328aad7f6fae62a.json new file mode 100644 index 0000000..b4059db --- /dev/null +++ b/.sqlx/query-01526138d0cd787dd0121a8a0ced390d439346378ab4d3590328aad7f6fae62a.json @@ -0,0 +1,98 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH base_search AS (\n SELECT\n s.id, s.title, s.original_title, s.description, s.cover_image_url,\n s.current_source_url, s.updated_at, s.processing_status,\n -- Calculate similarity score for ranking\n similarity(s.title, $3) as sim_score\n FROM series s\n WHERE\n s.title ILIKE '%' || $3 || '%'\n OR\n (s.title % $3 AND similarity(s.title, $3) >= $4)\n ),\n ranked_results AS (\n SELECT\n *,\n CASE\n WHEN title ILIKE $3 THEN 10\n WHEN title ILIKE $3 || '%' THEN 8\n WHEN title ILIKE '%' || $3 || '%' THEN 6\n ELSE 4\n END as search_rank\n FROM base_search\n ),\n total_count AS (\n SELECT COUNT(*) AS total FROM ranked_results\n )\n SELECT\n rr.id, rr.title, rr.original_title, rr.description,\n rr.cover_image_url, rr.current_source_url, rr.updated_at,\n rr.processing_status as \"processing_status: SeriesStatus\",\n -- Aggregate author names into a JSON array for each series\n COALESCE(\n json_agg(a.name) FILTER (WHERE a.id IS NOT NULL),\n '[]'::json\n ) AS \"authors!\",\n tc.total as total_items\n FROM ranked_results rr\n CROSS JOIN total_count tc\n LEFT JOIN series_authors sa ON rr.id = sa.series_id\n LEFT JOIN authors a ON sa.author_id = a.id\n GROUP BY\n rr.id, rr.title, rr.original_title, rr.description, rr.cover_image_url,\n rr.current_source_url, rr.updated_at, rr.processing_status,\n rr.search_rank, rr.sim_score, tc.total\n -- Order by the best rank, then by similarity, then by ID for stable sorting\n ORDER BY rr.search_rank DESC, rr.sim_score DESC, rr.id ASC\n LIMIT $1\n OFFSET $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "title", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "original_title", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "description", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "cover_image_url", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "current_source_url", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 7, + "name": "processing_status: SeriesStatus", + "type_info": { + "Custom": { + "name": "series_status", + "kind": { + "Enum": [ + "Pending", + "Processing", + "Available", + "Ongoing", + "Completed", + "Hiatus", + "Discontinued", + "Error", + "Pending Deletion", + "Deleting", + "Deletion Failed" + ] + } + } + } + }, + { + "ordinal": 8, + "name": "authors!", + "type_info": "Json" + }, + { + "ordinal": 9, + "name": "total_items", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Int8", + "Text", + "Float4" + ] + }, + "nullable": [ + false, + false, + true, + false, + false, + false, + false, + false, + null, + null + ] + }, + "hash": "01526138d0cd787dd0121a8a0ced390d439346378ab4d3590328aad7f6fae62a" +} diff --git a/.sqlx/query-dd47d4bddabc76774508545fc208c4c6d489d419158fefa65a0e13644e81b7bd.json b/.sqlx/query-05d49b961d5dfb46828c18d6a6df8f46c7d61c38add6c75d7708f2bd7e93be78.json similarity index 52% rename from .sqlx/query-dd47d4bddabc76774508545fc208c4c6d489d419158fefa65a0e13644e81b7bd.json rename to .sqlx/query-05d49b961d5dfb46828c18d6a6df8f46c7d61c38add6c75d7708f2bd7e93be78.json index 988009a..eb399ad 100644 --- a/.sqlx/query-dd47d4bddabc76774508545fc208c4c6d489d419158fefa65a0e13644e81b7bd.json +++ b/.sqlx/query-05d49b961d5dfb46828c18d6a6df8f46c7d61c38add6c75d7708f2bd7e93be78.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT a.name FROM authors a\n JOIN series_authors sa ON a.id = sa.author_id\n WHERE sa.series_id = $1", + "query": "\n SELECT a.name FROM authors a\n JOIN series_authors sa ON a.id = sa.author_id\n WHERE sa.series_id = $1\n ", "describe": { "columns": [ { @@ -18,5 +18,5 @@ false ] }, - "hash": "dd47d4bddabc76774508545fc208c4c6d489d419158fefa65a0e13644e81b7bd" + "hash": "05d49b961d5dfb46828c18d6a6df8f46c7d61c38add6c75d7708f2bd7e93be78" } diff --git a/.sqlx/query-05dbbd810e571d1653e73d5001b1fc262cd779d1439ca255421fa51118744048.json b/.sqlx/query-05dbbd810e571d1653e73d5001b1fc262cd779d1439ca255421fa51118744048.json new file mode 100644 index 0000000..62062de --- /dev/null +++ b/.sqlx/query-05dbbd810e571d1653e73d5001b1fc262cd779d1439ca255421fa51118744048.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO series_ratings (series_id, user_id, rating) VALUES ($1, $2, $3)\n ON CONFLICT (user_id, series_id) DO UPDATE SET rating = $3, updated_at = NOW()\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int4", + "Int4", + "Int2" + ] + }, + "nullable": [] + }, + "hash": "05dbbd810e571d1653e73d5001b1fc262cd779d1439ca255421fa51118744048" +} diff --git a/.sqlx/query-07eba14e32d72782dde094c1597bde9607276fd32ad7392be4e77eacbdfabd69.json b/.sqlx/query-07eba14e32d72782dde094c1597bde9607276fd32ad7392be4e77eacbdfabd69.json new file mode 100644 index 0000000..d24ade4 --- /dev/null +++ b/.sqlx/query-07eba14e32d72782dde094c1597bde9607276fd32ad7392be4e77eacbdfabd69.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "UPDATE series\n SET last_chapter_found_in_storage = GREATEST(COALESCE(last_chapter_found_in_storage, 0), $1),\n updated_at = NOW()\n WHERE id = $2", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Float4", + "Int4" + ] + }, + "nullable": [] + }, + "hash": "07eba14e32d72782dde094c1597bde9607276fd32ad7392be4e77eacbdfabd69" +} diff --git a/.sqlx/query-0e2fbcf5a2d66a4ee1b5fd28dc178db72e2189d3fd4464ed7adbb54ed0ef0d5c.json b/.sqlx/query-0e2fbcf5a2d66a4ee1b5fd28dc178db72e2189d3fd4464ed7adbb54ed0ef0d5c.json new file mode 100644 index 0000000..bd056a7 --- /dev/null +++ b/.sqlx/query-0e2fbcf5a2d66a4ee1b5fd28dc178db72e2189d3fd4464ed7adbb54ed0ef0d5c.json @@ -0,0 +1,119 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH RECURSIVE comment_thread AS (\n -- Anchor member: top-level comments\n SELECT * FROM comments\n WHERE\n CASE\n WHEN $4::bigint IS NOT NULL THEN id = $4\n ELSE comments_type = $1 AND comments_id = $2 AND parent_id IS NULL\n END\n UNION ALL\n -- Recursive member: replies to comments already in the thread\n SELECT c.*\n FROM comments c\n JOIN comment_thread ct ON c.parent_id = ct.id\n ),\n vote_summary AS (\n SELECT\n cv.comment_vote_id,\n COUNT(*) FILTER (WHERE cv.vote_type = 1) AS upvotes,\n COUNT(*) FILTER (WHERE cv.vote_type = -1) AS downvotes\n FROM comment_votes cv\n WHERE cv.comment_vote_id IN (SELECT id FROM comment_thread)\n GROUP BY cv.comment_vote_id\n ),\n attachments_summary AS (\n -- Aggregate all attachment URLs for each comment into a JSON array\n SELECT\n comment_id,\n array_agg(file_url) as attachment_urls\n FROM comment_attachments\n WHERE comment_id IN (SELECT id FROM comment_thread)\n GROUP BY comment_id\n )\n SELECT\n ct.id as \"id!\",\n ct.parent_id,\n ct.content_html as \"content_html!\",\n ct.content_user_markdown as \"content_markdown!\",\n ct.created_at as \"created_at!\",\n ct.updated_at as \"updated_at!\",\n ct.user_id as \"user_id!\",\n COALESCE(up.display_name, u.username) as \"user_username!\",\n up.avatar_url as \"user_avatar_url\",\n u.role_id as \"user_role_id!\",\n COALESCE(vs.upvotes, 0) as \"upvotes!\",\n COALESCE(vs.downvotes, 0) as \"downvotes!\",\n (ct.deleted_at IS NOT NULL ) as \"is_deleted!\",\n cv.vote_type as \"current_user_vote\",\n ats.attachment_urls as \"attachment_urls\"\n FROM comment_thread ct\n JOIN users u ON ct.user_id = u.id\n LEFT JOIN user_profiles up ON u.id = up.user_id\n LEFT JOIN vote_summary vs ON ct.id = vs.comment_vote_id\n LEFT JOIN comment_votes cv ON ct.id = cv.comment_vote_id AND cv.user_id = $3\n LEFT JOIN attachments_summary ats ON ct.id = ats.comment_id\n -- ORDER BY ct.created_at ASC\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id!", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "parent_id", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "content_html!", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "content_markdown!", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "created_at!", + "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "updated_at!", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "user_id!", + "type_info": "Int4" + }, + { + "ordinal": 7, + "name": "user_username!", + "type_info": "Text" + }, + { + "ordinal": 8, + "name": "user_avatar_url", + "type_info": "Text" + }, + { + "ordinal": 9, + "name": "user_role_id!", + "type_info": "Int4" + }, + { + "ordinal": 10, + "name": "upvotes!", + "type_info": "Int8" + }, + { + "ordinal": 11, + "name": "downvotes!", + "type_info": "Int8" + }, + { + "ordinal": 12, + "name": "is_deleted!", + "type_info": "Bool" + }, + { + "ordinal": 13, + "name": "current_user_vote", + "type_info": "Int2" + }, + { + "ordinal": 14, + "name": "attachment_urls", + "type_info": "TextArray" + } + ], + "parameters": { + "Left": [ + { + "Custom": { + "name": "comments_entity", + "kind": { + "Enum": [ + "series", + "series_chapters" + ] + } + } + }, + "Int4", + "Int4", + "Int8" + ] + }, + "nullable": [ + true, + true, + true, + true, + true, + true, + true, + null, + true, + true, + null, + null, + null, + true, + true + ] + }, + "hash": "0e2fbcf5a2d66a4ee1b5fd28dc178db72e2189d3fd4464ed7adbb54ed0ef0d5c" +} diff --git a/.sqlx/query-154c0b1e89ad82e920973a00897ed0374bc15ccf8eec3df2a02b657bcef3a6b2.json b/.sqlx/query-10714d781c4f88bb58ac8ac3393631dbfe9484d8ab62f69e25babbf449daa17d.json similarity index 60% rename from .sqlx/query-154c0b1e89ad82e920973a00897ed0374bc15ccf8eec3df2a02b657bcef3a6b2.json rename to .sqlx/query-10714d781c4f88bb58ac8ac3393631dbfe9484d8ab62f69e25babbf449daa17d.json index f14cbf3..543d797 100644 --- a/.sqlx/query-154c0b1e89ad82e920973a00897ed0374bc15ccf8eec3df2a02b657bcef3a6b2.json +++ b/.sqlx/query-10714d781c4f88bb58ac8ac3393631dbfe9484d8ab62f69e25babbf449daa17d.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n sr.id,\n sr.title,\n sr.original_title,\n sr.description,\n sr.cover_image_url,\n sr.current_source_url,\n sr.updated_at,\n sr.processing_status as \"processing_status: SeriesStatus\",\n COALESCE(\n json_agg(a.name) FILTER (WHERE a.id IS NOT NULL),\n '[]'::json\n ) as \"authors!\",\n COUNT(*) OVER () as total_items\n FROM\n series sr\n LEFT JOIN\n series_authors sa ON sr.id = sa.series_id\n LEFT JOIN\n authors a ON sa.author_id = a.id\n WHERE\n ($3::TEXT IS NULL OR sr.title_tsv @@ plainto_tsquery('english', $3))\n GROUP BY\n sr.id\n ORDER BY\n sr.updated_at DESC\n LIMIT $1\n OFFSET $2\n ", + "query": "\n SELECT\n s.id, s.title, s.original_title, s.description, s.cover_image_url,\n s.current_source_url, s.updated_at,\n s.processing_status as \"processing_status: SeriesStatus\",\n COALESCE(\n json_agg(a.name) FILTER (WHERE a.id IS NOT NULL),\n '[]'::json\n ) as \"authors!\",\n COUNT(*) OVER () as total_items\n FROM\n series s\n LEFT JOIN series_authors sa ON s.id = sa.series_id\n LEFT JOIN authors a ON sa.author_id = a.id\n GROUP BY s.id\n ORDER BY s.updated_at DESC\n LIMIT $1 OFFSET $2\n ", "describe": { "columns": [ { @@ -76,8 +76,7 @@ "parameters": { "Left": [ "Int8", - "Int8", - "Text" + "Int8" ] }, "nullable": [ @@ -93,5 +92,5 @@ null ] }, - "hash": "154c0b1e89ad82e920973a00897ed0374bc15ccf8eec3df2a02b657bcef3a6b2" + "hash": "10714d781c4f88bb58ac8ac3393631dbfe9484d8ab62f69e25babbf449daa17d" } diff --git a/.sqlx/query-172c20fbd5dc86fb2320ccb218652409751f86f3783c19b1e944618ee8b0b0e5.json b/.sqlx/query-172c20fbd5dc86fb2320ccb218652409751f86f3783c19b1e944618ee8b0b0e5.json deleted file mode 100644 index f7ed995..0000000 --- a/.sqlx/query-172c20fbd5dc86fb2320ccb218652409751f86f3783c19b1e944618ee8b0b0e5.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n WITH search_results AS (\n SELECT\n u.id,\n u.username,\n u.email,\n r.role_name,\n u.user_tsv\n FROM users u\n JOIN roles r ON u.role_id = r.id\n WHERE\n -- ILIKE for substring matches\n u.username ILIKE '%' || $3 || '%'\n OR u.email ILIKE '%' || $3 || '%'\n -- FTS for whole-word/prefix matches\n OR u.user_tsv @@ to_tsquery('simple', $4)\n -- fuzzy match filtering\n OR (u.username || ' ' || u.email) % $3\n ),\n ranked_results AS (\n SELECT\n *,\n CASE\n WHEN username ILIKE '%' || $3 || '%' OR email ILIKE '%' || $3 || '%' THEN 10\n WHEN user_tsv @@ to_tsquery('simple', $4) THEN 8\n ELSE 6\n END as search_rank,\n -- Calculate similarity score for ranking\n similarity(username || ' ' || email, $3) as sim_score\n FROM search_results\n ),\n total_count AS (\n SELECT COUNT(*) AS total FROM ranked_results WHERE search_rank > 0\n )\n SELECT\n rr.id,\n rr.username,\n rr.email,\n rr.role_name,\n tc.total as total_items\n FROM ranked_results rr\n CROSS JOIN total_count tc\n WHERE rr.search_rank > 0\n ORDER BY rr.search_rank DESC, rr.sim_score DESC, rr.id ASC\n LIMIT $1 OFFSET $2\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int4" - }, - { - "ordinal": 1, - "name": "username", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "email", - "type_info": "Text" - }, - { - "ordinal": 3, - "name": "role_name", - "type_info": "Text" - }, - { - "ordinal": 4, - "name": "total_items", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Int8", - "Int8", - "Text", - "Text" - ] - }, - "nullable": [ - false, - false, - false, - false, - null - ] - }, - "hash": "172c20fbd5dc86fb2320ccb218652409751f86f3783c19b1e944618ee8b0b0e5" -} diff --git a/.sqlx/query-d37354f165f68b157b032bb2eae1045abec8f8f2a1669f4f75a740527cff2997.json b/.sqlx/query-19151ec4a77c3cd1e182f0cfb6ccb83911c0075d35280252566d378120bc4c0e.json similarity index 79% rename from .sqlx/query-d37354f165f68b157b032bb2eae1045abec8f8f2a1669f4f75a740527cff2997.json rename to .sqlx/query-19151ec4a77c3cd1e182f0cfb6ccb83911c0075d35280252566d378120bc4c0e.json index 79306af..323f153 100644 --- a/.sqlx/query-d37354f165f68b157b032bb2eae1045abec8f8f2a1669f4f75a740527cff2997.json +++ b/.sqlx/query-19151ec4a77c3cd1e182f0cfb6ccb83911c0075d35280252566d378120bc4c0e.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT c.id, c.name FROM categories c\n JOIN series_categories sc ON c.id = sc.category_id\n WHERE sc.series_id = $1", + "query": "\n SELECT c.id, c.name FROM categories c\n JOIN series_categories sc ON c.id = sc.category_id\n WHERE sc.series_id = $1\n ", "describe": { "columns": [ { @@ -24,5 +24,5 @@ false ] }, - "hash": "d37354f165f68b157b032bb2eae1045abec8f8f2a1669f4f75a740527cff2997" + "hash": "19151ec4a77c3cd1e182f0cfb6ccb83911c0075d35280252566d378120bc4c0e" } diff --git a/.sqlx/query-19fc91b1024527bce698cc654c619dfdceca6f5938542bea66b025a89cd5dcd2.json b/.sqlx/query-19fc91b1024527bce698cc654c619dfdceca6f5938542bea66b025a89cd5dcd2.json deleted file mode 100644 index 53373cc..0000000 --- a/.sqlx/query-19fc91b1024527bce698cc654c619dfdceca6f5938542bea66b025a89cd5dcd2.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n s.id,\n s.title,\n s.cover_image_url,\n s.updated_at,\n s.last_chapter_found_in_storage,\n sc.title as chapter_title,\n COUNT(*) OVER () as total_items\n FROM\n series s\n LEFT JOIN\n series_chapters sc ON s.id = sc.series_id\n AND s.last_chapter_found_in_storage = sc.chapter_number\n WHERE\n s.updated_at >= NOW() - interval '7 days'\n ORDER BY\n s.updated_at DESC\n LIMIT $1\n OFFSET $2\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int4" - }, - { - "ordinal": 1, - "name": "title", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "cover_image_url", - "type_info": "Text" - }, - { - "ordinal": 3, - "name": "updated_at", - "type_info": "Timestamptz" - }, - { - "ordinal": 4, - "name": "last_chapter_found_in_storage", - "type_info": "Float4" - }, - { - "ordinal": 5, - "name": "chapter_title", - "type_info": "Text" - }, - { - "ordinal": 6, - "name": "total_items", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Int8", - "Int8" - ] - }, - "nullable": [ - false, - false, - false, - false, - true, - true, - null - ] - }, - "hash": "19fc91b1024527bce698cc654c619dfdceca6f5938542bea66b025a89cd5dcd2" -} diff --git a/.sqlx/query-21c0768f87efd70066db55b20a68a28e3b5f31ab8e697035628e0a1cc5e8578b.json b/.sqlx/query-25fab2cf9710a5c5922ff95f1ce25dc38e7b9fb9b7b2b69d1764dbd3dcc376fd.json similarity index 52% rename from .sqlx/query-21c0768f87efd70066db55b20a68a28e3b5f31ab8e697035628e0a1cc5e8578b.json rename to .sqlx/query-25fab2cf9710a5c5922ff95f1ce25dc38e7b9fb9b7b2b69d1764dbd3dcc376fd.json index c01bde9..c23d602 100644 --- a/.sqlx/query-21c0768f87efd70066db55b20a68a28e3b5f31ab8e697035628e0a1cc5e8578b.json +++ b/.sqlx/query-25fab2cf9710a5c5922ff95f1ce25dc38e7b9fb9b7b2b69d1764dbd3dcc376fd.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n WITH vote_summary AS (\n SELECT\n cv.comment_vote_id,\n COUNT(*) FILTER (WHERE cv.vote_type = 1) AS upvotes,\n COUNT(*) FILTER (WHERE cv.vote_type = -1) AS downvotes\n FROM comment_votes cv\n WHERE cv.comment_vote_id = $1\n GROUP BY cv.comment_vote_id\n ),\n attachments_summary AS (\n SELECT\n comment_id,\n json_agg(file_url) as attachment_urls\n FROM comment_attachments\n WHERE comment_id = $1\n GROUP BY comment_id\n )\n SELECT\n c.id as \"id!\",\n c.parent_id,\n c.content_html as \"content_html!\",\n c.content_user_markdown as \"content_markdown!\",\n c.created_at as \"created_at!\",\n c.updated_at as \"updated_at!\",\n c.user_id as \"user_id!\",\n COALESCE(up.display_name, u.username) as \"user_username!\",\n up.avatar_url as \"user_avatar_url\",\n COALESCE(vs.upvotes, 0) as \"upvotes!\",\n COALESCE(vs.downvotes, 0) as \"downvotes!\",\n cv.vote_type as \"current_user_vote: _\",\n ats.attachment_urls as \"attachment_urls: _\"\n FROM comments c\n JOIN users u ON c.user_id = u.id\n LEFT JOIN user_profiles up ON u.id = up.user_id\n LEFT JOIN vote_summary vs ON c.id = vs.comment_vote_id\n LEFT JOIN comment_votes cv ON c.id = cv.comment_vote_id AND cv.user_id = $2\n LEFT JOIN attachments_summary ats ON c.id = ats.comment_id\n WHERE c.id = $1 AND c.deleted_at IS NULL\n ", + "query": "\n WITH vote_summary AS (\n SELECT\n cv.comment_vote_id,\n COUNT(*) FILTER (WHERE cv.vote_type = 1) AS upvotes,\n COUNT(*) FILTER (WHERE cv.vote_type = -1) AS downvotes\n FROM comment_votes cv\n WHERE cv.comment_vote_id = $1\n GROUP BY cv.comment_vote_id\n ),\n attachments_summary AS (\n SELECT\n comment_id,\n array_agg(file_url) as attachment_urls\n FROM comment_attachments\n WHERE comment_id = $1\n GROUP BY comment_id\n )\n SELECT\n c.id as \"id!\",\n c.parent_id,\n c.content_html as \"content_html!\",\n c.content_user_markdown as \"content_markdown!\",\n c.created_at as \"created_at!\",\n c.updated_at as \"updated_at!\",\n c.user_id as \"user_id!\",\n COALESCE(up.display_name, u.username) as \"user_username!\",\n up.avatar_url as \"user_avatar_url\",\n u.role_id as \"user_role_id!\",\n COALESCE(vs.upvotes, 0) as \"upvotes!\",\n COALESCE(vs.downvotes, 0) as \"downvotes!\",\n (c.deleted_at IS NOT NULL ) as \"is_deleted!\",\n cv.vote_type as \"current_user_vote?\",\n ats.attachment_urls as \"attachment_urls?\"\n FROM comments c\n JOIN users u ON c.user_id = u.id\n LEFT JOIN user_profiles up ON u.id = up.user_id\n LEFT JOIN vote_summary vs ON c.id = vs.comment_vote_id\n LEFT JOIN comment_votes cv ON c.id = cv.comment_vote_id AND cv.user_id = $2\n LEFT JOIN attachments_summary ats ON c.id = ats.comment_id\n WHERE c.id = $1 AND c.deleted_at IS NULL\n ", "describe": { "columns": [ { @@ -50,23 +50,33 @@ }, { "ordinal": 9, + "name": "user_role_id!", + "type_info": "Int4" + }, + { + "ordinal": 10, "name": "upvotes!", "type_info": "Int8" }, { - "ordinal": 10, + "ordinal": 11, "name": "downvotes!", "type_info": "Int8" }, { - "ordinal": 11, - "name": "current_user_vote: _", + "ordinal": 12, + "name": "is_deleted!", + "type_info": "Bool" + }, + { + "ordinal": 13, + "name": "current_user_vote?", "type_info": "Int2" }, { - "ordinal": 12, - "name": "attachment_urls: _", - "type_info": "Json" + "ordinal": 14, + "name": "attachment_urls?", + "type_info": "TextArray" } ], "parameters": { @@ -85,11 +95,13 @@ false, null, true, + false, + null, null, null, false, null ] }, - "hash": "21c0768f87efd70066db55b20a68a28e3b5f31ab8e697035628e0a1cc5e8578b" + "hash": "25fab2cf9710a5c5922ff95f1ce25dc38e7b9fb9b7b2b69d1764dbd3dcc376fd" } diff --git a/.sqlx/query-26f6acf048a86178ceb4bfff6e7229dd503b9870fc635c24292b265c02a8ac1d.json b/.sqlx/query-26f6acf048a86178ceb4bfff6e7229dd503b9870fc635c24292b265c02a8ac1d.json new file mode 100644 index 0000000..0076e9a --- /dev/null +++ b/.sqlx/query-26f6acf048a86178ceb4bfff6e7229dd503b9870fc635c24292b265c02a8ac1d.json @@ -0,0 +1,38 @@ +{ + "db_name": "PostgreSQL", + "query": "INSERT INTO series_chapters (series_id, chapter_number, title, source_url, status)\n VALUES ($1, $2, $3, $4, $5)\n ON CONFLICT (series_id, chapter_number)\n DO UPDATE SET\n updated_at = NOW(),\n source_url = EXCLUDED.source_url,\n status = EXCLUDED.status\n RETURNING id", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Int4", + "Float4", + "Text", + "Text", + { + "Custom": { + "name": "chapter_status", + "kind": { + "Enum": [ + "Processing", + "Available", + "NoImagesFound", + "Error" + ] + } + } + } + ] + }, + "nullable": [ + false + ] + }, + "hash": "26f6acf048a86178ceb4bfff6e7229dd503b9870fc635c24292b265c02a8ac1d" +} diff --git a/.sqlx/query-29e707635ce688d1e26f9e73cc57338483557621742f5caedb9f6cd2814dbc39.json b/.sqlx/query-29e707635ce688d1e26f9e73cc57338483557621742f5caedb9f6cd2814dbc39.json new file mode 100644 index 0000000..f3a8cda --- /dev/null +++ b/.sqlx/query-29e707635ce688d1e26f9e73cc57338483557621742f5caedb9f6cd2814dbc39.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT EXISTS(\n SELECT 1 FROM comments WHERE parent_id = $1 AND deleted_at IS NULL\n )\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "exists", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + null + ] + }, + "hash": "29e707635ce688d1e26f9e73cc57338483557621742f5caedb9f6cd2814dbc39" +} diff --git a/.sqlx/query-31329106b5e77ff191cdb52bbd811af71244672925f91cc8f83e21d14f979ece.json b/.sqlx/query-31329106b5e77ff191cdb52bbd811af71244672925f91cc8f83e21d14f979ece.json deleted file mode 100644 index df11f16..0000000 --- a/.sqlx/query-31329106b5e77ff191cdb52bbd811af71244672925f91cc8f83e21d14f979ece.json +++ /dev/null @@ -1,182 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n WITH candidate AS (\n SELECT id FROM series\n WHERE processing_status = $1\n LIMIT 1\n FOR UPDATE SKIP LOCKED\n )\n UPDATE series\n SET processing_status = $2\n WHERE id = (SELECT id FROM candidate)\n RETURNING\n id, title, original_title, description, cover_image_url, current_source_url,\n source_website_host, views_count, bookmarks_count, total_rating_score, total_ratings_count, last_chapter_found_in_storage,\n processing_status as \"processing_status: SeriesStatus\", check_interval_minutes, last_checked_at,\n next_checked_at, created_at, updated_at\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int4" - }, - { - "ordinal": 1, - "name": "title", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "original_title", - "type_info": "Text" - }, - { - "ordinal": 3, - "name": "description", - "type_info": "Text" - }, - { - "ordinal": 4, - "name": "cover_image_url", - "type_info": "Text" - }, - { - "ordinal": 5, - "name": "current_source_url", - "type_info": "Text" - }, - { - "ordinal": 6, - "name": "source_website_host", - "type_info": "Text" - }, - { - "ordinal": 7, - "name": "views_count", - "type_info": "Int4" - }, - { - "ordinal": 8, - "name": "bookmarks_count", - "type_info": "Int4" - }, - { - "ordinal": 9, - "name": "total_rating_score", - "type_info": "Int8" - }, - { - "ordinal": 10, - "name": "total_ratings_count", - "type_info": "Int4" - }, - { - "ordinal": 11, - "name": "last_chapter_found_in_storage", - "type_info": "Float4" - }, - { - "ordinal": 12, - "name": "processing_status: SeriesStatus", - "type_info": { - "Custom": { - "name": "series_status", - "kind": { - "Enum": [ - "Pending", - "Processing", - "Available", - "Ongoing", - "Completed", - "Hiatus", - "Discontinued", - "Error", - "Pending Deletion", - "Deleting", - "Deletion Failed" - ] - } - } - } - }, - { - "ordinal": 13, - "name": "check_interval_minutes", - "type_info": "Int4" - }, - { - "ordinal": 14, - "name": "last_checked_at", - "type_info": "Timestamptz" - }, - { - "ordinal": 15, - "name": "next_checked_at", - "type_info": "Timestamptz" - }, - { - "ordinal": 16, - "name": "created_at", - "type_info": "Timestamptz" - }, - { - "ordinal": 17, - "name": "updated_at", - "type_info": "Timestamptz" - } - ], - "parameters": { - "Left": [ - { - "Custom": { - "name": "series_status", - "kind": { - "Enum": [ - "Pending", - "Processing", - "Available", - "Ongoing", - "Completed", - "Hiatus", - "Discontinued", - "Error", - "Pending Deletion", - "Deleting", - "Deletion Failed" - ] - } - } - }, - { - "Custom": { - "name": "series_status", - "kind": { - "Enum": [ - "Pending", - "Processing", - "Available", - "Ongoing", - "Completed", - "Hiatus", - "Discontinued", - "Error", - "Pending Deletion", - "Deleting", - "Deletion Failed" - ] - } - } - } - ] - }, - "nullable": [ - false, - false, - true, - false, - false, - false, - false, - false, - false, - false, - false, - true, - false, - false, - true, - true, - false, - false - ] - }, - "hash": "31329106b5e77ff191cdb52bbd811af71244672925f91cc8f83e21d14f979ece" -} diff --git a/.sqlx/query-32bd2bb5781954c16789f5e68c442e02186b6fee3dc13c9fe1d1753b654aff2c.json b/.sqlx/query-32bd2bb5781954c16789f5e68c442e02186b6fee3dc13c9fe1d1753b654aff2c.json new file mode 100644 index 0000000..78f0f91 --- /dev/null +++ b/.sqlx/query-32bd2bb5781954c16789f5e68c442e02186b6fee3dc13c9fe1d1753b654aff2c.json @@ -0,0 +1,18 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE users\n SET username = $1, email = $2, role_id = $3, is_active = $4, updated_at = NOW()\n WHERE id = $5\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Text", + "Int4", + "Bool", + "Int4" + ] + }, + "nullable": [] + }, + "hash": "32bd2bb5781954c16789f5e68c442e02186b6fee3dc13c9fe1d1753b654aff2c" +} diff --git a/.sqlx/query-a5d63c05f2e3140963dc2808e75003f4c5f948511401d8259c5ca8b61318bf90.json b/.sqlx/query-35fbad5e10a60481b24b5b3529ed6df8469dc31fbd21875a1d9fb196e2e3aa8d.json similarity index 83% rename from .sqlx/query-a5d63c05f2e3140963dc2808e75003f4c5f948511401d8259c5ca8b61318bf90.json rename to .sqlx/query-35fbad5e10a60481b24b5b3529ed6df8469dc31fbd21875a1d9fb196e2e3aa8d.json index 6f104df..7e307af 100644 --- a/.sqlx/query-a5d63c05f2e3140963dc2808e75003f4c5f948511401d8259c5ca8b61318bf90.json +++ b/.sqlx/query-35fbad5e10a60481b24b5b3529ed6df8469dc31fbd21875a1d9fb196e2e3aa8d.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT id, title, original_title, description, cover_image_url, current_source_url,\n source_website_host, views_count, bookmarks_count, total_rating_score, total_ratings_count, last_chapter_found_in_storage,\n processing_status as \"processing_status: SeriesStatus\",\n check_interval_minutes, last_checked_at, next_checked_at, created_at, updated_at\n FROM series WHERE id = $1", + "query": "\n SELECT id, title, original_title, description, cover_image_url, current_source_url,\n source_website_host, views_count, bookmarks_count, total_rating_score, total_ratings_count,\n last_chapter_found_in_storage, processing_status as \"processing_status: SeriesStatus\",\n check_interval_minutes, last_checked_at, next_checked_at, created_at, updated_at\n FROM series \n WHERE id = $1\n ", "describe": { "columns": [ { @@ -139,5 +139,5 @@ false ] }, - "hash": "a5d63c05f2e3140963dc2808e75003f4c5f948511401d8259c5ca8b61318bf90" + "hash": "35fbad5e10a60481b24b5b3529ed6df8469dc31fbd21875a1d9fb196e2e3aa8d" } diff --git a/.sqlx/query-414bdbf62eab2c725d434c1026c83c8c94aa15e7084581ef95cafea37737223c.json b/.sqlx/query-414bdbf62eab2c725d434c1026c83c8c94aa15e7084581ef95cafea37737223c.json new file mode 100644 index 0000000..52fbd5b --- /dev/null +++ b/.sqlx/query-414bdbf62eab2c725d434c1026c83c8c94aa15e7084581ef95cafea37737223c.json @@ -0,0 +1,28 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n u.role_id,\n c.user_id\n FROM comments c\n JOIN users u ON c.user_id = u.id\n WHERE c.id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "role_id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "user_id", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false + ] + }, + "hash": "414bdbf62eab2c725d434c1026c83c8c94aa15e7084581ef95cafea37737223c" +} diff --git a/.sqlx/query-4708b47aab9e8b20cbfebb16287313b98c461c8f57e0f612d53917a286f7a54c.json b/.sqlx/query-4708b47aab9e8b20cbfebb16287313b98c461c8f57e0f612d53917a286f7a54c.json new file mode 100644 index 0000000..ca09274 --- /dev/null +++ b/.sqlx/query-4708b47aab9e8b20cbfebb16287313b98c461c8f57e0f612d53917a286f7a54c.json @@ -0,0 +1,52 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n u.id,\n u.username,\n u.email,\n u.role_id,\n COALESCE(u.is_active, false) as \"is_active!\",\n r.role_name\n FROM users u\n JOIN roles r ON u.role_id = r.id\n WHERE u.id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "username", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "email", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "role_id", + "type_info": "Int4" + }, + { + "ordinal": 4, + "name": "is_active!", + "type_info": "Bool" + }, + { + "ordinal": 5, + "name": "role_name", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int4" + ] + }, + "nullable": [ + false, + false, + false, + false, + null, + false + ] + }, + "hash": "4708b47aab9e8b20cbfebb16287313b98c461c8f57e0f612d53917a286f7a54c" +} diff --git a/.sqlx/query-46e71b9fdf391246eeed01dad5b40ae863fda9d9d7ae2b2652c6a5b815adab18.json b/.sqlx/query-49005252d64c1f75617187b2c904218bfdde0826bc8810c260fdc1d82c72575f.json similarity index 83% rename from .sqlx/query-46e71b9fdf391246eeed01dad5b40ae863fda9d9d7ae2b2652c6a5b815adab18.json rename to .sqlx/query-49005252d64c1f75617187b2c904218bfdde0826bc8810c260fdc1d82c72575f.json index dfa89aa..9694ca5 100644 --- a/.sqlx/query-46e71b9fdf391246eeed01dad5b40ae863fda9d9d7ae2b2652c6a5b815adab18.json +++ b/.sqlx/query-49005252d64c1f75617187b2c904218bfdde0826bc8810c260fdc1d82c72575f.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "SELECT id, title, original_title, description, cover_image_url, current_source_url,\n source_website_host, views_count, bookmarks_count, total_rating_score, total_ratings_count, last_chapter_found_in_storage,\n processing_status as \"processing_status: SeriesStatus\",\n check_interval_minutes, last_checked_at, next_checked_at, created_at, updated_at\n FROM series WHERE title = $1", + "query": "\n SELECT id, title, original_title, description, cover_image_url, current_source_url,\n source_website_host, views_count, bookmarks_count, total_rating_score, total_ratings_count,\n last_chapter_found_in_storage, processing_status as \"processing_status: SeriesStatus\",\n check_interval_minutes, last_checked_at, next_checked_at, created_at, updated_at\n FROM series \n WHERE title = $1\n ", "describe": { "columns": [ { @@ -139,5 +139,5 @@ false ] }, - "hash": "46e71b9fdf391246eeed01dad5b40ae863fda9d9d7ae2b2652c6a5b815adab18" + "hash": "49005252d64c1f75617187b2c904218bfdde0826bc8810c260fdc1d82c72575f" } diff --git a/.sqlx/query-4bc9cada744d2c91bf58c583fda5d5c9e97d9fd394391e623845d2834f95625b.json b/.sqlx/query-4bc9cada744d2c91bf58c583fda5d5c9e97d9fd394391e623845d2834f95625b.json new file mode 100644 index 0000000..2a29e13 --- /dev/null +++ b/.sqlx/query-4bc9cada744d2c91bf58c583fda5d5c9e97d9fd394391e623845d2834f95625b.json @@ -0,0 +1,73 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH candidate AS (\n SELECT id FROM series\n WHERE processing_status = $1\n LIMIT 1\n FOR UPDATE SKIP LOCKED\n )\n UPDATE series\n SET processing_status = $2\n WHERE id = (SELECT id FROM candidate)\n RETURNING\n id,\n title,\n cover_image_url\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "title", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "cover_image_url", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + { + "Custom": { + "name": "series_status", + "kind": { + "Enum": [ + "Pending", + "Processing", + "Available", + "Ongoing", + "Completed", + "Hiatus", + "Discontinued", + "Error", + "Pending Deletion", + "Deleting", + "Deletion Failed" + ] + } + } + }, + { + "Custom": { + "name": "series_status", + "kind": { + "Enum": [ + "Pending", + "Processing", + "Available", + "Ongoing", + "Completed", + "Hiatus", + "Discontinued", + "Error", + "Pending Deletion", + "Deleting", + "Deletion Failed" + ] + } + } + } + ] + }, + "nullable": [ + false, + false, + false + ] + }, + "hash": "4bc9cada744d2c91bf58c583fda5d5c9e97d9fd394391e623845d2834f95625b" +} diff --git a/.sqlx/query-4e99507d676e4da8900cace2df44e52907f74d57a294980dd4fb683267690f14.json b/.sqlx/query-4e99507d676e4da8900cace2df44e52907f74d57a294980dd4fb683267690f14.json new file mode 100644 index 0000000..d52ed59 --- /dev/null +++ b/.sqlx/query-4e99507d676e4da8900cace2df44e52907f74d57a294980dd4fb683267690f14.json @@ -0,0 +1,58 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH locked_rows AS ( \n SELECT id\n FROM series_chapters\n WHERE status = 'Processing'\n ORDER BY created_at ASC \n LIMIT $1\n FOR UPDATE SKIP LOCKED\n )\n UPDATE series_chapters sc\n SET \n status = 'Processing', \n updated_at = NOW()\n FROM locked_rows lr, series s\n WHERE sc.id = lr.id AND sc.series_id = s.id\n RETURNING\n sc.id as chapter_id,\n sc.chapter_number,\n sc.source_url as chapter_url,\n s.id as series_id,\n s.title as series_title,\n s.source_website_host as source_host,\n s.current_source_url as series_url\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "chapter_id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "chapter_number", + "type_info": "Float4" + }, + { + "ordinal": 2, + "name": "chapter_url", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "series_id", + "type_info": "Int4" + }, + { + "ordinal": 4, + "name": "series_title", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "source_host", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "series_url", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + false + ] + }, + "hash": "4e99507d676e4da8900cace2df44e52907f74d57a294980dd4fb683267690f14" +} diff --git a/.sqlx/query-50293c2e54af11d4c2a553e29b671cef087a159c6ee7182d8ca929ecb748f3b7.json b/.sqlx/query-50293c2e54af11d4c2a553e29b671cef087a159c6ee7182d8ca929ecb748f3b7.json new file mode 100644 index 0000000..cf6c805 --- /dev/null +++ b/.sqlx/query-50293c2e54af11d4c2a553e29b671cef087a159c6ee7182d8ca929ecb748f3b7.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM users WHERE id = $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int4" + ] + }, + "nullable": [] + }, + "hash": "50293c2e54af11d4c2a553e29b671cef087a159c6ee7182d8ca929ecb748f3b7" +} diff --git a/.sqlx/query-687d52591357a48a7755284ffd589a5b834dc49ff7b053e488ecc7a2e6a3e1b7.json b/.sqlx/query-687d52591357a48a7755284ffd589a5b834dc49ff7b053e488ecc7a2e6a3e1b7.json deleted file mode 100644 index 3b499f3..0000000 --- a/.sqlx/query-687d52591357a48a7755284ffd589a5b834dc49ff7b053e488ecc7a2e6a3e1b7.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "UPDATE series SET last_chapter_found_in_storage = $1, updated_at = NOW() WHERE id = $2", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Float4", - "Int4" - ] - }, - "nullable": [] - }, - "hash": "687d52591357a48a7755284ffd589a5b834dc49ff7b053e488ecc7a2e6a3e1b7" -} diff --git a/.sqlx/query-6c1e46896cea195631b6c54e78bff51c0a9c6d899b1bc467119826213a7e9c63.json b/.sqlx/query-6c1e46896cea195631b6c54e78bff51c0a9c6d899b1bc467119826213a7e9c63.json new file mode 100644 index 0000000..8dbff1e --- /dev/null +++ b/.sqlx/query-6c1e46896cea195631b6c54e78bff51c0a9c6d899b1bc467119826213a7e9c63.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM comments WHERE id = $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "6c1e46896cea195631b6c54e78bff51c0a9c6d899b1bc467119826213a7e9c63" +} diff --git a/.sqlx/query-6dc1d469d73360b6bf38fd405305dc551708e2d45b502c97dbe7bab97aa0ba9d.json b/.sqlx/query-6dc1d469d73360b6bf38fd405305dc551708e2d45b502c97dbe7bab97aa0ba9d.json new file mode 100644 index 0000000..d08fbbf --- /dev/null +++ b/.sqlx/query-6dc1d469d73360b6bf38fd405305dc551708e2d45b502c97dbe7bab97aa0ba9d.json @@ -0,0 +1,60 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n s.id,\n s.title,\n s.original_title,\n s.cover_image_url,\n s.last_chapter_found_in_storage,\n s.updated_at,\n COALESCE(json_agg(DISTINCT a.name ORDER BY a.name) FILTER (WHERE a.id IS NOT NULL),\n '[]'::json) as authors\n FROM series s\n LEFT JOIN series_authors sa ON s.id = sa.series_id\n LEFT JOIN authors a ON sa.author_id = a.id\n WHERE\n (\n s.title ILIKE '%' || $1 || '%'\n OR (s.title % $1 AND similarity(s.title, $1) >= $2)\n )\n OR\n (\n s.original_title IS NOT NULL AND (\n s.original_title ILIKE '%' || $1 || '%'\n OR (s.original_title % $1 AND similarity(s.original_title, $1) >= $2)\n )\n )\n GROUP BY s.id\n ORDER BY GREATEST(\n similarity(s.title, $1),\n similarity(COALESCE(s.original_title, ''), $1)\n ) DESC\n LIMIT $3\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "title", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "original_title", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "cover_image_url", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "last_chapter_found_in_storage", + "type_info": "Float4" + }, + { + "ordinal": 5, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "authors", + "type_info": "Json" + } + ], + "parameters": { + "Left": [ + "Text", + "Float4", + "Int8" + ] + }, + "nullable": [ + false, + false, + true, + false, + true, + false, + null + ] + }, + "hash": "6dc1d469d73360b6bf38fd405305dc551708e2d45b502c97dbe7bab97aa0ba9d" +} diff --git a/.sqlx/query-6ea09619b5ba51afcddbc8bebc42d1ddf2ae9cbd22d8ee31e6ae54c82fc3be67.json b/.sqlx/query-6ea09619b5ba51afcddbc8bebc42d1ddf2ae9cbd22d8ee31e6ae54c82fc3be67.json deleted file mode 100644 index d9d101d..0000000 --- a/.sqlx/query-6ea09619b5ba51afcddbc8bebc42d1ddf2ae9cbd22d8ee31e6ae54c82fc3be67.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n u.id,\n u.username,\n u.email,\n r.role_name,\n COUNT(*) OVER() as total_items\n FROM users u\n JOIN roles r ON u.role_id = r.id\n ORDER BY u.id ASC\n LIMIT $1 OFFSET $2\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int4" - }, - { - "ordinal": 1, - "name": "username", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "email", - "type_info": "Text" - }, - { - "ordinal": 3, - "name": "role_name", - "type_info": "Text" - }, - { - "ordinal": 4, - "name": "total_items", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Int8", - "Int8" - ] - }, - "nullable": [ - false, - false, - false, - false, - null - ] - }, - "hash": "6ea09619b5ba51afcddbc8bebc42d1ddf2ae9cbd22d8ee31e6ae54c82fc3be67" -} diff --git a/.sqlx/query-731032aa92d1405c18f5d6534092dfa6afa93aef6c1e2eb0506dad4d389e44e6.json b/.sqlx/query-731032aa92d1405c18f5d6534092dfa6afa93aef6c1e2eb0506dad4d389e44e6.json new file mode 100644 index 0000000..a1ebb55 --- /dev/null +++ b/.sqlx/query-731032aa92d1405c18f5d6534092dfa6afa93aef6c1e2eb0506dad4d389e44e6.json @@ -0,0 +1,102 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n r.id,\n u.username as reporter_username,\n r.reporter_id,\n r.created_at,\n r.reason as \"reason!: ReportReason\",\n sc.id as chapter_id,\n sc.chapter_number as chapter_number,\n sc.title as chapter_series_title,\n c.id as comment_id,\n SUBSTRING(c.content_html, 1, 50) as comment_preview,\n COUNT(*) OVER() as total_items\n FROM reports r\n INNER JOIN users u ON r.reporter_id = u.id\n LEFT JOIN series_chapters sc ON r.chapter_id = sc.id\n LEFT JOIN series s ON sc.series_id = s.id\n LEFT JOIN comments c ON r.comment_id = c.id\n ORDER BY r.created_at DESC\n LIMIT $1\n OFFSET $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "reporter_username", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "reporter_id", + "type_info": "Int4" + }, + { + "ordinal": 3, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "reason!: ReportReason", + "type_info": { + "Custom": { + "name": "report_reason", + "kind": { + "Enum": [ + "broken_image", + "wrong_chapter", + "duplicate_chapter", + "missing_image", + "missing_chapter", + "slow_loading", + "broken_text", + "toxic", + "racist", + "spam", + "other" + ] + } + } + } + }, + { + "ordinal": 5, + "name": "chapter_id", + "type_info": "Int4" + }, + { + "ordinal": 6, + "name": "chapter_number", + "type_info": "Float4" + }, + { + "ordinal": 7, + "name": "chapter_series_title", + "type_info": "Text" + }, + { + "ordinal": 8, + "name": "comment_id", + "type_info": "Int8" + }, + { + "ordinal": 9, + "name": "comment_preview", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "total_items", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + true, + false, + null, + null + ] + }, + "hash": "731032aa92d1405c18f5d6534092dfa6afa93aef6c1e2eb0506dad4d389e44e6" +} diff --git a/.sqlx/query-77e09aa662d7eb1f5badd4bf3471545489a797440862aa30b5d1b3ef90cc6a01.json b/.sqlx/query-77e09aa662d7eb1f5badd4bf3471545489a797440862aa30b5d1b3ef90cc6a01.json new file mode 100644 index 0000000..003ceef --- /dev/null +++ b/.sqlx/query-77e09aa662d7eb1f5badd4bf3471545489a797440862aa30b5d1b3ef90cc6a01.json @@ -0,0 +1,47 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE comments\n SET\n content_user_markdown = '',\n content_html = '

[Deleted]

', -- Or any placeholder\n deleted_at = NOW(),\n updated_at = NOW()\n WHERE id = $1 AND user_id = $2\n RETURNING id, content_user_markdown, content_html, updated_at, (deleted_at IS NOT NULL) as \"is_deleted!\"\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "content_user_markdown", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "content_html", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "is_deleted!", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [ + "Int8", + "Int4" + ] + }, + "nullable": [ + false, + false, + false, + false, + null + ] + }, + "hash": "77e09aa662d7eb1f5badd4bf3471545489a797440862aa30b5d1b3ef90cc6a01" +} diff --git a/.sqlx/query-7802e84b32168e8ea3d4f75f65d491327ddc6d01f7c959aad77326b251bed291.json b/.sqlx/query-7802e84b32168e8ea3d4f75f65d491327ddc6d01f7c959aad77326b251bed291.json new file mode 100644 index 0000000..53315bc --- /dev/null +++ b/.sqlx/query-7802e84b32168e8ea3d4f75f65d491327ddc6d01f7c959aad77326b251bed291.json @@ -0,0 +1,59 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n u.id,\n u.username,\n u.email,\n u.role_id,\n r.role_name,\n u.is_active,\n COUNT(*) OVER() as \"total_items\"\n FROM users u\n JOIN roles r ON u.role_id = r.id\n ORDER BY u.id ASC\n LIMIT $1 OFFSET $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "username", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "email", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "role_id", + "type_info": "Int4" + }, + { + "ordinal": 4, + "name": "role_name", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "is_active", + "type_info": "Bool" + }, + { + "ordinal": 6, + "name": "total_items", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + true, + null + ] + }, + "hash": "7802e84b32168e8ea3d4f75f65d491327ddc6d01f7c959aad77326b251bed291" +} diff --git a/.sqlx/query-81d81bb83838ea176ab82fe89ebc299b8d579961bfb2691343a3d9cff1f9490f.json b/.sqlx/query-81d81bb83838ea176ab82fe89ebc299b8d579961bfb2691343a3d9cff1f9490f.json new file mode 100644 index 0000000..6d0000a --- /dev/null +++ b/.sqlx/query-81d81bb83838ea176ab82fe89ebc299b8d579961bfb2691343a3d9cff1f9490f.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM reports WHERE id = $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int4" + ] + }, + "nullable": [] + }, + "hash": "81d81bb83838ea176ab82fe89ebc299b8d579961bfb2691343a3d9cff1f9490f" +} diff --git a/.sqlx/query-898ddd8b20b52f9781f447b88af5d1d0accc2c839bf05f776664de9f2ba36c92.json b/.sqlx/query-898ddd8b20b52f9781f447b88af5d1d0accc2c839bf05f776664de9f2ba36c92.json new file mode 100644 index 0000000..7b4bf64 --- /dev/null +++ b/.sqlx/query-898ddd8b20b52f9781f447b88af5d1d0accc2c839bf05f776664de9f2ba36c92.json @@ -0,0 +1,24 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT 1 FROM users WHERE (username = $1 OR email = $2) AND id != $3 LIMIT 1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "?column?", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Text", + "Text", + "Int4" + ] + }, + "nullable": [ + null + ] + }, + "hash": "898ddd8b20b52f9781f447b88af5d1d0accc2c839bf05f776664de9f2ba36c92" +} diff --git a/.sqlx/query-8a18ab3263d1286f8dacf3875d58b57334a2844ecdfdbc207be3987b8daa3349.json b/.sqlx/query-8a18ab3263d1286f8dacf3875d58b57334a2844ecdfdbc207be3987b8daa3349.json new file mode 100644 index 0000000..93f8167 --- /dev/null +++ b/.sqlx/query-8a18ab3263d1286f8dacf3875d58b57334a2844ecdfdbc207be3987b8daa3349.json @@ -0,0 +1,46 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE comments\n SET\n content_user_markdown = '',\n content_html = '

[Removed by Mod]

',\n deleted_at = NOW(),\n updated_at = NOW()\n WHERE id = $1\n RETURNING id, content_user_markdown, content_html, updated_at, (deleted_at IS NOT NULL) as \"is_deleted!\"\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "content_user_markdown", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "content_html", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "is_deleted!", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + false, + null + ] + }, + "hash": "8a18ab3263d1286f8dacf3875d58b57334a2844ecdfdbc207be3987b8daa3349" +} diff --git a/.sqlx/query-21643eac78fdc89735010102e25e48311a4bb482726b2425efbd7fac55271f34.json b/.sqlx/query-8a3fa527c2b92f4b1e6421cafb12670397d643659533a6ddcc90cb84fcd3c4bc.json similarity index 87% rename from .sqlx/query-21643eac78fdc89735010102e25e48311a4bb482726b2425efbd7fac55271f34.json rename to .sqlx/query-8a3fa527c2b92f4b1e6421cafb12670397d643659533a6ddcc90cb84fcd3c4bc.json index d52269f..17a6c1c 100644 --- a/.sqlx/query-21643eac78fdc89735010102e25e48311a4bb482726b2425efbd7fac55271f34.json +++ b/.sqlx/query-8a3fa527c2b92f4b1e6421cafb12670397d643659533a6ddcc90cb84fcd3c4bc.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "UPDATE series SET processing_status = $1,\n updated_at = NOW() WHERE id = $2 AND processing_status NOT IN ($3, $4)", + "query": "UPDATE series\n SET processing_status = $1, updated_at = NOW()\n WHERE id = $2\n AND processing_status NOT IN ($3, $4)", "describe": { "columns": [], "parameters": { @@ -70,5 +70,5 @@ }, "nullable": [] }, - "hash": "21643eac78fdc89735010102e25e48311a4bb482726b2425efbd7fac55271f34" + "hash": "8a3fa527c2b92f4b1e6421cafb12670397d643659533a6ddcc90cb84fcd3c4bc" } diff --git a/.sqlx/query-92d7089f1eb432ab54f67fc21f7a4a6f91fe7f1bb2c163ff8d36f44020fcc5b7.json b/.sqlx/query-92d7089f1eb432ab54f67fc21f7a4a6f91fe7f1bb2c163ff8d36f44020fcc5b7.json deleted file mode 100644 index 6d1e1b2..0000000 --- a/.sqlx/query-92d7089f1eb432ab54f67fc21f7a4a6f91fe7f1bb2c163ff8d36f44020fcc5b7.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "INSERT INTO series_chapters (series_id, chapter_number, title, source_url)\n VALUES ($1, $2, $3, $4)\n ON CONFLICT (source_url) DO UPDATE SET updated_at = NOW()\n RETURNING id", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int4" - } - ], - "parameters": { - "Left": [ - "Int4", - "Float4", - "Text", - "Text" - ] - }, - "nullable": [ - false - ] - }, - "hash": "92d7089f1eb432ab54f67fc21f7a4a6f91fe7f1bb2c163ff8d36f44020fcc5b7" -} diff --git a/.sqlx/query-9a4af2e1957f9d886b4bf067163dbb89c076da206155c7b7820ecd63d13a61b3.json b/.sqlx/query-9a4af2e1957f9d886b4bf067163dbb89c076da206155c7b7820ecd63d13a61b3.json deleted file mode 100644 index 556a56a..0000000 --- a/.sqlx/query-9a4af2e1957f9d886b4bf067163dbb89c076da206155c7b7820ecd63d13a61b3.json +++ /dev/null @@ -1,106 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n WITH RECURSIVE comment_thread AS (\n -- Anchor member: top-level comments\n SELECT * FROM comments\n WHERE comments_type = $1 AND comments_id = $2 AND parent_id IS NULL AND deleted_at IS NULL\n UNION ALL\n -- Recursive member: replies to comments already in the thread\n SELECT c.*\n FROM comments c\n JOIN comment_thread ct ON c.parent_id = ct.id\n WHERE c.deleted_at IS NULL\n ),\n vote_summary AS (\n SELECT\n cv.comment_vote_id,\n COUNT(*) FILTER (WHERE cv.vote_type = 1) AS upvotes,\n COUNT(*) FILTER (WHERE cv.vote_type = -1) AS downvotes\n FROM comment_votes cv\n WHERE cv.comment_vote_id IN (SELECT id FROM comment_thread)\n GROUP BY cv.comment_vote_id\n ),\n attachments_summary AS (\n -- Aggregate all attachment URLs for each comment into a JSON array\n SELECT\n comment_id,\n json_agg(file_url) as attachment_urls\n FROM comment_attachments\n WHERE comment_id IN (SELECT id FROM comment_thread)\n GROUP BY comment_id\n )\n SELECT\n ct.id as \"id!\",\n ct.parent_id,\n ct.content_html as \"content_html!\",\n ct.content_user_markdown as \"content_markdown!\",\n ct.created_at as \"created_at!\",\n ct.updated_at as \"updated_at!\",\n ct.user_id as \"user_id!\",\n COALESCE(up.display_name, u.username) as \"user_username!\",\n up.avatar_url as \"user_avatar_url\",\n COALESCE(vs.upvotes, 0) as \"upvotes!\",\n COALESCE(vs.downvotes, 0) as \"downvotes!\",\n cv.vote_type as \"current_user_vote: _\",\n ats.attachment_urls as \"attachment_urls: _\"\n FROM comment_thread ct\n JOIN users u ON ct.user_id = u.id\n LEFT JOIN user_profiles up ON u.id = up.user_id\n LEFT JOIN vote_summary vs ON ct.id = vs.comment_vote_id\n LEFT JOIN comment_votes cv ON ct.id = cv.comment_vote_id AND cv.user_id = $3\n LEFT JOIN attachments_summary ats ON ct.id = ats.comment_id\n ORDER BY ct.created_at ASC\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id!", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "parent_id", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "content_html!", - "type_info": "Text" - }, - { - "ordinal": 3, - "name": "content_markdown!", - "type_info": "Text" - }, - { - "ordinal": 4, - "name": "created_at!", - "type_info": "Timestamptz" - }, - { - "ordinal": 5, - "name": "updated_at!", - "type_info": "Timestamptz" - }, - { - "ordinal": 6, - "name": "user_id!", - "type_info": "Int4" - }, - { - "ordinal": 7, - "name": "user_username!", - "type_info": "Text" - }, - { - "ordinal": 8, - "name": "user_avatar_url", - "type_info": "Text" - }, - { - "ordinal": 9, - "name": "upvotes!", - "type_info": "Int8" - }, - { - "ordinal": 10, - "name": "downvotes!", - "type_info": "Int8" - }, - { - "ordinal": 11, - "name": "current_user_vote: _", - "type_info": "Int2" - }, - { - "ordinal": 12, - "name": "attachment_urls: _", - "type_info": "Json" - } - ], - "parameters": { - "Left": [ - { - "Custom": { - "name": "comments_entity", - "kind": { - "Enum": [ - "series", - "series_chapters" - ] - } - } - }, - "Int4", - "Int4" - ] - }, - "nullable": [ - null, - null, - null, - null, - null, - null, - null, - null, - true, - null, - null, - false, - null - ] - }, - "hash": "9a4af2e1957f9d886b4bf067163dbb89c076da206155c7b7820ecd63d13a61b3" -} diff --git a/.sqlx/query-a0d21a213952a7b0cf5310928e80fecfc9cfca68c0df7222da50817a9e649bf6.json b/.sqlx/query-a0d21a213952a7b0cf5310928e80fecfc9cfca68c0df7222da50817a9e649bf6.json new file mode 100644 index 0000000..6fe9e51 --- /dev/null +++ b/.sqlx/query-a0d21a213952a7b0cf5310928e80fecfc9cfca68c0df7222da50817a9e649bf6.json @@ -0,0 +1,15 @@ +{ + "db_name": "PostgreSQL", + "query": "\n DELETE FROM comments\n WHERE id = $1 AND user_id = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int4" + ] + }, + "nullable": [] + }, + "hash": "a0d21a213952a7b0cf5310928e80fecfc9cfca68c0df7222da50817a9e649bf6" +} diff --git a/.sqlx/query-a4d5abc3b51e7e82774d067146a2e8e371a3684339e1bca5c2b1a3c4116d26a0.json b/.sqlx/query-a4d5abc3b51e7e82774d067146a2e8e371a3684339e1bca5c2b1a3c4116d26a0.json new file mode 100644 index 0000000..ccf7448 --- /dev/null +++ b/.sqlx/query-a4d5abc3b51e7e82774d067146a2e8e371a3684339e1bca5c2b1a3c4116d26a0.json @@ -0,0 +1,61 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH base_search AS (\n SELECT\n u.id,\n u.username,\n u.email,\n u.role_id,\n u.is_active,\n r.role_name,\n similarity(u.username || ' ' || u.email, $3) AS sim_score\n FROM users u\n JOIN roles r ON u.role_id = r.id\n WHERE\n (u.username ILIKE '%' || $3 || '%')\n OR\n (u.email ILIKE '%' || $3 || '%')\n OR\n (\n (u.username || ' ' || u.email) % $3\n AND\n similarity(u.username || ' ' || u.email, $3) >= $4\n )\n ),\n ranked_results AS (\n SELECT\n *,\n CASE\n WHEN username ILIKE $3 OR email ILIKE $3 THEN 10\n WHEN username ILIKE '%' || $3 || '%' OR email ILIKE '%' || $3 || '%' THEN 8\n ELSE 6\n END as search_rank\n FROM base_search\n ),\n total_count AS (\n SELECT COUNT(*) AS total FROM ranked_results\n )\n SELECT\n rr.id,\n rr.username,\n rr.email,\n rr.role_name,\n rr.role_id,\n rr.is_active,\n tc.total as total_items\n FROM ranked_results rr\n CROSS JOIN total_count tc\n -- We can order by columns (search_rank, sim_score) that are not in the final SELECT list\n ORDER BY rr.search_rank DESC, rr.sim_score DESC, rr.id ASC\n LIMIT $1\n OFFSET $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "username", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "email", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "role_name", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "role_id", + "type_info": "Int4" + }, + { + "ordinal": 5, + "name": "is_active", + "type_info": "Bool" + }, + { + "ordinal": 6, + "name": "total_items", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Int8", + "Text", + "Float4" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + true, + null + ] + }, + "hash": "a4d5abc3b51e7e82774d067146a2e8e371a3684339e1bca5c2b1a3c4116d26a0" +} diff --git a/.sqlx/query-a6c2db6e77868f6e17f3dfb16738904b79c6925efedd814fa4cfc70d8af27561.json b/.sqlx/query-a6c2db6e77868f6e17f3dfb16738904b79c6925efedd814fa4cfc70d8af27561.json new file mode 100644 index 0000000..0b0f118 --- /dev/null +++ b/.sqlx/query-a6c2db6e77868f6e17f3dfb16738904b79c6925efedd814fa4cfc70d8af27561.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT file_url FROM comment_attachments WHERE comment_id = $1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "file_url", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false + ] + }, + "hash": "a6c2db6e77868f6e17f3dfb16738904b79c6925efedd814fa4cfc70d8af27561" +} diff --git a/.sqlx/query-b79a867a9a0819e972ae6e36f6103ad1043fa3ca33fa3690258a9ddff5b1fc27.json b/.sqlx/query-b79a867a9a0819e972ae6e36f6103ad1043fa3ca33fa3690258a9ddff5b1fc27.json new file mode 100644 index 0000000..c9512e4 --- /dev/null +++ b/.sqlx/query-b79a867a9a0819e972ae6e36f6103ad1043fa3ca33fa3690258a9ddff5b1fc27.json @@ -0,0 +1,77 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n s.id,\n s.title,\n s.original_title,\n s.description,\n s.cover_image_url,\n s.updated_at,\n s.last_chapter_found_in_storage,\n sc.title as chapter_title,\n COALESCE(json_agg(DISTINCT a.name ORDER BY a.name) FILTER (WHERE a.id IS NOT NULL),\n '[]'::json) as authors,\n COUNT(*) OVER () as total_items\n FROM\n series s\n LEFT JOIN\n series_chapters sc ON s.id = sc.series_id\n AND s.last_chapter_found_in_storage = sc.chapter_number\n LEFT JOIN\n series_authors sa ON s.id = sa.series_id\n LEFT JOIN\n authors a ON sa.author_id = a.id\n WHERE\n s.updated_at >= NOW() - interval '7 days'\n GROUP BY\n s.id, sc.title\n ORDER BY\n s.updated_at DESC\n LIMIT $1\n OFFSET $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "title", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "original_title", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "description", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "cover_image_url", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "last_chapter_found_in_storage", + "type_info": "Float4" + }, + { + "ordinal": 7, + "name": "chapter_title", + "type_info": "Text" + }, + { + "ordinal": 8, + "name": "authors", + "type_info": "Json" + }, + { + "ordinal": 9, + "name": "total_items", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + }, + "nullable": [ + false, + false, + true, + false, + false, + false, + true, + true, + null, + null + ] + }, + "hash": "b79a867a9a0819e972ae6e36f6103ad1043fa3ca33fa3690258a9ddff5b1fc27" +} diff --git a/.sqlx/query-c07704f7b459b9bacce8fabba895c6310650c485e6770f7ab9749330064825de.json b/.sqlx/query-c07704f7b459b9bacce8fabba895c6310650c485e6770f7ab9749330064825de.json deleted file mode 100644 index caaa490..0000000 --- a/.sqlx/query-c07704f7b459b9bacce8fabba895c6310650c485e6770f7ab9749330064825de.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "INSERT INTO series_ratings (series_id, user_id, rating) VALUES ($1, $2, $3)\n ON CONFLICT (user_id, series_id) DO UPDATE SET rating = $3, updated_at = NOW()\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int4", - "Int4", - "Int2" - ] - }, - "nullable": [] - }, - "hash": "c07704f7b459b9bacce8fabba895c6310650c485e6770f7ab9749330064825de" -} diff --git a/.sqlx/query-d2d972c1a48715a0c8da941c399a8d5deaac035cf929300fdb795024ba6fed1c.json b/.sqlx/query-c2aea4e6e939393b141fa4035d8062e0ca5f3546d608aa1e5a4ef69814e05531.json similarity index 57% rename from .sqlx/query-d2d972c1a48715a0c8da941c399a8d5deaac035cf929300fdb795024ba6fed1c.json rename to .sqlx/query-c2aea4e6e939393b141fa4035d8062e0ca5f3546d608aa1e5a4ef69814e05531.json index b105148..8122dac 100644 --- a/.sqlx/query-d2d972c1a48715a0c8da941c399a8d5deaac035cf929300fdb795024ba6fed1c.json +++ b/.sqlx/query-c2aea4e6e939393b141fa4035d8062e0ca5f3546d608aa1e5a4ef69814e05531.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n WITH candidate AS (\n SELECT id FROM series\n WHERE\n processing_status = $1\n AND next_checked_at <= NOW()\n ORDER BY next_checked_at ASC\n LIMIT 1\n FOR UPDATE SKIP LOCKED\n )\n UPDATE series\n SET processing_status = $2\n WHERE id = (SELECT id FROM candidate)\n RETURNING\n id, title, original_title, description, cover_image_url, current_source_url,\n source_website_host, views_count, bookmarks_count, total_rating_score, total_ratings_count, last_chapter_found_in_storage,\n processing_status as \"processing_status: SeriesStatus\", check_interval_minutes, last_checked_at,\n next_checked_at, created_at, updated_at\n ", + "query": "\n WITH candidate AS (\n SELECT id FROM series\n WHERE\n processing_status = $1\n AND next_checked_at <= NOW()\n ORDER BY next_checked_at ASC\n LIMIT $2\n FOR UPDATE SKIP LOCKED\n )\n UPDATE series\n SET processing_status = $3\n WHERE id IN (SELECT id FROM candidate)\n RETURNING\n id, \n title, \n current_source_url, source_website_host,\n last_chapter_found_in_storage,\n processing_status as \"processing_status: SeriesStatus\",\n check_interval_minutes\n ", "describe": { "columns": [ { @@ -15,56 +15,21 @@ }, { "ordinal": 2, - "name": "original_title", - "type_info": "Text" - }, - { - "ordinal": 3, - "name": "description", - "type_info": "Text" - }, - { - "ordinal": 4, - "name": "cover_image_url", - "type_info": "Text" - }, - { - "ordinal": 5, "name": "current_source_url", "type_info": "Text" }, { - "ordinal": 6, + "ordinal": 3, "name": "source_website_host", "type_info": "Text" }, { - "ordinal": 7, - "name": "views_count", - "type_info": "Int4" - }, - { - "ordinal": 8, - "name": "bookmarks_count", - "type_info": "Int4" - }, - { - "ordinal": 9, - "name": "total_rating_score", - "type_info": "Int8" - }, - { - "ordinal": 10, - "name": "total_ratings_count", - "type_info": "Int4" - }, - { - "ordinal": 11, + "ordinal": 4, "name": "last_chapter_found_in_storage", "type_info": "Float4" }, { - "ordinal": 12, + "ordinal": 5, "name": "processing_status: SeriesStatus", "type_info": { "Custom": { @@ -88,29 +53,9 @@ } }, { - "ordinal": 13, + "ordinal": 6, "name": "check_interval_minutes", "type_info": "Int4" - }, - { - "ordinal": 14, - "name": "last_checked_at", - "type_info": "Timestamptz" - }, - { - "ordinal": 15, - "name": "next_checked_at", - "type_info": "Timestamptz" - }, - { - "ordinal": 16, - "name": "created_at", - "type_info": "Timestamptz" - }, - { - "ordinal": 17, - "name": "updated_at", - "type_info": "Timestamptz" } ], "parameters": { @@ -135,6 +80,7 @@ } } }, + "Int8", { "Custom": { "name": "series_status", @@ -158,25 +104,14 @@ ] }, "nullable": [ - false, - false, - true, - false, - false, - false, false, false, false, false, - false, - true, - false, - false, - true, true, false, false ] }, - "hash": "d2d972c1a48715a0c8da941c399a8d5deaac035cf929300fdb795024ba6fed1c" + "hash": "c2aea4e6e939393b141fa4035d8062e0ca5f3546d608aa1e5a4ef69814e05531" } diff --git a/.sqlx/query-34741bbbd599c4a18993fdbe09f58ae9c395f5369ee2522a6c0ab8d18cfe1e9a.json b/.sqlx/query-c8b0764049924eadcaafb999304a7d7b54569d083efbb6f63659e1633abe2ba3.json similarity index 51% rename from .sqlx/query-34741bbbd599c4a18993fdbe09f58ae9c395f5369ee2522a6c0ab8d18cfe1e9a.json rename to .sqlx/query-c8b0764049924eadcaafb999304a7d7b54569d083efbb6f63659e1633abe2ba3.json index 6be9026..a942d14 100644 --- a/.sqlx/query-34741bbbd599c4a18993fdbe09f58ae9c395f5369ee2522a6c0ab8d18cfe1e9a.json +++ b/.sqlx/query-c8b0764049924eadcaafb999304a7d7b54569d083efbb6f63659e1633abe2ba3.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "INSERT INTO series\n (title, original_title, description, cover_image_url, current_source_url, source_website_host, check_interval_minutes)\n VALUES ($1, $2, $3, $4, $5, $6, $7)\n RETURNING id", + "query": "\n INSERT INTO series\n (title, original_title, description, cover_image_url, current_source_url, source_website_host, check_interval_minutes)\n VALUES ($1, $2, $3, $4, $5, $6, $7)\n RETURNING id\n ", "describe": { "columns": [ { @@ -24,5 +24,5 @@ false ] }, - "hash": "34741bbbd599c4a18993fdbe09f58ae9c395f5369ee2522a6c0ab8d18cfe1e9a" + "hash": "c8b0764049924eadcaafb999304a7d7b54569d083efbb6f63659e1633abe2ba3" } diff --git a/.sqlx/query-d3eb1eeba3a3941bf56b29b24e61b3573b1a42b001e550c1a47c76d062535d4d.json b/.sqlx/query-d3eb1eeba3a3941bf56b29b24e61b3573b1a42b001e550c1a47c76d062535d4d.json deleted file mode 100644 index 792cc05..0000000 --- a/.sqlx/query-d3eb1eeba3a3941bf56b29b24e61b3573b1a42b001e550c1a47c76d062535d4d.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n UPDATE comments\n SET\n content_user_markdown = $1,\n content_html = $2,\n updated_at = NOW()\n WHERE id = $3 AND user_id = $4\n RETURNING content_html\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "content_html", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Text", - "Text", - "Int8", - "Int4" - ] - }, - "nullable": [ - false - ] - }, - "hash": "d3eb1eeba3a3941bf56b29b24e61b3573b1a42b001e550c1a47c76d062535d4d" -} diff --git a/.sqlx/query-d50ae4eb0b10b8710170e43ee655621e97ce66fe4939870b55a10ec5c3132d2c.json b/.sqlx/query-d50ae4eb0b10b8710170e43ee655621e97ce66fe4939870b55a10ec5c3132d2c.json new file mode 100644 index 0000000..10fb256 --- /dev/null +++ b/.sqlx/query-d50ae4eb0b10b8710170e43ee655621e97ce66fe4939870b55a10ec5c3132d2c.json @@ -0,0 +1,12 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM password_reset_tokens WHERE expires_at < NOW()", + "describe": { + "columns": [], + "parameters": { + "Left": [] + }, + "nullable": [] + }, + "hash": "d50ae4eb0b10b8710170e43ee655621e97ce66fe4939870b55a10ec5c3132d2c" +} diff --git a/.sqlx/query-d65a09aee176b6dba0a9e94b3e728931f6e687475043839a396aa605e1136678.json b/.sqlx/query-d65a09aee176b6dba0a9e94b3e728931f6e687475043839a396aa605e1136678.json new file mode 100644 index 0000000..176aebb --- /dev/null +++ b/.sqlx/query-d65a09aee176b6dba0a9e94b3e728931f6e687475043839a396aa605e1136678.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT MAX(chapter_number)\n FROM series_chapters\n WHERE series_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "max", + "type_info": "Float4" + } + ], + "parameters": { + "Left": [ + "Int4" + ] + }, + "nullable": [ + null + ] + }, + "hash": "d65a09aee176b6dba0a9e94b3e728931f6e687475043839a396aa605e1136678" +} diff --git a/.sqlx/query-da21599581a5323de6a888f6db9cc094c2230b87a725a752373d7e605d96d30f.json b/.sqlx/query-da21599581a5323de6a888f6db9cc094c2230b87a725a752373d7e605d96d30f.json new file mode 100644 index 0000000..c737f37 --- /dev/null +++ b/.sqlx/query-da21599581a5323de6a888f6db9cc094c2230b87a725a752373d7e605d96d30f.json @@ -0,0 +1,49 @@ +{ + "db_name": "PostgreSQL", + "query": "\n UPDATE comments\n SET\n content_user_markdown = $1,\n content_html = $2,\n deleted_at = NOW(),\n updated_at = NOW()\n WHERE id = $3 AND user_id = $4\n RETURNING id, content_user_markdown, content_html, updated_at, (deleted_at IS NOT NULL) as \"is_deleted!\"\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "content_user_markdown", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "content_html", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "updated_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "is_deleted!", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [ + "Text", + "Text", + "Int8", + "Int4" + ] + }, + "nullable": [ + false, + false, + false, + false, + null + ] + }, + "hash": "da21599581a5323de6a888f6db9cc094c2230b87a725a752373d7e605d96d30f" +} diff --git a/.sqlx/query-e7427da830b3095a987dc773423dfbe82e4f34e11d94898df3d3584e56c5c2af.json b/.sqlx/query-e7427da830b3095a987dc773423dfbe82e4f34e11d94898df3d3584e56c5c2af.json new file mode 100644 index 0000000..2364fa0 --- /dev/null +++ b/.sqlx/query-e7427da830b3095a987dc773423dfbe82e4f34e11d94898df3d3584e56c5c2af.json @@ -0,0 +1,36 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO reports (reporter_id, chapter_id, comment_id, reason)\n VALUES ($1, $2, $3, $4)\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int4", + "Int4", + "Int8", + { + "Custom": { + "name": "report_reason", + "kind": { + "Enum": [ + "broken_image", + "wrong_chapter", + "duplicate_chapter", + "missing_image", + "missing_chapter", + "slow_loading", + "broken_text", + "toxic", + "racist", + "spam", + "other" + ] + } + } + } + ] + }, + "nullable": [] + }, + "hash": "e7427da830b3095a987dc773423dfbe82e4f34e11d94898df3d3584e56c5c2af" +} diff --git a/.sqlx/query-fcd2a58a2eea797a641384f985f8cf908e3d4d612b11bf57f6529cf58f67d971.json b/.sqlx/query-fcd2a58a2eea797a641384f985f8cf908e3d4d612b11bf57f6529cf58f67d971.json new file mode 100644 index 0000000..54b85d5 --- /dev/null +++ b/.sqlx/query-fcd2a58a2eea797a641384f985f8cf908e3d4d612b11bf57f6529cf58f67d971.json @@ -0,0 +1,46 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT u.username, u.email, u.role_id, u.is_active, r.role_name\n FROM users u\n JOIN roles r ON u.role_id = r.id\n WHERE u.id = $1\n FOR UPDATE", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "username", + "type_info": "Text" + }, + { + "ordinal": 1, + "name": "email", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "role_id", + "type_info": "Int4" + }, + { + "ordinal": 3, + "name": "is_active", + "type_info": "Bool" + }, + { + "ordinal": 4, + "name": "role_name", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Int4" + ] + }, + "nullable": [ + false, + false, + false, + true, + false + ] + }, + "hash": "fcd2a58a2eea797a641384f985f8cf908e3d4d612b11bf57f6529cf58f67d971" +} diff --git a/.sqlx/query-fd583e5bd160f5f65f2613fbf4a0e26db34167ad2612acc6c1cdd2d795305496.json b/.sqlx/query-fd583e5bd160f5f65f2613fbf4a0e26db34167ad2612acc6c1cdd2d795305496.json new file mode 100644 index 0000000..bd612dd --- /dev/null +++ b/.sqlx/query-fd583e5bd160f5f65f2613fbf4a0e26db34167ad2612acc6c1cdd2d795305496.json @@ -0,0 +1,14 @@ +{ + "db_name": "PostgreSQL", + "query": "DELETE FROM comment_attachments WHERE comment_id = $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] + }, + "hash": "fd583e5bd160f5f65f2613fbf4a0e26db34167ad2612acc6c1cdd2d795305496" +} diff --git a/Cargo.lock b/Cargo.lock index 26dc8b1..e2dbabe 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,15 +2,6 @@ # It is not intended for manual editing. version = 4 -[[package]] -name = "addr2line" -version = "0.24.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" -dependencies = [ - "gimli", -] - [[package]] name = "adler2" version = "2.0.1" @@ -77,6 +68,19 @@ version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" +[[package]] +name = "ammonia" +version = "4.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6b346764dd0814805de8abf899fe03065bcee69bb1a4771c785817e39f3978f" +dependencies = [ + "cssparser 0.35.0", + "html5ever 0.35.0", + "maplit", + "tendril", + "url", +] + [[package]] name = "android-tzdata" version = "0.1.1" @@ -94,9 +98,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.98" +version = "1.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" [[package]] name = "arbitrary" @@ -208,6 +212,21 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" +[[package]] +name = "av-metrics" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "996ce95bbdb0203e5b91d4a0c9b81c0d67d11c80f884482a0c1ea19e732e3530" +dependencies = [ + "crossbeam", + "itertools 0.10.5", + "lab", + "num-traits", + "rayon", + "thiserror 1.0.69", + "v_frame", +] + [[package]] name = "av-scenechange" version = "0.14.1" @@ -253,9 +272,9 @@ dependencies = [ [[package]] name = "aws-config" -version = "1.8.3" +version = "1.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0baa720ebadea158c5bda642ac444a2af0cdf7bb66b46d1e4533de5d1f449d0" +checksum = "1856b1b48b65f71a4dd940b1c0931f9a7b646d4a924b9828ffefc1454714668a" dependencies = [ "aws-credential-types", "aws-runtime", @@ -283,9 +302,9 @@ dependencies = [ [[package]] name = "aws-credential-types" -version = "1.2.4" +version = "1.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b68c2194a190e1efc999612792e25b1ab3abfefe4306494efaaabc25933c0cbe" +checksum = "86590e57ea40121d47d3f2e131bfd873dea15d78dc2f4604f4734537ad9e56c4" dependencies = [ "aws-smithy-async", "aws-smithy-runtime-api", @@ -300,6 +319,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c953fe1ba023e6b7730c0d4b031d06f267f23a46167dcbd40316644b10a17ba" dependencies = [ "aws-lc-sys", + "untrusted 0.7.1", "zeroize", ] @@ -318,9 +338,9 @@ dependencies = [ [[package]] name = "aws-runtime" -version = "1.5.9" +version = "1.5.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2090e664216c78e766b6bac10fe74d2f451c02441d43484cd76ac9a295075f7" +checksum = "8fe0fd441565b0b318c76e7206c8d1d0b0166b3e986cf30e890b61feb6192045" dependencies = [ "aws-credential-types", "aws-sigv4", @@ -343,9 +363,9 @@ dependencies = [ [[package]] name = "aws-sdk-s3" -version = "1.100.0" +version = "1.112.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c5eafbdcd898114b839ba68ac628e31c4cfc3e11dfca38dc1b2de2f35bb6270" +checksum = "eee73a27721035c46da0572b390a69fbdb333d0177c24f3d8f7ff952eeb96690" dependencies = [ "aws-credential-types", "aws-runtime", @@ -377,9 +397,9 @@ dependencies = [ [[package]] name = "aws-sdk-sso" -version = "1.78.0" +version = "1.89.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbd7bc4bd34303733bded362c4c997a39130eac4310257c79aae8484b1c4b724" +checksum = "a9c1b1af02288f729e95b72bd17988c009aa72e26dcb59b3200f86d7aea726c9" dependencies = [ "aws-credential-types", "aws-runtime", @@ -399,9 +419,9 @@ dependencies = [ [[package]] name = "aws-sdk-ssooidc" -version = "1.79.0" +version = "1.91.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77358d25f781bb106c1a69531231d4fd12c6be904edb0c47198c604df5a2dbca" +checksum = "4e8122301558dc7c6c68e878af918880b82ff41897a60c8c4e18e4dc4d93e9f1" dependencies = [ "aws-credential-types", "aws-runtime", @@ -421,9 +441,9 @@ dependencies = [ [[package]] name = "aws-sdk-sts" -version = "1.80.0" +version = "1.92.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06e3ed2a9b828ae7763ddaed41d51724d2661a50c45f845b08967e52f4939cfc" +checksum = "a0c7808adcff8333eaa76a849e6de926c6ac1a1268b9fd6afe32de9c29ef29d2" dependencies = [ "aws-credential-types", "aws-runtime", @@ -444,9 +464,9 @@ dependencies = [ [[package]] name = "aws-sigv4" -version = "1.3.3" +version = "1.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddfb9021f581b71870a17eac25b52335b82211cdc092e02b6876b2bcefa61666" +checksum = "c35452ec3f001e1f2f6db107b6373f1f48f05ec63ba2c5c9fa91f07dad32af11" dependencies = [ "aws-credential-types", "aws-smithy-eventstream", @@ -472,9 +492,9 @@ dependencies = [ [[package]] name = "aws-smithy-async" -version = "1.2.5" +version = "1.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e190749ea56f8c42bf15dd76c65e14f8f765233e6df9b0506d9d934ebef867c" +checksum = "127fcfad33b7dfc531141fda7e1c402ac65f88aca5511a4d31e2e3d2cd01ce9c" dependencies = [ "futures-util", "pin-project-lite", @@ -483,9 +503,9 @@ dependencies = [ [[package]] name = "aws-smithy-checksums" -version = "0.63.5" +version = "0.63.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ab9472f7a8ec259ddb5681d2ef1cb1cf16c0411890063e67cdc7b62562cc496" +checksum = "95bd108f7b3563598e4dc7b62e1388c9982324a2abd622442167012690184591" dependencies = [ "aws-smithy-http", "aws-smithy-types", @@ -503,9 +523,9 @@ dependencies = [ [[package]] name = "aws-smithy-eventstream" -version = "0.60.10" +version = "0.60.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "604c7aec361252b8f1c871a7641d5e0ba3a7f5a586e51b66bc9510a5519594d9" +checksum = "e29a304f8319781a39808847efb39561351b1bb76e933da7aa90232673638658" dependencies = [ "aws-smithy-types", "bytes", @@ -514,9 +534,9 @@ dependencies = [ [[package]] name = "aws-smithy-http" -version = "0.62.2" +version = "0.62.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43c82ba4cab184ea61f6edaafc1072aad3c2a17dcf4c0fce19ac5694b90d8b5f" +checksum = "445d5d720c99eed0b4aa674ed00d835d9b1427dd73e04adaf2f94c6b2d6f9fca" dependencies = [ "aws-smithy-eventstream", "aws-smithy-runtime-api", @@ -524,6 +544,7 @@ dependencies = [ "bytes", "bytes-utils", "futures-core", + "futures-util", "http 0.2.12", "http 1.3.1", "http-body 0.4.6", @@ -535,9 +556,9 @@ dependencies = [ [[package]] name = "aws-smithy-http-client" -version = "1.0.6" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f108f1ca850f3feef3009bdcc977be201bca9a91058864d9de0684e64514bee0" +checksum = "623254723e8dfd535f566ee7b2381645f8981da086b5c4aa26c0c41582bb1d2c" dependencies = [ "aws-smithy-async", "aws-smithy-runtime-api", @@ -558,33 +579,34 @@ dependencies = [ "rustls-native-certs 0.8.1", "rustls-pki-types", "tokio", + "tokio-rustls 0.26.2", "tower", "tracing", ] [[package]] name = "aws-smithy-json" -version = "0.61.4" +version = "0.61.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a16e040799d29c17412943bdbf488fd75db04112d0c0d4b9290bacf5ae0014b9" +checksum = "2db31f727935fc63c6eeae8b37b438847639ec330a9161ece694efba257e0c54" dependencies = [ "aws-smithy-types", ] [[package]] name = "aws-smithy-observability" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9364d5989ac4dd918e5cc4c4bdcc61c9be17dcd2586ea7f69e348fc7c6cab393" +checksum = "2d1881b1ea6d313f9890710d65c158bdab6fb08c91ea825f74c1c8c357baf4cc" dependencies = [ "aws-smithy-runtime-api", ] [[package]] name = "aws-smithy-query" -version = "0.60.7" +version = "0.60.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2fbd61ceb3fe8a1cb7352e42689cec5335833cd9f94103a61e98f9bb61c64bb" +checksum = "d28a63441360c477465f80c7abac3b9c4d075ca638f982e605b7dc2a2c7156c9" dependencies = [ "aws-smithy-types", "urlencoding", @@ -592,9 +614,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime" -version = "1.8.5" +version = "1.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "660f70d9d8af6876b4c9aa8dcb0dbaf0f89b04ee9a4455bea1b4ba03b15f26f6" +checksum = "0bbe9d018d646b96c7be063dd07987849862b0e6d07c778aad7d93d1be6c1ef0" dependencies = [ "aws-smithy-async", "aws-smithy-http", @@ -616,9 +638,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime-api" -version = "1.8.5" +version = "1.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "937a49ecf061895fca4a6dd8e864208ed9be7546c0527d04bc07d502ec5fba1c" +checksum = "ec7204f9fd94749a7c53b26da1b961b4ac36bf070ef1e0b94bb09f79d4f6c193" dependencies = [ "aws-smithy-async", "aws-smithy-types", @@ -633,9 +655,9 @@ dependencies = [ [[package]] name = "aws-smithy-types" -version = "1.3.2" +version = "1.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d498595448e43de7f4296b7b7a18a8a02c61ec9349128c80a368f7c3b4ab11a8" +checksum = "25f535879a207fce0db74b679cfc3e91a3159c8144d717d55f5832aea9eef46e" dependencies = [ "base64-simd", "bytes", @@ -659,18 +681,18 @@ dependencies = [ [[package]] name = "aws-smithy-xml" -version = "0.60.10" +version = "0.60.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3db87b96cb1b16c024980f133968d52882ca0daaee3a086c6decc500f6c99728" +checksum = "eab77cdd036b11056d2a30a7af7b775789fb024bf216acc13884c6c97752ae56" dependencies = [ "xmlparser", ] [[package]] name = "aws-types" -version = "1.3.8" +version = "1.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b069d19bf01e46298eaedd7c6f283fe565a59263e53eebec945f3e6398f42390" +checksum = "d79fb68e3d7fe5d4833ea34dc87d2e97d26d3086cb3da660bb6b1f76d98680b6" dependencies = [ "aws-credential-types", "aws-smithy-async", @@ -682,9 +704,9 @@ dependencies = [ [[package]] name = "axum" -version = "0.8.4" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "021e862c184ae977658b36c4500f7feac3221ca5da43e3f25bd04ab6c79a29b5" +checksum = "8a18ed336352031311f4e0b4dd2ff392d4fbb370777c9d18d7fc9d7359f73871" dependencies = [ "axum-core", "axum-macros", @@ -702,8 +724,7 @@ dependencies = [ "mime", "percent-encoding", "pin-project-lite", - "rustversion", - "serde", + "serde_core", "serde_json", "serde_path_to_error", "serde_urlencoded", @@ -717,9 +738,9 @@ dependencies = [ [[package]] name = "axum-core" -version = "0.5.2" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68464cd0412f486726fb3373129ef5d2993f90c34bc2bc1c1e9943b2f4fc7ca6" +checksum = "59446ce19cd142f8833f856eb31f3eb097812d1479ab224f54d72428ca21ea22" dependencies = [ "bytes", "futures-core", @@ -728,7 +749,6 @@ dependencies = [ "http-body-util", "mime", "pin-project-lite", - "rustversion", "sync_wrapper", "tower-layer", "tower-service", @@ -737,15 +757,16 @@ dependencies = [ [[package]] name = "axum-extra" -version = "0.10.1" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45bf463831f5131b7d3c756525b305d40f1185b688565648a92e1392ca35713d" +checksum = "5136e6c5e7e7978fe23e9876fb924af2c0f84c72127ac6ac17e7c46f457d362c" dependencies = [ "axum", "axum-core", "bytes", "cookie", "fastrand", + "futures-core", "futures-util", "headers", "http 1.3.1", @@ -754,12 +775,11 @@ dependencies = [ "mime", "multer", "pin-project-lite", - "rustversion", - "serde", + "serde_core", "serde_json", - "tower", "tower-layer", "tower-service", + "tracing", "typed-json", ] @@ -778,10 +798,12 @@ dependencies = [ name = "backend" version = "0.1.0" dependencies = [ + "ammonia", "anyhow", "arc-swap", "argon2", "async-channel", + "av-metrics", "aws-config", "aws-sdk-s3", "axum", @@ -791,11 +813,16 @@ dependencies = [ "bytes", "chrono", "dotenvy", + "dssim", + "futures", "image", + "image-compare", "jsonwebtoken", "lettre", "notify", "notify-debouncer-full", + "once_cell", + "pulldown-cmark", "rand 0.9.2", "ravif 0.12.0", "regex", @@ -806,41 +833,29 @@ dependencies = [ "serde_json", "slug", "sqlx", + "tempfile", "time", "tokio", - "toml 0.9.4", + "tokio-cron-scheduler", + "toml 0.9.8", "tower", "tower-http", + "ua_generator", "url", "uuid", ] [[package]] name = "backon" -version = "1.5.2" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "592277618714fbcecda9a02ba7a8781f319d26532a88553bbacc77ba5d2b3a8d" +checksum = "cffb0e931875b666fc4fcb20fee52e9bbd1ef836fd9e9e04ec21555f9f85f7ef" dependencies = [ "fastrand", "gloo-timers", "tokio", ] -[[package]] -name = "backtrace" -version = "0.3.75" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" -dependencies = [ - "addr2line", - "cfg-if", - "libc", - "miniz_oxide", - "object", - "rustc-demangle", - "windows-targets 0.52.6", -] - [[package]] name = "base16ct" version = "0.1.1" @@ -892,7 +907,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "rustc-hash", + "rustc-hash 1.1.0", "shlex", "syn", "which", @@ -960,7 +975,17 @@ checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", - "brotli-decompressor", + "brotli-decompressor 5.0.0", +] + +[[package]] +name = "brotli-decompressor" +version = "4.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a334ef7c9e23abf0ce748e8cd309037da93e606ad52eb372e4ce327a0dcfbdfd" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", ] [[package]] @@ -993,9 +1018,9 @@ checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" [[package]] name = "bytemuck" -version = "1.23.1" +version = "1.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c76a5792e44e4abe34d3abf15636779261d45a7450612059293d1d2cfc63422" +checksum = "1fbdf580320f38b612e485521afda1ee26d10cc9884efaaa750d383e13e3c5f4" [[package]] name = "byteorder" @@ -1011,9 +1036,9 @@ checksum = "8f1fe948ff07f4bd06c30984e69f5b4899c516a3ef74f34df92a2df2ab535495" [[package]] name = "bytes" -version = "1.10.1" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" +checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" [[package]] name = "bytes-utils" @@ -1036,6 +1061,12 @@ dependencies = [ "shlex", ] +[[package]] +name = "cesu8" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" + [[package]] name = "cexpr" version = "0.6.0" @@ -1061,6 +1092,12 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + [[package]] name = "chrono" version = "0.4.41" @@ -1073,7 +1110,17 @@ dependencies = [ "num-traits", "serde", "wasm-bindgen", - "windows-link", + "windows-link 0.1.3", +] + +[[package]] +name = "chrono-tz" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6139a8597ed92cf816dfb33f5dd6cf0bb93a6adc938f11039f371bc5bcd26c3" +dependencies = [ + "chrono", + "phf 0.12.1", ] [[package]] @@ -1112,6 +1159,16 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b" +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "memchr", +] + [[package]] name = "concurrent-queue" version = "2.5.0" @@ -1199,15 +1256,15 @@ checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" [[package]] name = "crc-fast" -version = "1.3.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bf62af4cc77d8fe1c22dde4e721d87f2f54056139d8c412e1366b740305f56f" +checksum = "6ddc2d09feefeee8bd78101665bd8645637828fa9317f9f292496dbbd8c65ff3" dependencies = [ "crc", "digest", - "libc", "rand 0.9.2", "regex", + "rustversion", ] [[package]] @@ -1219,6 +1276,39 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "croner" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c007081651a19b42931f86f7d4f74ee1c2a7d0cd2c6636a81695b5ffd4e9990" +dependencies = [ + "chrono", + "derive_builder", + "strum", +] + +[[package]] +name = "crossbeam" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1137cd7e7fc0fb5d3c5a8678be38ec56e819125d8d7907411fe24ccb943faca8" +dependencies = [ + "crossbeam-channel", + "crossbeam-deque", + "crossbeam-epoch", + "crossbeam-queue", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "crossbeam-deque" version = "0.8.6" @@ -1300,7 +1390,20 @@ dependencies = [ "cssparser-macros", "dtoa-short", "itoa", - "phf", + "phf 0.11.3", + "smallvec", +] + +[[package]] +name = "cssparser" +version = "0.36.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dae61cf9c0abb83bd659dab65b7e4e38d8236824c85f0f804f173567bda257d2" +dependencies = [ + "cssparser-macros", + "dtoa-short", + "itoa", + "phf 0.13.1", "smallvec", ] @@ -1314,6 +1417,41 @@ dependencies = [ "syn", ] +[[package]] +name = "darling" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn", +] + +[[package]] +name = "darling_macro" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" +dependencies = [ + "darling_core", + "quote", + "syn", +] + [[package]] name = "der" version = "0.6.1" @@ -1344,6 +1482,37 @@ dependencies = [ "powerfmt", ] +[[package]] +name = "derive_builder" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "507dfb09ea8b7fa618fcf76e953f4f5e192547945816d5358edffe39f6f94947" +dependencies = [ + "derive_builder_macro", +] + +[[package]] +name = "derive_builder_core" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "derive_builder_macro" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" +dependencies = [ + "derive_builder_core", + "syn", +] + [[package]] name = "derive_more" version = "2.0.1" @@ -1399,6 +1568,35 @@ version = "0.15.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" +[[package]] +name = "dssim" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7eaa333e059ea555ff38041ef8245d9b9207a208ed221126a516b840a7a5562f" +dependencies = [ + "crossbeam-channel", + "dssim-core", + "getopts", + "imgref", + "load_image", + "lodepng", + "ordered-channel", + "rayon", + "rgb", +] + +[[package]] +name = "dssim-core" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3c601412450ff29a9258b2f85b18b38f658caf70fad1692f40ca863d86cb753" +dependencies = [ + "imgref", + "itertools 0.14.0", + "rayon", + "rgb", +] + [[package]] name = "dtoa" version = "1.0.10" @@ -1581,6 +1779,26 @@ version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" +[[package]] +name = "fax" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f05de7d48f37cd6730705cbca900770cab77a89f413d23e100ad7fad7795a0ab" +dependencies = [ + "fax_derive", +] + +[[package]] +name = "fax_derive" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0aca10fb742cb43f9e7bb8467c91aa9bcb8e3ffbc6a6f7389bb93ffc920577d" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "fdeflate" version = "0.3.7" @@ -1616,6 +1834,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d" dependencies = [ "crc32fast", + "libz-rs-sys", "miniz_oxide", ] @@ -1644,18 +1863,30 @@ checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" [[package]] name = "foreign-types" -version = "0.3.2" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +checksum = "d737d9aa519fb7b749cbc3b962edcf310a8dd1f4b67c91c4f83975dbdd17d965" dependencies = [ + "foreign-types-macros", "foreign-types-shared", ] +[[package]] +name = "foreign-types-macros" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a5c6c585bc94aaf2c7b51dd4c2ba22680844aba4c687be581871a6f518c5742" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "foreign-types-shared" -version = "0.1.1" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" +checksum = "aa9a19cbb55df58761df49b23516a86d432839add4af60fc256da840f66ed35b" [[package]] name = "form_urlencoded" @@ -1691,6 +1922,21 @@ dependencies = [ "new_debug_unreachable", ] +[[package]] +name = "futures" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + [[package]] name = "futures-channel" version = "0.3.31" @@ -1735,6 +1981,17 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" +[[package]] +name = "futures-macro" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "futures-sink" version = "0.3.31" @@ -1753,8 +2010,10 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ + "futures-channel", "futures-core", "futures-io", + "futures-macro", "futures-sink", "futures-task", "memchr", @@ -1763,15 +2022,6 @@ dependencies = [ "slab", ] -[[package]] -name = "fxhash" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" -dependencies = [ - "byteorder", -] - [[package]] name = "generic-array" version = "0.14.7" @@ -1784,9 +2034,9 @@ dependencies = [ [[package]] name = "getopts" -version = "0.2.23" +version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cba6ae63eb948698e300f645f87c70f76630d505f23b8907cf1e193ee85048c1" +checksum = "cfe4fbac503b8d1f88e6676011885f34b7174f46e59956bba534ba83abded4df" dependencies = [ "unicode-width", ] @@ -1811,9 +2061,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" dependencies = [ "cfg-if", + "js-sys", "libc", "r-efi", "wasi 0.14.2+wasi-0.2.4", + "wasm-bindgen", ] [[package]] @@ -1826,12 +2078,6 @@ dependencies = [ "weezl", ] -[[package]] -name = "gimli" -version = "0.31.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" - [[package]] name = "glob" version = "0.3.2" @@ -1930,6 +2176,12 @@ dependencies = [ "foldhash", ] +[[package]] +name = "hashbrown" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d" + [[package]] name = "hashlink" version = "0.10.0" @@ -2009,10 +2261,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55d958c2f74b664487a2035fe1dadb032c48718a03b63f3ab0b8537db8549ed4" dependencies = [ "log", - "markup5ever", + "markup5ever 0.35.0", "match_token", ] +[[package]] +name = "html5ever" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6452c4751a24e1b99c3260d505eaeee76a050573e61f30ac2c924ddc7236f01e" +dependencies = [ + "log", + "markup5ever 0.36.1", +] + [[package]] name = "http" version = "0.2.12" @@ -2157,22 +2419,7 @@ dependencies = [ "tokio", "tokio-rustls 0.26.2", "tower-service", -] - -[[package]] -name = "hyper-tls" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" -dependencies = [ - "bytes", - "http-body-util", - "hyper 1.6.0", - "hyper-util", - "native-tls", - "tokio", - "tokio-native-tls", - "tower-service", + "webpki-roots 1.0.2", ] [[package]] @@ -2311,6 +2558,12 @@ dependencies = [ "zerovec", ] +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + [[package]] name = "idna" version = "1.0.3" @@ -2334,9 +2587,9 @@ dependencies = [ [[package]] name = "image" -version = "0.25.6" +version = "0.25.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db35664ce6b9810857a38a906215e75a9c879f0696556a39f59c62829710251a" +checksum = "529feb3e6769d234375c4cf1ee2ce713682b8e76538cb13f9fc23e1400a591e7" dependencies = [ "bytemuck", "byteorder-lite", @@ -2344,6 +2597,7 @@ dependencies = [ "exr", "gif", "image-webp", + "moxcms", "num-traits", "png", "qoi", @@ -2355,6 +2609,18 @@ dependencies = [ "zune-jpeg", ] +[[package]] +name = "image-compare" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bf712e96694f43e33b8394265e4d4bc06998c3648718148e4584d80dc3b3165" +dependencies = [ + "image", + "itertools 0.14.0", + "rayon", + "thiserror 2.0.12", +] + [[package]] name = "image-webp" version = "0.2.3" @@ -2373,12 +2639,12 @@ checksum = "d0263a3d970d5c054ed9312c0057b4f3bde9c0b33836d3637361d4a9e6e7a408" [[package]] name = "indexmap" -version = "2.10.0" +version = "2.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661" +checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f" dependencies = [ "equivalent", - "hashbrown 0.15.4", + "hashbrown 0.16.0", ] [[package]] @@ -2412,17 +2678,6 @@ dependencies = [ "syn", ] -[[package]] -name = "io-uring" -version = "0.7.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4" -dependencies = [ - "bitflags 2.9.1", - "cfg-if", - "libc", -] - [[package]] name = "ipnet" version = "2.11.0" @@ -2439,6 +2694,15 @@ dependencies = [ "serde", ] +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + [[package]] name = "itertools" version = "0.12.1" @@ -2463,6 +2727,28 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" +[[package]] +name = "jni" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97" +dependencies = [ + "cesu8", + "cfg-if", + "combine", + "jni-sys", + "log", + "thiserror 1.0.69", + "walkdir", + "windows-sys 0.45.0", +] + +[[package]] +name = "jni-sys" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" + [[package]] name = "jobserver" version = "0.1.33" @@ -2478,6 +2764,9 @@ name = "jpeg-decoder" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "00810f1d8b74be64b13dbf3db89ac67740615d6c891f0e7b6179326533011a07" +dependencies = [ + "rayon", +] [[package]] name = "js-sys" @@ -2491,16 +2780,18 @@ dependencies = [ [[package]] name = "jsonwebtoken" -version = "9.3.1" +version = "10.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a87cc7a48537badeae96744432de36f4be2b4a34a05a5ef32e9dd8a1c169dde" +checksum = "c76e1c7d7df3e34443b3621b459b066a7b79644f059fc8b2db7070c825fd417e" dependencies = [ + "aws-lc-rs", "base64 0.22.1", + "getrandom 0.2.16", "js-sys", "pem", - "ring", "serde", "serde_json", + "signature 2.2.0", "simple_asn1", ] @@ -2524,6 +2815,12 @@ dependencies = [ "libc", ] +[[package]] +name = "lab" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf36173d4167ed999940f804952e6b08197cae5ad5d572eb4db150ce8ad5d58f" + [[package]] name = "lazy_static" version = "1.5.0" @@ -2539,6 +2836,29 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" +[[package]] +name = "lcms2" +version = "6.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b75877b724685dd49310bdbadbf973fc69b1d01992a6d4a861b928fc3943f87b" +dependencies = [ + "bytemuck", + "foreign-types", + "lcms2-sys", +] + +[[package]] +name = "lcms2-sys" +version = "4.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c2604b23848ca80b2add60f0fb2270fd980e622c25029b6597fa01cfd5f8d5f" +dependencies = [ + "cc", + "dunce", + "libc", + "pkg-config", +] + [[package]] name = "lebe" version = "0.5.2" @@ -2566,7 +2886,7 @@ dependencies = [ "percent-encoding", "quoted_printable", "rustls 0.23.31", - "rustls-native-certs 0.8.1", + "rustls-platform-verifier", "socket2 0.6.0", "tokio", "tokio-rustls 0.26.2", @@ -2615,6 +2935,15 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "libz-rs-sys" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "840db8cf39d9ec4dd794376f38acc40d0fc65eec2a8f484f7fd375b84602becd" +dependencies = [ + "zlib-rs", +] + [[package]] name = "linux-raw-sys" version = "0.4.15" @@ -2633,6 +2962,22 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" +[[package]] +name = "load_image" +version = "3.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13af80aa49d27d6334430fcc8aa55c01acdf23209da94f670e217650320644c0" +dependencies = [ + "bytemuck", + "imgref", + "jpeg-decoder", + "lcms2", + "lodepng", + "quick-error", + "rexif", + "rgb", +] + [[package]] name = "lock_api" version = "0.4.13" @@ -2643,6 +2988,17 @@ dependencies = [ "scopeguard", ] +[[package]] +name = "lodepng" +version = "3.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a32335d22e44238e2bb0b4d726964d18952ce1f1279ec3305305d2c61539eb" +dependencies = [ + "crc32fast", + "flate2", + "rgb", +] + [[package]] name = "log" version = "0.4.27" @@ -2667,12 +3023,24 @@ dependencies = [ "hashbrown 0.15.4", ] +[[package]] +name = "lru-slab" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" + [[package]] name = "mac" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" +[[package]] +name = "maplit" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" + [[package]] name = "markup5ever" version = "0.35.0" @@ -2681,7 +3049,18 @@ checksum = "311fe69c934650f8f19652b3946075f0fc41ad8757dbb68f1ca14e7900ecc1c3" dependencies = [ "log", "tendril", - "web_atoms", + "web_atoms 0.1.3", +] + +[[package]] +name = "markup5ever" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c3294c4d74d0742910f8c7b466f44dda9eb2d5742c1e430138df290a1e8451c" +dependencies = [ + "log", + "tendril", + "web_atoms 0.2.0", ] [[package]] @@ -2761,6 +3140,16 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "moxcms" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddd32fa8935aeadb8a8a6b6b351e40225570a37c43de67690383d87ef170cd08" +dependencies = [ + "num-traits", + "pxfm", +] + [[package]] name = "multer" version = "3.1.0" @@ -2787,23 +3176,6 @@ dependencies = [ "jobserver", ] -[[package]] -name = "native-tls" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" -dependencies = [ - "libc", - "log", - "openssl", - "openssl-probe", - "openssl-sys", - "schannel", - "security-framework 2.11.1", - "security-framework-sys", - "tempfile", -] - [[package]] name = "new_debug_unreachable" version = "1.0.6" @@ -2957,47 +3329,12 @@ dependencies = [ "libm", ] -[[package]] -name = "object" -version = "0.36.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" -dependencies = [ - "memchr", -] - [[package]] name = "once_cell" version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" -[[package]] -name = "openssl" -version = "0.10.73" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8505734d46c8ab1e19a1dce3aef597ad87dcb4c37e7188231769bd6bd51cebf8" -dependencies = [ - "bitflags 2.9.1", - "cfg-if", - "foreign-types", - "libc", - "once_cell", - "openssl-macros", - "openssl-sys", -] - -[[package]] -name = "openssl-macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "openssl-probe" version = "0.1.6" @@ -3005,16 +3342,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] -name = "openssl-sys" -version = "0.9.109" +name = "ordered-channel" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90096e2e47630d78b7d1c20952dc621f957103f8bc2c8359ec81290d75238571" -dependencies = [ - "cc", - "libc", - "pkg-config", - "vcpkg", -] +checksum = "95be4d57809897b5a7539fc15a7dfe0e84141bc3dfaa2e9b1b27caa90acf61ab" [[package]] name = "outref" @@ -3116,8 +3447,28 @@ version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" dependencies = [ - "phf_macros", - "phf_shared", + "phf_macros 0.11.3", + "phf_shared 0.11.3", +] + +[[package]] +name = "phf" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "913273894cec178f401a31ec4b656318d95473527be05c0752cc41cdc32be8b7" +dependencies = [ + "phf_shared 0.12.1", +] + +[[package]] +name = "phf" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1562dc717473dbaa4c1f85a36410e03c047b2e7df7f45ee938fbef64ae7fadf" +dependencies = [ + "phf_macros 0.13.1", + "phf_shared 0.13.1", + "serde", ] [[package]] @@ -3126,8 +3477,18 @@ version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a" dependencies = [ - "phf_generator", - "phf_shared", + "phf_generator 0.11.3", + "phf_shared 0.11.3", +] + +[[package]] +name = "phf_codegen" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49aa7f9d80421bca176ca8dbfebe668cc7a2684708594ec9f3c0db0805d5d6e1" +dependencies = [ + "phf_generator 0.13.1", + "phf_shared 0.13.1", ] [[package]] @@ -3136,18 +3497,41 @@ version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" dependencies = [ - "phf_shared", + "phf_shared 0.11.3", "rand 0.8.5", ] +[[package]] +name = "phf_generator" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "135ace3a761e564ec88c03a77317a7c6b80bb7f7135ef2544dbe054243b89737" +dependencies = [ + "fastrand", + "phf_shared 0.13.1", +] + [[package]] name = "phf_macros" version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f84ac04429c13a7ff43785d75ad27569f2951ce0ffd30a3321230db2fc727216" dependencies = [ - "phf_generator", - "phf_shared", + "phf_generator 0.11.3", + "phf_shared 0.11.3", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "phf_macros" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "812f032b54b1e759ccd5f8b6677695d5268c588701effba24601f6932f8269ef" +dependencies = [ + "phf_generator 0.13.1", + "phf_shared 0.13.1", "proc-macro2", "quote", "syn", @@ -3162,6 +3546,24 @@ dependencies = [ "siphasher", ] +[[package]] +name = "phf_shared" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981" +dependencies = [ + "siphasher", +] + +[[package]] +name = "phf_shared" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e57fef6bc5981e38c2ce2d63bfa546861309f875b8a75f092d1d54ae2d64f266" +dependencies = [ + "siphasher", +] + [[package]] name = "pin-project-lite" version = "0.2.16" @@ -3213,11 +3615,11 @@ checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" [[package]] name = "png" -version = "0.17.16" +version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82151a2fc869e011c153adc57cf2789ccb8d9906ce52c0b39a6b5697749d7526" +checksum = "97baced388464909d42d89643fe4361939af9b7ce7a31ee32a168f832a70f2a0" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.9.1", "crc32fast", "fdeflate", "flate2", @@ -3302,19 +3704,102 @@ dependencies = [ ] [[package]] -name = "qoi" -version = "0.4.1" +name = "pulldown-cmark" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e8bbe1a966bd2f362681a44f6edce3c2310ac21e4d5067a6e7ec396297a6ea0" +dependencies = [ + "bitflags 2.9.1", + "getopts", + "memchr", + "pulldown-cmark-escape", + "unicase", +] + +[[package]] +name = "pulldown-cmark-escape" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "007d8adb5ddab6f8e3f491ac63566a7d5002cc7ed73901f72057943fa71ae1ae" + +[[package]] +name = "pxfm" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83f9b339b02259ada5c0f4a389b7fb472f933aa17ce176fd2ad98f28bb401fde" +dependencies = [ + "num-traits", +] + +[[package]] +name = "qoi" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f6d64c71eb498fe9eae14ce4ec935c555749aef511cca85b5568910d6e48001" +dependencies = [ + "bytemuck", +] + +[[package]] +name = "quick-error" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" + +[[package]] +name = "quinn" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" +dependencies = [ + "bytes", + "cfg_aliases", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash 2.1.1", + "rustls 0.23.31", + "socket2 0.6.0", + "thiserror 2.0.12", + "tokio", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-proto" +version = "0.11.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f6d64c71eb498fe9eae14ce4ec935c555749aef511cca85b5568910d6e48001" +checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" dependencies = [ - "bytemuck", + "bytes", + "getrandom 0.3.3", + "lru-slab", + "rand 0.9.2", + "ring", + "rustc-hash 2.1.1", + "rustls 0.23.31", + "rustls-pki-types", + "slab", + "thiserror 2.0.12", + "tinyvec", + "tracing", + "web-time", ] [[package]] -name = "quick-error" -version = "2.0.1" +name = "quinn-udp" +version = "0.5.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" +checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" +dependencies = [ + "cfg_aliases", + "libc", + "once_cell", + "socket2 0.6.0", + "tracing", + "windows-sys 0.60.2", +] [[package]] name = "quote" @@ -3529,9 +4014,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.11.1" +version = "1.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" dependencies = [ "aho-corasick", "memchr", @@ -3541,9 +4026,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.9" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" dependencies = [ "aho-corasick", "memchr", @@ -3578,21 +4063,21 @@ dependencies = [ "http-body-util", "hyper 1.6.0", "hyper-rustls 0.27.7", - "hyper-tls", "hyper-util", "js-sys", "log", "mime", - "native-tls", "percent-encoding", "pin-project-lite", + "quinn", + "rustls 0.23.31", "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", "sync_wrapper", "tokio", - "tokio-native-tls", + "tokio-rustls 0.26.2", "tower", "tower-http", "tower-service", @@ -3600,8 +4085,15 @@ dependencies = [ "wasm-bindgen", "wasm-bindgen-futures", "web-sys", + "webpki-roots 1.0.2", ] +[[package]] +name = "rexif" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be932047c168919c8d5af065b16fa7d4bd24835ffa256bf0cf1ff463f91c15df" + [[package]] name = "rfc6979" version = "0.3.1" @@ -3632,7 +4124,7 @@ dependencies = [ "cfg-if", "getrandom 0.2.16", "libc", - "untrusted", + "untrusted 0.9.0", "windows-sys 0.52.0", ] @@ -3657,16 +4149,16 @@ dependencies = [ ] [[package]] -name = "rustc-demangle" -version = "0.1.26" +name = "rustc-hash" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustc-hash" -version = "1.1.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" [[package]] name = "rustc_version" @@ -3770,9 +4262,37 @@ version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79" dependencies = [ + "web-time", "zeroize", ] +[[package]] +name = "rustls-platform-verifier" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be59af91596cac372a6942530653ad0c3a246cdd491aaa9dcaee47f88d67d5a0" +dependencies = [ + "core-foundation 0.10.1", + "core-foundation-sys", + "jni", + "log", + "once_cell", + "rustls 0.23.31", + "rustls-native-certs 0.8.1", + "rustls-platform-verifier-android", + "rustls-webpki 0.103.4", + "security-framework 3.2.0", + "security-framework-sys", + "webpki-root-certs", + "windows-sys 0.59.0", +] + +[[package]] +name = "rustls-platform-verifier-android" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" + [[package]] name = "rustls-webpki" version = "0.101.7" @@ -3780,7 +4300,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ "ring", - "untrusted", + "untrusted 0.9.0", ] [[package]] @@ -3792,7 +4312,7 @@ dependencies = [ "aws-lc-rs", "ring", "rustls-pki-types", - "untrusted", + "untrusted 0.9.0", ] [[package]] @@ -3833,14 +4353,14 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "scraper" -version = "0.24.0" +version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5f3a24d916e78954af99281a455168d4a9515d65eca99a18da1b813689c4ad9" +checksum = "93cecd86d6259499c844440546d02f55f3e17bd286e529e48d1f9f67e92315cb" dependencies = [ - "cssparser", + "cssparser 0.36.0", "ego-tree", "getopts", - "html5ever", + "html5ever 0.36.1", "precomputed-hash", "selectors", "tendril", @@ -3853,7 +4373,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ "ring", - "untrusted", + "untrusted 0.9.0", ] [[package]] @@ -3908,19 +4428,19 @@ dependencies = [ [[package]] name = "selectors" -version = "0.31.0" +version = "0.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5685b6ae43bfcf7d2e7dfcfb5d8e8f61b46442c902531e41a32a9a8bf0ee0fb6" +checksum = "feef350c36147532e1b79ea5c1f3791373e61cbd9a6a2615413b3807bb164fb7" dependencies = [ "bitflags 2.9.1", - "cssparser", + "cssparser 0.36.0", "derive_more", - "fxhash", "log", "new_debug_unreachable", - "phf", - "phf_codegen", + "phf 0.13.1", + "phf_codegen 0.13.1", "precomputed-hash", + "rustc-hash 2.1.1", "servo_arc", "smallvec", ] @@ -3933,18 +4453,28 @@ checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" [[package]] name = "serde" -version = "1.0.219" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.219" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", @@ -3953,14 +4483,15 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.142" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "030fedb782600dcbd6f02d479bf0d817ac3bb40d644745b769d6a96bc3afc5a7" +checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" dependencies = [ "itoa", "memchr", "ryu", "serde", + "serde_core", ] [[package]] @@ -3984,11 +4515,11 @@ dependencies = [ [[package]] name = "serde_spanned" -version = "1.0.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40734c41988f7306bb04f0ecf60ec0f3f1caa34290e4e8ea471dcd3346483b83" +checksum = "e24345aa0fe688594e73770a5f6d1b216508b4f93484c0026d521acd30134392" dependencies = [ - "serde", + "serde_core", ] [[package]] @@ -4005,9 +4536,9 @@ dependencies = [ [[package]] name = "servo_arc" -version = "0.4.1" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "204ea332803bd95a0b60388590d59cf6468ec9becf626e2451f1d26a1d972de4" +checksum = "170fb83ab34de17dc69aa7c67482b22218ddb85da56546f9bd6b929e32a05930" dependencies = [ "stable_deref_trait", ] @@ -4397,7 +4928,20 @@ checksum = "bf776ba3fa74f83bf4b63c3dcbbf82173db2632ed8452cb2d891d33f459de70f" dependencies = [ "new_debug_unreachable", "parking_lot", - "phf_shared", + "phf_shared 0.11.3", + "precomputed-hash", + "serde", +] + +[[package]] +name = "string_cache" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a18596f8c785a729f2819c0f6a7eae6ebeebdfffbfe4214ae6b087f690e31901" +dependencies = [ + "new_debug_unreachable", + "parking_lot", + "phf_shared 0.13.1", "precomputed-hash", "serde", ] @@ -4408,8 +4952,20 @@ version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c711928715f1fe0fe509c53b43e993a9a557babc2d0a3567d0a3006f1ac931a0" dependencies = [ - "phf_generator", - "phf_shared", + "phf_generator 0.11.3", + "phf_shared 0.11.3", + "proc-macro2", + "quote", +] + +[[package]] +name = "string_cache_codegen" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "585635e46db231059f76c5849798146164652513eb9e8ab2685939dd90f29b69" +dependencies = [ + "phf_generator 0.13.1", + "phf_shared 0.13.1", "proc-macro2", "quote", ] @@ -4425,6 +4981,33 @@ dependencies = [ "unicode-properties", ] +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "strum" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "subtle" version = "2.6.1" @@ -4504,15 +5087,15 @@ checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1" [[package]] name = "tempfile" -version = "3.20.0" +version = "3.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" +checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16" dependencies = [ "fastrand", "getrandom 0.3.3", "once_cell", "rustix 1.0.8", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -4568,13 +5151,16 @@ dependencies = [ [[package]] name = "tiff" -version = "0.9.1" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba1310fcea54c6a9a4fd1aad794ecc02c31682f6bfbecdf460bf19533eed1e3e" +checksum = "af9605de7fee8d9551863fd692cce7637f548dbd9db9180fcc07ccc6d26c336f" dependencies = [ + "fax", "flate2", - "jpeg-decoder", + "half", + "quick-error", "weezl", + "zune-jpeg", ] [[package]] @@ -4635,43 +5221,46 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.47.1" +version = "1.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038" +checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" dependencies = [ - "backtrace", "bytes", - "io-uring", "libc", "mio", "parking_lot", "pin-project-lite", "signal-hook-registry", - "slab", "socket2 0.6.0", "tokio-macros", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] -name = "tokio-macros" -version = "2.5.0" +name = "tokio-cron-scheduler" +version = "0.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" +checksum = "1f50e41f200fd8ed426489bd356910ede4f053e30cebfbd59ef0f856f0d7432a" dependencies = [ - "proc-macro2", - "quote", - "syn", + "chrono", + "chrono-tz", + "croner", + "num-derive", + "num-traits", + "tokio", + "tracing", + "uuid", ] [[package]] -name = "tokio-native-tls" -version = "0.3.1" +name = "tokio-macros" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" dependencies = [ - "native-tls", - "tokio", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -4732,14 +5321,14 @@ dependencies = [ [[package]] name = "toml" -version = "0.9.4" +version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41ae868b5a0f67631c14589f7e250c1ea2c574ee5ba21c6c8dd4b1485705a5a1" +checksum = "f0dc8b1fb61449e27716ec0e1bdf0f6b8f3e8f6b05391e8497b8b6d7804ea6d8" dependencies = [ "indexmap", - "serde", - "serde_spanned 1.0.0", - "toml_datetime 0.7.0", + "serde_core", + "serde_spanned 1.0.3", + "toml_datetime 0.7.3", "toml_parser", "toml_writer", "winnow", @@ -4756,11 +5345,11 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.7.0" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bade1c3e902f58d73d3f294cd7f20391c1cb2fbcb643b73566bc773971df91e3" +checksum = "f2cdb639ebbc97961c51720f858597f7f24c4fc295327923af55b74c3c724533" dependencies = [ - "serde", + "serde_core", ] [[package]] @@ -4778,18 +5367,18 @@ dependencies = [ [[package]] name = "toml_parser" -version = "1.0.1" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97200572db069e74c512a14117b296ba0a80a30123fbbb5aa1f4a348f639ca30" +checksum = "c0cbe268d35bdb4bb5a56a2de88d0ad0eb70af5384a99d648cd4b3d04039800e" dependencies = [ "winnow", ] [[package]] name = "toml_writer" -version = "1.0.2" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcc842091f2def52017664b53082ecbbeb5c7731092bad69d2c63050401dfd64" +checksum = "df8b2b54733674ad286d16267dcfc7a71ed5c776e4ac7aa3c3e2561f7c637bf2" [[package]] name = "tower" @@ -4802,6 +5391,7 @@ dependencies = [ "pin-project-lite", "sync_wrapper", "tokio", + "tokio-util", "tower-layer", "tower-service", "tracing", @@ -4895,6 +5485,26 @@ version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" +[[package]] +name = "ua_generator" +version = "0.5.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8003d480b7ac56acfae9f7bed79d52925a6a263b895c981e6cb5c52a5d627640" +dependencies = [ + "dotenvy", + "fastrand", + "serde", + "serde_json", + "toml 0.9.8", + "ureq", +] + +[[package]] +name = "unicase" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" + [[package]] name = "unicode-bidi" version = "0.3.18" @@ -4928,12 +5538,38 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c" +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + [[package]] name = "untrusted" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" +[[package]] +name = "ureq" +version = "2.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02d1a66277ed75f640d608235660df48c8e3c19f3b4edb6a263315626cc3c01d" +dependencies = [ + "base64 0.22.1", + "brotli-decompressor 4.0.3", + "encoding_rs", + "flate2", + "log", + "once_cell", + "rustls 0.23.31", + "rustls-pki-types", + "serde", + "serde_json", + "url", + "webpki-roots 0.26.11", +] + [[package]] name = "url" version = "2.5.4" @@ -4965,13 +5601,13 @@ checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] name = "uuid" -version = "1.17.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d" +checksum = "e2e054861b4bd027cd373e18e8d8d8e6548085000e41290d95ce0c373a654b4a" dependencies = [ "getrandom 0.3.3", "js-sys", - "serde", + "serde_core", "wasm-bindgen", ] @@ -5131,16 +5767,47 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "web_atoms" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57ffde1dc01240bdf9992e3205668b235e59421fd085e8a317ed98da0178d414" dependencies = [ - "phf", - "phf_codegen", - "string_cache", - "string_cache_codegen", + "phf 0.11.3", + "phf_codegen 0.11.3", + "string_cache 0.8.9", + "string_cache_codegen 0.5.4", +] + +[[package]] +name = "web_atoms" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acd0c322f146d0f8aad130ce6c187953889359584497dac6561204c8e17bb43d" +dependencies = [ + "phf 0.13.1", + "phf_codegen 0.13.1", + "string_cache 0.9.0", + "string_cache_codegen 0.6.1", +] + +[[package]] +name = "webpki-root-certs" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e4ffd8df1c57e87c325000a3d6ef93db75279dc3a231125aac571650f22b12a" +dependencies = [ + "rustls-pki-types", ] [[package]] @@ -5206,7 +5873,7 @@ checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" dependencies = [ "windows-implement", "windows-interface", - "windows-link", + "windows-link 0.1.3", "windows-result", "windows-strings", ] @@ -5239,13 +5906,19 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + [[package]] name = "windows-registry" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e" dependencies = [ - "windows-link", + "windows-link 0.1.3", "windows-result", "windows-strings", ] @@ -5256,7 +5929,7 @@ version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" dependencies = [ - "windows-link", + "windows-link 0.1.3", ] [[package]] @@ -5265,7 +5938,16 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" dependencies = [ - "windows-link", + "windows-link 0.1.3", +] + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", ] [[package]] @@ -5304,6 +5986,30 @@ dependencies = [ "windows-targets 0.53.3", ] +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link 0.2.1", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + [[package]] name = "windows-targets" version = "0.48.5" @@ -5341,7 +6047,7 @@ version = "0.53.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91" dependencies = [ - "windows-link", + "windows-link 0.1.3", "windows_aarch64_gnullvm 0.53.0", "windows_aarch64_msvc 0.53.0", "windows_i686_gnu 0.53.0", @@ -5352,6 +6058,12 @@ dependencies = [ "windows_x86_64_msvc 0.53.0", ] +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" @@ -5370,6 +6082,12 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + [[package]] name = "windows_aarch64_msvc" version = "0.48.5" @@ -5388,6 +6106,12 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + [[package]] name = "windows_i686_gnu" version = "0.48.5" @@ -5418,6 +6142,12 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + [[package]] name = "windows_i686_msvc" version = "0.48.5" @@ -5436,6 +6166,12 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + [[package]] name = "windows_x86_64_gnu" version = "0.48.5" @@ -5454,6 +6190,12 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + [[package]] name = "windows_x86_64_gnullvm" version = "0.48.5" @@ -5472,6 +6214,12 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + [[package]] name = "windows_x86_64_msvc" version = "0.48.5" @@ -5492,9 +6240,9 @@ checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" [[package]] name = "winnow" -version = "0.7.12" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3edebf492c8125044983378ecb5766203ad3b4c2f7a922bd7dd207f6d443e95" +checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf" dependencies = [ "memchr", ] @@ -5630,6 +6378,12 @@ dependencies = [ "syn", ] +[[package]] +name = "zlib-rs" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f06ae92f42f5e5c42443fd094f245eb656abf56dd7cce9b8b263236565e00f2" + [[package]] name = "zstd" version = "0.13.3" diff --git a/Cargo.toml b/Cargo.toml index 7f3f32b..1b1b300 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,19 +4,26 @@ members = [ ] resolver = "3" +[workspace.lints.clippy] +redundant_clone = "deny" +panic = "deny" +unwrap_used = "deny" +panicking_unwrap = "deny" +implicit_clone = "deny" +perf = "deny" + [profile.dev.package.sqlx-macros] opt-level = 3 -[profile.deploy] +[profile.deployment] inherits = "release" - debug = false opt-level = 3 strip = "none" overflow-checks = false debug-assertions = false rpath = false -lto = "fat" -panic = "unwind" +lto = "thin" +panic = "abort" codegen-units = 1 incremental = false \ No newline at end of file diff --git a/Taskfile.yaml b/Taskfile.yaml index 9f5b140..4bb6252 100644 --- a/Taskfile.yaml +++ b/Taskfile.yaml @@ -5,21 +5,21 @@ tasks: cmds: - cargo run {{.CLI_ARGS}} - release: + dev-fast: cmds: - - task: backend -- --release - + - cargo run --release + + optimize: + cmds: + - cargo run --profile deployment + frontend: cmds: - bun run dev dir: ./frontend - dev: - cmds: - - task: run-backend - - task: run-frontend - - dev-release: + prod: cmds: - - task: run-backend -- --release - - task: run-frontend + - bun run build + - bun run preview + dir: ./frontend \ No newline at end of file diff --git a/backend/.gitignore b/backend/.gitignore deleted file mode 100644 index e69de29..0000000 diff --git a/backend/Cargo.toml b/backend/Cargo.toml index a60a6b3..95f01f5 100644 --- a/backend/Cargo.toml +++ b/backend/Cargo.toml @@ -4,56 +4,86 @@ version = "0.1.0" edition = "2024" resolver = "3" license = "AGPL-3" -rust-version = "1.89.0" +rust-version = "1.91.0" [[bin]] name = "backend" path = "src/main.rs" [dependencies] -anyhow = "1.0.98" -axum = { version = "0.8.4", features = ["macros"] } +anyhow = "1.0.100" +axum = { version = "0.8.6", features = ["macros"] } chrono = { version = "0.4.41", features = ["serde"] } -image = "0.25.6" -jsonwebtoken = "9.3.1" +image = { version = "0.25.8", features = ["png", "jpeg"] } +jsonwebtoken = { version = "10.2.0", features = ["aws_lc_rs"] } rand = "0.9.1" ravif = { version = "0.12.0", features = ["default"] } -regex = "1.11.1" -reqwest = "0.12.23" -scraper = "0.24.0" +regex = "1.12.2" +reqwest = { version = "0.12.23", default-features = false, features = [ + "http2", + "system-proxy", + "charset", + "rustls-tls"] } +scraper = "0.25.0" serde = { version = "1.0.219", features = ["derive"] } -tokio = { version = "~1.47.0", features = ["full"] } -toml = "0.9.0" +tokio = { version = "1.48.0", features = ["full"] } +toml = "0.9.8" url = "2.5.4" -axum-extra = { version = "0.10.1", features = ["typed-header", "erased-json", "cookie", "multipart"] } -axum-core = "0.5.2" -serde_json = "1.0.140" +axum-extra = { version = "0.12.1", features = [ + "typed-header", + "erased-json", + "cookie", + "multipart"] } +axum-core = "0.5.5" +serde_json = "1.0.145" time = "0.3.41" -rgb = "0.8.50" -aws-sdk-s3 = "1.100.0" -aws-config = { version = "~1.8.0", features = ["behavior-version-latest"] } -bytes = "1.10.1" +rgb = "0.8.52" +aws-sdk-s3 = "1.112.0" +aws-config = { version = "1.8.10", features = ["behavior-version-latest"] } +bytes = "1.11.0" slug = "0.1.6" -backon = { version = "1.5.2", features = ["default", "tokio"] } +backon = { version = "1.6.0", features = [ + "default", + "tokio"] } dotenvy = "0.15.7" -tower-http = { version = "0.6.6", features = ["cors", "tower", "compression-full", "async-compression", "timeout"] } +tower-http = { version = "0.6.6", features = [ + "cors", + "tower", + "compression-full", + "async-compression", + "timeout"] } argon2 = "0.5.3" -sqlx = { version = "0.8.6", features = ["postgres", "chrono", "runtime-tokio-rustls", - "macros", "runtime-tokio", "migrate"] } -uuid = { version = "1.17.0", features = ["v4", "serde"] } -lettre = { version = "0.11.17", default-features = false, features = [ +sqlx = { version = "0.8.6", features = [ + "postgres", + "chrono", + "runtime-tokio-rustls", + "macros", + "runtime-tokio", + "migrate", + "tls-rustls"] } +uuid = { version = "1.19.0", features = ["v4", "serde"] } +lettre = { version = "0.11.18", default-features = false, features = [ "tokio1-rustls", "builder", "smtp-transport", "pool", "rustls-platform-verifier", - "ring" + "aws-lc-rs" ] } async-channel = "2.5.0" -tower = "0.5.2" +tower = { version = "0.5.2", features = ["limit"] } notify = "8.2.0" notify-debouncer-full = "0.6.0" arc-swap = "1.7.1" pulldown-cmark = "0.13.0" ammonia = "4.1.1" once_cell = "1.21.3" +futures = "0.3.31" +ua_generator = "0.5.36" +tokio-cron-scheduler = "0.15.1" + +[dev-dependencies] +tempfile = "3.23" +av-metrics = "0.9" +dssim = "3.2" +image-compare = "0.4.2" \ No newline at end of file diff --git a/backend/Dockerfile b/backend/Dockerfile index 01f31dd..0b39547 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -24,15 +24,19 @@ COPY .sqlx ./.sqlx COPY backend/src ./backend/src COPY Cargo.toml Cargo.lock ./ COPY backend/Cargo.toml ./backend/ +#COPY migrations ./migrations -RUN cargo build --release --profile deploy --bin backend +RUN cargo build --profile deploy --bin backend -FROM debian:trixie-slim AS runtime +FROM ubuntu:25.04 AS runtime WORKDIR /app # Fix shared libraries: libssl.so.3. RUN apt-get update && apt-get install -y libssl3 && rm -rf /var/lib/apt/lists/* COPY --from=builder /app/target/release/backend /usr/local/bin/ +RUN chmod +x backend + +EXPOSE 8000 CMD ["backend"] diff --git a/backend/config_sites.toml b/backend/config_sites.toml index fb9da80..69e7d58 100644 --- a/backend/config_sites.toml +++ b/backend/config_sites.toml @@ -1,4 +1,3 @@ -# config.toml [sites."www.mgeko.cc"] chapter_link_selector = "#chpagedlist ul.chapter-list li a" chapter_number_from_url_regex = "chapter-([\\d]+(?:[.-][\\d]+)?)" @@ -9,14 +8,15 @@ image_url_attribute = "src" image_url_fallback_attributes = ["data-src", "data-lazy-src"] chapter_order = "desc" -#[sites."harimanga.me"] -#chapter_link_selecctor = "" -#chapter_number_from_url_regex = "chapter-([\\d]+(?:[.-][\\d]+)?)" -#chapter_number_from_text_regex = "([\\d]+(?:[.-][\\d]+)?)" -#chapter_number_data_attribute_on_parent = "" -#image_selector_on_chapter_page = "div#reading-content img" -#image_url_attribute = "src" -#image_url_fallback_attributes = ["data-src", "data-lazy-src"] +[sites."harimanga.me"] +chapter_link_selector = "div.listing-chapters_wrap ul.main li a" +chapter_number_from_url_regex = "chapter-([\\d]+(?:[.-][\\d]+)?)" +chapter_number_from_text_regex = "([\\d]+(?:[.-][\\d]+)?)" +chapter_number_data_attribute_on_parent = "" +image_selector_on_chapter_page = "div.reading-content div img" +image_url_attribute = "src" +image_url_fallback_attributes = ["data-src", "data-lazy-src"] +chapter_order = "desc" #[sites."demonicscans.org"] #image_selector_on_chapter_page = "div.main-width.center-m img" diff --git a/backend/src/api/admin/admin_comment_handler.rs b/backend/src/api/admin/admin_comment_handler.rs new file mode 100644 index 0000000..c10eade --- /dev/null +++ b/backend/src/api/admin/admin_comment_handler.rs @@ -0,0 +1,107 @@ +use axum::extract::{Path, State}; +use axum::http::StatusCode; +use axum::Json; +use axum_core::__private::tracing::{error, info}; +use axum_core::response::{IntoResponse, Response}; +use serde_json::json; + +use crate::api::extractor::ModeratorOrHigherUser; +use crate::builder::startup::AppState; +use crate::database::{DeleteCommentResult, UpdateCommentResponse}; + +pub async fn admin_delete_comment_handler( + auth: ModeratorOrHigherUser, + State(state): State, + Path(comment_id): Path, +) -> Response { + println!( + "->> {:<12} - admin_delete_comment - mod: {}, comment_id: {}", + "HANDLER", auth.0.username, comment_id + ); + + let user_info = format!("{} (ID: {})", auth.0.username, auth.0.id); + let requestor_role_id = auth.0.role as i32; + + match state + .db_service + .admin_delete_comment(comment_id, requestor_role_id) + .await + { + Ok(result) => { + let mut soft_deleted_data: Option = None; + let mut delete_type = "hard_delete"; + + let (status_msg, files_to_delete) = match result { + DeleteCommentResult::NotFound => { + return ( + StatusCode::NOT_FOUND, + Json(json!({ + "status": "error", + "message": format!("Comment with id {} not found", comment_id) + })), + ) + .into_response(); + } + DeleteCommentResult::InsufficientPermissions => { + return ( + StatusCode::FORBIDDEN, + Json(json!({ + "status": "error", + "message": "You do not have permission to delete this user's comment" + })), + ) + .into_response(); + } + DeleteCommentResult::SoftDeleted(updated_comment, keys) => { + soft_deleted_data = Some(updated_comment); + delete_type = "soft_delete"; + ("Comment soft-deleted (replies exist)", keys) + } + DeleteCommentResult::HardDeleted(keys) => ("Comment hard-deleted", keys), + }; + + let files_count = files_to_delete.len(); + + if !files_to_delete.is_empty() { + let storage = state.storage_client.clone(); + let mod_name = auth.0.username; + + tokio::spawn(async move { + match storage.delete_image_objects(&files_to_delete).await { + Ok(_) => info!( + "Background: Deleted {} files for comment {} by {}", + files_count, comment_id, mod_name + ), + Err(e) => error!( + "Background: Failed to delete files for comment {}: {:?}", + comment_id, e + ), + } + }); + } + ( + StatusCode::OK, + Json(json!({ + "status": "success", + "message": status_msg, + "action_type": delete_type, + "comment": soft_deleted_data, + "deleted_files_count": files_count, + "moderated_by": user_info + })), + ) + .into_response() + } + Err(e) => { + error!("Failed to admin delete comment: {:?}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(json!({ + "status": "error", + "message": "Internal server error during comment deletion" + })), + ) + .into_response() + } + } +} diff --git a/backend/src/api/admin/admin_routes.rs b/backend/src/api/admin/admin_routes.rs new file mode 100644 index 0000000..4d31829 --- /dev/null +++ b/backend/src/api/admin/admin_routes.rs @@ -0,0 +1,62 @@ +use axum::routing::{delete, get, patch, post}; +use axum::Router; + +use crate::api::admin::admin_comment_handler::admin_delete_comment_handler; +use crate::api::admin::admin_series_handlers::{ + create_category_tag_handler, create_new_series_handler, delete_category_tag_handler, + delete_chapter_handler, delete_series_handler, get_all_paginated_series_handler, + get_list_category_tags_handler, get_series_category_tags_handler, get_series_chapter_handler, + repair_chapter_handler, update_existing_series_handler, upload_series_cover_image_handler, +}; +use crate::api::admin::admin_user_handler::{ + delete_user_handler, get_all_paginated_users_handler, update_user_metadata_handler, +}; +use crate::builder::startup::AppState; + +/// Admin User management routes +fn admin_user_routes() -> Router { + Router::new() + .route("/users/delete/{id}", delete(delete_user_handler)) + .route("/users/update/{id}", patch(update_user_metadata_handler)) + .route( + "/users/paginated/list-search", + get(get_all_paginated_users_handler), + ) + .route( + "/comments/delete/{id}", + delete(admin_delete_comment_handler), + ) +} + +/// Admin Series management routes +fn admin_series_routes() -> Router { + Router::new() + .route("/series/add", post(create_new_series_handler)) + .route("/series/chapter-list/{id}", get(get_series_chapter_handler)) + .route( + "/series/delete/{id}/chapter/{chapter_number}", + delete(delete_chapter_handler), + ) + .route("/series/delete/{id}", delete(delete_series_handler)) + .route("/series/repair/chapter/{id}", post(repair_chapter_handler)) + .route( + "/series/paginated/list-search", + get(get_all_paginated_series_handler), + ) + .route("/series/update/{id}", patch(update_existing_series_handler)) + .route("/series/tags/{id}", get(get_series_category_tags_handler)) + .route( + "/series/cover/upload/image", + post(upload_series_cover_image_handler), + ) + .route("/category/tag/add", post(create_category_tag_handler)) + .route( + "/category/tag/delete/{id}", + delete(delete_category_tag_handler), + ) + .route("/category/tag/list", get(get_list_category_tags_handler)) +} + +pub fn admin_routes() -> Router { + admin_series_routes().merge(admin_user_routes()) +} diff --git a/backend/src/api/admin_handlers.rs b/backend/src/api/admin/admin_series_handlers.rs similarity index 75% rename from backend/src/api/admin_handlers.rs rename to backend/src/api/admin/admin_series_handlers.rs index 72e6ebb..9210bd7 100644 --- a/backend/src/api/admin_handlers.rs +++ b/backend/src/api/admin/admin_series_handlers.rs @@ -1,32 +1,26 @@ -use crate::api::extractor::AdminUser; -use crate::builder::startup::AppState; -use crate::database::{NewSeriesData, Series, UpdateSeriesData}; -use crate::task_workers::repair_chapter_worker; -use crate::task_workers::series_check_worker::SeriesCheckJob; -use axum::Json; use axum::extract::{Path, Query, State}; use axum::http::StatusCode; -use axum_core::__private::tracing::error; +use axum::Json; +use axum_core::__private::tracing::warn; use axum_core::response::{IntoResponse, Response}; use axum_extra::extract::Multipart; use rand::Rng; -use serde::{Deserialize, Serialize}; +use serde::Deserialize; use uuid::Uuid; -#[derive(Deserialize)] -pub struct CreateSeriesRequest { - title: String, - original_title: Option, - authors: Option>, - description: String, - cover_image_url: String, - source_url: String, - category_ids: Vec, -} +use crate::api::admin::{ + CreateCategoryTagRequest, CreateSeriesRequest, PaginatedResponse, PaginationParams, + RepairChapterRequest, SeriesResponse, UpdateSeriesRequest, UploadCoverImageResponse, +}; +use crate::api::extractor::{AdminOrHigherUser, SuperAdminUser}; +use crate::builder::startup::AppState; +use crate::database::{NewSeriesData, Series, UpdateSeriesData}; +use crate::task_workers::repair_chapter_worker; +use crate::task_workers::series_check_worker::SeriesCheckJob; -// Admin endpoint to create new series +// Create new series pub async fn create_new_series_handler( - admin: AdminUser, + admin: AdminOrHigherUser, State(state): State, Json(payload): Json, ) -> Response { @@ -52,22 +46,18 @@ pub async fn create_new_series_handler( }; // Create new series in DB - let new_series_id = match db_service.add_new_series(&new_series_data).await - { + let new_series_id = match db_service.add_new_series(&new_series_data).await { Ok(id) => id, Err(e) => { return ( StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"status": "error", "message": e.to_string()})) + Json(serde_json::json!({"status": "error", "message": e.to_string()})), ) .into_response(); } }; - let fetch_new_series: Series = match db_service - .get_series_by_id(new_series_id) - .await - { + let fetch_new_series: Series = match db_service.get_series_by_id(new_series_id).await { Ok(Some(series)) => series, _ => { eprintln!("Error fetching new series from DB: {}", new_series_id); @@ -94,7 +84,6 @@ pub async fn create_new_series_handler( new_series_id ); } - ( StatusCode::CREATED, Json(serde_json::json!({"status": "success", "id": new_series_id, "message": "Series created and scheduled for immediate scraping"})), @@ -102,21 +91,10 @@ pub async fn create_new_series_handler( .into_response() } -#[derive(Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct UpdateSeriesRequest { - title: Option, - original_title: Option, - authors: Option>, - description: Option, - cover_image_url: Option, - source_url: Option, - category_ids: Option>, -} - +// Update existing series data pub async fn update_existing_series_handler( Path(series_id): Path, - admin: AdminUser, + admin: AdminOrHigherUser, State(state): State, Json(payload): Json, ) -> Response { @@ -160,12 +138,7 @@ pub async fn update_existing_series_handler( } } -#[derive(Serialize)] -pub struct UploadResponse { - status: String, - url: String, -} - +// Upload series cover image pub async fn upload_series_cover_image_handler( State(state): State, mut multipart: Multipart, @@ -192,8 +165,7 @@ pub async fn upload_series_cover_image_handler( .and_then(std::ffi::OsStr::to_str) .unwrap_or("jpg"); - let unique_image_key = - format!("cover-manga/{}.{}", Uuid::new_v4(), file_extension); + let unique_image_key = format!("cover-manga/{}.{}", Uuid::new_v4(), file_extension); match state .storage_client @@ -202,15 +174,10 @@ pub async fn upload_series_cover_image_handler( { Ok(key) => { // Construct the public URL - let public_url = format!( - "{}/{}", - state.storage_client.domain_cdn_url(), - &key - ); - + let public_url = format!("{}/{}", state.storage_client.domain_cdn_url(), &key); ( StatusCode::OK, - Json(UploadResponse { + Json(UploadCoverImageResponse { status: "success".to_string(), url: public_url, }), @@ -227,51 +194,16 @@ pub async fn upload_series_cover_image_handler( } } else { ( - StatusCode::BAD_REQUEST, - Json(serde_json::json!({"status": "error", "message": "No cover image file found"})) - ).into_response() + StatusCode::BAD_REQUEST, + Json(serde_json::json!({"status": "error", "message": "No cover image file found"})), + ) + .into_response() } } -#[derive(Deserialize)] -pub struct PaginationParams { - #[serde(default = "default_page")] - page: u32, - #[serde(default = "default_page_size")] - page_size: u32, - #[serde(default)] - search: Option, -} - -fn default_page() -> u32 { - 1 -} -fn default_page_size() -> u32 { - 25 -} -#[derive(Serialize)] -#[serde(rename_all = "camelCase")] -pub struct SeriesResponse { - id: i32, - title: String, - original_title: Option, - description: String, - cover_image_url: String, - source_url: String, - authors: Vec, - last_updated: String, - processing_status: String, -} - -#[derive(Serialize)] -#[serde(rename_all = "camelCase")] -pub struct PaginatedResponse { - items: Vec, - total_items: i64, -} - -pub async fn get_all_series_handler( - admin: AdminUser, +// Fetch all series with pagination +pub async fn get_all_paginated_series_handler( + admin: AdminOrHigherUser, State(state): State, Query(pagination): Query, ) -> Response { @@ -282,7 +214,11 @@ pub async fn get_all_series_handler( match state .db_service - .get_admin_paginated_series(pagination.page, pagination.page_size, pagination.search.as_deref()) + .get_admin_paginated_series( + pagination.page, + pagination.page_size, + pagination.search.as_deref(), + ) .await { Ok(paginated_result) => { @@ -296,7 +232,13 @@ pub async fn get_all_series_handler( description: s.description, cover_image_url: s.cover_image_url, source_url: s.current_source_url, - authors: serde_json::from_value(s.authors).unwrap_or_else(|_| vec![]), + authors: Vec::::deserialize(&s.authors).unwrap_or_else(|e| { + warn!( + "Failed to deserialize authors for series id {}: {}. Data: {:?}", + s.id, e, s.authors + ); + vec![] + }), last_updated: s.updated_at.format("%Y-%m-%d %H:%M:%S").to_string(), processing_status: s.processing_status.to_string(), }) @@ -309,62 +251,17 @@ pub async fn get_all_series_handler( (StatusCode::OK, Json(response_series_data)).into_response() } - Err(e) => { - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"status": "error", "message": e.to_string()})), - ) - .into_response() - } - } -} - -pub async fn get_all_users_handler( - admin: AdminUser, - State(state): State, - Query(pagination): Query, -) -> Response { - println!( - "->> {:<12} - get_all_users_handler - user: {}", - "HANDLER", admin.0.username - ); - - match state - .db_service - .get_paginated_user( - pagination.page, - pagination.page_size, - pagination.search.as_deref(), + Err(e) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"status": "error", "message": e.to_string()})), ) - .await - { - Ok(paginated_result) => { - let response_user_data = PaginatedResponse { - items: paginated_result.items, - total_items: paginated_result.total_items, - }; - - (StatusCode::OK, Json(response_user_data)).into_response() - } - Err(e) => { - error!("Failed to get paginated users: {:#?}", e); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"status": "error", "message": "Could not retrieve users"})), - ) - .into_response() - } + .into_response(), } } -#[derive(Deserialize)] -pub struct RepairChapterRequest { - pub chapter_number: f32, - pub new_chapter_url: String, -} - +// Repair chapter pub async fn repair_chapter_handler( - admin: AdminUser, + admin: AdminOrHigherUser, Path(series_id): Path, State(state): State, Json(payload): Json, @@ -399,8 +296,9 @@ pub async fn repair_chapter_handler( } } +// Delete series pub async fn delete_series_handler( - admin: AdminUser, + admin: SuperAdminUser, Path(series_id): Path, State(state): State, ) -> Response { @@ -436,13 +334,9 @@ pub async fn delete_series_handler( } } -#[derive(Deserialize)] -pub struct CreateCategoryTagRequest { - pub name: String, -} - +// Create category tag pub async fn create_category_tag_handler( - admin: AdminUser, + admin: AdminOrHigherUser, State(state): State, Json(payload): Json, ) -> Response { @@ -452,22 +346,23 @@ pub async fn create_category_tag_handler( ); match state.db_service.create_category_tag(&payload.name).await { - Ok(new_category) => { - (StatusCode::CREATED, Json(serde_json::json!({"status": "success", "category": new_category})), - ) - .into_response() - } + Ok(new_category) => ( + StatusCode::CREATED, + Json(serde_json::json!({"status": "success", "category": new_category})), + ) + .into_response(), Err(e) => { // Check for unique violation error from PostgreSQL (code 23505) if let Some(sqlx::Error::Database(db_error)) = e.root_cause().downcast_ref::() - && db_error.code() == Some(std::borrow::Cow::from("23505")) { - return ( + && db_error.code() == Some(std::borrow::Cow::from("23505")) + { + return ( StatusCode::CONFLICT, Json(serde_json::json!({"status": "error", "message": "Category tag already exists."})), ) .into_response(); - } + } ( StatusCode::INTERNAL_SERVER_ERROR, Json(serde_json::json!({"status": "error", "message": e.to_string()})), @@ -477,8 +372,9 @@ pub async fn create_category_tag_handler( } } +// Delete category tag pub async fn delete_category_tag_handler( - admin: AdminUser, + admin: AdminOrHigherUser, State(state): State, Path(category_id): Path, ) -> Response { @@ -507,7 +403,7 @@ pub async fn delete_category_tag_handler( } pub async fn get_list_category_tags_handler( - admin: AdminUser, + admin: AdminOrHigherUser, State(state): State, ) -> Response { println!( @@ -522,15 +418,16 @@ pub async fn get_list_category_tags_handler( ) .into_response(), Err(e) => ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"status": "error", "message": e.to_string()})), + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"status": "error", "message": e.to_string()})), ) .into_response(), } } +// Get series category tags pub async fn get_series_category_tags_handler( - admin: AdminUser, + admin: AdminOrHigherUser, State(state): State, Path(series_id): Path, ) -> Response { @@ -539,14 +436,77 @@ pub async fn get_series_category_tags_handler( "HANDLER", admin.0.username, series_id ); - match state.db_service.get_category_tag_by_series_id(series_id).await { + match state + .db_service + .get_category_tag_by_series_id(series_id) + .await + { Ok(tags) => ( StatusCode::OK, Json(serde_json::json!({"status": "success", "tags": tags})), - ).into_response(), + ) + .into_response(), Err(e) => ( StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"status": "error", "message": e.to_string()})) - ).into_response(), + Json(serde_json::json!({"status": "error", "message": e.to_string()})), + ) + .into_response(), + } +} + +pub async fn get_series_chapter_handler( + admin: AdminOrHigherUser, + State(state): State, + Path(series_id): Path, +) -> Response { + println!( + "->> {:<12} - get_series_chapters_handler - user: {}, series_id: {}", + "HANDLER", admin.0.username, series_id + ); + + match state.db_service.get_chapters_by_series_id(series_id).await { + Ok(chapters) => ( + StatusCode::OK, + Json(serde_json::json!({"status": "success", "chapters": chapters})), + ) + .into_response(), + Err(e) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"status": "error", "message": e.to_string()})), + ) + .into_response(), + } +} + +pub async fn delete_chapter_handler( + admin: AdminOrHigherUser, + State(state): State, + Path((series_id, chapter_number)): Path<(i32, f32)>, +) -> Response { + println!( + "->> {:<12} - delete_chapter_handler - user: {}, series: {}, chap: {}", + "HANDLER", admin.0.username, series_id, chapter_number + ); + + match state + .db_service + .delete_chapter_and_images_for_chapter(series_id, chapter_number) + .await + { + Ok(row_affected) if row_affected > 0 => ( + StatusCode::OK, + Json(serde_json::json!({"status": "success", "message": "Chapter has been deleted."})), + ) + .into_response(), + Ok(_) => ( + StatusCode::NOT_FOUND, + Json(serde_json::json!({"status": "error", "message": "Chapter not found."})), + ) + .into_response(), + Err(e) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"status": "error", "message": e.to_string()})), + ) + .into_response(), } } diff --git a/backend/src/api/admin/admin_user_handler.rs b/backend/src/api/admin/admin_user_handler.rs new file mode 100644 index 0000000..ca67a0a --- /dev/null +++ b/backend/src/api/admin/admin_user_handler.rs @@ -0,0 +1,110 @@ +use axum::Json; +use axum::extract::{Path, Query, State}; +use axum::http::StatusCode; +use axum_core::__private::tracing::error; +use axum_core::response::{IntoResponse, Response}; +use serde_json::Value; + +use crate::api::admin::{ + AdminError, AdminResult, AdminUpdateUserPayload, PaginatedResponse, PaginationParams, +}; +use crate::api::extractor::AdminOrHigherUser; +use crate::builder::startup::AppState; +use crate::database::UserWithRole; + +// Fetch all users with pagination +pub async fn get_all_paginated_users_handler( + admin: AdminOrHigherUser, + State(state): State, + Query(pagination): Query, +) -> Response { + println!( + "->> {:<12} - get_all_users_handler - user: {}", + "HANDLER", admin.0.username + ); + + match state + .db_service + .get_admin_paginated_user( + pagination.page, + pagination.page_size, + pagination.search.as_deref(), + ) + .await + { + Ok(paginated_result) => { + let response_user_data = PaginatedResponse { + items: paginated_result.items, + total_items: paginated_result.total_items, + }; + + (StatusCode::OK, Json(response_user_data)).into_response() + } + Err(e) => { + error!("Failed to get paginated users: {:#?}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"status": "error", "message": "Could not retrieve users"})), + ) + .into_response() + } + } +} + +/// Partial update user metadata +pub async fn update_user_metadata_handler( + admin: AdminOrHigherUser, + State(state): State, + Path(user_id): Path, + Json(payload): Json, +) -> AdminResult> { + println!( + "->> {:<12} - update_user_handler - user: {}", + "HANDLER", admin.0.username + ); + + if admin.0.id == user_id { + return Err(AdminError::Forbidden( + "You cannot update your own account".to_string(), + )); + } + + let updated_user = state + .db_service + .admin_update_user( + user_id, + payload.username.as_deref(), + payload.email.as_deref(), + payload.role_id, + payload.is_active, + admin.0.role, + ) + .await? + .ok_or_else(|| AdminError::NotFound(format!("User with id {} not found", user_id)))?; + + Ok(Json(updated_user)) +} + +pub async fn delete_user_handler( + State(state): State, + admin: AdminOrHigherUser, + Path(user_id): Path, +) -> AdminResult> { + println!( + "->> {:<12} - delete_user_handler - user: {}", + "HANDLER", admin.0.username + ); + + let deleted_user = state.db_service.admin_delete_user(user_id).await?; + + if deleted_user == 0 { + return Err(AdminError::NotFound(format!( + "User with id {} not found", + user_id + ))); + } + + Ok(Json( + serde_json::json!({"status": "success", "message": "User deleted successfully"}), + )) +} diff --git a/backend/src/api/admin/mod.rs b/backend/src/api/admin/mod.rs new file mode 100644 index 0000000..8c44278 --- /dev/null +++ b/backend/src/api/admin/mod.rs @@ -0,0 +1,156 @@ +use axum::Json; +use axum_core::__private::tracing::log::error; +use axum_core::response::{IntoResponse, Response}; +use reqwest::StatusCode; +use serde::{Deserialize, Serialize}; + +use crate::common::error::AuthError; + +pub mod admin_comment_handler; +pub mod admin_routes; +pub mod admin_series_handlers; +pub mod admin_user_handler; + +#[derive(Serialize)] +struct AdminErrorResponse { + error: String, +} + +pub enum AdminError { + Auth(AuthError), + Conflict(String), + NotFound(String), + Forbidden(String), + DatabaseError(anyhow::Error), +} + +impl From for AdminError { + fn from(e: anyhow::Error) -> Self { + let e_str = e.to_string(); + if e.to_string().contains("Username or Email already exists") { + AdminError::Conflict("Username or Email already exists".to_string()) + } else if e_str.starts_with("FORBIDDEN:") { + AdminError::Forbidden(e_str.replace("FORBIDDEN: ", "")) + } else { + AdminError::DatabaseError(e) + } + } +} + +impl From for AdminError { + fn from(e: AuthError) -> Self { + AdminError::Auth(e) + } +} + +impl IntoResponse for AdminError { + fn into_response(self) -> Response { + let (status, message) = match self { + AdminError::DatabaseError(e) => { + error!("Database error: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + "An internal server error occurred".to_string(), + ) + } + AdminError::Conflict(msg) => (StatusCode::CONFLICT, msg), + AdminError::NotFound(msg) => (StatusCode::NOT_FOUND, msg), + AdminError::Forbidden(msg) => (StatusCode::FORBIDDEN, msg), + AdminError::Auth(e) => { + return e.into_response(); + } + }; + + let body = Json(AdminErrorResponse { error: message }); + (status, body).into_response() + } +} + +pub type AdminResult = Result; + +#[derive(Deserialize)] +pub struct CreateSeriesRequest { + title: String, + original_title: Option, + authors: Option>, + description: String, + cover_image_url: String, + source_url: String, + category_ids: Vec, +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct UpdateSeriesRequest { + title: Option, + original_title: Option, + authors: Option>, + description: Option, + cover_image_url: Option, + source_url: Option, + category_ids: Option>, +} + +#[derive(Serialize)] +pub struct UploadCoverImageResponse { + status: String, + url: String, +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PaginationParams { + #[serde(default = "default_page")] + page: u32, + #[serde(default = "default_page_size")] + page_size: u32, + #[serde(default)] + search: Option, +} + +fn default_page() -> u32 { + 1 +} +fn default_page_size() -> u32 { + 25 +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +pub struct SeriesResponse { + id: i32, + title: String, + original_title: Option, + description: String, + cover_image_url: String, + source_url: String, + authors: Vec, + last_updated: String, + processing_status: String, +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +pub struct PaginatedResponse { + items: Vec, + total_items: i64, +} + +#[derive(Deserialize)] +pub struct RepairChapterRequest { + pub chapter_number: f32, + pub new_chapter_url: String, +} + +#[derive(Deserialize)] +pub struct CreateCategoryTagRequest { + pub name: String, +} + +#[derive(Deserialize)] +pub struct AdminUpdateUserPayload { + username: Option, + email: Option, + role_id: Option, + is_active: Option, +} diff --git a/backend/src/api/admin_routes.rs b/backend/src/api/admin_routes.rs deleted file mode 100644 index fe00159..0000000 --- a/backend/src/api/admin_routes.rs +++ /dev/null @@ -1,36 +0,0 @@ -use axum::Router; -use axum::routing::{delete, get, patch, post}; - -use crate::api::admin_handlers::{ - create_category_tag_handler, create_new_series_handler, - delete_category_tag_handler, delete_series_handler, get_all_series_handler, - get_all_users_handler, get_list_category_tags_handler, - get_series_category_tags_handler, repair_chapter_handler, - update_existing_series_handler, upload_series_cover_image_handler, -}; -use crate::builder::startup::AppState; - -pub fn admin_routes() -> Router { - Router::new() - // User management routes - .route("/users/list", get(get_all_users_handler)) - // Series management routes - .route("/series/add", post(create_new_series_handler)) - .route("/series/delete/{id}", delete(delete_series_handler)) - .route("/series/repair/chapter/{id}", post(repair_chapter_handler)) - .route("/series/list", get(get_all_series_handler)) - .route("/series/update/{id}", patch(update_existing_series_handler)) - .route("/series/tags/{id}", get(get_series_category_tags_handler)) - // Image upload routes - .route( - "/series/cover/upload/image", - post(upload_series_cover_image_handler), - ) - // Category Tag management routes - .route("/category/tag/add", post(create_category_tag_handler)) - .route( - "/category/tag/delete/{id}", - delete(delete_category_tag_handler), - ) - .route("/category/tag/list", get(get_list_category_tags_handler)) -} diff --git a/backend/src/api/extractor.rs b/backend/src/api/extractor.rs index f0c3d64..dd04cb6 100644 --- a/backend/src/api/extractor.rs +++ b/backend/src/api/extractor.rs @@ -1,16 +1,47 @@ -use crate::builder::startup::AppState; -use crate::common::error::AuthError; -use crate::common::jwt::Claims; -use crate::database::Users; +use std::convert::Infallible; + use axum::extract::FromRequestParts; use axum::http::request::Parts; use axum_core::__private::tracing::error; -use std::convert::Infallible; +use crate::builder::startup::AppState; +use crate::common::error::AuthError; +use crate::common::jwt::Claims; + +#[derive(Debug, PartialEq, PartialOrd, Clone, Copy)] +pub enum Role { + User = 0, + Moderator = 1, + Admin = 2, + SuperAdmin = 3, +} + +impl Role { + pub fn from_name(role_name: &str) -> Option { + match role_name { + "superadmin" => Some(Role::SuperAdmin), + "admin" => Some(Role::Admin), + "moderator" => Some(Role::Moderator), + "user" => Some(Role::User), + _ => None, + } + } + + pub fn to_name(self) -> &'static str { + match self { + Role::SuperAdmin => "superadmin", + Role::Admin => "admin", + Role::Moderator => "moderator", + Role::User => "user", + } + } +} + +// Authenticated user pub struct AuthenticatedUser { pub id: i32, pub username: String, - pub role_id: i32, + pub role: Role, } impl FromRequestParts for AuthenticatedUser { @@ -22,7 +53,7 @@ impl FromRequestParts for AuthenticatedUser { ) -> Result { let claims = Claims::from_request_parts(parts, state) .await - .map_err(|_| AuthError::InvalidToken)?; + .map_err(|_err| AuthError::InvalidToken)?; let user = state .db_service @@ -34,14 +65,27 @@ impl FromRequestParts for AuthenticatedUser { })? .ok_or(AuthError::InvalidToken)?; + let role_name = state + .db_service + .get_role_name_by_id(user.role_id) + .await + .map_err(|_err| AuthError::InternalServerError)? + .ok_or(AuthError::InvalidToken)?; + + let role = Role::from_name(&role_name).ok_or_else(|| { + error!("Role name '{}' in DB is not a valid Role enum.", role_name); + AuthError::InternalServerError + })?; + Ok(AuthenticatedUser { id: user.id, username: user.username, - role_id: user.role_id, + role, }) } } +// Optional authenticated user (sign-in or sign-out) pub struct OptionalAuthenticatedUser(pub Option); impl FromRequestParts for OptionalAuthenticatedUser { @@ -51,8 +95,7 @@ impl FromRequestParts for OptionalAuthenticatedUser { parts: &mut Parts, state: &AppState, ) -> Result { - let user_result = - AuthenticatedUser::from_request_parts(parts, state).await; + let user_result = AuthenticatedUser::from_request_parts(parts, state).await; // If extraction is successful, wrap it in Some. // we treat it as None instead of rejecting the request. @@ -62,37 +105,62 @@ impl FromRequestParts for OptionalAuthenticatedUser { } } -pub struct AdminUser(pub Users); +// Super admin user only +pub struct SuperAdminUser(pub AuthenticatedUser); -impl FromRequestParts for AdminUser { +impl FromRequestParts for SuperAdminUser { type Rejection = AuthError; async fn from_request_parts( parts: &mut Parts, state: &AppState, ) -> Result { - let claims = Claims::from_request_parts(parts, state) - .await - .map_err(|_| AuthError::InvalidToken)?; + let user = AuthenticatedUser::from_request_parts(parts, state).await?; - let user = state - .db_service - .get_user_by_identifier(&claims.sub) - .await - .map_err(|_| AuthError::InternalServerError)? - .ok_or(AuthError::InvalidToken)?; + if user.role != Role::SuperAdmin { + Err(AuthError::WrongCredentials) + } else { + Ok(SuperAdminUser(user)) + } + } +} - let role_name = state - .db_service - .get_role_name_by_id(user.role_id) - .await - .map_err(|_| AuthError::InternalServerError)? - .ok_or(AuthError::InvalidToken)?; +// Admin or higher user +pub struct AdminOrHigherUser(pub AuthenticatedUser); - if role_name != "admin" { - return Err(AuthError::WrongCredentials); +impl FromRequestParts for AdminOrHigherUser { + type Rejection = AuthError; + + async fn from_request_parts( + parts: &mut Parts, + state: &AppState, + ) -> Result { + let user = AuthenticatedUser::from_request_parts(parts, state).await?; + + if user.role < Role::Admin { + Err(AuthError::WrongCredentials) + } else { + Ok(AdminOrHigherUser(user)) } + } +} + +// Moderator or higher user +pub struct ModeratorOrHigherUser(pub AuthenticatedUser); + +impl FromRequestParts for ModeratorOrHigherUser { + type Rejection = AuthError; - Ok(AdminUser(user)) + async fn from_request_parts( + parts: &mut Parts, + state: &AppState, + ) -> Result { + let user = AuthenticatedUser::from_request_parts(parts, state).await?; + + if user.role < Role::Moderator { + Err(AuthError::WrongCredentials) + } else { + Ok(ModeratorOrHigherUser(user)) + } } } diff --git a/backend/src/api/mod.rs b/backend/src/api/mod.rs index d58fa2b..cfbd4b3 100644 --- a/backend/src/api/mod.rs +++ b/backend/src/api/mod.rs @@ -1,7 +1,4 @@ -mod admin_handlers; -mod admin_routes; -pub mod auth_handlers; +pub mod admin; pub mod extractor; +pub mod public; pub mod routes; -pub mod series_handlers; -pub mod user_handlers; diff --git a/backend/src/api/auth_handlers.rs b/backend/src/api/public/auth_handlers.rs similarity index 81% rename from backend/src/api/auth_handlers.rs rename to backend/src/api/public/auth_handlers.rs index 03fc18d..809eac7 100644 --- a/backend/src/api/auth_handlers.rs +++ b/backend/src/api/public/auth_handlers.rs @@ -1,32 +1,28 @@ -use crate::api::extractor::AuthenticatedUser; -use crate::builder::startup::AppState; -use crate::common::email_service::send_password_reset_email; -use crate::common::error::AuthError; -use crate::common::hashing::{hash_password, verify_password}; -use crate::common::jwt::{ - RefreshClaims, create_access_jwt, create_refresh_jwt, -}; -use crate::database::DatabaseService; -use axum::Json; use axum::extract::State; use axum::http::StatusCode; +use axum::Json; use axum_core::__private::tracing::error; -use axum_extra::extract::CookieJar; use axum_extra::extract::cookie::{Cookie, SameSite}; +use axum_extra::extract::CookieJar; use chrono::{Duration, Utc}; use serde::{Deserialize, Serialize}; use uuid::Uuid; +use crate::api::extractor::AuthenticatedUser; +use crate::builder::startup::AppState; +use crate::common::email_service::send_password_reset_email; +use crate::common::error::AuthError; +use crate::common::hashing::{hash_password, verify_password}; +use crate::common::jwt::{create_access_jwt, create_refresh_jwt, RefreshClaims}; +use crate::database::DatabaseService; + #[derive(Serialize)] pub struct GenericMessageResponse { message: String, } // Helper function to get role name string from role id -async fn get_role_name( - db_service: &DatabaseService, - role_id: i32, -) -> Result { +async fn get_role_name(db_service: &DatabaseService, role_id: i32) -> Result { db_service .get_role_name_by_id(role_id) .await @@ -40,6 +36,91 @@ async fn get_role_name( }) } +#[derive(Deserialize)] +pub struct RegisterPayload { + username: String, + email: String, + password: String, +} + +impl RegisterPayload { + fn validate_input(&self) -> Result<(), AuthError> { + if self.username.trim().len() < 4 { + return Err(AuthError::InvalidCharacter( + "Username should be at least 4 characters long.".to_string(), + )); + } + if self.email.is_empty() || self.password.is_empty() { + return Err(AuthError::MissingCredentials); + } + + Ok(()) + } +} + +/// Register new user +/// Its checks for uniqueness and create a new user in the database +pub async fn register_new_user_handler( + State(state): State, + Json(payload): Json, +) -> Result<(StatusCode, Json), AuthError> { + let db_service = &state.db_service; + + // Validate input + payload.validate_input()?; + + // Check if user/username already exists in the database + if db_service + .get_user_by_identifier(&payload.username) + .await? + .is_some() + { + return Err(AuthError::UserAlreadyExists { + field: "username".to_string(), + }); + } + + if db_service + .get_user_by_identifier(&payload.email) + .await? + .is_some() + { + return Err(AuthError::UserAlreadyExists { + field: "email".to_string(), + }); + } + + let hashed_password = + hash_password(&payload.password).map_err(|_err| AuthError::InternalServerError)?; + + // Get the ID for the default 'user' role. + let user_role_id = db_service + .get_role_id_by_name("user") + .await + .map_err(|_err| AuthError::InternalServerError)? + .ok_or_else(|| { + error!("Default 'user' role not found in the database."); + AuthError::InternalServerError + })?; + + // Create a new user in the database + let _new_user = db_service + .create_user( + &payload.username, + &payload.email, + &hashed_password, + user_role_id, + ) + .await + .map_err(|_err| AuthError::InternalServerError)?; + + let response = GenericMessageResponse { + message: "User registered successfully".to_string(), + }; + + Ok((StatusCode::CREATED, Json(response))) +} + // Struct for Responses #[derive(Serialize)] pub struct UserData { @@ -71,8 +152,8 @@ impl LoginRequest { } } -// Accepts a `CookieJar` and return modified `CookieJar` -// with the token set as a cookie +/// Login authenticated user with credential verification +/// Return access and refresh tokens as HTTP-only cookies pub async fn login_handler( jar: CookieJar, State(state): State, @@ -91,14 +172,11 @@ pub async fn login_handler( })? .ok_or(AuthError::WrongCredentials)?; - let is_password_valid = verify_password( - &payload.password, - &user.password_hash, - ) - .map_err(|_| { - error!("Password verification failed for user {}", user.username); - AuthError::WrongCredentials - })?; + let is_password_valid = + verify_password(&payload.password, &user.password_hash).map_err(|_err| { + error!("Password verification failed for user {}", user.username); + AuthError::WrongCredentials + })?; if !is_password_valid { return Err(AuthError::WrongCredentials); @@ -106,8 +184,7 @@ pub async fn login_handler( let role_name = get_role_name(db_service, user.role_id).await?; - let access_token = - create_access_jwt(user.username.clone(), role_name.clone())?; + let access_token = create_access_jwt(user.username.clone(), role_name.clone())?; let refresh_token = create_refresh_jwt(user.username.clone())?; // Set cookie @@ -143,6 +220,8 @@ pub async fn login_handler( Ok((new_jar, Json(response))) } +/// Logout the user by clearing the token cookies +/// Clear access dan refresh token from the browser pub async fn logout_handler( jar: CookieJar, ) -> Result<(CookieJar, Json), AuthError> { @@ -170,7 +249,9 @@ pub async fn logout_handler( Ok((new_jar, Json(response_body))) } -pub async fn refresh_token_handler( +/// Refresh the access token using a valid refresh token. +/// Return a new access token in a cookie +pub async fn refresh_access_token_handler( jar: CookieJar, State(state): State, claims: RefreshClaims, @@ -179,20 +260,19 @@ pub async fn refresh_token_handler( .db_service .get_user_by_identifier(&claims.sub) .await - .map_err(|_| AuthError::InvalidToken)? + .map_err(|_err| AuthError::InvalidToken)? .ok_or(AuthError::InvalidRefreshToken)?; let role_name = get_role_name(&state.db_service, user.role_id).await?; - let new_access_token = - create_access_jwt(claims.sub.clone(), role_name.clone())?; + let new_access_token = create_access_jwt(claims.sub.clone(), role_name)?; let new_access_cookie = Cookie::build(("token", new_access_token)) .path("/") .http_only(true) .secure(false) // Only send via HTTPS (disable for local development) .same_site(SameSite::Lax) - .max_age(time::Duration::minutes(15)) + .max_age(time::Duration::minutes(30)) .build(); let new_jar = jar.add(new_access_cookie); @@ -208,9 +288,8 @@ pub struct UserResponse { user: UserData, } -/// Protected handler. `Claims` acts as a guard. +/// Protected endpoint that returns authenticated user data pub async fn protected_handler( - State(state): State, user: AuthenticatedUser, ) -> Result<(StatusCode, Json), AuthError> { println!( @@ -218,12 +297,12 @@ pub async fn protected_handler( user.username ); - let role_name = get_role_name(&state.db_service, user.role_id).await?; + let role_name = user.role.to_name(); let user_data = UserData { id: user.id, username: user.username, - role: role_name, + role: role_name.to_string(), }; let response = UserResponse { user: user_data }; @@ -252,8 +331,7 @@ impl ResetPasswordRequest { } } -// Handler for the password reset request -// Finds a user by email, generates a token, and in a real app, sends an email. +/// Handler for nitiate password reset by sending reset token to email pub async fn forgot_password_handler( State(state): State, Json(payload): Json, @@ -265,18 +343,14 @@ pub async fn forgot_password_handler( .await { let unique_reset_token = Uuid::new_v4().to_string(); - let expired_at = Utc::now() + Duration::hours(1); + let expired_at = Utc::now() + Duration::minutes(10); // Store reset token in the database state .db_service - .create_password_reset_token( - user.id, - &unique_reset_token, - expired_at, - ) + .create_password_reset_token(user.id, &unique_reset_token, expired_at) .await - .map_err(|_| AuthError::InternalServerError)?; + .map_err(|_err| AuthError::InternalServerError)?; // Send the password reset email if let Err(e) = send_password_reset_email( @@ -303,7 +377,7 @@ pub async fn forgot_password_handler( Ok(Json(response)) } -// Handler for finalizing password reset with token +/// Handler for finalizing password reset using valid reset token pub async fn reset_password_handler( State(state): State, Json(payload): Json, @@ -315,25 +389,23 @@ pub async fn reset_password_handler( let (user_id, expires_at) = db_service .get_user_by_reset_token(&payload.token) .await? - .ok_or(AuthError::InvalidCredentials)?; + .ok_or(AuthError::InvalidToken)?; + // Clean up the expired token if Utc::now() > expires_at { db_service .delete_password_reset_token(&payload.token) .await?; - return Err(AuthError::MissingCredentials); + return Err(AuthError::InvalidToken); } - let hashed_password = hash_password(&payload.new_password) - .map_err(|_| AuthError::InvalidCredentials)?; + let hashed_password = + hash_password(&payload.new_password).map_err(|_err| AuthError::InternalServerError)?; db_service - .update_user_password_hash_after_reset_password( - user_id, - &hashed_password, - ) + .update_user_password_hash_after_reset_password(user_id, &hashed_password) .await - .map_err(|_| AuthError::InternalServerError)?; + .map_err(|_err| AuthError::InternalServerError)?; db_service .delete_password_reset_token(&payload.token) @@ -346,92 +418,6 @@ pub async fn reset_password_handler( Ok(Json(response)) } -#[derive(Deserialize)] -pub struct RegisterPayload { - username: String, - email: String, - password: String, -} - -impl RegisterPayload { - fn validate_input(&self) -> Result<(), AuthError> { - if self.username.trim().len() < 4 { - return Err(AuthError::InvalidCharacter( - "Username should be at least 4 characters long.".to_string(), - )); - } - if self.email.is_empty() || self.password.is_empty() { - return Err(AuthError::MissingCredentials); - } - - Ok(()) - } -} - -/// it checks for uniqueness and create new user in the database -pub async fn register_new_user_handler( - State(state): State, - Json(payload): Json, -) -> Result<(StatusCode, Json), AuthError> { - let db_service = &state.db_service; - - // Validate input - payload.validate_input()?; - - // Check if user already exists in the database - // We use existing `get_user_by_identifier` method - if db_service - .get_user_by_identifier(&payload.username) - .await? - .is_some() - { - return Err(AuthError::UserAlreadyExists { - field: "username".to_string(), - }); - } - if db_service - .get_user_by_identifier(&payload.email) - .await? - .is_some() - { - return Err(AuthError::UserAlreadyExists { - field: "email".to_string(), - }); - } - - // Hash the password before storing it in the database - let hashed_password = hash_password(&payload.password) - .map_err(|_| AuthError::InternalServerError)?; - - // Get the ID for the default 'user' role. - let user_role_id = db_service - .get_role_id_by_name("user") - .await - .map_err(|_| AuthError::InternalServerError)? - .ok_or_else(|| { - error!("Default 'user' role not found in the database."); - AuthError::InternalServerError - })?; - - // Create a new user in the database - let _new_user = db_service - .create_user( - &payload.username, - &payload.email, - &hashed_password, - user_role_id, - ) - .await - .map_err(|_| AuthError::InternalServerError)?; - - // Return success response - let response = GenericMessageResponse { - message: "User registered successfully".to_string(), - }; - - Ok((StatusCode::CREATED, Json(response))) -} - #[derive(Deserialize)] pub struct CheckUsernamePayload { username: String, @@ -443,6 +429,8 @@ pub struct CheckUsernameResponse { message: String, } +/// Check if username already taken +/// This aims to guarantee username uniqueness pub async fn realtime_check_username_handler( State(state): State, Json(payload): Json, @@ -453,8 +441,7 @@ pub async fn realtime_check_username_handler( if payload.username.trim().len() < 4 { let response = CheckUsernameResponse { available: false, - message: "Username should be at least 4 characters long" - .to_string(), + message: "Username should be at least 4 characters long".to_string(), }; return (StatusCode::BAD_REQUEST, Json(response)); } @@ -483,9 +470,7 @@ pub async fn realtime_check_username_handler( ); let response = CheckUsernameResponse { available: false, - message: - "Error checking username availability. Please try again" - .to_string(), + message: "Error checking username availability. Please try again".to_string(), }; (StatusCode::INTERNAL_SERVER_ERROR, Json(response)) } diff --git a/backend/src/api/public/comments_handlers.rs b/backend/src/api/public/comments_handlers.rs new file mode 100644 index 0000000..0acbc9a --- /dev/null +++ b/backend/src/api/public/comments_handlers.rs @@ -0,0 +1,487 @@ +use axum::extract::{Path, Query, State}; +use axum::http::StatusCode; +use axum::Json; +use axum_core::__private::tracing::error; +use axum_core::response::{IntoResponse, Response}; +use axum_extra::extract::Multipart; +use serde::Deserialize; +use uuid::Uuid; + +use crate::api::extractor::{AuthenticatedUser, OptionalAuthenticatedUser}; +use crate::api::public::user_handlers::extract_field_data; +use crate::builder::startup::AppState; +use crate::database::{Comment, CommentEntityType, CommentSort, DeleteCommentResult, VotePayload}; + +/// Helper function to recursively prepend the base CDN URL to all comment attachment URLs. +/// This modifies the comments in place using an iterative stack-based approach. +fn hydrate_attachments_url(comments: &mut [Comment], base_url: &str) { + // Collect mutable references to the top-level comments + let mut stack: Vec<&mut Comment> = comments.iter_mut().collect(); + + // Process comments iteratively to avoid deep recursion + while let Some(comment) = stack.pop() { + // Prepend base_url to attachment_urls if they exist + if let Some(urls) = &mut comment.attachment_urls { + for url in urls.iter_mut() { + *url = format!("{}/{}", base_url, url); + } + } + + stack.extend(comment.replies.iter_mut()); + } +} + +#[derive(Deserialize)] +pub struct CommentParams { + #[serde(default)] + sort: CommentSort, + thread_id: Option, +} + +// Fetch series comments +pub async fn get_series_comment_handler( + State(state): State, + Path(series_id): Path, + user: OptionalAuthenticatedUser, + Query(params): Query, +) -> Response { + let user_id = user.0.map(|u| u.id); + match state + .db_service + .get_comments( + CommentEntityType::Series, + series_id, + user_id, + params.sort, + params.thread_id, + ) + .await + { + Ok(mut comments) => { + let base_url = state.storage_client.domain_cdn_url(); + + hydrate_attachments_url(&mut comments, base_url); + + (StatusCode::OK, Json(comments)).into_response() + } + Err(e) => { + error!( + "[SERIES] Failed to get comments for series {}: {}", + series_id, e + ); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": "Failed to get comments for series"})), + ) + .into_response() + } + } +} + +// Fetch chapter comments +pub async fn get_chapter_comment_handler( + State(state): State, + Path(chapter_id): Path, + user: OptionalAuthenticatedUser, + Query(params): Query, +) -> Response { + let user_id = user.0.map(|u| u.id); + match state + .db_service + .get_comments( + CommentEntityType::SeriesChapters, + chapter_id, + user_id, + params.sort, + params.thread_id, + ) + .await + { + Ok(mut comments) => { + let base_url = state.storage_client.domain_cdn_url(); + + hydrate_attachments_url(&mut comments, base_url); + + (StatusCode::OK, Json(comments)).into_response() + } + Err(e) => { + error!( + "[CHAPTERS] Failed to get comments for chapter {}: {}", + chapter_id, e + ); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": "Failed to get comment for chapter"})), + ) + .into_response() + } + } +} + +// New comment send handler w/o attachments +pub async fn new_comment_submission_handler( + state: AppState, + user: AuthenticatedUser, + mut multipart: Multipart, + entity_type: CommentEntityType, + entity_id: i32, +) -> Response { + let mut content_markdown = None; + let mut parent_id: Option = None; + let mut attachment_data: Vec<(Vec, String, String)> = Vec::new(); + + while let Ok(Some(field)) = multipart.next_field().await { + if let Some(field_name) = field.name() { + match field_name { + "content_markdown" => content_markdown = field.text().await.ok(), + "parent_id" => { + if let Ok(text) = field.text().await { + parent_id = text.parse::().ok(); + } + } + "images" => { + let file_name = field.file_name().unwrap_or("").to_string(); + let content_type = field + .content_type() + .unwrap_or("application/octet-stream") + .to_string(); + if let Ok(data) = field.bytes().await { + if data.len() > 5 * 1024 * 1024 { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({"message": "File size exceeds 5MB"})), + ) + .into_response(); + } + attachment_data.push((data.to_vec(), file_name, content_type)); + } + } + _ => (), + } + } + } + + // Validation + let content_markdown_str = content_markdown.unwrap_or_default(); + if content_markdown_str.is_empty() && attachment_data.is_empty() { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({"message": "Comment must have content or an attachment."})), + ) + .into_response(); + } + + // Upload file if any + let mut attachment_keys: Vec = Vec::new(); + for (file_data, file_name, content_type) in attachment_data { + let file_extension = std::path::Path::new(&file_name) + .extension() + .and_then(std::ffi::OsStr::to_str) + .unwrap_or(""); + + let unique_key = format!("comments/{}/{}.{}", user.id, Uuid::new_v4(), file_extension); + + if let Err(e) = state + .storage_client + .upload_image_file(file_data, &unique_key, &content_type) + .await + { + error!("Failed to upload comment attachment: {}", e); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": "Failed to upload file attachment"})), + ) + .into_response(); + } + attachment_keys.push(unique_key); + } + + // Create the new comment using the provided entity_type and entity_id + let new_comment_id = match state + .db_service + .create_new_comment( + user.id, + entity_type, + entity_id, + &content_markdown_str, + parent_id, + &attachment_keys, + ) + .await + { + Ok(id) => id, + Err(e) => { + error!( + "Failed to create comment for entity type {:?} and ID {}: {}", + entity_type, entity_id, e + ); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": "Failed to create comment for series"})), + ) + .into_response(); + } + }; + + // After creating, fetch the full data for the new comment + match state + .db_service + .get_comment_by_id(new_comment_id, Some(user.id)) + .await + { + // If fetch is successful, return the full comment object + Ok(Some(mut new_comment)) => { + let base_url = state.storage_client.domain_cdn_url(); + + hydrate_attachments_url(std::slice::from_mut(&mut new_comment), base_url); + + (StatusCode::OK, Json(new_comment)).into_response() + } + // Handle cases where the comment couldn't be fetched right after creation + Ok(None) => { + error!( + "Comment created id {} but not not found immediately early", + new_comment_id + ); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": "Comment create but not found early"})), + ) + .into_response() + } + Err(e) => { + error!("Failed to retrive comment data {:#?}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json( + serde_json::json!({"error": format!("Comment created but failed to retrieve its data {:#?}", e)}), + ), + ) + .into_response() + } + } +} + +// Create/Post new comment to a specific series page +pub async fn create_series_comment_handler( + State(state): State, + user: AuthenticatedUser, + Path(series_id): Path, + multipart: Multipart, +) -> Response { + println!( + "->> {:<12} - record_series_view - series_id: {:?}", + "HANDLER", series_id + ); + + new_comment_submission_handler(state, user, multipart, CommentEntityType::Series, series_id) + .await +} + +// Create/Post a new comment to a specific chapter page +pub async fn create_chapter_comment_handler( + State(state): State, + user: AuthenticatedUser, + Path(chapter_id): Path, + multipart: Multipart, +) -> Response { + println!( + "->> {:<12} - record_series_view - series_id: {:?}", + "HANDLER", chapter_id + ); + + new_comment_submission_handler( + state, + user, + multipart, + CommentEntityType::SeriesChapters, + chapter_id, + ) + .await +} + +// Upload comment with attachments +pub async fn upload_comment_attachments_handler( + State(state): State, + user: AuthenticatedUser, + mut multipart: Multipart, +) -> Response { + if let Ok(Some(field)) = multipart.next_field().await { + let content_type = field + .content_type() + .unwrap_or("application/octet-stream") + .to_string(); + + let file_name = field.file_name().unwrap_or("").to_string(); + + let file_data = match extract_field_data(field).await { + Ok(data) => data, + Err(response) => return response, + }; + + const MAX_FILE_SIZE: usize = 5 * 1024 * 1024; + if file_data.len() > MAX_FILE_SIZE { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({"message": "File size cannot exceed 5MB"})), + ) + .into_response(); + } + + let file_extension = std::path::Path::new(&file_name) + .extension() + .and_then(std::ffi::OsStr::to_str) + .unwrap_or(""); + + let unique_image_key = + format!("comments/{}/{}.{}", user.id, Uuid::new_v4(), file_extension); + + match state + .storage_client + .upload_image_file(file_data, &unique_image_key, &content_type) + .await + { + Ok(url) => (StatusCode::OK, Json(serde_json::json!({"url": url}))).into_response(), + Err(e) => { + error!("Failed to upload comment attachment: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": "Failed to upload file"})), + ) + .into_response() + } + } + } else { + ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({"message": "No file found in the request."})), + ) + .into_response() + } +} + +// Delete comment +pub async fn delete_comment_handler( + State(state): State, + user: AuthenticatedUser, + Path(comment_id): Path, +) -> Response { + match state.db_service.delete_comment(comment_id, user.id).await { + Ok(DeleteCommentResult::SoftDeleted(updated_comment, attachment_object_key)) => { + // Database transaction was successful. + // Run this after the DB commit. + if !attachment_object_key.is_empty() + && let Err(e) = state + .storage_client + .delete_image_objects(&attachment_object_key) + .await + { + error!( + "Failed to delete storage objects for comment {}: {}. Keys: {:?}", + comment_id, e, attachment_object_key + ); + } + (StatusCode::OK, Json(updated_comment)).into_response() + } + Ok(DeleteCommentResult::InsufficientPermissions) => ( + StatusCode::FORBIDDEN, + Json(serde_json::json!({ + "status": "error", + "message": "You do not have permission to delete this user's comment" + })), + ) + .into_response(), + Ok(DeleteCommentResult::HardDeleted(attachment_object_key)) => { + if !attachment_object_key.is_empty() + && let Err(e) = state + .storage_client + .delete_image_objects(&attachment_object_key) + .await + { + error!( + "Failed to delete storage objects for comment {}: {}. Keys: {:?}", + comment_id, e, attachment_object_key + ); + } + ( + StatusCode::NO_CONTENT, + Json(serde_json::json!({"message": "Comment not found or permission denied"})), + ) + .into_response() + } + Ok(DeleteCommentResult::NotFound) => ( + StatusCode::NOT_FOUND, + Json(serde_json::json!({"message": "Comment not found or permission denied"})), + ) + .into_response(), + Err(e) => { + error!("Failed to delete comment with id: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": "Failed to delete comment"})), + ) + .into_response() + } + } +} + +#[derive(Deserialize)] +pub struct UpdateCommentPayload { + pub content_markdown: String, +} + +// Update (Edit) existing comment +pub async fn update_existing_comment_handler( + State(state): State, + user: AuthenticatedUser, + Path(comment_id): Path, + Json(payload): Json, +) -> Response { + match state + .db_service + .update_existing_comment(comment_id, user.id, &payload.content_markdown) + .await + { + Ok(Some(updated_data)) => (StatusCode::OK, Json(updated_data)).into_response(), + Ok(None) => ( + StatusCode::NOT_FOUND, + Json(serde_json::json!({"message": "Comment not found or permission denied"})), + ) + .into_response(), + Err(e) => { + error!( + "Failed to update existing comment with id {}: {}", + comment_id, e + ); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": "Could not update comment"})), + ) + .into_response() + } + } +} + +// Vote on comment +pub async fn vote_on_comment_handler( + State(state): State, + user: AuthenticatedUser, + Path(comment_id): Path, + Json(payload): Json, +) -> Response { + match state + .db_service + .vote_on_comment(comment_id, user.id, payload.vote_type) + .await + { + Ok(response_data) => (StatusCode::OK, Json(response_data)).into_response(), + Err(e) => { + error!("Failed to vote on comment {}: {}", comment_id, e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": "Failed to vote on comment {}"})), + ) + .into_response() + } + } +} diff --git a/backend/src/api/public/mod.rs b/backend/src/api/public/mod.rs new file mode 100644 index 0000000..8e3a16b --- /dev/null +++ b/backend/src/api/public/mod.rs @@ -0,0 +1,5 @@ +pub mod auth_handlers; +pub mod comments_handlers; +pub mod public_routes; +pub mod series_handlers; +pub mod user_handlers; diff --git a/backend/src/api/public/public_routes.rs b/backend/src/api/public/public_routes.rs new file mode 100644 index 0000000..87d67ac --- /dev/null +++ b/backend/src/api/public/public_routes.rs @@ -0,0 +1,117 @@ +use axum::routing::{delete, get, patch, post}; +use axum::Router; + +use crate::api::public::auth_handlers::{ + forgot_password_handler, login_handler, logout_handler, protected_handler, + realtime_check_username_handler, refresh_access_token_handler, register_new_user_handler, + reset_password_handler, +}; +use crate::api::public::comments_handlers::{ + create_chapter_comment_handler, create_series_comment_handler, delete_comment_handler, + get_chapter_comment_handler, get_series_comment_handler, update_existing_comment_handler, + upload_comment_attachments_handler, vote_on_comment_handler, +}; +use crate::api::public::series_handlers::{ + browse_series_handler, fetch_chapter_details_handler, fetch_most_viewed_series_handler, + fetch_new_series_handler, fetch_series_details_by_id_handler, + fetch_updated_series_chapter_handler, get_all_categories_handler, rate_series_handler, + record_series_view_handler, user_search_series_handler, +}; +use crate::api::public::user_handlers::{ + add_bookmark_series_handler, delete_bookmark_series_handler, + get_bookmark_status_current_user_handler, get_user_bookmark_library_handler, + get_user_profile_handler, update_user_avatar_handler, update_user_password_setting_handler, + update_user_profile_handler, +}; +use crate::builder::startup::AppState; + +/// User auth api routes +pub fn auth_api_routes() -> Router { + Router::new() + .route("/login", post(login_handler)) + .route("/register-new-user", post(register_new_user_handler)) + .route("/refresh-access-token", post(refresh_access_token_handler)) + .route("/logout", post(logout_handler)) + .route("/user", post(protected_handler)) + .route("/check-username", post(realtime_check_username_handler)) + .route("/forgot-password", post(forgot_password_handler)) + .route("/reset-password", post(reset_password_handler)) +} + +/// User logged-in api routes +fn user_logged_in_api_routes() -> Router { + Router::new() + .route( + "/user/profile", + get(get_user_profile_handler).patch(update_user_profile_handler), + ) + .route( + "/user/profile/password", + patch(update_user_password_setting_handler), + ) + .route("/user/profile/avatar", post(update_user_avatar_handler)) + .route("/user/bookmark", get(get_user_bookmark_library_handler)) + .route( + "/series/{id}/bookmark", + post(add_bookmark_series_handler).delete(delete_bookmark_series_handler), + ) + .route( + "/series/{id}/bookmark/status", + get(get_bookmark_status_current_user_handler), + ) +} + +/// General public api routes (no authentication required) +fn public_general_api_routes() -> Router { + Router::new() + .route("/series/most-viewed", get(fetch_most_viewed_series_handler)) + .route("/series/new-series", get(fetch_new_series_handler)) + .route( + "/series/latest-updated-series", + get(fetch_updated_series_chapter_handler), + ) + .route("/series/tags", get(get_all_categories_handler)) + .route("/series/browse", get(browse_series_handler)) + .route("/series/search", get(user_search_series_handler)) + .route( + "/series/details/{id}", + get(fetch_series_details_by_id_handler), + ) + .route( + "/series/{id}/chapter/{chapter_number}", + get(fetch_chapter_details_handler), + ) + .route("/series/{id}/rate", post(rate_series_handler)) + .route("/series/{id}/views-count", post(record_series_view_handler)) +} + +/// Comment api routes +fn public_comment_api_routes() -> Router { + Router::new() + .route( + "/series/{id}/comments", + get(get_series_comment_handler).post(create_series_comment_handler), + ) + .route( + "/series/chapter/{id}/comments", + get(get_chapter_comment_handler).post(create_chapter_comment_handler), + ) + .route( + "/comments/{id}/edit", + patch(update_existing_comment_handler), + ) + .route("/comments/{id}/delete", delete(delete_comment_handler)) + .route("/comments/{id}/vote", post(vote_on_comment_handler)) + .route( + "/comments/attachments/upload", + post(upload_comment_attachments_handler), + ) +} + +// Route for general public api +pub fn general_api_routes() -> Router { + // Merge same prefix "/api" routes into one + public_general_api_routes() + .merge(public_comment_api_routes()) + .merge(user_logged_in_api_routes()) +} diff --git a/backend/src/api/public/series_handlers.rs b/backend/src/api/public/series_handlers.rs new file mode 100644 index 0000000..7bcc702 --- /dev/null +++ b/backend/src/api/public/series_handlers.rs @@ -0,0 +1,502 @@ +use axum::extract::{Path, Query, State}; +use axum::Json; +use axum_core::__private::tracing::error; +use axum_core::response::{IntoResponse, Response}; +use reqwest::StatusCode; +use serde::de::{Deserializer, Error}; +use serde::{Deserialize, Serialize}; + +use crate::api::extractor::AuthenticatedUser; +use crate::builder::startup::AppState; +use crate::database::{CategoryTag, Series, SeriesChapter, SeriesOrderBy}; + +#[derive(Deserialize)] +pub struct MostViewedParams { + #[serde(default = "default_period")] + period: String, + #[serde(default = "default_limit")] + limit: i64, +} + +fn default_period() -> String { + "week".to_string() +} + +fn default_limit() -> i64 { + 20 +} + +pub async fn fetch_most_viewed_series_handler( + State(state): State, + Query(params): Query, +) -> Response { + // Map the user-friendly period string + let period_str = match params.period.to_lowercase().as_str() { + "hour" => "1 hour", + "day" => "1 day", + "week" => "1 week", + "month" => "1 month", + _ => "1 days", + }; + + match state + .db_service + .fetch_most_viewed_series(period_str, params.limit) + .await + { + Ok(series) => (StatusCode::OK, Json(series)).into_response(), + Err(e) => { + eprintln!("Error fetching most viewed series: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"status": "error", "message": "Could not retrieve most viewed series."})), + ).into_response() + } + } +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +pub struct SeriesDataResponse { + series: Series, + chapters: Vec, + authors: Vec, + category_tags: Vec, +} + +// Fetch all details for a single series +pub async fn fetch_series_details_by_id_handler( + State(state): State, + Path(id): Path, +) -> Response { + let db = &state.db_service; + + let series = match db.get_series_by_id(id).await { + Ok(Some(s)) => s, + Ok(None) => { + return ( + StatusCode::NOT_FOUND, + Json(serde_json::json!({"status": "error", "message": "Series not found."})), + ) + .into_response(); + } + Err(e) => { + error!("Error fetching series details: {}", e); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"status": "error", "message": "Could not retrieve series details."})), + ) + .into_response(); + } + }; + + let series_id = series.id; + + // Fetch authors, chapters, and categories tag in parallel + let (authors_result, chapters_result, categories_result) = tokio::join!( + db.get_authors_by_series_id(series_id), + db.get_chapters_by_series_id(series_id), + db.get_category_tag_by_series_id(series_id), + ); + + let authors = authors_result.unwrap_or_else(|e| { + error!("Failed to get authors for series {}: {}", series_id, e); + Vec::new() + }); + + let chapters = chapters_result.unwrap_or_else(|e| { + error!("Failed to get chapters for series {}: {}", series_id, e); + Vec::new() + }); + + let category_tags = categories_result.unwrap_or_else(|e| { + error!("Failed to get categories for series {}: {}", series_id, e); + Vec::new() + }); + + let response_data = SeriesDataResponse { + series, + authors, + chapters, + category_tags, + }; + + (StatusCode::OK, Json(response_data)).into_response() +} + +pub async fn fetch_new_series_handler(State(state): State) -> Response { + match state + .db_service + .get_public_series_paginated(1, 20, SeriesOrderBy::CreatedAt) + .await + { + Ok(series) => (StatusCode::OK, Json(series)).into_response(), + Err(e) => { + eprintln!("Error fetching new series: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"status": "error", "message": "Could not retrieve new series."})), + ).into_response() + } + } +} + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SeriesPaginationParams { + #[serde(default = "default_page")] + page: u32, + #[serde(default = "default_pagesize")] + page_size: u32, +} + +fn default_page() -> u32 { + 1 +} +fn default_pagesize() -> u32 { + 50 +} + +pub async fn fetch_updated_series_chapter_handler( + State(state): State, + Query(params): Query, +) -> Response { + match state + .db_service + .get_latest_release_series_chapter_paginated(params.page, params.page_size) + .await + { + Ok(paginated_result) => (StatusCode::OK, Json(paginated_result)).into_response(), + Err(e) => { + error!("Error fetching updated series: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"status": "error", "message": "Could not retrieve updated series."})), + ) + .into_response() + } + } +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ChapterDetailsResponse { + series_title: String, + chapter_title: Option, + chapter_id: i32, + chapter_number: f32, + pages: Vec, + all_chapters: Vec, + prev_chapter_number: Option, + next_chapter_number: Option, +} + +pub async fn fetch_chapter_details_handler( + State(state): State, + Path((series_id, chapter_number)): Path<(i32, f32)>, +) -> Response { + println!( + "->> {:<12} - fetch_chapter_images - series_id: {}, chapter: {}", + "HANDLER", series_id, chapter_number + ); + + let db = &state.db_service; + let base_url = state.storage_client.domain_cdn_url(); + + let (series_result, all_chapters_result, images_result) = tokio::join!( + db.get_series_by_id(series_id), + db.get_chapters_by_series_id(series_id), + db.get_images_urls_for_chapter_series(series_id, chapter_number), + ); + + // Get series title + let series = match series_result { + Ok(Some(s)) => s, + _ => { + return ( + StatusCode::NOT_FOUND, + Json(serde_json::json!({"status": "error", "message": "Series not found."})), + ) + .into_response(); + } + }; + + // Get chapter images list + let object_keys = match images_result { + Ok(img_chap) => img_chap, + Err(e) => { + error!("Error fetching chapter images: {}", e); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"status": "error", "message": "Could not retrieve chapter images."})), + ).into_response(); + } + }; + + let pages = object_keys + .into_iter() + .map(|key| format!("{}/{}", base_url, key)) + .collect(); + + // Get all chapters for the series and find current, next and previous chapters + let all_chapters = match all_chapters_result { + Ok(mut chaps) => { + chaps.sort_by(|a, b| a.chapter_number.total_cmp(&b.chapter_number)); + chaps + } + Err(e) => { + error!("Error fetching chapter list: {}", e); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"status": "error", "message": "Could not retrieve chapter list."})), + ).into_response(); + } + }; + + let current_chapter_idx = all_chapters + .iter() + .position(|c| c.chapter_number == chapter_number); + + let current_chapter_index = match current_chapter_idx { + Some(index) => index, + None => { + return ( + StatusCode::NOT_FOUND, + Json(serde_json::json!({"status": "error", "message": "Chapter not found in this series."})), + ).into_response(); + } + }; + + let current_chapter = &all_chapters[current_chapter_index]; + + let prev_chapter_number = if current_chapter_index > 0 { + all_chapters + .get(current_chapter_index - 1) + .map(|c| c.chapter_number) + } else { + None + }; + + let next_chapter_number = all_chapters + .get(current_chapter_index + 1) + .map(|c| c.chapter_number); + + let response_data = ChapterDetailsResponse { + series_title: series.title, + chapter_title: current_chapter.title.clone(), + chapter_id: current_chapter.id, + chapter_number, + pages, + all_chapters, + prev_chapter_number, + next_chapter_number, + }; + + (StatusCode::OK, Json(response_data)).into_response() +} + +pub async fn record_series_view_handler( + State(state): State, + Path(series_id): Path, +) -> Response { + println!( + "->> {:<12} - record_series_view - series_id: {}", + "HANDLER", series_id + ); + + let db = &state.db_service; + + match db.record_series_view(series_id).await { + Ok(_) => ( + StatusCode::OK, + Json(serde_json::json!({"status": "success", "message": "View Recorded."})), + ) + .into_response(), + Err(e) => { + error!("Error recording series view for id {}: {}", series_id, e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"status": "error", "message": "Could not record series view."})), + ) + .into_response() + } + } +} + +#[derive(Deserialize)] +pub struct RateSeriesPayload { + rating: i16, +} +#[derive(Serialize)] +pub struct RateSeriesResponse { + message: String, + new_total_score: i64, + new_total_count: i32, +} + +pub async fn rate_series_handler( + State(state): State, + user: AuthenticatedUser, + Path(series_id): Path, + Json(payload): Json, +) -> Response { + // validate rating value + if !(1..=5).contains(&payload.rating) { + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({"error": "Rating must be between 1 and 5"})), + ) + .into_response(); + } + + match state + .db_service + .add_or_update_series_rating(series_id, payload.rating, user.id) + .await + { + Ok(_) => match state.db_service.get_series_by_id(series_id).await { + Ok(Some(series)) => { + let response = RateSeriesResponse { + message: "Rating submitted".to_string(), + new_total_score: series.total_rating_score, + new_total_count: series.total_ratings_count, + }; + (StatusCode::OK, Json(response)).into_response() + } + _ => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": "Failed for retrieve updated series data"})), + ) + .into_response(), + }, + Err(e) => { + error!("Failed to proess rating: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"error": format!("Failed to update rating: {}", e)})), + ) + .into_response() + } + } +} + +// Fetch all category tags +pub async fn get_all_categories_handler(State(state): State) -> Response { + match state.db_service.get_list_all_categories().await { + Ok(categories) => (StatusCode::OK, Json(categories)).into_response(), + Err(e) => { + error!("Failed to get list of all categories: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"status": "error", "message": "Could not retrieve categories."})), + ) + .into_response() + } + } +} + +fn deserialize_i32_vec<'de, D>(deserializer: D) -> Result>, D::Error> +where + D: Deserializer<'de>, +{ + let s: Option = Option::deserialize(deserializer)?; + match s { + Some(s) if !s.is_empty() => { + let result: Result, _> = s + .split(',') + .map(str::trim) + .filter(|part| !part.is_empty()) + .map(str::parse) + .collect(); + + match result { + Ok(v) if !v.is_empty() => Ok(Some(v)), + Ok(_) => Ok(None), + Err(e) => Err(Error::custom(e)), + } + } + _ => Ok(None), + } +} + +#[derive(Debug, Deserialize)] +pub struct BrowseParams { + #[serde(default = "default_page")] + page: u32, + #[serde(default = "default_pagesize")] + page_size: u32, + order_by: Option, + #[serde(default, deserialize_with = "deserialize_i32_vec")] + include: Option>, + #[serde(default, deserialize_with = "deserialize_i32_vec")] + exclude: Option>, + search: Option, +} + +pub async fn browse_series_handler( + State(state): State, + Query(params): Query, +) -> Response { + let order_by = match params.order_by.as_deref() { + Some("new") => SeriesOrderBy::CreatedAt, + Some("updated") => SeriesOrderBy::UpdatedAt, + Some("views") => SeriesOrderBy::ViewsCount, + Some("ratings") => SeriesOrderBy::Rating, + _ => SeriesOrderBy::UpdatedAt, + }; + + let include_ids = params.include.as_deref().unwrap_or(&[]); + let exclude_ids = params.exclude.as_deref().unwrap_or(&[]); + let search_query = params.search.as_deref(); + + match state + .db_service + .browse_series_paginated_with_filters( + params.page, + params.page_size, + order_by, + include_ids, + exclude_ids, + search_query, + ) + .await + { + Ok(paginated_result) => (StatusCode::OK, Json(paginated_result)).into_response(), + Err(e) => { + error!("Failed to browse_series: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json( + serde_json::json!({"status": "error", "message": "Could not retrieve series."}), + ), + ) + .into_response() + } + } +} + +#[derive(Deserialize)] +pub struct UserSearchParams { + search: String, +} + +pub async fn user_search_series_handler( + State(state): State, + Query(params): Query, +) -> Response { + match state + .db_service + .user_search_paginated_series(¶ms.search) + .await + { + Ok(series) => (StatusCode::OK, Json(series)).into_response(), + Err(e) => { + error!("Failed to search series: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"status": "error", "message": "Could not search series."})), + ) + .into_response() + } + } +} diff --git a/backend/src/api/user_handlers.rs b/backend/src/api/public/user_handlers.rs similarity index 71% rename from backend/src/api/user_handlers.rs rename to backend/src/api/public/user_handlers.rs index adfc6f0..a323c67 100644 --- a/backend/src/api/user_handlers.rs +++ b/backend/src/api/public/user_handlers.rs @@ -1,16 +1,18 @@ -use crate::api::extractor::AuthenticatedUser; -use crate::builder::startup::AppState; -use crate::common::hashing::hash_password; use axum::Json; use axum::extract::{Path, State}; use axum::http::StatusCode; use axum_core::__private::tracing::error; use axum_core::response::{IntoResponse, Response}; use axum_extra::extract::Multipart; +use axum_extra::extract::multipart::Field; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use uuid::Uuid; +use crate::api::extractor::AuthenticatedUser; +use crate::builder::startup::AppState; +use crate::common::hashing::hash_password; + #[derive(Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct UpdateProfilePayload { @@ -38,11 +40,8 @@ pub async fn get_user_profile_handler( if let Some(key) = &profile.avatar_url && !key.is_empty() { - profile.avatar_url = Some(format!( - "{}/{}", - state.storage_client.domain_cdn_url(), - key - )); + profile.avatar_url = + Some(format!("{}/{}", state.storage_client.domain_cdn_url(), key)); } (StatusCode::OK, Json(profile)).into_response() } @@ -66,8 +65,7 @@ pub async fn update_user_profile_handler( ) -> Response { // Validate email uniqueness if its being changed if let Some(ref email) = payload.email - && let Ok(Some(existing_user)) = - state.db_service.get_user_by_identifier(email).await + && let Ok(Some(existing_user)) = state.db_service.get_user_by_identifier(email).await && existing_user.id != user.id { return ( @@ -80,11 +78,7 @@ pub async fn update_user_profile_handler( // Call db to perform partial update match state .db_service - .update_partial_user_profile( - user.id, - payload.display_name, - payload.email, - ) + .update_partial_user_profile(user.id, payload.display_name, payload.email) .await { Ok(_) => ( @@ -109,16 +103,22 @@ pub async fn update_user_password_setting_handler( Json(payload): Json, ) -> Response { if payload.new_password.len() < 8 { - return (StatusCode::BAD_REQUEST, Json(serde_json::json!({"message": "Password must be at least 8 characters long."}))).into_response(); + return ( + StatusCode::BAD_REQUEST, + Json(serde_json::json!({"message": "Password must be at least 8 characters long."})), + ) + .into_response(); } let hashed_password = match hash_password(&payload.new_password) { Ok(hashed) => hashed, - Err(_) => return ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"message": "Failed to process password."})), - ) - .into_response(), + Err(_) => { + return ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"message": "Failed to process password."})), + ) + .into_response(); + } }; match state @@ -128,14 +128,31 @@ pub async fn update_user_password_setting_handler( { Ok(_) => ( StatusCode::OK, - Json( - serde_json::json!({"message": "Password updated successfully"}), - ), + Json(serde_json::json!({"message": "Password updated successfully"})), ) .into_response(), Err(e) => { error!("DB error updating password: {}", e); - (StatusCode::INTERNAL_SERVER_ERROR, Json(serde_json::json!({"message": "Could not update password."}))).into_response() + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"message": "Could not update password."})), + ) + .into_response() + } + } +} + +// Helper function extract field bytes from multipart +pub async fn extract_field_data(field: Field) -> Result, Response> { + match field.bytes().await { + Ok(bytes) => Ok(bytes.to_vec()), + Err(e) => { + error!("Failed to read bytes from multipart field: {}", e); + Err(( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"message": format!("Failed to read file: {}", e)})), + ) + .into_response()) } } } @@ -153,13 +170,9 @@ pub async fn update_user_avatar_handler( .to_string(); let file_name = field.file_name().unwrap_or("unknown.jpg").to_string(); - let file_data = match field.bytes().await { - Ok(bytes) => bytes.to_vec(), - Err(e) => return ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"message": format!("Failed to read file: {}", e)})), - ) - .into_response(), + let file_data_bytes = match extract_field_data(field).await { + Ok(data) => data, + Err(response) => return response, }; let file_extension = std::path::Path::new(&file_name) @@ -178,31 +191,42 @@ pub async fn update_user_avatar_handler( // Upload to cloud storage return match state .storage_client - .upload_image_file(file_data, &unique_image_key, &content_type) + .upload_image_file(file_data_bytes, &unique_image_key, &content_type) .await { Ok(key) => { match state.db_service.update_user_avatar(user.id, &key).await { Ok(_) => { // Construct the public URL - let public_url = format!("{}/{}", state.storage_client.domain_cdn_url(), key); + let public_url = + format!("{}/{}", state.storage_client.domain_cdn_url(), key); - (StatusCode::OK, Json(serde_json::json!({"status": "success", "url": public_url}))).into_response() - }, - Err(_) => (StatusCode::INTERNAL_SERVER_ERROR, Json(serde_json::json!({"message": "Failed to save avatar URL."}))).into_response(), + ( + StatusCode::OK, + Json(serde_json::json!({"status": "success", "url": public_url})), + ) + .into_response() + } + Err(_) => ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"message": "Failed to save avatar URL."})), + ) + .into_response(), } } Err(e) => { error!("Error updating user avatar: {}", e); - (StatusCode::INTERNAL_SERVER_ERROR, Json(serde_json::json!({"message": "Failed to upload avatar."}))).into_response() + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({"message": "Failed to upload avatar."})), + ) + .into_response() } }; } ( StatusCode::BAD_REQUEST, - Json( - serde_json::json!({"message": "No avatar file found in request."}), - ), + Json(serde_json::json!({"message": "No avatar file found in request."})), ) .into_response() } @@ -236,7 +260,11 @@ pub async fn add_bookmark_series_handler( user: AuthenticatedUser, Path(series_id): Path, ) -> Response { - match state.db_service.add_bookmarked_series(user.id, series_id).await { + match state + .db_service + .add_bookmarked_series(user.id, series_id) + .await + { Ok(_) => ( StatusCode::OK, Json(serde_json::json!({"status": "success", "message": "Add Bookmark"})), @@ -259,7 +287,11 @@ pub async fn delete_bookmark_series_handler( user: AuthenticatedUser, Path(series_id): Path, ) -> Response { - match state.db_service.delete_bookmarked_series(user.id, series_id).await { + match state + .db_service + .delete_bookmarked_series(user.id, series_id) + .await + { Ok(_) => ( StatusCode::OK, Json(serde_json::json!({"status": "success", "message": "Remove Bookmark"})), @@ -269,7 +301,9 @@ pub async fn delete_bookmark_series_handler( error!("DB error fetching user bookmarks: {}", e); ( StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"status": "error", "message": "Could not remove bookmark"})), + Json( + serde_json::json!({"status": "error", "message": "Could not remove bookmark"}), + ), ) .into_response() } @@ -296,7 +330,9 @@ pub async fn get_bookmark_status_current_user_handler( error!("DB error fetching user bookmarks: {}", e); ( StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"status": "error", "message": "Could not fetch bookmarks"})), + Json( + serde_json::json!({"status": "error", "message": "Could not fetch bookmarks"}), + ), ) .into_response() } @@ -317,12 +353,13 @@ pub async fn get_user_bookmark_library_handler( let response_list = bookmarked_series_list .into_iter() .map(|series| { - let latest_chapter_info = series - .last_chapter_found_in_storage - .map(|chapter_num| LatestChapterInfo { - chapter_number: chapter_num, - title: series.chapter_title, - }); + let latest_chapter_info = + series + .last_chapter_found_in_storage + .map(|chapter_num| LatestChapterInfo { + chapter_number: chapter_num, + title: series.chapter_title, + }); BookmarkSeriesResponse { id: series.id, diff --git a/backend/src/api/routes.rs b/backend/src/api/routes.rs index e9fd2d7..8101eb2 100644 --- a/backend/src/api/routes.rs +++ b/backend/src/api/routes.rs @@ -1,112 +1,14 @@ -use crate::api::admin_routes::admin_routes; -use crate::api::auth_handlers::{ - forgot_password_handler, login_handler, logout_handler, protected_handler, - realtime_check_username_handler, refresh_token_handler, - register_new_user_handler, reset_password_handler, -}; -use crate::api::series_handlers::{ - browse_series_handler, fetch_chapter_details_handler, - fetch_most_viewed_series_handler, fetch_new_series_handler, - fetch_series_details_by_id_handler, fetch_updated_series_chapter_handler, - get_all_categories_handler, get_chapter_comment_handler, - get_series_comment_handler, post_chapter_comment_handler, - post_series_comment_handler, rate_series_handler, - record_series_view_handler, update_existing_comment_handler, - upload_comment_attachments_handler, vote_on_comment_handler, -}; -use crate::api::user_handlers::{ - add_bookmark_series_handler, delete_bookmark_series_handler, - get_bookmark_status_current_user_handler, - get_user_bookmark_library_handler, get_user_profile_handler, - update_user_avatar_handler, update_user_password_setting_handler, - update_user_profile_handler, -}; -use crate::builder::startup::AppState; use axum::Router; -use axum::routing::{get, patch, post}; - -pub fn routes() -> Router { - // Route for user auth api - let auth_api_routes = Router::new() - .route("/login", post(login_handler)) - .route("/register", post(register_new_user_handler)) - .route("/refresh", post(refresh_token_handler)) - .route("/logout", post(logout_handler)) - .route("/user", post(protected_handler)) - .route("/check-username", post(realtime_check_username_handler)) - .route("/forgot-password", post(forgot_password_handler)) - .route("/reset-password", post(reset_password_handler)); - - // Router user related api - let user_api = Router::new() - .route("/user/bookmark", get(get_user_bookmark_library_handler)) - .route( - "/user/profile", - get(get_user_profile_handler).patch(update_user_profile_handler), - ) - .route( - "/user/profile/password", - patch(update_user_password_setting_handler), - ) - .route("/user/profile/avatar", post(update_user_avatar_handler)); - // Route for public api - let public_series_api_routes = Router::new() - .route("/series/most-viewed", get(fetch_most_viewed_series_handler)) - .route("/series/new-series", get(fetch_new_series_handler)) - .route( - "/series/latest-updated-series", - get(fetch_updated_series_chapter_handler), - ) - .route("/series/tags", get(get_all_categories_handler)) - .route("/series/browse", get(browse_series_handler)) - .route( - "/series/details/{id}", - get(fetch_series_details_by_id_handler), - ) - .route( - "/series/{id}/chapter/{chapter_number}", - get(fetch_chapter_details_handler), - ) - .route("/series/{id}/rate", post(rate_series_handler)) - .route("/series/{id}/views-count", post(record_series_view_handler)) - .route( - "/series/{id}/bookmark", - post(add_bookmark_series_handler) - .delete(delete_bookmark_series_handler), - ) - .route( - "/series/{id}/bookmark/status", - get(get_bookmark_status_current_user_handler), - ); - - // Router comments related api - let comments_api = Router::new() - .route( - "/series/{id}/comments", - get(get_series_comment_handler).post(post_series_comment_handler), - ) - .route( - "/series/chapter/{id}/comments", - get(get_chapter_comment_handler).post(post_chapter_comment_handler), - ) - .route( - "/comments/{id}/edit", - patch(update_existing_comment_handler), - ) - .route("/comments/{id}/vote", post(vote_on_comment_handler)) - .route( - "/comments/attachments/upload", - post(upload_comment_attachments_handler), - ); - - // Merge same prefix "/api" routes into one - let general_api = - public_series_api_routes.merge(user_api).merge(comments_api); +use crate::api::admin::admin_routes::admin_routes; +use crate::api::public::public_routes::{auth_api_routes, general_api_routes}; +use crate::builder::startup::AppState; +// Merge all routes into one +pub fn merged_routes() -> Router { // Combine routers under prefix "/api" Router::new() .nest("/api/admin", admin_routes()) - .nest("/api/auth", auth_api_routes) - .nest("/api", general_api) + .nest("/api/auth", auth_api_routes()) + .nest("/api", general_api_routes()) } diff --git a/backend/src/api/series_handlers.rs b/backend/src/api/series_handlers.rs deleted file mode 100644 index 3447204..0000000 --- a/backend/src/api/series_handlers.rs +++ /dev/null @@ -1,874 +0,0 @@ -use crate::api::extractor::{AuthenticatedUser, OptionalAuthenticatedUser}; -use crate::builder::startup::AppState; -use crate::database::{ - CategoryTag, Comment, CommentEntityType, Series, SeriesChapter, - SeriesOrderBy, VotePayload, -}; -use axum::Json; -use axum::extract::{Path, Query, State}; -use axum_core::__private::tracing::error; -use axum_core::response::{IntoResponse, Response}; -use axum_extra::extract::Multipart; -use reqwest::StatusCode; -use serde::de::{Deserializer, Error}; -use serde::{Deserialize, Serialize}; -use uuid::Uuid; - -#[derive(Deserialize)] -pub struct MostViewedParams { - #[serde(default = "default_period")] - period: String, - #[serde(default = "default_limit")] - limit: i64, -} - -fn default_period() -> String { - "week".to_string() -} - -fn default_limit() -> i64 { - 20 -} - -pub async fn fetch_most_viewed_series_handler( - State(state): State, - Query(params): Query, -) -> Response { - // Map the user-friendly period string - let period_str = match params.period.to_lowercase().as_str() { - "hour" => "1 hour", - "day" => "1 day", - "week" => "1 week", - "month" => "1 month", - _ => "1 days", - }; - - match state - .db_service - .fetch_most_viewed_series(period_str, params.limit) - .await - { - Ok(series) => (StatusCode::OK, Json(series)).into_response(), - Err(e) => { - eprintln!("Error fetching most viewed series: {}", e); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"status": "error", "message": "Could not retrieve most viewed series."})), - ).into_response() - } - } -} - -#[derive(Serialize)] -#[serde(rename_all = "camelCase")] -pub struct SeriesDataResponse { - series: Series, - chapters: Vec, - authors: Vec, - category_tags: Vec, -} - -// Fetch all details for a single series -pub async fn fetch_series_details_by_id_handler( - State(state): State, - Path(id): Path, -) -> Response { - let db = &state.db_service; - - let series = match db.get_series_by_id(id).await { - Ok(Some(s)) => s, - Ok(None) => { - return ( - StatusCode::NOT_FOUND, - Json(serde_json::json!({"status": "error", "message": "Series not found."})), - ) - .into_response(); - } - Err(e) => { - error!("Error fetching series details: {}", e); - return ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"status": "error", "message": "Could not retrieve series details."})), - ) - .into_response(); - } - }; - - let series_id = series.id; - - // Fetch authors, chapters, and categories tag in parallel - let (authors_result, chapters_result, categories_result) = tokio::join!( - db.get_authors_by_series_id(series_id), - db.get_chapters_by_series_id(series_id), - db.get_category_tag_by_series_id(series_id), - ); - - let response_data = SeriesDataResponse { - series, - authors: authors_result.unwrap_or_default(), - chapters: chapters_result.unwrap_or_default(), - category_tags: categories_result.unwrap_or_default(), - }; - - (StatusCode::OK, Json(response_data)).into_response() -} - -pub async fn fetch_new_series_handler( - State(state): State, -) -> Response { - match state - .db_service - .get_public_series_paginated(1, 20, SeriesOrderBy::CreatedAt) - .await - { - Ok(series) => (StatusCode::OK, Json(series)).into_response(), - Err(e) => { - eprintln!("Error fetching new series: {}", e); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"status": "error", "message": "Could not retrieve new series."})), - ).into_response() - } - } -} - -#[derive(Deserialize)] -pub struct PaginationParams { - #[serde(default = "default_page")] - page: u32, - #[serde(default = "default_pagesize")] - page_size: u32, - #[serde(default)] - search: Option, -} - -fn default_page() -> u32 { - 1 -} -fn default_pagesize() -> u32 { - 50 -} - -pub async fn fetch_updated_series_chapter_handler( - State(state): State, - Query(params): Query, -) -> Response { - match state - .db_service - .get_latest_release_series_chapter_paginated( - params.page, - params.page_size, - ) - .await - { - Ok(paginated_result) => { - (StatusCode::OK, Json(paginated_result)).into_response() - } - Err(e) => { - error!("Error fetching updated series: {}", e); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"status": "error", "message": "Could not retrieve updated series."})), - ) - .into_response() - } - } -} - -#[derive(Serialize)] -#[serde(rename_all = "camelCase")] -pub struct ChapterDetailsResponse { - series_title: String, - chapter_title: Option, - chapter_id: i32, - chapter_number: f32, - pages: Vec, - all_chapters: Vec, - prev_chapter_number: Option, - next_chapter_number: Option, -} - -pub async fn fetch_chapter_details_handler( - State(state): State, - Path((series_id, chapter_number)): Path<(i32, f32)>, -) -> Response { - println!( - "->> {:<12} - fetch_chapter_images - series_id: {}, chapter: {}", - "HANDLER", series_id, chapter_number - ); - - let db = &state.db_service; - let base_url = state.storage_client.domain_cdn_url(); - - let (series_result, all_chapters_result, images_result) = tokio::join!( - db.get_series_by_id(series_id), - db.get_chapters_by_series_id(series_id), - db.get_images_urls_for_chapter_series(series_id, chapter_number), - ); - - // Get series title - let series = match series_result { - Ok(Some(s)) => s, - _ => return (StatusCode::NOT_FOUND, Json(serde_json::json!({"status": "error", "message": "Series not found."})), - ).into_response(), - }; - - // Get chapter images list - let object_keys = match images_result { - Ok(img_chap) => img_chap, - Err(e) => { - error!("Error fetching chapter images: {}", e); - return ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"status": "error", "message": "Could not retrieve chapter images."})), - ).into_response(); - } - }; - - let pages = object_keys - .into_iter() - .map(|key| format!("{}/{}", base_url, key)) - .collect(); - - // Get all chapters for the series and find current, next and previous chapters - let all_chapters = match all_chapters_result { - Ok(mut chaps) => { - chaps.sort_by(|a, b| { - a.chapter_number.partial_cmp(&b.chapter_number).unwrap() - }); - chaps - } - Err(e) => { - error!("Error fetching chapter list: {}", e); - return ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"status": "error", "message": "Could not retrieve chapter list."})), - ).into_response(); - } - }; - - let current_chapter_idx = all_chapters - .iter() - .position(|c| c.chapter_number == chapter_number); - - let current_chapter_index = match current_chapter_idx { - Some(index) => index, - None => { - return ( - StatusCode::NOT_FOUND, - Json(serde_json::json!({"status": "error", "message": "Chapter not found in this series."})), - ).into_response(); - } - }; - - let current_chapter = &all_chapters[current_chapter_index]; - - let prev_chapter_number = if current_chapter_index > 0 { - all_chapters - .get(current_chapter_index - 1) - .map(|c| c.chapter_number) - } else { - None - }; - - let next_chapter_number = all_chapters - .get(current_chapter_index + 1) - .map(|c| c.chapter_number); - - let response_data = ChapterDetailsResponse { - series_title: series.title, - chapter_title: current_chapter.title.clone(), - chapter_id: current_chapter.id, - chapter_number, - pages, - all_chapters, - prev_chapter_number, - next_chapter_number, - }; - - (StatusCode::OK, Json(response_data)).into_response() -} - -pub async fn record_series_view_handler( - State(state): State, - Path(series_id): Path, -) -> Response { - println!( - "->> {:<12} - record_series_view - series_id: {}", - "HANDLER", series_id - ); - - let db = &state.db_service; - - match db.record_series_view(series_id).await { - Ok(_) => ( - StatusCode::OK, - Json(serde_json::json!({"status": "success", "message": "View Recorded."})), - ) - .into_response(), - Err(e) => { - error!("Error recording series view for id {}: {}", series_id, e); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"status": "error", "message": "Could not record series view."})), - ) - .into_response() - } - } -} - -#[derive(Deserialize)] -pub struct RateSeriesPayload { - rating: i16, -} -#[derive(Serialize)] -pub struct RateSeriesResponse { - message: String, - new_total_score: i64, - new_total_count: i32, -} - -pub async fn rate_series_handler( - State(state): State, - user: AuthenticatedUser, - Path(series_id): Path, - Json(payload): Json, -) -> Response { - // validate rating value - if !(1..=5).contains(&payload.rating) { - return ( - StatusCode::BAD_REQUEST, - Json( - serde_json::json!({"error": "Rating must be between 1 and 5"}), - ), - ) - .into_response(); - } - - match state - .db_service - .add_or_update_series_rating(series_id, payload.rating, user.id) - .await - { - Ok(_) => { - match state.db_service.get_series_by_id(series_id).await { - Ok(Some(series)) => { - let response = RateSeriesResponse { - message: "Rating submitted".to_string(), - new_total_score: series.total_rating_score, - new_total_count: series.total_ratings_count - }; - (StatusCode::OK, Json(response)).into_response() - } - _ => ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"error": "Failed for retrieve updated series data"})), - ).into_response(), - } - } - Err(e) => { - error!("Failed to proess rating: {}", e); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"error": format!("Failed to update rating: {}", e)})), - ).into_response() - } - } -} - -// Fetch series comments -pub async fn get_series_comment_handler( - State(state): State, - Path(series_id): Path, - user: OptionalAuthenticatedUser, -) -> Response { - let user_id = user.0.map(|u| u.id); - match state - .db_service - .get_comments(CommentEntityType::Series, series_id, user_id) - .await - { - Ok(mut comments) => { - let base_url = state.storage_client.domain_cdn_url(); - - let mut stack: Vec<&mut Comment> = comments.iter_mut().collect(); - while let Some(comment) = stack.pop() { - if let Some(urls) = &mut comment.attachment_urls { - // Iterate over each URL string mutably. - for url in urls.iter_mut() { - // Prepend the base_url to the existing URL string. - *url = format!("{}/{}", base_url, url); - } - } - - stack.extend(comment.replies.iter_mut()); - } - - (StatusCode::OK, Json(comments)).into_response() - } - Err(e) => { - error!( - "[SERIES] Failed to get comments for series {}: {}", - series_id, e - ); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"error": "Failed to get comments for series"})), - ).into_response() - } - } -} - -// Fetch chapter comments -pub async fn get_chapter_comment_handler( - State(state): State, - Path(chapter_id): Path, - user: OptionalAuthenticatedUser, -) -> Response { - let user_id = user.0.map(|u| u.id); - match state - .db_service - .get_comments(CommentEntityType::SeriesChapters, chapter_id, user_id) - .await - { - Ok(mut comments) => { - let base_url = state.storage_client.domain_cdn_url(); - - let mut stack: Vec<&mut Comment> = comments.iter_mut().collect(); - while let Some(comment) = stack.pop() { - if let Some(urls) = &mut comment.attachment_urls { - for url in urls.iter_mut() { - *url = format!("{}{}", base_url, url); - } - } - stack.extend(comment.replies.iter_mut()); - } - (StatusCode::OK, Json(comments)).into_response() - } - Err(e) => { - error!( - "[CHAPTERS] Failed to get comments for chapter {}: {}", - chapter_id, e - ); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"error": "Failed to get comment for chapter"})), - ).into_response() - } - } -} - -// Helper function for post new comment handler -async fn new_comment_submission_handler( - state: AppState, - user: AuthenticatedUser, - mut multipart: Multipart, - entity_type: CommentEntityType, - entity_id: i32, -) -> Response { - let mut content_markdown = None; - let mut parent_id: Option = None; - let mut attachment_data: Vec<(Vec, String, String)> = Vec::new(); - - while let Ok(Some(field)) = multipart.next_field().await { - if let Some(field_name) = field.name() { - match field_name { - "content_markdown" => { - content_markdown = field.text().await.ok() - } - "parent_id" => { - if let Ok(text) = field.text().await { - parent_id = text.parse::().ok(); - } - } - "images" => { - let file_name = field.file_name().unwrap_or("").to_string(); - let content_type = field - .content_type() - .unwrap_or("application/octet-stream") - .to_string(); - if let Ok(data) = field.bytes().await { - if data.len() > 5 * 1024 * 1024 { - return (StatusCode::BAD_REQUEST, Json(serde_json::json!({"message": "File size exceeds 5MB"}))).into_response(); - } - attachment_data.push(( - data.to_vec(), - file_name, - content_type, - )); - } - } - _ => (), - } - } - } - - // Validation - let content_markdown_str = content_markdown.unwrap_or_default(); - if content_markdown_str.is_empty() && attachment_data.is_empty() { - return (StatusCode::BAD_REQUEST, Json(serde_json::json!({"message": "Comment must have content or an attachment."}))).into_response(); - } - - // Upload file if any - let mut attachment_keys: Vec = Vec::new(); - for (file_data, file_name, content_type) in attachment_data { - let file_extension = std::path::Path::new(&file_name) - .extension() - .and_then(std::ffi::OsStr::to_str) - .unwrap_or(""); - - let unique_key = format!( - "comments/{}/{}.{}", - user.id, - Uuid::new_v4(), - file_extension - ); - - if let Err(e) = state - .storage_client - .upload_image_file(file_data, &unique_key, &content_type) - .await - { - error!("Failed to upload comment attachment: {}", e); - return (StatusCode::INTERNAL_SERVER_ERROR, Json(serde_json::json!({"error": "Failed to upload file attachment"}))).into_response(); - } - attachment_keys.push(unique_key); - } - - // Create the new comment using the provided entity_type and entity_id - let new_comment_id = match state - .db_service - .create_new_comment( - user.id, - entity_type, - entity_id, - &content_markdown_str, - parent_id, - &attachment_keys, - ) - .await - { - Ok(id) => id, - Err(e) => { - error!( - "Failed to create comment for entity type {:?} and ID {}: {}", - entity_type, entity_id, e - ); - return ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"error": "Failed to create comment for series"})), - ) - .into_response(); - } - }; - - // After creating, fetch the full data for the new comment - match state - .db_service - .get_comment_by_id(new_comment_id, Some(user.id)) - .await - { - // If fetch is successful, return the full comment object - Ok(Some(mut new_comment)) => { - if let Some(urls) = &mut new_comment.attachment_urls { - let base_url = state.storage_client.domain_cdn_url(); - for url in urls.iter_mut() { - *url = format!("{}{}", base_url, url); - } - } - (StatusCode::OK, Json(new_comment)).into_response() - }, - // Handle cases where the comment couldn't be fetched right after creation - Ok(None) | Err(_) => ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"error": "Comment created but failed to retrieve its data"})), - ) - .into_response(), - } -} - -// Post new comment to a specific series page -pub async fn post_series_comment_handler( - State(state): State, - user: AuthenticatedUser, - Path(series_id): Path, - multipart: Multipart, -) -> Response { - println!( - "->> {:<12} - record_series_view - series_id: {:?}", - "HANDLER", series_id - ); - - new_comment_submission_handler( - state, - user, - multipart, - CommentEntityType::Series, - series_id, - ) - .await -} - -// Post a new comment to a specific chapter -pub async fn post_chapter_comment_handler( - State(state): State, - user: AuthenticatedUser, - Path(chapter_id): Path, - multipart: Multipart, -) -> Response { - println!( - "->> {:<12} - record_series_view - series_id: {:?}", - "HANDLER", chapter_id - ); - - new_comment_submission_handler( - state, - user, - multipart, - CommentEntityType::SeriesChapters, - chapter_id, - ) - .await -} - -pub async fn upload_comment_attachments_handler( - State(state): State, - user: AuthenticatedUser, - mut multipart: Multipart, -) -> Response { - if let Ok(Some(field)) = multipart.next_field().await { - let content_type = field - .content_type() - .unwrap_or("application/octet-stream") - .to_string(); - let file_name = field.file_name().unwrap_or("").to_string(); - - let file_data = match field.bytes().await { - Ok(bytes) => bytes.to_vec(), - Err(e) => { - return ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"message": format!("Failed to read file: {}", e)})), - ) - .into_response(); - } - }; - - const MAX_FILE_SIZE: usize = 5 * 1024 * 1024; - if file_data.len() > MAX_FILE_SIZE { - return ( - StatusCode::BAD_REQUEST, - Json(serde_json::json!({"message": "File size cannot exceed 5MB"})), - ) - .into_response(); - } - - let file_extension = std::path::Path::new(&file_name) - .extension() - .and_then(std::ffi::OsStr::to_str) - .unwrap_or(""); - - let unique_image_key = format!( - "comments/{}/{}.{}", - user.id, - Uuid::new_v4(), - file_extension - ); - - match state - .storage_client - .upload_image_file(file_data, &unique_image_key, &content_type) - .await - { - Ok(url) => (StatusCode::OK, Json(serde_json::json!({"url": url}))) - .into_response(), - Err(e) => { - error!("Failed to upload comment attachment: {}", e); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"error": "Failed to upload file"})), - ) - .into_response() - } - } - } else { - ( - StatusCode::BAD_REQUEST, - Json( - serde_json::json!({"message": "No file found in the request."}), - ), - ) - .into_response() - } -} - -#[derive(Deserialize)] -pub struct UpdateCommentPayload { - pub content_markdown: String, -} - -pub async fn update_existing_comment_handler( - State(state): State, - user: AuthenticatedUser, - Path(comment_id): Path, - Json(payload): Json, -) -> Response { - match state - .db_service - .update_existing_comment(comment_id, user.id, &payload.content_markdown) - .await - { - Ok(Some(_)) => { - match state - .db_service - .get_comment_by_id(comment_id, Some(user.id)) - .await - { - Ok(Some(updated_comment)) => { - ( - StatusCode::OK, - Json(updated_comment), - ).into_response() - } - _ => ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"message": "Comment updated but failed to retrieve new data"})), - ) - .into_response(), - } - } - Ok(None) => { - (StatusCode::NOT_FOUND, Json(serde_json::json!({"message": "Comment not found or permission denied"}))).into_response() - } - Err(e) => { - error!( - "Failed to update existing comment with id {}: {}", - comment_id, e - ); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"error": "Could not update comment"})), - ) - .into_response() - } - } -} - -pub async fn vote_on_comment_handler( - State(state): State, - user: AuthenticatedUser, - Path(comment_id): Path, - Json(payload): Json, -) -> Response { - match state - .db_service - .vote_on_comment(comment_id, user.id, payload.vote_type) - .await - { - Ok(response_data) => { - (StatusCode::OK, Json(response_data)).into_response() - } - Err(e) => { - error!("Failed to vote on comment {}: {}", comment_id, e); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"error": "Failed to vote on comment {}"})), - ) - .into_response() - } - } -} - -pub async fn get_all_categories_handler( - State(state): State, -) -> Response { - match state.db_service.get_list_all_categories().await { - Ok(categories) => (StatusCode::OK, Json(categories)).into_response(), - Err(e) => { - error!("Failed to get list of all categories: {}", e); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"status": "error", "message": "Could not retrieve categories."})), - ) - .into_response() - } - } -} - -fn deserialize_i32_vec<'de, D>( - deserializer: D, -) -> Result>, D::Error> -where - D: Deserializer<'de>, -{ - let s: Option = Option::deserialize(deserializer)?; - match s { - Some(s) if !s.is_empty() => { - let result: Result, _> = s - .split(',') - .map(str::trim) - .filter(|part| !part.is_empty()) - .map(str::parse) - .collect(); - - match result { - Ok(v) if !v.is_empty() => Ok(Some(v)), - Ok(_) => Ok(None), - Err(e) => Err(Error::custom(e)), - } - } - _ => Ok(None), - } -} - -#[derive(Debug, Deserialize)] -pub struct BrowseParams { - #[serde(default = "default_page")] - page: u32, - #[serde(default = "default_pagesize")] - page_size: u32, - order_by: Option, - #[serde(default, deserialize_with = "deserialize_i32_vec")] - include: Option>, - #[serde(default, deserialize_with = "deserialize_i32_vec")] - exclude: Option>, -} - -pub async fn browse_series_handler( - State(state): State, - Query(params): Query, -) -> Response { - let order_by = match params.order_by.as_deref() { - Some("new") => SeriesOrderBy::CreatedAt, - Some("updated") => SeriesOrderBy::UpdatedAt, - Some("views") => SeriesOrderBy::ViewsCount, - Some("ratings") => SeriesOrderBy::Rating, - _ => SeriesOrderBy::UpdatedAt, - }; - - let include_ids = params.include.as_deref().unwrap_or(&[]); - let exclude_ids = params.exclude.as_deref().unwrap_or(&[]); - - match state - .db_service - .browse_series_paginated_with_filters( - params.page, - params.page_size, - order_by, - include_ids, - exclude_ids, - ) - .await - { - Ok(paginated_result) => { - (StatusCode::OK, Json(paginated_result)).into_response() - } - Err(e) => { - error!("Failed to browse_series: {}", e); - ( - StatusCode::INTERNAL_SERVER_ERROR, - Json(serde_json::json!({"status": "error", "message": "Could not retrieve series."})), - ).into_response() - } - } -} diff --git a/backend/src/builder/config_sites_watcher.rs b/backend/src/builder/config_sites_watcher.rs index 73698d0..e54cef6 100644 --- a/backend/src/builder/config_sites_watcher.rs +++ b/backend/src/builder/config_sites_watcher.rs @@ -1,16 +1,15 @@ -use crate::scraping::model::SitesConfig; +use std::path::Path; +use std::sync::Arc; +use std::time::Duration; + use arc_swap::ArcSwap; use notify::Error; use notify_debouncer_full::notify::{EventKind, RecursiveMode}; use notify_debouncer_full::{DebouncedEvent, new_debouncer}; -use std::path::Path; -use std::sync::Arc; -use std::time::Duration; -pub async fn config_sites_watcher( - config_path: String, - config_swap: Arc>, -) { +use crate::scraping::model::SitesConfig; + +pub async fn config_sites_watcher(config_path: String, config_swap: Arc>) { println!("[CONFIG-WATCHER] Watch file {}", config_path); let path = Path::new(&config_path); @@ -32,9 +31,7 @@ pub async fn config_sites_watcher( match SitesConfig::load(&clone_config_path) { Ok(new_config) => { config_swap.store(Arc::new(new_config)); - println!( - "[CONFIG-WATCHER] Config store and reloaded successfully" - ); + println!("[CONFIG-WATCHER] Config store and reloaded successfully"); } Err(e) => { eprintln!( diff --git a/backend/src/builder/startup.rs b/backend/src/builder/startup.rs index e1e052f..98abb04 100644 --- a/backend/src/builder/startup.rs +++ b/backend/src/builder/startup.rs @@ -1,22 +1,25 @@ -use crate::api; -use crate::builder::config_sites_watcher::config_sites_watcher; -use crate::database::DatabaseService; -use crate::database::storage::StorageClient; -use crate::scraping::model::SitesConfig; -use crate::task_workers::channels::{OnDemandChannels, setup_worker_channels}; -use arc_swap::ArcSwap; -use axum::http::{HeaderValue, Method, header}; -use axum::{Router, serve}; -use lettre::AsyncSmtpTransport; -use reqwest::Client; use std::env; use std::sync::Arc; use std::time::Duration; + +use arc_swap::ArcSwap; +use axum::http::{header, HeaderValue, Method}; +use axum::{serve, Router}; +use lettre::AsyncSmtpTransport; +use reqwest::Client; use tokio::net::TcpListener; use tokio::signal; use tower::ServiceBuilder; +use tower_http::compression::CompressionLayer; +use tower_http::cors::CorsLayer; use tower_http::timeout::TimeoutLayer; -use tower_http::{compression::CompressionLayer, cors::CorsLayer}; + +use crate::api; +use crate::builder::config_sites_watcher::config_sites_watcher; +use crate::database::storage::StorageClient; +use crate::database::DatabaseService; +use crate::scraping::model::SitesConfig; +use crate::task_workers::channels::{setup_worker_channels, OnDemandChannels}; // Type definition for Mailer pub type Mailer = AsyncSmtpTransport; @@ -69,8 +72,8 @@ pub async fn run( }; // CORS Configuration - let frontend_origin = env::var("FRONTEND_ORIGIN") - .unwrap_or_else(|_| "http://localhost:1998".to_string()); + let frontend_origin = + env::var("FRONTEND_ORIGIN").unwrap_or_else(|_err| "http://localhost:1998".to_string()); let cors = CorsLayer::new() .allow_methods([ @@ -97,7 +100,7 @@ pub async fn run( // Setup App router // Initialize the router and attach the authentication routes let app = Router::new() - .merge(api::routes::routes()) + .merge(api::routes::merged_routes()) .layer( ServiceBuilder::new() .layer(CompressionLayer::new()) diff --git a/backend/src/common/dynamic_proxy.rs b/backend/src/common/dynamic_proxy.rs index f740e08..1a246be 100644 --- a/backend/src/common/dynamic_proxy.rs +++ b/backend/src/common/dynamic_proxy.rs @@ -1,7 +1,8 @@ +use std::time::Duration; + use anyhow::{Context, Result}; use rand::seq::IndexedRandom; use reqwest::{Client, Error as ReqwestError, Proxy}; -use std::time::Duration; // List of User-Agent strings to be chosen randomly. const USER_AGENT: &[&str] = &[ @@ -18,16 +19,13 @@ const USER_AGENT: &[&str] = &[ "Mozilla/5.0 (Linux; Android 16; SM-A102U) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.7204.46 Mobile Safari/537.36", ]; -/// Returns a randomly selected User-Agent string from the `USER_AGENTS` list. pub fn get_random_user_agent() -> &'static str { - // Fallback to the first agent if choose fails (should not happen with a non-empty list). USER_AGENT .choose(&mut rand::rng()) .unwrap_or(&USER_AGENT[0]) } -/// Configuration for using proxies. -/// Currently, this is designed for future use as load_dynamic_proxy returns an empty list. +/// Configuration for using proxies (future needs) pub struct ProxyConfig { pub proxies: Vec, } @@ -38,8 +36,6 @@ impl ProxyConfig { Self { proxies } } - /// Returns a randomly selected proxy string from the list. - /// Returns `None` if the proxy list is empty. pub fn get_random_proxy_str(&self) -> Option<&String> { if self.proxies.is_empty() { None @@ -48,21 +44,17 @@ impl ProxyConfig { } } - /// Attempts to create a `reqwest::Proxy` object from a randomly selected proxy string. - /// Returns `None` if no proxy string is available. - /// Returns `Some(Result)` to indicate success or failure in creating the Proxy object. pub fn get_proxy_object(&self) -> Option> { self.get_random_proxy_str().map(|proxy_str| { - println!("[PROXY] Trying to use proxy from the list"); // Log which proxy is being attempted - Proxy::all(proxy_str) // This can return an error if the proxy string format is invalid + println!("[PROXY] Trying to use proxy from the list"); + Proxy::all(proxy_str) }) } } -/// Loads a list of proxy strings. -/// Currently, returns an empty vector indicating that dynamic proxies are not yet implemented. +/// Loads a list of proxy strings fn load_dynamic_proxy() -> Vec { - let proxies = vec![]; // Initialize as empty + let proxies = vec![]; // [INFO] Placeholder for future implementation of dynamic proxy loading. // Example: @@ -92,8 +84,8 @@ fn build_configured_http_client( let user_agent = get_random_user_agent(); let mut client_builder = Client::builder() .user_agent(user_agent) - .timeout(Duration::from_secs(30)) // General request timeout - .connect_timeout(Duration::from_secs(20)); // Connection establishment timeout + .timeout(Duration::from_secs(30)) + .connect_timeout(Duration::from_secs(20)); // [INFO] This is used for future dynamic proxy configuration. let mut using_proxy = false; @@ -105,8 +97,6 @@ fn build_configured_http_client( using_proxy = true; } Err(e) => { - // Log the error but continue without this specific proxy. - // The client will be built without a proxy or might use system proxy. eprintln!( "[HTTP Client Internal] Gagal mengkonfigurasi proxy: {}. Melanjutkan tanpa proxy.", e @@ -114,7 +104,6 @@ fn build_configured_http_client( } } } else { - // No proxy string was available from ProxyConfig. println!("[HTTP CLIENT] No proxy string provided by ProxyConfig.") } } @@ -130,22 +119,19 @@ fn build_configured_http_client( user_agent ) } - client_builder.build() // This can also return ReqwestError + client_builder.build() } -/// Initializes and returns a reqwest::Client. /// This is the main public function for obtaining a configured HTTP client pub fn init_client() -> Result { println!("[HTTP Client] Initializing HTTP client..."); - let proxy_list = load_dynamic_proxy(); // Currently returns empty + let proxy_list = load_dynamic_proxy(); - // Only create ProxyConfig if proxy_list is not empty. let proxy_config = if !proxy_list.is_empty() { Some(ProxyConfig::new(proxy_list)) } else { None }; - build_configured_http_client(proxy_config.as_ref()) // Pass Option<&ProxyConfig> - .context("Failed to initialize HTTP client") // Context from anyhow for better error reporting + build_configured_http_client(proxy_config.as_ref()).context("Failed to initialize HTTP client") } diff --git a/backend/src/common/email_service.rs b/backend/src/common/email_service.rs index 60294d2..86cc3ed 100644 --- a/backend/src/common/email_service.rs +++ b/backend/src/common/email_service.rs @@ -1,8 +1,10 @@ -use crate::builder::startup::Mailer; +use std::env; + use anyhow::Context; use lettre::message::Mailbox; use lettre::{AsyncTransport, Message}; -use std::env; + +use crate::builder::startup::Mailer; // Function to send the password reset email pub async fn send_password_reset_email( @@ -11,8 +13,7 @@ pub async fn send_password_reset_email( recipient_username: &str, token: &str, ) -> anyhow::Result<()> { - let frontend_url = - env::var("FRONTEND_URL").expect("FRONTEND_URL must be set"); + let frontend_url = env::var("FRONTEND_URL").expect("FRONTEND_URL must be set"); let app_email = env::var("SMTP_USERNAME").expect("APP_EMAIL must be set"); // Construct the password reset link diff --git a/backend/src/common/error.rs b/backend/src/common/error.rs index 83471d3..b34047a 100644 --- a/backend/src/common/error.rs +++ b/backend/src/common/error.rs @@ -1,4 +1,5 @@ -use axum::{Json, http::StatusCode}; +use axum::http::StatusCode; +use axum::Json; use axum_core::response::{IntoResponse, Response}; // Custom error type definition @@ -6,7 +7,6 @@ pub enum AuthError { InvalidToken, WrongCredentials, MissingCredentials, - InvalidCredentials, TokenCreation, InvalidRefreshToken, InvalidCharacter(String), @@ -18,24 +18,13 @@ pub enum AuthError { impl IntoResponse for AuthError { fn into_response(self) -> Response { let (status, error_message) = match self { - AuthError::WrongCredentials => { - (StatusCode::UNAUTHORIZED, "Invalid credentials") - } - AuthError::MissingCredentials => { - (StatusCode::BAD_REQUEST, "Missing credentials") - } - AuthError::InvalidCredentials => { - (StatusCode::BAD_REQUEST, "Missing credentials") - } + AuthError::WrongCredentials => (StatusCode::UNAUTHORIZED, "Invalid credentials"), + AuthError::MissingCredentials => (StatusCode::BAD_REQUEST, "Missing credentials"), AuthError::TokenCreation => { (StatusCode::INTERNAL_SERVER_ERROR, "Failed to create token") } - AuthError::InvalidToken => { - (StatusCode::UNAUTHORIZED, "Invalid token") - } - AuthError::InvalidRefreshToken => { - (StatusCode::UNAUTHORIZED, "Invalid refresh token") - } + AuthError::InvalidToken => (StatusCode::UNAUTHORIZED, "Invalid token"), + AuthError::InvalidRefreshToken => (StatusCode::UNAUTHORIZED, "Invalid refresh token"), AuthError::InternalServerError => { (StatusCode::INTERNAL_SERVER_ERROR, "Internal server error") } @@ -43,7 +32,7 @@ impl IntoResponse for AuthError { return ( StatusCode::CONFLICT, Json(serde_json::json!({"message": format!("{} Already exists", field) })), - ) + ) .into_response(); } AuthError::InvalidCharacter(message) => { diff --git a/backend/src/common/hashing.rs b/backend/src/common/hashing.rs index 201d77b..ce2bc73 100644 --- a/backend/src/common/hashing.rs +++ b/backend/src/common/hashing.rs @@ -1,6 +1,6 @@ use anyhow::Result; -use argon2::password_hash::SaltString; -use argon2::password_hash::{Error as PwHashError, rand_core::OsRng}; +use argon2::password_hash::rand_core::OsRng; +use argon2::password_hash::{Error as PwHashError, SaltString}; use argon2::{Argon2, Params, PasswordHash, PasswordHasher, PasswordVerifier}; /// Configuration for Argon2 parameters @@ -20,16 +20,13 @@ impl Default for ArgonConfig { } } -/// Hashes a password using Argon2id. -/// Returns the full hash string which includes the salt and parameters. +/// Hashes a password +/// Returns the full hash string which includes the salt and parameters pub fn hash_password(password: &str) -> Result { - // Get password bytes let password_bytes = password.as_bytes(); - // Generate random salt let salt = SaltString::generate(&mut OsRng); - // Define the argon2 parameters let config = ArgonConfig::default(); let params = Params::new( @@ -38,33 +35,20 @@ pub fn hash_password(password: &str) -> Result { config.parallelism, None, ) - .map_err(|_| PwHashError::ParamNameInvalid)?; + .map_err(|_err| PwHashError::ParamNameInvalid)?; - // Create the Argon2 instance with parameters - let argon2 = Argon2::new( - argon2::Algorithm::Argon2id, - argon2::Version::V0x13, - params, - ); + let argon2 = Argon2::new(argon2::Algorithm::Argon2id, argon2::Version::V0x13, params); - // Hash the password and return the resulting string Ok(argon2.hash_password(password_bytes, &salt)?.to_string()) } /// Verifies a password against a stored Argon2 hash -pub fn verify_password( - password: &str, - stored_hash: &str, -) -> Result { - // Get password bytes +pub fn verify_password(password: &str, stored_hash: &str) -> Result { let password_bytes = password.as_bytes(); - // Parse the hash string from the database let parsed_hash = PasswordHash::new(stored_hash)?; - // Verify the password. - let verification_result = - Argon2::default().verify_password(password_bytes, &parsed_hash); + let verification_result = Argon2::default().verify_password(password_bytes, &parsed_hash); match verification_result { Ok(()) => Ok(true), @@ -87,10 +71,10 @@ mod tests { // Ensure the hashing process was successful assert!(hash_result.is_ok()); - let hash = hash_result.unwrap(); + let hash = hash_result.unwrap_or_default(); println!("\nGenerated hash for '{}': {}\n", password, hash); - let is_valid = verify_password(password, &hash).unwrap(); + let is_valid = verify_password(password, &hash).unwrap_or_default(); assert!(is_valid); } } diff --git a/backend/src/common/jwt.rs b/backend/src/common/jwt.rs index e670b17..72e4c1b 100644 --- a/backend/src/common/jwt.rs +++ b/backend/src/common/jwt.rs @@ -1,18 +1,18 @@ -use crate::common::error::AuthError; -use axum::{extract::FromRequestParts, http::request::Parts}; +use std::env; +use std::sync::LazyLock; + +use axum::extract::FromRequestParts; +use axum::http::request::Parts; use axum_extra::extract::CookieJar; use chrono::{Duration, Utc}; -use jsonwebtoken::{ - Algorithm, DecodingKey, EncodingKey, Header, Validation, decode, encode, -}; +use jsonwebtoken::{Algorithm, DecodingKey, EncodingKey, Header, Validation, decode, encode}; use serde::{Deserialize, Serialize}; -use std::env; -use std::sync::LazyLock; + +use crate::common::error::AuthError; // Secret key for JWT signing and encryption static KEYS: LazyLock = LazyLock::new(|| { - let secret_key = - env::var("JWT_SECRET_KEY").expect("JWT_SECRET_KEY must be set"); + let secret_key = env::var("JWT_SECRET_KEY").expect("JWT_SECRET_KEY must be set"); Keys::new(secret_key.as_bytes()) }); @@ -54,12 +54,11 @@ where S: Send + Sync, { type Rejection = AuthError; - async fn from_request_parts( - parts: &mut Parts, - state: &S, - ) -> Result { + async fn from_request_parts(parts: &mut Parts, state: &S) -> Result { // Extract cookie jar from request - let jar = CookieJar::from_request_parts(parts, state).await.unwrap(); + let jar = CookieJar::from_request_parts(parts, state) + .await + .map_err(|_err| AuthError::InvalidToken)?; // Get a Cookie named "token" let token_cookie = jar.get("token").ok_or(AuthError::InvalidToken)?; @@ -70,7 +69,7 @@ where validation.algorithms = vec![Algorithm::HS512]; let token_data = decode::(token, &KEYS.decoding, &validation) - .map_err(|_| AuthError::InvalidToken)?; + .map_err(|_err| AuthError::InvalidToken)?; Ok(token_data.claims) } @@ -82,33 +81,27 @@ where S: Send + Sync, { type Rejection = AuthError; - async fn from_request_parts( - parts: &mut Parts, - state: &S, - ) -> Result { - let jar = CookieJar::from_request_parts(parts, state).await.unwrap(); - - let refresh_token_cookie = - jar.get("refresh-token").ok_or(AuthError::InvalidToken)?; + async fn from_request_parts(parts: &mut Parts, state: &S) -> Result { + let jar = CookieJar::from_request_parts(parts, state) + .await + .map_err(|_err| AuthError::InvalidToken)?; + + let refresh_token_cookie = jar.get("refresh-token").ok_or(AuthError::InvalidToken)?; let refresh_token = refresh_token_cookie.value(); // Decode token with HS512 Algorithm let mut validation = Validation::default(); validation.algorithms = vec![Algorithm::HS512]; - let token_data = - decode::(refresh_token, &KEYS.decoding, &validation) - .map_err(|_| AuthError::InvalidToken)?; + let token_data = decode::(refresh_token, &KEYS.decoding, &validation) + .map_err(|_err| AuthError::InvalidToken)?; Ok(token_data.claims) } } /// Create jwt token for a given user ID and role (access token) -pub fn create_access_jwt( - user_id: String, - role: String, -) -> Result { +pub fn create_access_jwt(user_id: String, role: String) -> Result { let now = Utc::now(); let iat = now.timestamp() as usize; @@ -124,7 +117,7 @@ pub fn create_access_jwt( // Specify HS512 algorithm in the header encode(&Header::new(Algorithm::HS512), &claims, &KEYS.encoding) - .map_err(|_| AuthError::TokenCreation) + .map_err(|_err| AuthError::TokenCreation) } /// Create refresh jwt token for a given user ID (refresh token) @@ -142,5 +135,5 @@ pub fn create_refresh_jwt(user_id: String) -> Result { }; encode(&Header::new(Algorithm::HS512), &claims, &KEYS.encoding) - .map_err(|_| AuthError::TokenCreation) + .map_err(|_err| AuthError::TokenCreation) } diff --git a/backend/src/common/utils.rs b/backend/src/common/utils.rs index 21b4c0d..77f4a15 100644 --- a/backend/src/common/utils.rs +++ b/backend/src/common/utils.rs @@ -1,14 +1,12 @@ +use std::time::Duration; + use anyhow::{Context, Result}; use rand::Rng; -use std::time::Duration; use tokio::time::sleep; use url::Url; // Converts a relative URL string to an absolute URL string, given a base URL. -pub fn to_absolute_url( - base_url_str: &str, - relative_url_str: &str, -) -> Result { +pub fn to_absolute_url(base_url_str: &str, relative_url_str: &str) -> Result { let base_url = Url::parse(base_url_str) .with_context(|| format!("Base URL not valid: {}", base_url_str))?; @@ -19,11 +17,10 @@ pub fn to_absolute_url( ) })?; - Ok(absolute_url.into()) // .into() is equivalent to .to_string() for Url + Ok(absolute_url.into()) } -/// Pauses execution asynchronously for a random duration between `min_secs` and `max_secs`. -/// If `min_secs` is greater than or equal to `max_secs`, it sleeps for `min_secs`. +/// If `min_secs` is greater than or equal to `max_secs`, it sleeps for `min_secs` pub async fn random_sleep_time(min_secs: u64, max_secs: u64) { let sleep_duration_seconds = if min_secs >= max_secs { min_secs diff --git a/backend/src/database/auth.rs b/backend/src/database/auth.rs index 6ae97e0..109b00d 100644 --- a/backend/src/database/auth.rs +++ b/backend/src/database/auth.rs @@ -12,36 +12,27 @@ use super::*; /// For queries returning a single value (one row, one column). /// Highly efficient for this purpose. impl DatabaseService { - pub async fn get_role_id_by_name( - &self, - role_name: &str, - ) -> AnyhowResult> { - let role_id = sqlx::query_scalar!( - "SELECT id FROM roles WHERE role_name = $1", - role_name, - ) - .fetch_optional(&self.pool) - .await - .context("Failed to get role ID by name")?; + /// Fetch user role id by name + pub async fn get_role_id_by_name(&self, role_name: &str) -> AnyhowResult> { + let role_id = sqlx::query_scalar!("SELECT id FROM roles WHERE role_name = $1", role_name,) + .fetch_optional(&self.pool) + .await + .context("Failed to get role ID by name")?; Ok(role_id) } - pub async fn get_role_name_by_id( - &self, - role_id: i32, - ) -> AnyhowResult> { - let role_name = sqlx::query_scalar!( - "SELECT role_name FROM roles WHERE id = $1", - role_id, - ) - .fetch_optional(&self.pool) - .await - .context("Failed to get role name by ID")?; + /// Fetch user role name by id + pub async fn get_role_name_by_id(&self, role_id: i32) -> AnyhowResult> { + let role_name = sqlx::query_scalar!("SELECT role_name FROM roles WHERE id = $1", role_id,) + .fetch_optional(&self.pool) + .await + .context("Failed to get role name by ID")?; Ok(role_name) } + /// Create user password reset token pub async fn create_password_reset_token( &self, user_id: i32, @@ -54,25 +45,30 @@ impl DatabaseService { token, expires_at ) + .execute(&self.pool) + .await + .context("Failed to create password reset token")?; + + Ok(()) + } + + /// Delete user password reset token + pub async fn delete_password_reset_token(&self, token: &str) -> AnyhowResult<()> { + sqlx::query!("DELETE FROM password_reset_tokens WHERE token = $1", token) .execute(&self.pool) .await - .context("Failed to create password reset token")?; + .context("Failed to delete password reset token")?; Ok(()) } - pub async fn delete_password_reset_token( - &self, - token: &str, - ) -> AnyhowResult<()> { - sqlx::query!( - "DELETE FROM password_reset_tokens WHERE token = $1", - token - ) - .execute(&self.pool) - .await - .context("Failed to delete password reset token")?; + /// Cleanup expired password reset token + pub async fn cleanup_password_reset_token(&self) -> AnyhowResult { + let result = sqlx::query!("DELETE FROM password_reset_tokens WHERE expires_at < NOW()") + .execute(&self.pool) + .await + .context("Failed to cleanup password reset token")?; - Ok(()) + Ok(result.rows_affected()) } } diff --git a/backend/src/database/chapters.rs b/backend/src/database/chapters.rs index 3c4a629..13d06d5 100644 --- a/backend/src/database/chapters.rs +++ b/backend/src/database/chapters.rs @@ -31,9 +31,9 @@ impl DatabaseService { title, source_url, ) - .fetch_one(&self.pool) - .await - .context("Failed to add chapter with sqlx")?; + .fetch_one(&self.pool) + .await + .context("Failed to add chapter with sqlx")?; Ok(new_id) } @@ -42,14 +42,17 @@ impl DatabaseService { &self, chapter_id: i32, image_order: i32, - image_url: &str, // This will be the R2/CDN Url + image_url: &str, ) -> AnyhowResult { let new_id = sqlx::query_scalar!( - "INSERT INTO chapter_images (chapter_id, image_order, image_url) VALUES ($1, $2, $3) RETURNING id", + "INSERT INTO chapter_images (chapter_id, image_order, image_url) VALUES ($1, $2, $3) RETURNING id", chapter_id, image_order, image_url, - ).fetch_one(&self.pool).await.context("Failed to add chapter image with sqlx")?; + ) + .fetch_one(&self.pool) + .await + .context("Failed to add chapter image with sqlx")?; Ok(new_id) } @@ -71,9 +74,9 @@ impl DatabaseService { series_id, chapter_number, ) - .fetch_optional(&mut *tx) // Run query inside transaction - .await - .context("Failed to get chapter ID to delete")?; + .fetch_optional(&mut *tx) // Run query inside transaction + .await + .context("Failed to get chapter ID to delete")?; if let Some(chapter_id) = chapter_id_to_delete { sqlx::query!( @@ -84,13 +87,10 @@ impl DatabaseService { .await .context("Failed to delete chapter images")?; - let result = sqlx::query!( - "DELETE FROM series_chapters WHERE id = $1", - chapter_id - ) - .execute(&mut *tx) - .await - .context("Failed to delete chapter")?; + let result = sqlx::query!("DELETE FROM series_chapters WHERE id = $1", chapter_id) + .execute(&mut *tx) + .await + .context("Failed to delete chapter")?; // If transaction was successful, commit it tx.commit().await.context("Failed to commit transaction")?; @@ -123,8 +123,7 @@ impl DatabaseService { chapter_id, e ); // Kembalikan error agar ? tetap berfungsi - Err(anyhow::anyhow!(e) - .context("Failed to update status chapter")) + Err(anyhow::anyhow!(e).context("Failed to update status chapter")) } } } diff --git a/backend/src/database/comments.rs b/backend/src/database/comments.rs index ca504e8..54e1baa 100644 --- a/backend/src/database/comments.rs +++ b/backend/src/database/comments.rs @@ -1,77 +1,189 @@ -use super::*; +use std::collections::{HashMap, HashSet}; + use ammonia::Builder; use anyhow::anyhow; use once_cell::sync::Lazy; -use pulldown_cmark::{Options, Parser, html}; +use pulldown_cmark::{html, Options, Parser}; use regex::Regex; -use std::collections::{HashMap, HashSet}; + +use super::*; static SPOILER_REGEX: Lazy = - Lazy::new(|| Regex::new(r"\|\|(.*?)\|\|").unwrap()); + Lazy::new(|| Regex::new(r"\|\|(.*?)\|\|").expect("Regex pattern failed to compile (invalid)")); impl DatabaseService { + fn map_flat_row(row: CommentFlatRow) -> Comment { + Comment { + id: row.id, + parent_id: row.parent_id, + content_html: row.content_html, + content_markdown: row.content_markdown, + created_at: row.created_at, + updated_at: row.updated_at, + user: CommentUser { + id: row.user_id, + username: row.user_username, + avatar_url: row.user_avatar_url, + }, + upvotes: row.upvotes, + downvotes: row.downvotes, + is_deleted: row.is_deleted, + current_user_vote: row.current_user_vote, + replies: Vec::new(), + attachment_urls: row.attachment_urls, + } + } + // Helper function to transform a flat list of comments into a nested tree structure. - fn nested_comment_tree(&self, rows: Vec) -> Vec { + fn nested_comment_tree( + &self, + rows: Vec, + sort_by: CommentSort, + thread_root_id: Option, + ) -> Vec { if rows.is_empty() { return Vec::new(); } // A map to hold all child comments, grouped by their parent's ID for efficient lookup. - let mut children_map: HashMap> = - HashMap::with_capacity(rows.len()); + let mut children_map: HashMap> = HashMap::with_capacity(rows.len()); // A vector to store the root-level comments. let mut root_comments: Vec = Vec::new(); for row in rows { - let comment: Comment = row.into(); + let comment = Self::map_flat_row(row); - if let Some(parent_id) = comment.parent_id { + let is_parent_root = comment.parent_id.is_none(); + let is_thread_root = thread_root_id == Some(comment.id); + + if is_thread_root || is_parent_root { // If it's a reply, add it to the children_map, keyed by its parent's ID. - children_map.entry(parent_id).or_default().push(comment); - } else { root_comments.push(comment); + } else if let Some(parent_id) = comment.parent_id { + children_map.entry(parent_id).or_default().push(comment); } } + // Sort root comments + match sort_by { + CommentSort::Newest => { + root_comments + .sort_unstable_by(|a, b| b.created_at.cmp(&a.created_at).then(b.id.cmp(&a.id))); + } + CommentSort::Oldest => { + root_comments + .sort_unstable_by(|a, b| a.created_at.cmp(&b.created_at).then(a.id.cmp(&b.id))); + } + CommentSort::TopVote => root_comments.sort_unstable_by(|a, b| { + let a_score = a.upvotes - a.downvotes; + let b_score = b.upvotes - b.downvotes; + b_score + .cmp(&a_score) + .then_with(|| b.created_at.cmp(&a.created_at)) + }), + } + let mut stack: Vec<&mut Comment> = root_comments.iter_mut().collect(); while let Some(parent) = stack.pop() { if let Some(mut children) = children_map.remove(&parent.id) { // Sort by creation date - children.sort_by_key(|c| c.created_at); + children + .sort_unstable_by(|a, b| a.created_at.cmp(&b.created_at).then(a.id.cmp(&b.id))); parent.replies = children; + // Push children to stack for child in &mut parent.replies { stack.push(child); } } } - root_comments.sort_by_key(|c| c.created_at); root_comments } + fn process_comment_markdown(&self, markdown: &str) -> AnyhowResult { + // Process spoiler markdown ||spoiler|| to + let processed_spoiler_markdown = + SPOILER_REGEX.replace_all(markdown, r#"$1"#); + + let mut options = Options::empty(); + options.insert(Options::ENABLE_STRIKETHROUGH); + + let parser = Parser::new_ext(&processed_spoiler_markdown, options); + + let mut unsafe_html = String::new(); + html::push_html(&mut unsafe_html, parser); + + // Replace with more efficient + let unsafe_html = unsafe_html + .replace("", "") + .replace("", ""); + + let allowed_tags = HashSet::from([ + "p", + "b", + "strong", + "em", + "a", + "code", + "pre", + "blockquote", + "ul", + "ol", + "li", + "h1", + "h2", + "h3", + "span", + ]); + + let mut allowed_classes = HashMap::new(); + // Allow class "spoiler" for tag + allowed_classes.insert("span", HashSet::from(["spoiler-hook"])); + + let sanitized_html = Builder::new() + .tags(allowed_tags) + .add_tag_attributes("a", &["href"]) + .allowed_classes(allowed_classes) + .link_rel(Some("nofollow noopener noreferrer")) + .clean(&unsafe_html) + .to_string(); + + Ok(sanitized_html) + } + pub async fn get_comments( &self, entity_type: CommentEntityType, entity_id: i32, current_user_id: Option, + sort_by: CommentSort, + thread_root_id: Option, ) -> AnyhowResult> { - let flat_comments: Vec = sqlx::query_as!( + println!("\n--- DEBUG COMMENT FETCH ---"); + println!("Entity Type: {:?}", entity_type); + println!("Entity ID: {}", entity_id); + println!("Thread Root ID (Target): {:?}", thread_root_id); + + let comments: Vec = sqlx::query_as!( CommentFlatRow, r#" WITH RECURSIVE comment_thread AS ( -- Anchor member: top-level comments SELECT * FROM comments - WHERE comments_type = $1 AND comments_id = $2 AND parent_id IS NULL AND deleted_at IS NULL + WHERE + CASE + WHEN $4::bigint IS NOT NULL THEN id = $4 + ELSE comments_type = $1 AND comments_id = $2 AND parent_id IS NULL + END UNION ALL -- Recursive member: replies to comments already in the thread SELECT c.* FROM comments c JOIN comment_thread ct ON c.parent_id = ct.id - WHERE c.deleted_at IS NULL ), vote_summary AS ( SELECT @@ -86,7 +198,7 @@ impl DatabaseService { -- Aggregate all attachment URLs for each comment into a JSON array SELECT comment_id, - json_agg(file_url) as attachment_urls + array_agg(file_url) as attachment_urls FROM comment_attachments WHERE comment_id IN (SELECT id FROM comment_thread) GROUP BY comment_id @@ -103,76 +215,27 @@ impl DatabaseService { up.avatar_url as "user_avatar_url", COALESCE(vs.upvotes, 0) as "upvotes!", COALESCE(vs.downvotes, 0) as "downvotes!", - cv.vote_type as "current_user_vote: _", - ats.attachment_urls as "attachment_urls: _" + (ct.deleted_at IS NOT NULL ) as "is_deleted!", + cv.vote_type as "current_user_vote", + ats.attachment_urls as "attachment_urls" FROM comment_thread ct JOIN users u ON ct.user_id = u.id LEFT JOIN user_profiles up ON u.id = up.user_id LEFT JOIN vote_summary vs ON ct.id = vs.comment_vote_id LEFT JOIN comment_votes cv ON ct.id = cv.comment_vote_id AND cv.user_id = $3 LEFT JOIN attachments_summary ats ON ct.id = ats.comment_id - ORDER BY ct.created_at ASC + -- ORDER BY ct.created_at ASC "#, entity_type as _, entity_id, - current_user_id + current_user_id, + thread_root_id ) - .fetch_all(&self.pool) - .await - .context("Failed to fetch comment thread")?; - - Ok(self.nested_comment_tree(flat_comments)) - } - - fn process_comment_markdown(&self, markdown: &str) -> AnyhowResult { - // Process spoiler markdown ||spoiler|| to - let processed_spoiler_markdown = SPOILER_REGEX - .replace_all(markdown, r#"$1"#); - - let mut options = Options::empty(); - options.insert(Options::ENABLE_STRIKETHROUGH); - - let parser = Parser::new_ext(&processed_spoiler_markdown, options); - - let mut unsafe_html = String::new(); - html::push_html(&mut unsafe_html, parser); - - // Replace with more efficient - let unsafe_html = unsafe_html - .replace("", "") - .replace("", ""); - - let allowed_tags = HashSet::from([ - "p", - "b", - "strong", - "em", - "a", - "code", - "pre", - "blockquote", - "ul", - "ol", - "li", - "h1", - "h2", - "h3", - "span", - ]); - - let mut allowed_classes = HashMap::new(); - // Allow class "spoiler" for tag - allowed_classes.insert("span", HashSet::from(["spoiler-hook"])); - - let sanitized_html = Builder::new() - .tags(allowed_tags) - .add_tag_attributes("a", &["href"]) - .allowed_classes(allowed_classes) - .link_rel(Some("nofollow noopener noreferrer")) - .clean(&unsafe_html) - .to_string(); + .fetch_all(&self.pool) + .await + .context("Failed to fetch comment thread")?; - Ok(sanitized_html) + Ok(self.nested_comment_tree(comments, sort_by, thread_root_id)) } // Function to add new comment on existing comment tree list @@ -181,7 +244,7 @@ impl DatabaseService { comment_id: i64, current_user_id: Option, ) -> AnyhowResult> { - let comment_row:Option = sqlx::query_as!( + let comment_row: Option = sqlx::query_as!( CommentFlatRow, r#" WITH vote_summary AS ( @@ -196,7 +259,7 @@ impl DatabaseService { attachments_summary AS ( SELECT comment_id, - json_agg(file_url) as attachment_urls + array_agg(file_url) as attachment_urls FROM comment_attachments WHERE comment_id = $1 GROUP BY comment_id @@ -213,8 +276,9 @@ impl DatabaseService { up.avatar_url as "user_avatar_url", COALESCE(vs.upvotes, 0) as "upvotes!", COALESCE(vs.downvotes, 0) as "downvotes!", - cv.vote_type as "current_user_vote: _", - ats.attachment_urls as "attachment_urls: _" + (c.deleted_at IS NOT NULL ) as "is_deleted!", + cv.vote_type as "current_user_vote?", + ats.attachment_urls as "attachment_urls?" FROM comments c JOIN users u ON c.user_id = u.id LEFT JOIN user_profiles up ON u.id = up.user_id @@ -226,11 +290,11 @@ impl DatabaseService { comment_id, current_user_id ) - .fetch_optional(&self.pool) - .await - .context("Failed to fetch comment by its id")?; + .fetch_optional(&self.pool) + .await + .context("Failed to fetch comment by its id")?; - Ok(comment_row.map(Comment::from)) + Ok(comment_row.map(Self::map_flat_row)) } pub async fn create_new_comment( @@ -270,13 +334,13 @@ impl DatabaseService { if !attachment_keys.is_empty() { for key in attachment_keys { sqlx::query!( - "INSERT INTO comment_attachments (comment_id, file_url) VALUES ($1, $2)", - new_comment_id, - key - ) - .execute(&mut *tx) - .await - .context("Failed to insert comment attachment")?; + "INSERT INTO comment_attachments (comment_id, file_url) VALUES ($1, $2)", + new_comment_id, + key + ) + .execute(&mut *tx) + .await + .context("Failed to insert comment attachment")?; } } @@ -290,19 +354,20 @@ impl DatabaseService { comment_id: i64, user_id: i32, new_content_markdown: &str, - ) -> AnyhowResult> { - let new_content_html = - self.process_comment_markdown(new_content_markdown)?; + ) -> AnyhowResult> { + let new_content_html = self.process_comment_markdown(new_content_markdown)?; - let updated_html = sqlx::query_scalar!( + let updated_html = sqlx::query_as!( + UpdateCommentResponse, r#" UPDATE comments SET content_user_markdown = $1, content_html = $2, + deleted_at = NOW(), updated_at = NOW() WHERE id = $3 AND user_id = $4 - RETURNING content_html + RETURNING id, content_user_markdown, content_html, updated_at, (deleted_at IS NOT NULL) as "is_deleted!" "#, new_content_markdown, new_content_html, @@ -316,6 +381,113 @@ impl DatabaseService { Ok(updated_html) } + pub async fn delete_comment( + &self, + comment_id: i64, + user_id: i32, + ) -> AnyhowResult { + let mut tx = self + .pool + .begin() + .await + .context("Failed to start transaction")?; + + // Fetch all attachment keys associated with this comment *before* deletion. + let attachment_object_key: Vec = sqlx::query_scalar!( + "SELECT file_url FROM comment_attachments WHERE comment_id = $1", + comment_id + ) + .fetch_all(&mut *tx) + .await + .context("Failed to fetch attachment keys")?; + + // Check if comment has replies + let has_replies: bool = sqlx::query_scalar!( + r#" + SELECT EXISTS( + SELECT 1 FROM comments WHERE parent_id = $1 AND deleted_at IS NULL + ) + "#, + comment_id + ) + .fetch_one(&mut *tx) + .await + .context("Failed to check for replies")? + .context("EXISTS query returned NULL, which should not happen")?; + + let rows_affected: u64; + + if has_replies { + let soft_delete_result = sqlx::query_as!( + UpdateCommentResponse, + r#" + UPDATE comments + SET + content_user_markdown = '', + content_html = '

[Deleted]

', -- Or any placeholder + deleted_at = NOW(), + updated_at = NOW() + WHERE id = $1 AND user_id = $2 + RETURNING id, content_user_markdown, content_html, updated_at, (deleted_at IS NOT NULL) as "is_deleted!" + "#, + comment_id, + user_id + ) + .fetch_optional(&mut *tx) + .await + .context("Failed to soft delete comment")?; + + if let Some(updated_comment) = soft_delete_result { + // Since this is an UPDATE, ON DELETE CASCADE won't trigger + // Manually delete the attachments + if let Err(e) = sqlx::query!( + "DELETE FROM comment_attachments WHERE comment_id = $1", + comment_id + ) + .execute(&mut *tx) + .await + { + tx.rollback().await.context("Failed to delete comment")?; + return Err(anyhow!(e).context("Failed to delete comment attachments")); + } + tx.commit().await.context("Failed to commit transaction")?; + Ok(DeleteCommentResult::SoftDeleted( + updated_comment, + attachment_object_key, + )) + } else { + tx.rollback().await.context("Failed to delete comment")?; + Ok(DeleteCommentResult::NotFound) + } + } else { + // No replies. We can safely delete the comment row. + // Attachment will automatically deleted by `ON DELETE CASCADE` + let hard_delete_result = sqlx::query!( + r#" + DELETE FROM comments + WHERE id = $1 AND user_id = $2 + "#, + comment_id, + user_id + ) + .execute(&mut *tx) + .await + .context("Failed to delete comment")?; + + rows_affected = hard_delete_result.rows_affected(); + + if rows_affected == 0 { + tx.rollback().await.context("Failed to delete comment")?; + return Ok(DeleteCommentResult::NotFound); + } + + tx.commit().await.context("Failed to commit transaction")?; + + Ok(DeleteCommentResult::HardDeleted(attachment_object_key)) + } + } + + // Vote on comment pub async fn vote_on_comment( &self, comment_id: i64, @@ -334,9 +506,9 @@ impl DatabaseService { comment_id, user_id ) - .fetch_optional(&mut *tx) - .await - .context("Failed to fetch comment vote")?; + .fetch_optional(&mut *tx) + .await + .context("Failed to fetch comment vote")?; let mut final_user_vote: Option = Some(vote_type); @@ -348,9 +520,9 @@ impl DatabaseService { comment_id, user_id ) - .execute(&mut *tx) - .await - .context("Failed to delete comment")?; + .execute(&mut *tx) + .await + .context("Failed to delete comment")?; final_user_vote = None; } else { // New vote or changing vote @@ -364,9 +536,9 @@ impl DatabaseService { user_id, vote_type ) - .execute(&mut *tx) - .await - .context("Failed to insert comment")?; + .execute(&mut *tx) + .await + .context("Failed to insert comment")?; } // Recalculate new total votes for comment @@ -392,4 +564,127 @@ impl DatabaseService { current_user_vote: final_user_vote, }) } + + // Delete comment as admin + pub async fn admin_delete_comment( + &self, + comment_id: i64, + requestor_role_id: i32, + ) -> AnyhowResult { + let mut tx = self + .pool + .begin() + .await + .context("Failed to begin transaction")?; + + let target_info = sqlx::query!( + r#" + SELECT + u.role_id, + c.user_id + FROM comments c + JOIN users u ON c.user_id = u.id + WHERE c.id = $1 + "#, + comment_id + ) + .fetch_optional(&mut *tx) + .await + .context("Failed to fetch comment info")?; + + let target_user_role_id = match target_info { + Some(record) => record.role_id, + None => { + return Ok(DeleteCommentResult::NotFound); + } + }; + + // Validation Tiered logic + // Role: User=0, Mod=1, Admin=2, SuperAdmin=3 + let is_super_admin = requestor_role_id == 3; + + if !is_super_admin && requestor_role_id <= target_user_role_id { + tx.rollback().await?; + return Ok(DeleteCommentResult::InsufficientPermissions); + } + + let attachment_object_key: Vec = sqlx::query_scalar!( + "SELECT file_url FROM comment_attachments WHERE comment_id = $1", + comment_id + ) + .fetch_all(&mut *tx) + .await + .context("Failed to fetch attachment keys")?; + + let has_replies: bool = sqlx::query_scalar!( + r#" + SELECT EXISTS( + SELECT 1 FROM comments WHERE parent_id = $1 AND deleted_at IS NULL + ) + "#, + comment_id + ) + .fetch_one(&mut *tx) + .await + .context("Failed to check for replies")? + .context("EXISTS query returned NULL, which should not happen")?; + + let row_affected: u64; + + if has_replies { + let soft_delete_result = sqlx::query_as!( + UpdateCommentResponse, + r#" + UPDATE comments + SET + content_user_markdown = '', + content_html = '

[Removed by Mod]

', + deleted_at = NOW(), + updated_at = NOW() + WHERE id = $1 + RETURNING id, content_user_markdown, content_html, updated_at, (deleted_at IS NOT NULL) as "is_deleted!" + "#, + comment_id + ) + .fetch_optional(&mut *tx) + .await + .context("Failed to soft delete comment")?; + + if let Some(updated_comment) = soft_delete_result { + sqlx::query!( + "DELETE FROM comment_attachments WHERE comment_id = $1", + comment_id + ) + .execute(&mut *tx) + .await + .context("Failed to delete comment attachments")?; + + tx.commit().await.context("Failed to comment deletion")?; + + Ok(DeleteCommentResult::SoftDeleted( + updated_comment, + attachment_object_key, + )) + } else { + tx.rollback().await.context("Failed to comment deletion")?; + Ok(DeleteCommentResult::NotFound) + } + } else { + let hard_delete_result = sqlx::query!("DELETE FROM comments WHERE id = $1", comment_id) + .execute(&mut *tx) + .await + .context("Failed to delete comment")?; + + row_affected = hard_delete_result.rows_affected(); + + if row_affected == 0 { + tx.rollback().await.context("Failed to delete comment")?; + return Ok(DeleteCommentResult::NotFound); + } + + tx.commit().await.context("Failed to commit transaction")?; + + Ok(DeleteCommentResult::HardDeleted(attachment_object_key)) + } + } } diff --git a/backend/src/database/mod.rs b/backend/src/database/mod.rs index 4fc0f70..bb7f62b 100644 --- a/backend/src/database/mod.rs +++ b/backend/src/database/mod.rs @@ -1,9 +1,10 @@ +use std::fmt; + use anyhow::{Context, Result as AnyhowResult}; use chrono::{DateTime, Utc}; use rand::prelude::*; use serde::{Deserialize, Serialize}; use sqlx::{FromRow, PgPool, Type}; -use std::fmt; use url::Url; pub mod auth; @@ -90,7 +91,7 @@ pub struct Series { } #[derive(Debug, Clone, PartialEq, Eq, sqlx::Type, Serialize, Deserialize)] -#[sqlx(type_name = "series_status", rename_all = "PascalCase")] +#[sqlx(type_name = "chapter_status", rename_all = "PascalCase")] pub enum ChapterStatus { Processing, Available, @@ -110,33 +111,6 @@ pub struct SeriesChapter { pub created_at: DateTime, } -/// Strcuct represents a user record fetched from the database -#[derive(Debug, FromRow)] -pub struct Users { - pub id: i32, - pub username: String, - pub email: String, - pub password_hash: String, - pub role_id: i32, -} - -#[derive(Debug, FromRow, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct UserProfileDetails { - pub username: String, - pub email: String, - pub display_name: Option, - pub avatar_url: Option, -} - -#[derive(Debug, FromRow, Serialize)] -pub struct UserWithRole { - pub id: i32, - pub username: String, - pub email: String, - pub role_name: String, -} - #[derive(Debug)] pub struct NewSeriesData<'a> { pub title: &'a str, @@ -201,6 +175,12 @@ pub enum SeriesOrderBy { Rating, } +#[derive(Debug, FromRow, Serialize, Deserialize)] +pub struct CategoryTag { + pub id: i32, + pub name: String, +} + // Pagination parameters for fetching series list. #[derive(Debug, Serialize, Deserialize)] pub struct PaginatedResult { @@ -208,19 +188,32 @@ pub struct PaginatedResult { pub total_items: i64, } -#[derive(Debug, FromRow, Serialize, Deserialize)] -pub struct CategoryTag { +// User Search Paginated Series Struct +#[derive(Debug, Serialize, Deserialize, FromRow)] +pub struct UserSearchPaginatedSeries { pub id: i32, - pub name: String, + pub title: String, + pub original_title: Option, + pub cover_image_url: String, + pub last_chapter_found_in_storage: Option, + pub updated_at: DateTime, + #[sqlx(json)] + pub authors: serde_json::Value, } -// Most viewed series data for the public API. -#[derive(Debug, FromRow, Serialize)] -pub struct MostViewedSeries { +// Latest Release Series Struct +#[derive(Debug, Serialize, FromRow)] +pub struct LatestReleaseSeries { pub id: i32, pub title: String, + pub original_title: Option, + #[sqlx(json)] + pub authors: serde_json::Value, pub cover_image_url: String, - pub view_count: Option, + pub description: String, + pub last_chapter_found_in_storage: Option, + pub updated_at: DateTime, + pub chapter_title: Option, } #[derive(Debug, FromRow, Serialize)] @@ -238,18 +231,17 @@ pub struct BrowseSeriesSearchResult { pub categories: serde_json::Value, } +// Most viewed series data for the public API. #[derive(Debug, FromRow, Serialize)] -pub struct BookmarkedSeries { +pub struct MostViewedSeries { pub id: i32, pub title: String, pub cover_image_url: String, - pub last_chapter_found_in_storage: Option, - pub updated_at: DateTime, - pub chapter_title: Option, + pub view_count: Option, } -#[derive(Debug, Serialize, FromRow)] -pub struct LatestReleaseSeries { +#[derive(Debug, FromRow, Serialize)] +pub struct BookmarkedSeries { pub id: i32, pub title: String, pub cover_image_url: String, @@ -258,6 +250,36 @@ pub struct LatestReleaseSeries { pub chapter_title: Option, } +/// Strcuct represents a user record fetched from the database +#[derive(Debug, FromRow)] +pub struct Users { + pub id: i32, + pub username: String, + pub email: String, + pub password_hash: String, + pub role_id: i32, +} + +#[derive(Debug, FromRow, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct UserProfileDetails { + pub username: String, + pub email: String, + pub display_name: Option, + pub avatar_url: Option, +} + +#[derive(Debug, FromRow, Serialize)] +pub struct UserWithRole { + pub id: i32, + pub username: String, + pub email: String, + pub role_name: String, + pub role_id: i32, + pub is_active: bool, +} + +// Comment struct #[derive(Debug, FromRow, Serialize, Clone)] pub struct Comment { pub id: i64, @@ -269,6 +291,7 @@ pub struct Comment { pub user: CommentUser, pub upvotes: i64, pub downvotes: i64, + pub is_deleted: bool, #[serde(skip_serializing_if = "Option::is_none")] pub current_user_vote: Option, #[serde(skip_serializing_if = "Vec::is_empty")] @@ -279,7 +302,7 @@ pub struct Comment { // Helper struct to map the flat comment result #[derive(Debug, FromRow)] -struct CommentFlatRow { +pub struct CommentFlatRow { id: i64, parent_id: Option, content_html: String, @@ -291,33 +314,9 @@ struct CommentFlatRow { user_avatar_url: Option, upvotes: i64, downvotes: i64, + is_deleted: bool, current_user_vote: Option, - attachment_urls: Option, -} - -impl From for Comment { - fn from(row: CommentFlatRow) -> Self { - Comment { - id: row.id, - parent_id: row.parent_id, - content_html: row.content_html, - content_markdown: row.content_markdown, - created_at: row.created_at, - updated_at: row.updated_at, - user: CommentUser { - id: row.user_id, - username: row.user_username, - avatar_url: row.user_avatar_url, - }, - upvotes: row.upvotes, - downvotes: row.downvotes, - current_user_vote: row.current_user_vote, - replies: Vec::new(), - attachment_urls: row - .attachment_urls - .and_then(|v| serde_json::from_value(v).ok()), - } - } + attachment_urls: Option>, } #[derive(Debug, FromRow, Serialize, Clone)] @@ -334,6 +333,14 @@ pub enum CommentEntityType { SeriesChapters, } +#[derive(Debug, Clone, Deserialize, Copy, Default)] +pub enum CommentSort { + #[default] + Newest, + Oldest, + TopVote, +} + #[derive(Debug, Deserialize)] pub struct NewCommentPayload { pub content_markdown: String, @@ -341,6 +348,22 @@ pub struct NewCommentPayload { pub attachments: Option>, } +#[derive(FromRow, Serialize, Debug)] +pub struct UpdateCommentResponse { + pub id: i64, + pub content_user_markdown: String, + pub content_html: String, + pub updated_at: DateTime, + pub is_deleted: bool, +} + +pub enum DeleteCommentResult { + HardDeleted(Vec), + SoftDeleted(UpdateCommentResponse, Vec), + NotFound, + InsufficientPermissions, +} + // Payload for voting on a comment. #[derive(Debug, Deserialize)] pub struct VotePayload { diff --git a/backend/src/database/series.rs b/backend/src/database/series.rs index 844ef89..971a5c9 100644 --- a/backend/src/database/series.rs +++ b/backend/src/database/series.rs @@ -1,9 +1,10 @@ +use anyhow::{anyhow, Context}; + use super::*; -use anyhow::{Context, anyhow}; /// Macros `sqlx::query!` /// For DML operations (INSERT, UPDATE, DELETE) or SELECTs, -/// where you're manually processing generic `sqlx::Row`s (anonymous struct). +/// where you're manually processing generic `sqlx::Row` (anonymous struct). /// /// Macros `sqlx::query_as!` /// For mapping SELECT results directly to a defined rust struct (`#[derive(FromRow)]`), @@ -13,10 +14,7 @@ use anyhow::{Context, anyhow}; /// For queries returning a single value (one row, one column). /// Highly efficient for this purpose. impl DatabaseService { - pub async fn add_new_series( - &self, - data: &NewSeriesData<'_>, - ) -> AnyhowResult { + pub async fn add_new_series(&self, data: &NewSeriesData<'_>) -> AnyhowResult { let mut tx = self .pool .begin() @@ -26,7 +24,8 @@ impl DatabaseService { let host = get_host_from_url(Some(data.source_url)); let new_series_id = sqlx::query_scalar!( - r#"INSERT INTO series + r#" + INSERT INTO series (title, original_title, description, cover_image_url, current_source_url, source_website_host, check_interval_minutes) VALUES ($1, $2, $3, $4, $5, $6, $7) RETURNING id"#, @@ -131,13 +130,10 @@ impl DatabaseService { .context("Failed to update series with sqlx")?; if let Some(author_names) = data.authors { - sqlx::query!( - "DELETE FROM series_authors WHERE series_id = $1", - series_id - ) - .execute(&mut *tx) - .await - .context("Failed to delete existing authors for series")?; + sqlx::query!("DELETE FROM series_authors WHERE series_id = $1", series_id) + .execute(&mut *tx) + .await + .context("Failed to delete existing authors for series")?; for name in author_names { let author_id = sqlx::query_scalar!( @@ -156,10 +152,7 @@ impl DatabaseService { ) .fetch_one(&mut *tx) .await - .context(format!( - "Failed to find or create author: {}", - name - ))?; + .context(format!("Failed to find or create author: {}", name))?; sqlx::query!( "INSERT INTO series_authors (series_id, author_id) VALUES ($1, $2) ON CONFLICT DO NOTHING", @@ -200,36 +193,34 @@ impl DatabaseService { Ok(result.rows_affected()) } - pub async fn get_series_by_id( - &self, - id: i32, - ) -> AnyhowResult> { + pub async fn get_series_by_id(&self, id: i32) -> AnyhowResult> { let series = sqlx::query_as!( Series, - r#"SELECT id, title, original_title, description, cover_image_url, current_source_url, - source_website_host, views_count, bookmarks_count, total_rating_score, total_ratings_count, last_chapter_found_in_storage, - processing_status as "processing_status: SeriesStatus", - check_interval_minutes, last_checked_at, next_checked_at, created_at, updated_at - FROM series WHERE id = $1"#, + r#" + SELECT id, title, original_title, description, cover_image_url, current_source_url, + source_website_host, views_count, bookmarks_count, total_rating_score, total_ratings_count, + last_chapter_found_in_storage, processing_status as "processing_status: SeriesStatus", + check_interval_minutes, last_checked_at, next_checked_at, created_at, updated_at + FROM series WHERE id = $1 + "#, id ) .fetch_optional(&self.pool) .await - .context("Failed to query series by ID with sqlx")?; // Handles cases where no row is found + .context("Failed to query series by ID with sqlx")?; Ok(series) } - pub async fn get_series_by_title( - &self, - title: &str, - ) -> AnyhowResult> { + pub async fn get_series_by_title(&self, title: &str) -> AnyhowResult> { let series = sqlx::query_as!( Series, - r#"SELECT id, title, original_title, description, cover_image_url, current_source_url, - source_website_host, views_count, bookmarks_count, total_rating_score, total_ratings_count, last_chapter_found_in_storage, - processing_status as "processing_status: SeriesStatus", + r#" + SELECT id, title, original_title, description, cover_image_url, current_source_url, + source_website_host, views_count, bookmarks_count, total_rating_score, total_ratings_count, + last_chapter_found_in_storage, processing_status as "processing_status: SeriesStatus", check_interval_minutes, last_checked_at, next_checked_at, created_at, updated_at - FROM series WHERE title = $1"#, + FROM series WHERE title = $1 + "#, title ) .fetch_optional(&self.pool) @@ -239,10 +230,7 @@ impl DatabaseService { } // Get authors for a sepecific series - pub async fn get_authors_by_series_id( - &self, - series_id: i32, - ) -> AnyhowResult> { + pub async fn get_authors_by_series_id(&self, series_id: i32) -> AnyhowResult> { let authors_name = sqlx::query_scalar!( r#"SELECT a.name FROM authors a JOIN series_authors sa ON a.id = sa.author_id @@ -265,7 +253,8 @@ impl DatabaseService { r#" SELECT c.id, c.name FROM categories c JOIN series_categories sc ON c.id = sc.category_id - WHERE sc.series_id = $1"#, + WHERE sc.series_id = $1 + "#, series_id ) .fetch_all(&self.pool) @@ -275,7 +264,7 @@ impl DatabaseService { Ok(categories) } - // Get paginated series list for admin + // Get series search list for admin panel pub async fn get_admin_paginated_series( &self, page: u32, @@ -304,45 +293,102 @@ impl DatabaseService { total_items: Option, } - let record_list = sqlx::query_as!( - QueryResult, - r#" - SELECT - sr.id, - sr.title, - sr.original_title, - sr.description, - sr.cover_image_url, - sr.current_source_url, - sr.updated_at, - sr.processing_status as "processing_status: SeriesStatus", - COALESCE( - json_agg(a.name) FILTER (WHERE a.id IS NOT NULL), - '[]'::json - ) as "authors!", - COUNT(*) OVER () as total_items - FROM - series sr - LEFT JOIN - series_authors sa ON sr.id = sa.series_id - LEFT JOIN - authors a ON sa.author_id = a.id - WHERE - ($3::TEXT IS NULL OR sr.title_tsv @@ plainto_tsquery('english', $3)) - GROUP BY - sr.id - ORDER BY - sr.updated_at DESC - LIMIT $1 - OFFSET $2 - "#, - limit, - offset, - search_query - ) - .fetch_all(&self.pool) - .await - .context("Failed to query all series")?; + let record_list = match search_query.filter(|q| !q.trim().is_empty()) { + Some(search_match) => { + let search_match = search_match.trim(); + let similarity_threshold = 0.20_f32; + + sqlx::query_as!( + QueryResult, + r#" + WITH base_search AS ( + SELECT + s.id, s.title, s.original_title, s.description, s.cover_image_url, + s.current_source_url, s.updated_at, s.processing_status, + -- Calculate similarity score for ranking + similarity(s.title, $3) as sim_score + FROM series s + WHERE + s.title ILIKE '%' || $3 || '%' + OR + (s.title % $3 AND similarity(s.title, $3) >= $4) + ), + ranked_results AS ( + SELECT + *, + CASE + WHEN title ILIKE $3 THEN 10 + WHEN title ILIKE $3 || '%' THEN 8 + WHEN title ILIKE '%' || $3 || '%' THEN 6 + ELSE 4 + END as search_rank + FROM base_search + ), + total_count AS ( + SELECT COUNT(*) AS total FROM ranked_results + ) + SELECT + rr.id, rr.title, rr.original_title, rr.description, + rr.cover_image_url, rr.current_source_url, rr.updated_at, + rr.processing_status as "processing_status: SeriesStatus", + -- Aggregate author names into a JSON array for each series + COALESCE( + json_agg(a.name) FILTER (WHERE a.id IS NOT NULL), + '[]'::json + ) AS "authors!", + tc.total as total_items + FROM ranked_results rr + CROSS JOIN total_count tc + LEFT JOIN series_authors sa ON rr.id = sa.series_id + LEFT JOIN authors a ON sa.author_id = a.id + GROUP BY + rr.id, rr.title, rr.original_title, rr.description, rr.cover_image_url, + rr.current_source_url, rr.updated_at, rr.processing_status, + rr.search_rank, rr.sim_score, tc.total + -- Order by the best rank, then by similarity, then by ID for stable sorting + ORDER BY rr.search_rank DESC, rr.sim_score DESC, rr.id ASC + LIMIT $1 + OFFSET $2 + "#, + limit, + offset, + search_match, + similarity_threshold, + ) + .fetch_all(&self.pool) + .await + .context("Failed to query all series") + } + None => { + // No search - simple pagination + sqlx::query_as!( + QueryResult, + r#" + SELECT + s.id, s.title, s.original_title, s.description, s.cover_image_url, + s.current_source_url, s.updated_at, + s.processing_status as "processing_status: SeriesStatus", + COALESCE( + json_agg(a.name) FILTER (WHERE a.id IS NOT NULL), + '[]'::json + ) as "authors!", + COUNT(*) OVER () as total_items + FROM + series s + LEFT JOIN series_authors sa ON s.id = sa.series_id + LEFT JOIN authors a ON sa.author_id = a.id + GROUP BY s.id + ORDER BY s.updated_at DESC + LIMIT $1 OFFSET $2 + "#, + limit, + offset + ) + .fetch_all(&self.pool) + .await + .context("Failed to get paginated series without search") + } + }?; let total_items = record_list .first() @@ -381,18 +427,15 @@ impl DatabaseService { new_status as _, series_id, ) - .execute(&self.pool) - .await - .context("Failed to update series processing status with sqlx")?; + .execute(&self.pool) + .await + .context("Failed to update series processing status with sqlx")?; Ok(result.rows_affected()) } // Called only if there's new valid content (new chapter) - pub async fn update_series_new_content_timestamp( - &self, - series_id: i32, - ) -> AnyhowResult { + pub async fn update_series_new_content_timestamp(&self, series_id: i32) -> AnyhowResult { let result = sqlx::query!( "UPDATE series SET updated_at = NOW() WHERE id = $1", series_id, @@ -412,13 +455,10 @@ impl DatabaseService { new_next_checked_at: Option>, ) -> AnyhowResult { // First, get the series data asynchronously. - let series = - self.get_series_by_id(series_id).await?.ok_or_else(|| { - anyhow!( - "Series with id {} not found for schedule update", - series_id - ) - })?; + let series = self + .get_series_by_id(series_id) + .await? + .ok_or_else(|| anyhow!("Series with id {} not found for schedule update", series_id))?; // Calculate the next check time if not provided let final_next_checked_at = new_next_checked_at.unwrap_or_else(|| { @@ -427,8 +467,7 @@ impl DatabaseService { // Add a random +- 5 minutes jitter to avoid all series checking at the exact same time let random_jitter = rng.random_range(-300..=300); let actual_interval_secs = (base_interval * 60) + random_jitter; - Utc::now() - + chrono::Duration::seconds(actual_interval_secs.max(300)) + Utc::now() + chrono::Duration::seconds(actual_interval_secs.max(300)) }); let final_status = new_status.unwrap_or(series.processing_status); @@ -459,10 +498,7 @@ impl DatabaseService { Ok(result.rows_affected()) } - pub async fn get_series_chapters_count( - &self, - series_id: i32, - ) -> AnyhowResult { + pub async fn get_series_chapters_count(&self, series_id: i32) -> AnyhowResult { let count = sqlx::query_scalar!( "SELECT COUNT(*) FROM series_chapters WHERE series_id = $1", series_id @@ -511,10 +547,7 @@ impl DatabaseService { })) } - pub async fn delete_series_by_id( - &self, - series_id: i32, - ) -> AnyhowResult { + pub async fn delete_series_by_id(&self, series_id: i32) -> AnyhowResult { let mut tx = self .pool .begin() @@ -550,19 +583,15 @@ impl DatabaseService { .context("Failed to delete series chapters")?; // Delete all author link records - sqlx::query!( - "DELETE FROM series_authors WHERE series_id = $1", - series_id - ) - .execute(&mut *tx) - .await - .context("Failed to delete series-authors links")?; + sqlx::query!("DELETE FROM series_authors WHERE series_id = $1", series_id) + .execute(&mut *tx) + .await + .context("Failed to delete series-authors links")?; - let result = - sqlx::query!("DELETE FROM series WHERE id = $1", series_id) - .execute(&mut *tx) - .await - .context("Failed to delete series")?; + let result = sqlx::query!("DELETE FROM series WHERE id = $1", series_id) + .execute(&mut *tx) + .await + .context("Failed to delete series")?; tx.commit() .await @@ -571,10 +600,7 @@ impl DatabaseService { Ok(result.rows_affected()) } - pub async fn mark_series_for_deletion( - &self, - series_id: i32, - ) -> AnyhowResult { + pub async fn mark_series_for_deletion(&self, series_id: i32) -> AnyhowResult { let result = sqlx::query!( "UPDATE series SET processing_status = $1, updated_at = NOW() WHERE id = $2 AND processing_status NOT IN ($3, $4)", @@ -583,16 +609,14 @@ impl DatabaseService { SeriesStatus::PendingDeletion as _, SeriesStatus::Deleting as _, ) - .execute(&self.pool) - .await - .context("Failed to mark series for deletion with sqlx")?; + .execute(&self.pool) + .await + .context("Failed to mark series for deletion with sqlx")?; Ok(result.rows_affected()) } - pub async fn find_and_lock_series_for_check( - &self, - ) -> AnyhowResult> { + pub async fn find_and_lock_series_for_check(&self, limit: i64) -> AnyhowResult> { let series = sqlx::query_as!( Series, r#" @@ -602,31 +626,30 @@ impl DatabaseService { processing_status = $1 AND next_checked_at <= NOW() ORDER BY next_checked_at ASC - LIMIT 1 + LIMIT $2 FOR UPDATE SKIP LOCKED ) UPDATE series - SET processing_status = $2 - WHERE id = (SELECT id FROM candidate) + SET processing_status = $3 + WHERE id IN (SELECT id FROM candidate) RETURNING - id, title, original_title, description, cover_image_url, current_source_url, - source_website_host, views_count, bookmarks_count, total_rating_score, total_ratings_count, last_chapter_found_in_storage, + id, title, original_title, description, cover_image_url, current_source_url, source_website_host, + views_count, bookmarks_count, total_rating_score, total_ratings_count, last_chapter_found_in_storage, processing_status as "processing_status: SeriesStatus", check_interval_minutes, last_checked_at, next_checked_at, created_at, updated_at "#, SeriesStatus::Ongoing as _, + limit, SeriesStatus::Processing as _, ) - .fetch_optional(&self.pool) + .fetch_all(&self.pool) .await .context("Failed to find and lock series for check with sqlx")?; Ok(series) } - pub async fn find_and_lock_series_for_job_deletion( - &self, - ) -> AnyhowResult> { + pub async fn find_and_lock_series_for_job_deletion(&self) -> AnyhowResult> { // If the row is already locked by another transaction, // it will skip it and look for the next row. let series = sqlx::query_as!( @@ -657,10 +680,7 @@ impl DatabaseService { Ok(series) } - pub async fn create_category_tag( - &self, - name: &str, - ) -> AnyhowResult { + pub async fn create_category_tag(&self, name: &str) -> AnyhowResult { let category = sqlx::query_as!( CategoryTag, "INSERT INTO categories (name) VALUES ($1) RETURNING id, name", @@ -682,14 +702,11 @@ impl DatabaseService { Ok(result.rows_affected()) } - pub async fn get_list_all_categories( - &self, - ) -> AnyhowResult> { - let categories = - sqlx::query_as!(CategoryTag, "SELECT id, name FROM categories") - .fetch_all(&self.pool) - .await - .context("Failed to list all categories with sqlx")?; + pub async fn get_list_all_categories(&self) -> AnyhowResult> { + let categories = sqlx::query_as!(CategoryTag, "SELECT id, name FROM categories") + .fetch_all(&self.pool) + .await + .context("Failed to list all categories with sqlx")?; Ok(categories) } diff --git a/backend/src/database/series_user_actions.rs b/backend/src/database/series_user_actions.rs index 26a7b18..d939d3d 100644 --- a/backend/src/database/series_user_actions.rs +++ b/backend/src/database/series_user_actions.rs @@ -1,9 +1,11 @@ -use super::*; use anyhow::Context; use sqlx::postgres::types::PgInterval; +use sqlx::QueryBuilder; + +use super::*; /// Macros `sqlx::query!` -/// For DML operations (INSERT, UPDATE, DELETE) or SELECTs, +/// For DML operations (INSERT, UPDATE, DELETE) or SELECT, /// where you're manually processing generic `sqlx::Row`s (anonymous struct). /// /// Macros `sqlx::query_as!` @@ -33,6 +35,7 @@ impl DatabaseService { Ok(()) } + // fetch most viewed series pub async fn fetch_most_viewed_series( &self, period: &str, @@ -97,11 +100,7 @@ impl DatabaseService { Ok(series_list) } - pub async fn add_bookmarked_series( - &self, - user_id: i32, - series_id: i32, - ) -> AnyhowResult<()> { + pub async fn add_bookmarked_series(&self, user_id: i32, series_id: i32) -> AnyhowResult<()> { let mut tx = self .pool .begin() @@ -123,20 +122,16 @@ impl DatabaseService { "UPDATE series SET bookmarks_count = bookmarks_count + 1 WHERE id = $1", series_id ) - .execute(&mut *tx) - .await - .context("Failed to update bookmarked series view with sqlx")?; + .execute(&mut *tx) + .await + .context("Failed to update bookmarked series view with sqlx")?; tx.commit().await.context("Failed to commit transaction.")?; Ok(()) } - pub async fn delete_bookmarked_series( - &self, - user_id: i32, - series_id: i32, - ) -> AnyhowResult<()> { + pub async fn delete_bookmarked_series(&self, user_id: i32, series_id: i32) -> AnyhowResult<()> { let mut tx = self .pool .begin() @@ -161,7 +156,7 @@ impl DatabaseService { ) .execute(&mut *tx) .await - .context("Failed to decrement serues bookmark count with sqlx")?; + .context("Failed to decrement series bookmark count with sqlx")?; } tx.commit().await.context("Failed to commit transaction.")?; @@ -169,20 +164,16 @@ impl DatabaseService { Ok(()) } - pub async fn is_series_bookmarked( - &self, - user_id: i32, - series_id: i32, - ) -> AnyhowResult { + pub async fn is_series_bookmarked(&self, user_id: i32, series_id: i32) -> AnyhowResult { // Query to check for existence of a bookmark entry let exist = sqlx::query_scalar!( "SELECT EXISTS(SELECT 1 FROM user_bookmarks WHERE user_id = $1 AND series_id = $2)", user_id, series_id ) - .fetch_one(&self.pool) - .await - .context("Failed to check bookmarked series view with sqlx")?; + .fetch_one(&self.pool) + .await + .context("Failed to check bookmarked series view with sqlx")?; Ok(exist.unwrap_or(false)) } @@ -239,20 +230,21 @@ impl DatabaseService { user_id, series_id ) - .fetch_optional(&mut *tx) - .await?; + .fetch_optional(&mut *tx) + .await?; sqlx::query!( - r#"INSERT INTO series_ratings (series_id, user_id, rating) VALUES ($1, $2, $3) - ON CONFLICT (user_id, series_id) DO UPDATE SET rating = $3, updated_at = NOW() + r#" + INSERT INTO series_ratings (series_id, user_id, rating) VALUES ($1, $2, $3) + ON CONFLICT (user_id, series_id) DO UPDATE SET rating = $3, updated_at = NOW() "#, series_id, user_id, rating, ) - .execute(&mut *tx) - .await - .context("Failed to update series rating")?; + .execute(&mut *tx) + .await + .context("Failed to update series rating")?; match old_rating { Some(old_score) => { @@ -263,9 +255,9 @@ impl DatabaseService { rating_diff, series_id, ) - .execute(&mut *tx) - .await - .context("Failed to update user series rating")?; + .execute(&mut *tx) + .await + .context("Failed to update user series rating")?; } None => { // New rating @@ -356,7 +348,11 @@ impl DatabaseService { struct QueryResult { id: i32, title: String, + original_title: Option, + #[sqlx(json)] + authors: serde_json::Value, cover_image_url: String, + description: String, last_chapter_found_in_storage: Option, updated_at: DateTime, chapter_title: Option, @@ -369,18 +365,28 @@ impl DatabaseService { SELECT s.id, s.title, + s.original_title, + s.description, s.cover_image_url, s.updated_at, s.last_chapter_found_in_storage, sc.title as chapter_title, + COALESCE(json_agg(DISTINCT a.name ORDER BY a.name) FILTER (WHERE a.id IS NOT NULL), + '[]'::json) as authors, COUNT(*) OVER () as total_items FROM series s LEFT JOIN series_chapters sc ON s.id = sc.series_id AND s.last_chapter_found_in_storage = sc.chapter_number + LEFT JOIN + series_authors sa ON s.id = sa.series_id + LEFT JOIN + authors a ON sa.author_id = a.id WHERE s.updated_at >= NOW() - interval '7 days' + GROUP BY + s.id, sc.title ORDER BY s.updated_at DESC LIMIT $1 @@ -402,6 +408,9 @@ impl DatabaseService { .map(|r| LatestReleaseSeries { id: r.id, title: r.title, + original_title: r.original_title, + authors: r.authors, + description: r.description, cover_image_url: r.cover_image_url, last_chapter_found_in_storage: r.last_chapter_found_in_storage, updated_at: r.updated_at, @@ -415,6 +424,7 @@ impl DatabaseService { }) } + // Paginated fetching of browse series with/without filters pub async fn browse_series_paginated_with_filters( &self, page: u32, @@ -422,6 +432,7 @@ impl DatabaseService { order_by: SeriesOrderBy, include_category_ids: &[i32], exclude_category_ids: &[i32], + search_query: Option<&str>, ) -> AnyhowResult> { let page_size = page_size.min(100); let limit = page_size as i64; @@ -437,6 +448,72 @@ impl DatabaseService { let has_include_filters = !include_category_ids.is_empty(); let has_exclude_filters = !exclude_category_ids.is_empty(); + const SIMILARITY_THRESHOLD: f32 = 0.20; + let trimmed_search_query = search_query + .filter(|s| !s.trim().is_empty()) + .map(|s| s.trim()); + + // Define common parts of the query + const SELECT_FIELD: &str = r#" + SELECT + s.id, s.title, s.original_title, s.description, s.cover_image_url, + s.updated_at, s.last_chapter_found_in_storage, + COALESCE(json_agg(DISTINCT a.name ORDER BY a.name) FILTER (WHERE a.id IS NOT NULL), '[]'::json) as authors, + COALESCE(json_agg(DISTINCT c.name ORDER BY c.name) FILTER (WHERE c.id IS NOT NULL), '[]'::json) as categories, + COUNT(*) OVER () AS total_items + "#; + + // Join table + const JOIN_LOGIC: &str = r#" + LEFT JOIN series_authors sa ON s.id = sa.series_id + LEFT JOIN authors a ON sa.author_id = a.id + LEFT JOIN series_categories sc ON s.id = sc.series_id + LEFT JOIN categories c ON sc.category_id = c.id + "#; + + // This GROUP BY must include all non-aggregated columns from SELECT_FIELDS + const GROUP_BY_LOGIC: &str = r#" + GROUP BY s.id, s.title, s.original_title, s.description, s.cover_image_url, + s.updated_at, s.last_chapter_found_in_storage, s.views_count, + s.total_rating_score, s.created_at + "#; + + // Define the search snippet + fn build_search_query<'a>( + query_builder: &mut QueryBuilder<'a, sqlx::Postgres>, + query_str: &'a str, + threshold: f32, + ) { + query_builder.push(" ("); // Start of search query + + // Title search: (s.title ILIKE ... OR (s.title % ... AND similarity(...) >= ...)) + query_builder.push(" (s.title ILIKE '%' || "); + query_builder.push_bind(query_str); // Binds value for ILIKE + query_builder.push(" || '%' OR (s.title % "); + query_builder.push_bind(query_str); // Binds value for trigram + query_builder.push(" AND similarity(s.title,"); + query_builder.push_bind(query_str); // Binds value for similarity + query_builder.push(") >= "); + query_builder.push_bind(threshold); // Binds threshold + query_builder.push("))"); + + // Original title search: OR (s.original_title IS NOT NULL AND (...)) + query_builder.push(" OR (s.original_title IS NOT NULL AND ("); + query_builder.push(" s.original_title ILIKE '%' || "); + query_builder.push_bind(query_str); // Binds value for ILIKE + query_builder.push(" || '%'"); + query_builder.push(" OR (s.original_title % "); + query_builder.push_bind(query_str); // Binds value for trigram + query_builder.push(" AND similarity(s.original_title, "); + query_builder.push_bind(query_str); // Binds value for similarity + query_builder.push(") >= "); + query_builder.push_bind(threshold); // Binds threshold + query_builder.push(")"); + query_builder.push(" ))"); + + query_builder.push(" )"); // End of search query + } + #[derive(Debug, FromRow)] struct QueryDefaultResult { id: i32, @@ -453,200 +530,92 @@ impl DatabaseService { total_items: Option, } - let record_list: Vec = match ( - has_include_filters, - has_exclude_filters, - ) { - (true, true) => { - // If both included and excluded filters - let query_string = format!( - r#" - WITH filtered_series AS ( - SELECT s.id - FROM series s - WHERE s.id IN ( - SELECT series_id - FROM series_categories - WHERE category_id = ANY($1) - GROUP BY series_id - HAVING COUNT(DISTINCT category_id) = $2 - ) - AND s.id NOT IN ( - SELECT series_id - FROM series_categories - WHERE category_id = ANY($3) - ) - ) - SELECT - s.id, - s.title, - s.original_title, - s.description, - s.cover_image_url, - s.updated_at, - s.last_chapter_found_in_storage, - COALESCE(json_agg(DISTINCT a.name ORDER BY a.name) FILTER (WHERE a.id IS NOT NULL), - '[]'::json) as authors, - COALESCE(json_agg(DISTINCT c.name ORDER BY c.name) FILTER (WHERE c.id IS NOT NULL), - '[]'::json) as categories, - COUNT(*) OVER () AS total_items - FROM - filtered_series fs - JOIN series s ON fs.id = s.id - LEFT JOIN series_authors sa ON s.id = sa.series_id - LEFT JOIN authors a ON sa.author_id = a.id - LEFT JOIN series_categories sc ON s.id = sc.series_id - LEFT JOIN categories c ON sc.category_id = c.id - GROUP BY s.id, s.title, s.original_title, s.description, s.cover_image_url, - s.updated_at, s.last_chapter_found_in_storage, s.views_count, s.total_rating_score, s.created_at - ORDER BY {} - LIMIT $4 - OFFSET $5 - "#, - order_by_clause - ); - sqlx::query_as::<_, QueryDefaultResult>(&query_string) - .bind(include_category_ids) - .bind(include_category_ids.len() as i64) - .bind(exclude_category_ids) - .bind(limit) - .bind(offset) - .fetch_all(&self.pool) - .await - .context("Failed to query series with both include and exclude filters")? + let mut query_builder = QueryBuilder::new(""); + let mut where_condition_added = false; + + // If any filter included, use the CTE + if has_include_filters { + query_builder + .push("WITH filtered_series AS ( SELECT s.id FROM series s WHERE s.id IN ("); + + // Subquery for include + query_builder.push("SELECT series_id FROM series_categories WHERE category_id = ANY("); + query_builder.push_bind(include_category_ids); // $1 + query_builder.push(") GROUP BY series_id HAVING COUNT(DISTINCT category_id) = "); + query_builder.push_bind(include_category_ids.len() as i64); // $2 + query_builder.push(")"); // Close IN (...) + + // Exclude logic inside the CTE + if has_exclude_filters { + query_builder.push(" AND s.id NOT IN (SELECT series_id FROM series_categories WHERE category_id = ANY("); + query_builder.push_bind(exclude_category_ids); // $3 + query_builder.push("))"); } - (true, false) => { - // Only include filters - let query_string = format!( - r#" - WITH filtered_series AS ( - SELECT series_id as id - FROM series_categories - WHERE category_id = ANY($1) - GROUP BY series_id - HAVING COUNT(DISTINCT category_id) = $2 - ) - SELECT - s.id, - s.title, - s.original_title, - s.description, - s.cover_image_url, - s.updated_at, - s.last_chapter_found_in_storage, - COALESCE(json_agg(DISTINCT a.name ORDER BY a.name) FILTER (WHERE a.id IS NOT NULL), - '[]'::json) as authors, - COALESCE(json_agg(DISTINCT c.name ORDER BY c.name) FILTER (WHERE c.id IS NOT NULL), - '[]'::json) as categories, - COUNT(*) OVER () AS total_items - FROM - filtered_series fs - JOIN series s ON fs.id = s.id - LEFT JOIN series_authors sa ON s.id = sa.series_id - LEFT JOIN authors a ON sa.author_id = a.id - LEFT JOIN series_categories sc ON s.id = sc.series_id - LEFT JOIN categories c ON sc.category_id = c.id - GROUP BY s.id, s.title, s.original_title, s.description, s.cover_image_url, - s.updated_at, s.last_chapter_found_in_storage, s.views_count, s.total_rating_score, s.created_at - ORDER BY {} - LIMIT $3 - OFFSET $4 - "#, - order_by_clause - ); - sqlx::query_as::<_, QueryDefaultResult>(&query_string) - .bind(include_category_ids) - .bind(include_category_ids.len() as i64) - .bind(limit) - .bind(offset) - .fetch_all(&self.pool) - .await - .context("Failed to query series with include filters")? + + // Search logic inside the CTE for performance + if let Some(query_str) = trimmed_search_query { + query_builder.push(" AND "); + + build_search_query(&mut query_builder, query_str, SIMILARITY_THRESHOLD); } - (false, true) => { - // Only exclude filters - let query_string = format!( - r#" - SELECT - s.id, - s.title, - s.original_title, - s.description, - s.cover_image_url, - s.updated_at, - s.last_chapter_found_in_storage, - COALESCE(json_agg(DISTINCT a.name ORDER BY a.name) FILTER (WHERE a.id IS NOT NULL), - '[]'::json) as authors, - COALESCE(json_agg(DISTINCT c.name ORDER BY c.name) FILTER (WHERE c.id IS NOT NULL), - '[]'::json) as categories, - COUNT(*) OVER () as total_items - FROM - series s - LEFT JOIN series_authors sa ON s.id = sa.series_id - LEFT JOIN authors a ON sa.author_id = a.id - LEFT JOIN series_categories sc ON s.id = sc.series_id - LEFT JOIN categories c ON sc.category_id = c.id - WHERE NOT EXISTS ( - SELECT 1 - FROM series_categories sc_exclude - WHERE sc_exclude.series_id = s.id - AND sc_exclude.category_id = ANY($1) - ) - GROUP BY s.id, s.title, s.original_title, s.description, s.cover_image_url, - s.updated_at, s.last_chapter_found_in_storage, s.views_count, s.total_rating_score, s.created_at - ORDER BY {} - LIMIT $2 - OFFSET $3 - "#, - order_by_clause - ); - sqlx::query_as::<_, QueryDefaultResult>(&query_string) - .bind(exclude_category_ids) - .bind(limit) - .bind(offset) - .fetch_all(&self.pool) - .await - .context("Failed to query series with exclude filters")? + + // Close CTE and build the main query + query_builder.push(" ) "); + query_builder.push(SELECT_FIELD); + query_builder.push(" FROM filtered_series fs JOIN series s ON fs.id = s.id"); + query_builder.push(JOIN_LOGIC); + } else { + query_builder.push(SELECT_FIELD); + query_builder.push(" FROM series s "); + query_builder.push(JOIN_LOGIC); + + // Exclude logic in the WHERE clause + if has_exclude_filters { + query_builder.push(" WHERE NOT EXISTS (SELECT 1 FROM series_categories sc_exclude WHERE sc_exclude.series_id = s.id AND sc_exclude.category_id = ANY("); + query_builder.push_bind(exclude_category_ids); // $1 + query_builder.push(")) "); + + where_condition_added = true; } - (false, false) => { - // No filters - let query_string = format!( - r#" - SELECT - s.id, - s.title, - s.original_title, - s.description, - s.cover_image_url, - s.updated_at, - s.last_chapter_found_in_storage, - COALESCE(json_agg(DISTINCT a.name ORDER BY a.name) FILTER (WHERE a.id IS NOT NULL), - '[]'::json) as authors, - COALESCE(json_agg(DISTINCT c.name ORDER BY c.name) FILTER (WHERE c.id IS NOT NULL), - '[]'::json) as categories, - COUNT(*) OVER () as total_items - FROM - series s - LEFT JOIN series_authors sa ON s.id = sa.series_id - LEFT JOIN authors a ON sa.author_id = a.id - LEFT JOIN series_categories sc ON s.id = sc.series_id - LEFT JOIN categories c ON sc.category_id = c.id - GROUP BY s.id, s.title, s.original_title, s.description, s.cover_image_url, - s.updated_at, s.last_chapter_found_in_storage, s.views_count, s.total_rating_score, s.created_at - ORDER BY {} - LIMIT $1 - OFFSET $2 - "#, - order_by_clause - ); - sqlx::query_as::<_, QueryDefaultResult>(&query_string) - .bind(limit) - .bind(offset) - .fetch_all(&self.pool) - .await - .context("Failed to query default without filters")? + + // Search logic in the WHERE clause + if let Some(query_str) = trimmed_search_query { + if where_condition_added { + query_builder.push(" AND "); + } else { + query_builder.push(" WHERE "); + } + + build_search_query(&mut query_builder, query_str, SIMILARITY_THRESHOLD); + + // where_condition_added = true; } - }; + } + + // Final Assembly (Common to all logic paths) + query_builder.push(GROUP_BY_LOGIC); + + // Add ORDER BY + query_builder.push(" ORDER BY "); + query_builder.push(order_by_clause); + + // Add LIMIT and OFFSET + query_builder.push(" LIMIT "); + query_builder.push_bind(limit); // $... (last) + query_builder.push(" OFFSET "); + query_builder.push_bind(offset); // $... (very last) + + println!( + "Executing SQL for Browse Series Paginated: {}", + query_builder.sql() + ); + + // Execute build query + let record_list = query_builder + .build_query_as::() + .fetch_all(&self.pool) + .await + .context("Failed to dymanically build and execute series browse query")?; let total_items = record_list .first() @@ -673,10 +642,70 @@ impl DatabaseService { }) } + // Paginated fetching of user search series + pub async fn user_search_paginated_series( + &self, + search_query: &str, + ) -> AnyhowResult> { + let trimmed_query = search_query.trim(); + + if trimmed_query.is_empty() { + return Ok(Vec::new()); + } + + const LIMIT: i64 = 25; + const SIMILARITY_THRESHOLD: f32 = 0.20; + + let search_list = sqlx::query_as!( + UserSearchPaginatedSeries, + r#" + SELECT + s.id, + s.title, + s.original_title, + s.cover_image_url, + s.last_chapter_found_in_storage, + s.updated_at, + COALESCE(json_agg(DISTINCT a.name ORDER BY a.name) FILTER (WHERE a.id IS NOT NULL), + '[]'::json) as authors + FROM series s + LEFT JOIN series_authors sa ON s.id = sa.series_id + LEFT JOIN authors a ON sa.author_id = a.id + WHERE + ( + s.title ILIKE '%' || $1 || '%' + OR (s.title % $1 AND similarity(s.title, $1) >= $2) + ) + OR + ( + s.original_title IS NOT NULL AND ( + s.original_title ILIKE '%' || $1 || '%' + OR (s.original_title % $1 AND similarity(s.original_title, $1) >= $2) + ) + ) + GROUP BY s.id + ORDER BY GREATEST( + similarity(s.title, $1), + similarity(COALESCE(s.original_title, ''), $1) + ) DESC + LIMIT $3 + "#, + trimmed_query, + SIMILARITY_THRESHOLD, + LIMIT, + ) + .fetch_all(&self.pool) + .await + .context("User failed to search series")?; + + Ok(search_list) + } + + // Query helper for delete old view logs pub async fn cleanup_old_view_logs(&self) -> AnyhowResult { let retention_interval = PgInterval { - months: 1, - days: 0, + months: 0, + days: 35, microseconds: 0, }; @@ -684,9 +713,9 @@ impl DatabaseService { "DELETE FROM series_view_log WHERE viewed_at < NOW() - $1::interval", retention_interval as _ ) - .execute(&self.pool) - .await - .context("Failed to cleanup old view logs with sqlx")?; + .execute(&self.pool) + .await + .context("Failed to cleanup old view logs with sqlx")?; Ok(result.rows_affected()) } diff --git a/backend/src/database/storage.rs b/backend/src/database/storage.rs index 029ba99..8578080 100644 --- a/backend/src/database/storage.rs +++ b/backend/src/database/storage.rs @@ -1,9 +1,10 @@ +use std::env; + use anyhow::{Context, Result, anyhow}; use aws_sdk_s3::Client; use aws_sdk_s3::config::{Credentials, Region}; use aws_sdk_s3::primitives::ByteStream; use aws_sdk_s3::types::{Delete, Error as S3Error, ObjectIdentifier}; -use std::env; /// A client for interacting with an S3-compatible object storage like Cloudflare R2. #[derive(Clone)] @@ -22,10 +23,10 @@ impl StorageClient { /// - `R2_SECRET_ACCESS_KEY`: Your R2 secret access key. /// - `R2_DOMAIN_CDN_URL`: The public URL of your bucket (https://pub-xxxxxxxx.r2.dev or your custom domain). pub async fn new_from_env() -> Result { - let bucket_name = env::var("R2_BUCKET_NAME") - .context("Environment variable R2_BUCKET_NAME is not set")?; - let account_id = env::var("R2_ACCOUNT_ID") - .context("Environment variable R2_ACCOUNT_ID is not set")?; + let bucket_name = + env::var("R2_BUCKET_NAME").context("Environment variable R2_BUCKET_NAME is not set")?; + let account_id = + env::var("R2_ACCOUNT_ID").context("Environment variable R2_ACCOUNT_ID is not set")?; let access_key_id = env::var("R2_ACCESS_KEY_ID") .context("Environment variable R2_ACCESS_KEY_ID is not set")?; let secret_access_key = env::var("R2_SECRET_ACCESS_KEY") @@ -36,8 +37,7 @@ impl StorageClient { let domain_cdn_url = domain_cdn_url.trim_end_matches('/').to_string(); // Construct the S3 endpoint URL for Cloudflare R2 - let endpoint_url = - format!("https://{account_id}.r2.cloudflarestorage.com"); + let endpoint_url = format!("https://{account_id}.r2.cloudflarestorage.com"); // Create a static credentials provider let credentials = Credentials::new( @@ -98,12 +98,7 @@ impl StorageClient { .content_type(content_type) .send() .await - .with_context(|| { - format!( - "Failed to upload object with key '{}' to R2 bucket", - key - ) - })?; + .with_context(|| format!("Failed to upload object with key '{}' to R2 bucket", key))?; // Construct the public URL let public_url = format!("{}/{}", self.domain_cdn_url, key); @@ -132,9 +127,7 @@ impl StorageClient { let delete_payload = Delete::builder() .set_objects(Some(objects_to_delete)) .build() - .map_err(|e| { - anyhow::anyhow!("Failed to build Delete payload: {}", e) - })?; + .map_err(|e| anyhow::anyhow!("Failed to build Delete payload: {}", e))?; // Send the delete_objects request. let result = self @@ -144,9 +137,7 @@ impl StorageClient { .delete(delete_payload) .send() .await - .with_context( - || "Failed to send delete_objects request to R2 bucket", - )?; + .with_context(|| "Failed to send delete_objects request to R2 bucket")?; // Check for "real" errors. Ignore "NoSuchKey" as it means the object is already gone. if let Some(errors) = result.errors { diff --git a/backend/src/database/users.rs b/backend/src/database/users.rs index 16b27bf..6b4322b 100644 --- a/backend/src/database/users.rs +++ b/backend/src/database/users.rs @@ -1,4 +1,5 @@ use super::*; +use crate::api::extractor::Role; /// Macros `sqlx::query!` /// For DML operations (INSERT, UPDATE, DELETE) or SELECTs, @@ -12,19 +13,17 @@ use super::*; /// For queries returning a single value (one row, one column). /// Highly efficient for this purpose. impl DatabaseService { - pub async fn get_user_by_identifier( - &self, - identifier: &str, - ) -> AnyhowResult> { + /// Fetch user by username or email + pub async fn get_user_by_identifier(&self, identifier: &str) -> AnyhowResult> { let user = sqlx::query_as!( Users, - // Check both column email and username "SELECT id, username, email, password_hash, role_id FROM users WHERE email = $1 OR username = $1", identifier, ).fetch_optional(&self.pool).await.context("Failed to get user by identifier")?; Ok(user) } + /// Create new user pub async fn create_user( &self, username: &str, @@ -46,42 +45,6 @@ impl DatabaseService { Ok(new_user_id) } - // Update password hash for a given user ID. - pub async fn update_user_password_hash_after_reset_password( - &self, - user_id: i32, - new_password_hash: &str, - ) -> AnyhowResult<()> { - sqlx::query!( - "UPDATE users SET password_hash = $1, updated_at = NOW() WHERE id = $2", - new_password_hash, - user_id - ) - .execute(&self.pool) - .await - .context("Failed to update user password hash")?; - - Ok(()) - } - - // Retrieves a user's ID and token expiration time by the reset token. - // Returns a tuple of (user_id, expires_at) if token is found - pub async fn get_user_by_reset_token( - &self, - token: &str, - ) -> AnyhowResult)>> { - let record = sqlx::query!( - "SELECT user_id, expires_at FROM password_reset_tokens WHERE token = $1", - token - ) - .fetch_optional(&self.pool) - .await - .context("Failed to get user by reset token")? - .map(|row| (row.user_id, row.expires_at)); - - Ok(record) - } - // Fetch user profiles data by id pub async fn get_user_profile_details( &self, @@ -130,8 +93,8 @@ impl DatabaseService { user_id, name ) - .execute(&mut *tx) - .await + .execute(&mut *tx) + .await .context("Failed to update user profile")?; } @@ -152,11 +115,7 @@ impl DatabaseService { } // Update user avatar - pub async fn update_user_avatar( - &self, - user_id: i32, - avatar_key: &str, - ) -> AnyhowResult<()> { + pub async fn update_user_avatar(&self, user_id: i32, avatar_key: &str) -> AnyhowResult<()> { sqlx::query!( r#" INSERT INTO user_profiles (user_id, avatar_url) VALUES ($1, $2) @@ -183,14 +142,15 @@ impl DatabaseService { new_password_hash, user_id ) - .execute(&self.pool) + .execute(&self.pool) .await .context("Failed to update user profile")?; Ok(()) } - pub async fn get_paginated_user( + // Get paginated user search list for admin panel + pub async fn get_admin_paginated_user( &self, page: u32, page_size: u32, @@ -203,48 +163,46 @@ impl DatabaseService { match search_query.filter(|q| !q.trim().is_empty()) { Some(search_match) => { let search_match = search_match.trim(); - - let fts_query = search_match - .split_whitespace() - .filter(|word| !word.is_empty()) - .map(|word| format!("{}:*", word)) - .collect::>() - .join(" & "); + let similarity_threshold = 0.20_f32; let records = sqlx::query!( r#" - WITH search_results AS ( + WITH base_search AS ( SELECT u.id, u.username, u.email, r.role_name, - u.user_tsv + -- Calculate similarity score for ranking + similarity(u.username || ' ' || u.email, $3) AS sim_score FROM users u JOIN roles r ON u.role_id = r.id WHERE -- ILIKE for substring matches - u.username ILIKE '%' || $3 || '%' - OR u.email ILIKE '%' || $3 || '%' - -- FTS for whole-word/prefix matches - OR u.user_tsv @@ to_tsquery('simple', $4) - -- fuzzy match filtering - OR (u.username || ' ' || u.email) % $3 + (u.username ILIKE '%' || $3 || '%') + OR + (u.email ILIKE '%' || $3 || '%') + -- fuzzy match trigram filtering + OR + ( + (u.username || ' ' || u.email) % $3 + AND + similarity(u.username || ' ' || u.email, $3) >= $4 + ) ), ranked_results AS ( SELECT *, CASE - WHEN username ILIKE '%' || $3 || '%' OR email ILIKE '%' || $3 || '%' THEN 10 - WHEN user_tsv @@ to_tsquery('simple', $4) THEN 8 + WHEN username ILIKE $3 OR email ILIKE $3 THEN 10 + WHEN username ILIKE '%' || $3 || '%' OR email ILIKE '%' || $3 || '%' THEN 8 + -- WHEN user_tsv @@ to_tsquery('simple', $4) THEN 8 ELSE 6 - END as search_rank, - -- Calculate similarity score for ranking - similarity(username || ' ' || email, $3) as sim_score - FROM search_results + END as search_rank + FROM base_search ), total_count AS ( - SELECT COUNT(*) AS total FROM ranked_results WHERE search_rank > 0 + SELECT COUNT(*) AS total FROM ranked_results ) SELECT rr.id, @@ -254,14 +212,15 @@ impl DatabaseService { tc.total as total_items FROM ranked_results rr CROSS JOIN total_count tc - WHERE rr.search_rank > 0 + -- Order by the best rank, then by similarity, then by ID for stable sorting ORDER BY rr.search_rank DESC, rr.sim_score DESC, rr.id ASC - LIMIT $1 OFFSET $2 + LIMIT $1 + OFFSET $2 "#, limit, offset, search_match, - fts_query + similarity_threshold ) .fetch_all(&self.pool) .await @@ -329,4 +288,189 @@ impl DatabaseService { } } } + + // Retrieves a user's ID and token expiration time by the reset token. + // Returns a tuple of (user_id, expires_at) if token is found + pub async fn get_user_by_reset_token( + &self, + token: &str, + ) -> AnyhowResult)>> { + let record = sqlx::query!( + "SELECT user_id, expires_at FROM password_reset_tokens WHERE token = $1", + token + ) + .fetch_optional(&self.pool) + .await + .context("Failed to get user by reset token")? + .map(|row| (row.user_id, row.expires_at)); + + Ok(record) + } + + // Update password hash for a given user ID + pub async fn update_user_password_hash_after_reset_password( + &self, + user_id: i32, + new_password_hash: &str, + ) -> AnyhowResult<()> { + sqlx::query!( + "UPDATE users SET password_hash = $1, updated_at = NOW() WHERE id = $2", + new_password_hash, + user_id + ) + .execute(&self.pool) + .await + .context("Failed to update user password hash")?; + + Ok(()) + } + + /// Partial Update user details (admin) + /// This function updates only the provided fields using a "Fetch-Merge-Update" pattern + /// It returns the updated user data or None if the user was not found + pub async fn admin_update_user( + &self, + user_id: i32, + username: Option<&str>, + email: Option<&str>, + role_id: Option, + is_active: Option, + actor_role: Role, + ) -> AnyhowResult> { + let mut tx = self + .pool + .begin() + .await + .context("Failed to begin transaction")?; + + // Fetch the current user data (and lock the row for update) + let current_user = sqlx::query!( + "SELECT + u.username, u.email, u.role_id, u.is_active, r.role_name + FROM users u + JOIN roles r ON u.role_id = r.id + WHERE u.id = $1 + FOR UPDATE", + user_id + ) + .fetch_optional(&mut *tx) + .await + .context("Failed to fetch user")?; + + // If user doesnt exist, rollback and return None + let Some(current_user) = current_user else { + tx.rollback().await.context("Failed to rollback user")?; + return Ok(None); + }; + + // Get target role enum + let target_role = Role::from_name(¤t_user.role_name).unwrap_or(Role::User); + + // Check if actor has permission to modify target user + // Admin (2) CANT change SuperAdmin (3) -> 2 <= 3 (Failed) + // Admin (2) CANT change Admin (2) -> 2 <= 2 (Failed) + // Admin (2) CAN change Moderator (1) -> 2 <= 1 (Pass) + if actor_role <= target_role { + tx.rollback().await.context("Failed to rollback user")?; + anyhow::bail!( + "FORBIDDEN: You do not have permission to modify a user with an equal or higher role." + ); + } + + if let Some(new_role_id) = role_id { + let new_role_name = + sqlx::query_scalar!("SELECT role_name FROM roles WHERE id = $1", new_role_id) + .fetch_optional(&mut *tx) + .await + .context("Failed to fetch role_id")? + .ok_or_else(|| anyhow::anyhow!("Invalid role_id: {}", new_role_id))?; + + let new_role_enum = Role::from_name(&new_role_name).unwrap_or(Role::User); + + if new_role_enum >= actor_role { + tx.rollback().await.context("Failed to rollback user")?; + anyhow::bail!("FORBIDDEN: You cannot assign a role higher than your own."); + } + } + + // Merge: Use new value if Some, otherwise keep the current value + let new_username = username.unwrap_or(¤t_user.username); + let new_email = email.unwrap_or(¤t_user.email); + let new_role_id = role_id.unwrap_or(current_user.role_id); + let new_is_active = is_active.or(current_user.is_active); + + // Check for conflicts (username or email) with *other* users + // Only check if username or email is actually changing + if username.is_some() || email.is_some() { + let conflict = sqlx::query_scalar!( + "SELECT 1 FROM users WHERE (username = $1 OR email = $2) AND id != $3 LIMIT 1", + new_username, + new_email, + user_id + ) + .fetch_optional(&mut *tx) + .await + .context("Failed to check for username/email conflict")?; + + if conflict.is_some() { + tx.rollback().await.context("Failed to rollback user")?; + // Return a specific error message that the handler can catch + anyhow::bail!( + "Username or email already exists for another user with id {}", + user_id + ); + } + } + + // Update the user with merged data + sqlx::query!( + r#" + UPDATE users + SET username = $1, email = $2, role_id = $3, is_active = $4, updated_at = NOW() + WHERE id = $5 + "#, + new_username, + new_email, + new_role_id, + new_is_active, + user_id + ) + .execute(&mut *tx) + .await + .context("Failed to update user")?; + + // Fetch the updated user data to return + let updated_user = sqlx::query_as!( + UserWithRole, + r#" + SELECT + u.id, + u.username, + u.email, + r.role_name + FROM users u + JOIN roles r ON u.role_id = r.id + WHERE u.id = $1 + "#, + user_id + ) + .fetch_one(&mut *tx) + .await + .context("Failed to fetch updated user")?; + + // Commit the transaction + tx.commit().await.context("Failed to commit transaction")?; + + Ok(Some(updated_user)) + } + + // Delete user by ID (admin) + pub async fn admin_delete_user(&self, user_id: i32) -> AnyhowResult { + let result = sqlx::query!("DELETE FROM users WHERE id = $1", user_id) + .execute(&self.pool) + .await + .context("Failed to delete user")?; + + Ok(result.rows_affected()) + } } diff --git a/backend/src/encoding/mod.rs b/backend/src/encoding/mod.rs deleted file mode 100644 index 3f6640b..0000000 --- a/backend/src/encoding/mod.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod image_encoding; diff --git a/backend/src/main.rs b/backend/src/main.rs index c9df055..17a1663 100644 --- a/backend/src/main.rs +++ b/backend/src/main.rs @@ -1,24 +1,34 @@ -#![allow(clippy::uninlined_format_args)] +#![deny( + clippy::panic, + clippy::unwrap_used, + clippy::panicking_unwrap, + clippy::redundant_clone, + clippy::implicit_clone, + clippy::perf +)] +// clippy::expect_used extern crate core; + mod api; -mod app; mod builder; mod common; mod database; -mod encoding; +mod processing; mod scraping; mod task_workers; -use crate::builder::startup; -use crate::builder::startup::Mailer; -use crate::common::dynamic_proxy; +use std::env; +use std::net::SocketAddr; +use std::time::Duration; + use anyhow::{Context, Result}; use dotenvy::dotenv; use lettre::transport::smtp::authentication::Credentials; use sqlx::postgres::PgPoolOptions; -use std::env; -use std::net::SocketAddr; -use std::time::Duration; + +use crate::builder::startup; +use crate::builder::startup::Mailer; +use crate::common::dynamic_proxy; // Main entry point for the application #[tokio::main] @@ -29,8 +39,7 @@ async fn main() -> Result<()> { println!("[MAIN] App Starting..."); // Initialize database external resources - let db_url = - env::var("DATABASE_URL").context("[MAIN] DATABASE_URL must be set")?; + let db_url = env::var("DATABASE_URL").context("[MAIN] DATABASE_URL must be set")?; let db_pool = PgPoolOptions::new() .max_connections(4) .min_connections(2) @@ -43,13 +52,16 @@ async fn main() -> Result<()> { println!("[MAIN] Database pool created."); + /* + let migrator = Migrator::new(Path::new("./migrations")).await?; + migrator.run(&db_pool).await?; + println!("Migrations applied successfully!"); + */ + // Initialize Mailer service external resources - let smtp_host = - env::var("SMTP_HOST").context("[MAIN] SMTP_HOST must be set")?; - let smtp_username = env::var("SMTP_USERNAME") - .context("[MAIN] SMTP_USERNAME must be set")?; - let smtp_password = env::var("SMTP_PASSWORD") - .context("[MAIN] SMTP_PASSWORD must be set")?; + let smtp_host = env::var("SMTP_HOST").context("[MAIN] SMTP_HOST must be set")?; + let smtp_username = env::var("SMTP_USERNAME").context("[MAIN] SMTP_USERNAME must be set")?; + let smtp_password = env::var("SMTP_PASSWORD").context("[MAIN] SMTP_PASSWORD must be set")?; let creds = Credentials::new(smtp_username, smtp_password); let mailer = Mailer::starttls_relay(&smtp_host) @@ -60,14 +72,14 @@ async fn main() -> Result<()> { println!("[MAIN] Mailer service initialized."); // Initialize HTTP Client - let http_client = dynamic_proxy::init_client() - .context("[MAIN] Failed to initialize HTTP client")?; + let http_client = + dynamic_proxy::init_client().context("[MAIN] Failed to initialize HTTP client")?; println!("[MAIN] HTTP Client created."); // Define the server address, port and listeners let addr = SocketAddr::from(([127, 0, 0, 1], 8000)); let listener = tokio::net::TcpListener::bind(addr).await?; - println!("[MAIN] Server listening on https://{addr}"); + println!("[MAIN] Server listening on https://{}", addr); // Start the builder startup::run(listener, db_pool, mailer, http_client).await?; diff --git a/backend/src/app/coordinator.rs b/backend/src/processing/coordinator.rs similarity index 85% rename from backend/src/app/coordinator.rs rename to backend/src/processing/coordinator.rs index 2d39ae2..b913cf0 100644 --- a/backend/src/app/coordinator.rs +++ b/backend/src/processing/coordinator.rs @@ -1,14 +1,15 @@ +use std::sync::Arc; + use anyhow::Result; use reqwest::Client; use slug::slugify; -use std::sync::Arc; use tokio::sync::Semaphore; use tokio::task; use crate::common::utils::random_sleep_time; use crate::database::storage::StorageClient; use crate::database::{ChapterStatus, DatabaseService, Series}; -use crate::encoding::image_encoding; +use crate::processing::image_encoding; use crate::scraping::model::SiteScrapingConfig; use crate::scraping::{fetcher, parser}; @@ -69,8 +70,7 @@ pub async fn process_single_chapter( config: &SiteScrapingConfig, db_service: &DatabaseService, ) -> Result> { - let convert_chapter_number = - chapter_info.number.to_string().replace('.', "-"); + let convert_chapter_number = chapter_info.number.to_string().replace('.', "-"); let consistent_title = format!("{}-eng", convert_chapter_number); @@ -92,17 +92,13 @@ pub async fn process_single_chapter( chapter_info.number, chapter_id ); - let html_content = - fetcher::fetch_html(http_client, &chapter_info.url).await?; + let html_content = fetcher::fetch_html(http_client, &chapter_info.url).await?; // Pause before start processing images random_sleep_time(1, 3).await; - let image_urls = parser::extract_image_urls_from_html_content( - &html_content, - &chapter_info.url, - config, - )?; + let image_urls = + parser::extract_image_urls_from_html_content(&html_content, &chapter_info.url, config)?; let total_image_found = image_urls.len(); @@ -128,18 +124,22 @@ pub async fn process_single_chapter( let task = tokio::spawn(async move { // This will wait until a permit is available from the semaphore - let _permit = permit_semaphore.acquire_owned().await.unwrap(); + let _permit = match permit_semaphore.acquire_owned().await { + Ok(permit) => permit, + Err(e) => { + eprintln!( + "[COORDINATOR-TASK][Ch:{}] Failed to acquire semaphore permit: {}. This indicates a critical logic error.", + chapter_number_str, e + ); + return (index, Err(anyhow::anyhow!("Failed to acquire semaphore"))); + } + }; // Pause random delay before task //random_sleep_time(1, 2).await; // The processing pipeline: fetch -> encode -> upload - let image_bytes = match fetcher::fetch_image_bytes( - &http_client, - &img_url, - ) - .await - { + let image_bytes = match fetcher::fetch_image_bytes(&http_client, &img_url).await { Ok(bytes) => bytes, Err(e) => { eprintln!( @@ -157,16 +157,10 @@ pub async fn process_single_chapter( { Ok(Ok(bytes)) => bytes, Ok(Err(e)) => { - return ( - index, - Err(anyhow::anyhow!("Encoding failed: {}", e)), - ); + return (index, Err(anyhow::anyhow!("Encoding failed: {}", e))); } Err(e) => { - return ( - index, - Err(anyhow::anyhow!("Encoding task panicked: {}", e)), - ); + return (index, Err(anyhow::anyhow!("Encoding task panicked: {}", e))); } }; @@ -179,11 +173,7 @@ pub async fn process_single_chapter( // Upload to R2 if let Err(e) = storage_client - .upload_image_series_objects( - &object_key, - avif_bytes, - "image/avif", - ) + .upload_image_series_objects(&object_key, avif_bytes, "image/avif") .await { eprintln!("[TASK] Failed to upload to R2: {}", e); @@ -211,10 +201,7 @@ pub async fn process_single_chapter( } // Task panicked Err(join_err) => { - eprintln!( - "[COORDINATOR] Processing task panicked: {}", - join_err - ); + eprintln!("[COORDINATOR] Processing task panicked: {}", join_err); } } } @@ -226,11 +213,7 @@ pub async fn process_single_chapter( for (original_index, key_to_save) in &successful_uploads { // Save CDN object key to the database if successful if db_service - .add_chapter_images( - chapter_id, - (*original_index + 1) as i32, - key_to_save, - ) + .add_chapter_images(chapter_id, (*original_index + 1) as i32, key_to_save) .await .is_err() { @@ -253,6 +236,7 @@ pub async fn process_single_chapter( total_image_found > 0, image_saved_count == total_image_found, ) { + // Complete chapter images (true, true) => { db_service .update_chapter_status(chapter_id, ChapterStatus::Available) diff --git a/backend/src/encoding/image_encoding.rs b/backend/src/processing/image_encoding.rs similarity index 85% rename from backend/src/encoding/image_encoding.rs rename to backend/src/processing/image_encoding.rs index 02b17d1..072d6a5 100644 --- a/backend/src/encoding/image_encoding.rs +++ b/backend/src/processing/image_encoding.rs @@ -1,10 +1,9 @@ use anyhow::{Context, Result}; -use image::{GenericImageView, load_from_memory}; +use image::{load_from_memory, GenericImageView}; use ravif::{Encoder, Img}; use rgb::FromSlice; -/// This function is CPU-intensive and is designed to be run in a blocking thread or parallel iwth rayon -/// [NOTE]: Using `tokio::task::spawn_blocking` to avoid blocking the async runtime? +/// This function is CPU-intensive and is designed to be run in a blocking thread or parallel th rayon pub fn covert_image_bytes_to_avif(image_bytes: &[u8]) -> Result> { // Decode the image from memory let img = load_from_memory(image_bytes).with_context( @@ -36,8 +35,7 @@ pub fn covert_image_bytes_to_avif(image_bytes: &[u8]) -> Result> { encoder.encode_rgb(Img::new(pixels, width, height)) }; - let avif_data = - avif_result.with_context(|| "Failed to encode image to AVIF")?; + let avif_data = avif_result.with_context(|| "Failed to encode image to AVIF")?; println!( "[IMAGE ENCODING] Successfully converted image ({}x{}) to AVIF format. Size: {} bytes", diff --git a/backend/src/app/mod.rs b/backend/src/processing/mod.rs similarity index 64% rename from backend/src/app/mod.rs rename to backend/src/processing/mod.rs index 1b9edae..48ef2ec 100644 --- a/backend/src/app/mod.rs +++ b/backend/src/processing/mod.rs @@ -1,2 +1,3 @@ pub mod coordinator; +pub mod image_encoding; pub mod orchestrator; diff --git a/backend/src/app/orchestrator.rs b/backend/src/processing/orchestrator.rs similarity index 76% rename from backend/src/app/orchestrator.rs rename to backend/src/processing/orchestrator.rs index fb5c74f..e7c6e3b 100644 --- a/backend/src/app/orchestrator.rs +++ b/backend/src/processing/orchestrator.rs @@ -1,16 +1,18 @@ -use crate::app::coordinator; +use std::env; +use std::sync::Arc; + +use anyhow::{anyhow, Result}; +use reqwest::Client; +use url::Url; + use crate::common::utils::random_sleep_time; use crate::database::storage::StorageClient; use crate::database::{DatabaseService, Series}; +use crate::processing::coordinator; use crate::scraping::fetcher; use crate::scraping::model::SitesConfig; use crate::scraping::parser::{ChapterInfo, ChapterParser}; use crate::task_workers::repair_chapter_worker::RepairChapterMsg; -use anyhow::{Result, anyhow}; -use reqwest::Client; -use std::env; -use std::sync::Arc; -use url::Url; // The main "engine" for checking series and scraping task. // This function can be called from anywhere, including a background task. @@ -36,21 +38,16 @@ pub async fn run_series_check( series.current_source_url ); - let series_page_html = - fetcher::fetch_html(&http_client, &series.current_source_url).await?; + let series_page_html = fetcher::fetch_html(&http_client, &series.current_source_url).await?; random_sleep_time(2, 5).await; // [Quick Check] Get latest chapter println!("[SERIES CHECK] Performing quick check, get latest chapter."); let latest_site_chapter = chapter_parser - .quick_check_extract_latest_chapter_info( - &series_page_html, - &series.current_source_url, - )?; + .quick_check_extract_latest_chapter_info(&series_page_html, &series.current_source_url)?; - let last_db_chapter_number = - series.last_chapter_found_in_storage.unwrap_or(0.0); + let last_db_chapter_number = series.last_chapter_found_in_storage.unwrap_or(0.0); let mut chapters_to_scrape: Vec = Vec::new(); let mut needs_full_scan = false; @@ -62,19 +59,13 @@ pub async fn run_series_check( // If latest chapter on site > latest in DB, we need a full scan. if latest_chapter.number > last_db_chapter_number { - println!( - "[SERIES CHECK] New chapter detected by Quick Check. Triggering full scan." - ); + println!("[SERIES CHECK] New chapter detected by Quick Check. Triggering full scan."); needs_full_scan = true; } else { // [Count Check] If no new chapter, check for backfills or deletions - println!( - "[SERIES CHECK] Quick Check passed. Performing Count Check" - ); - let site_chapter_count = - chapter_parser.count_chapter_links(&series_page_html)?; - let db_chapter_count = - db_service.get_series_chapters_count(series.id).await?; + println!("[SERIES CHECK] Quick Check passed. Performing Count Check"); + let site_chapter_count = chapter_parser.count_chapter_links(&series_page_html)?; + let db_chapter_count = db_service.get_series_chapters_count(series.id).await?; println!( "[SERIES CHECK] Chapter on site: {}, chapters in DB: {}", @@ -82,9 +73,7 @@ pub async fn run_series_check( ); if site_chapter_count != db_chapter_count as usize { - println!( - "[SERIES CHECK] Count missmatch. Trigger full scan for synchronization." - ); + println!("[SERIES CHECK] Count missmatch. Trigger full scan for synchronization."); needs_full_scan = true; } } @@ -97,10 +86,7 @@ pub async fn run_series_check( if needs_full_scan { println!("[SERIES CHECK] Run full scan"); let all_available_chapters = chapter_parser - .full_scan_extract_all_chapter_info( - &series_page_html, - &series.current_source_url, - )?; + .full_scan_extract_all_chapter_info(&series_page_html, &series.current_source_url)?; if all_available_chapters.is_empty() { println!( @@ -136,24 +122,20 @@ pub async fn run_series_check( ); // Start Scraping Process for Selected Chapters - let last_info_downloaded_chapter = - coordinator::process_series_chapters_from_list( - &series, - &chapters_to_scrape, - &http_client, - storage_client, - site_config, - db_service, - ) - .await?; + let last_info_downloaded_chapter = coordinator::process_series_chapters_from_list( + &series, + &chapters_to_scrape, + &http_client, + storage_client, + site_config, + db_service, + ) + .await?; // Update series metadata in the database if let Some(last_chapter_num) = last_info_downloaded_chapter { db_service - .update_series_last_chapter_found_in_storage( - series.id, - Some(last_chapter_num), - ) + .update_series_last_chapter_found_in_storage(series.id, Some(last_chapter_num)) .await?; println!( "[BULK SCRAPE] Updated last local chapter for '{}' to {}.", @@ -179,9 +161,7 @@ pub async fn repair_specific_chapter_series( let series = db_service .get_series_by_id(msg.series_id) .await? - .ok_or_else(|| { - anyhow!("Series with ID {} not found.", msg.series_id) - })?; + .ok_or_else(|| anyhow!("Series with ID {} not found.", msg.series_id))?; let image_urls_to_delete = db_service .get_images_urls_for_chapter_series(msg.series_id, msg.chapter_number) @@ -213,9 +193,7 @@ pub async fn repair_specific_chapter_series( let new_host = Url::parse(&msg.new_chapter_url)? .host_str() - .ok_or_else(|| { - anyhow!("Invalid new chapter URL: {}", &msg.new_chapter_url) - })? + .ok_or_else(|| anyhow!("Invalid new chapter URL: {}", &msg.new_chapter_url))? .to_string(); let site_config = sites_config @@ -223,7 +201,7 @@ pub async fn repair_specific_chapter_series( .ok_or_else(|| anyhow!("No scraping config for host: {}", new_host))?; let chapter_info_to_scrape = ChapterInfo { - url: msg.new_chapter_url.to_string(), + url: msg.new_chapter_url, number: msg.chapter_number, }; diff --git a/backend/src/scraping/fetcher.rs b/backend/src/scraping/fetcher.rs index 9dcc6c6..7549adf 100644 --- a/backend/src/scraping/fetcher.rs +++ b/backend/src/scraping/fetcher.rs @@ -1,45 +1,46 @@ use anyhow::{Context, Result}; -use backon::Retryable; -use backon::{BackoffBuilder, ExponentialBuilder}; +use backon::{BackoffBuilder, ExponentialBuilder, Retryable}; use bytes::Bytes; use reqwest::Client; -// Determines whether a network error should trigger a retry attempt. -// Retry Strategy: -// - Retry: Server errors (5xx), timeouts, connection issues, rate limits (429) -// - Don't retry: Client errors (4xx except 429), parsing errors, other failures -// Why this approach: -// - Server errors are often temporary (server restart, maintenance, etc.) -// - Timeouts might succeed on retry with better network conditions -// - Rate limits (429) usually resolve after waiting +/// Determines whether a network error should trigger a retry attempt +/// Retry Strategy: +/// - Retry: Server errors (5xx), timeouts, connection issues, rate limits (429) +/// - Don't retry: Client errors (4xx except 429), parsing errors, other failures +/// +/// We use this approach because: +/// - Server errors are often temporary (server restart, maintenance, etc.) +/// - Timeouts might succeed on retry with better network conditions +/// - Rate limits (429) usually resolve after waiting fn is_transient_error(e: &anyhow::Error) -> bool { - // Attempt to downcast the error to a reqwest::Error to inspect it. + // Attempt to downcast the error to a reqwest::Error to inspect it if let Some(req_err) = e.downcast_ref::() { - // Retry if the request timed out or if there was a connection issue. + // Retry if the request timed out or if there was a connection issue if req_err.is_timeout() || req_err.is_connect() { return true; } - // Retry on specific status codes. + // Retry on specific status codes if let Some(status) = req_err.status() { // Retry on 5xx server errors or 429 Too Many Requests. return status.is_server_error() || status.as_u16() == 429; } } - // For all other errors, we don't retry. + // For all other errors, we don't retry false } -// Generic fetch function that handles the core logic of sending a request, -// This is the heart of our fetching system. It handles: -// 1. Making HTTP requests with retry logic -// 2. Status code validation -// 3. Exponential backoff between retries -// 4. Flexible response processing (HTML, bytes, JSON, etc.) -// Generic Parameters Explained: -// - `T`: The final return type (String, Bytes, etc.) -// - `F`: The processor function type -// - `Fut`: The Future returned by the processor function +/// Generic fetch function that handles the core logic of sending a request +/// This is the heart of our fetching system. It handles: +/// 1. Making HTTP requests with retry logic +/// 2. Status code validation +/// 3. Exponential backoff between retries +/// 4. Flexible response processing (HTML, bytes, JSON, etc.) +/// +/// Generic Parameters Explained: +/// - `T`: The final return type (String, Bytes, etc.) +/// - `F`: The processor function type +/// - `Fut`: The Future returned by the processor function async fn fetch_with_retry( client: &Client, url: &str, @@ -53,24 +54,25 @@ where Fut: Future>, { // Configure exponential backoff - // [NOTE] Can customize it further here, e.g., .with_max_times(5) + // [NOTE] Can customize it further here, `.with_max_times(5)` let backoff = ExponentialBuilder::default().build(); // Define the operation we want to retry // This closure captures all the variables it needs (client, url, processor) let operation = || async { // This can fail due to: DNS resolution, connection refused, timeouts, etc. - let response = - client.get(url).send().await.with_context(|| { - format!("Failed to send request to {}", url) - })?; + let response = client + .get(url) + .send() + .await + .with_context(|| format!("Failed to send request to {}", url))?; // Check if HTTP status indicates success (2xx) `Ok` // `error_for_status()` will convert a 4xx or 5xx status code into an `Errors`. // Why: HTTP request "succeeded" but server said "no" (404, 500, etc.) - let response = response.error_for_status().with_context(|| { - format!("Request to {} returned a non-success status", url) - })?; + let response = response + .error_for_status() + .with_context(|| format!("Request to {} returned a non-success status", url))?; println!("[FETCHER] HTML from {} fetched successfully", url); @@ -81,7 +83,7 @@ where operation .retry(backoff) .when(|e| { - // This closure decides whether to retry based on the error + // Decides whether to retry based on the error let should_retry = is_transient_error(e); if should_retry { println!( @@ -107,9 +109,10 @@ pub async fn fetch_html(client: &Client, url: &str) -> Result { // Call generic fetch function with binary-specific processor fetch_with_retry(client, url, |response| async { // This can fail if: response is not valid UTF-8, connection drops during read - response.text().await.with_context(|| { - format!("Failed to read response body from {}", url) - }) + response + .text() + .await + .with_context(|| format!("Failed to read response body from {}", url)) }) .await } @@ -121,9 +124,10 @@ pub async fn fetch_image_bytes(client: &Client, url: &str) -> Result { // Call generic fetch function with binary-specific processor fetch_with_retry(client, url, |response| async { // This preserves the exact binary data without any text conversion - response.bytes().await.with_context(|| { - format!("Failed to read bytes from response of {}", url) - }) + response + .bytes() + .await + .with_context(|| format!("Failed to read bytes from response of {}", url)) }) .await } diff --git a/backend/src/scraping/model.rs b/backend/src/scraping/model.rs index 5967b78..fb61c55 100644 --- a/backend/src/scraping/model.rs +++ b/backend/src/scraping/model.rs @@ -1,9 +1,10 @@ -use anyhow::{Context, Result}; -use serde::Deserialize; use std::collections::HashMap; use std::fs; use std::path::Path; +use anyhow::{Context, Result}; +use serde::Deserialize; + /// Configuration for scraping a specific website. #[derive(Deserialize, Clone, Debug)] pub struct SiteScrapingConfig { @@ -39,22 +40,16 @@ impl SitesConfig { )); } - let config_content = - fs::read_to_string(config_path).with_context(|| { - format!( - "[CONFIG] Failed to read file: {}", - config_path.display() - ) - })?; + let config_content = fs::read_to_string(config_path) + .with_context(|| format!("[CONFIG] Failed to read file: {}", config_path.display()))?; // Serde will automatically handle the TOML structure. - let app_config: SitesConfig = toml::from_str(&config_content) - .with_context(|| { - format!( - "[CONFIG] Failed to parse TOML configuration: {}", - config_path.display() - ) - })?; + let app_config: SitesConfig = toml::from_str(&config_content).with_context(|| { + format!( + "[CONFIG] Failed to parse TOML configuration: {}", + config_path.display() + ) + })?; println!( "[CONFIG] Configuration loaded successfully {} site(s) from {}", @@ -65,10 +60,7 @@ impl SitesConfig { } /// Retrieves site-specific scraping configuration based on hostname. - pub fn get_site_config( - &self, - host_name: &str, - ) -> Option<&SiteScrapingConfig> { + pub fn get_site_config(&self, host_name: &str) -> Option<&SiteScrapingConfig> { self.sites.get(host_name) } } diff --git a/backend/src/scraping/parser.rs b/backend/src/scraping/parser.rs index e67a8a8..1a0b81d 100644 --- a/backend/src/scraping/parser.rs +++ b/backend/src/scraping/parser.rs @@ -1,11 +1,13 @@ -use crate::common::utils; -use crate::scraping::model::SiteScrapingConfig; +use std::collections::HashMap; + use anyhow::Result; use regex::Regex; use scraper::{Element, ElementRef, Html, Selector}; -use std::collections::HashMap; use url::Url; +use crate::common::utils; +use crate::scraping::model::SiteScrapingConfig; + #[derive(Debug, Clone)] pub struct ChapterInfo { pub url: String, @@ -63,17 +65,13 @@ impl ChapterParser { return Ok(None); } - let abs_url = - utils::to_absolute_url(series_page_url, trimmed_href)?; - let title = - link_element.text().collect::().trim().to_string(); + let abs_url = utils::to_absolute_url(series_page_url, trimmed_href)?; + let title = link_element.text().collect::().trim().to_string(); // Find the chapter number using a prioritized strategy - if let Some(number) = self.find_chapter_number_with_strategies( - link_element, - &abs_url, - &title, - ) { + if let Some(number) = + self.find_chapter_number_with_strategies(link_element, &abs_url, &title) + { return Ok(Some(ChapterInfo { url: abs_url, number, @@ -84,11 +82,7 @@ impl ChapterParser { } // Helper function to extract number from regex match - fn extract_number_from_regex( - &self, - regex: &Regex, - input: &str, - ) -> Option { + fn extract_number_from_regex(&self, regex: &Regex, input: &str) -> Option { regex .captures(input) .and_then(|captures| captures.get(1)) @@ -111,8 +105,7 @@ impl ChapterParser { // Helper closure to parse a string capture into f32 let parse_match = |s: &str| s.parse::().ok(); - if let Some(attr_name) = - &self.config.chapter_number_data_attribute_on_parent + if let Some(attr_name) = &self.config.chapter_number_data_attribute_on_parent && !attr_name.is_empty() { // Simple loop on `parent_element` @@ -153,12 +146,11 @@ impl ChapterParser { let document = Html::parse_document(series_page_html); // Select first or last element depending on configured site chapter_order - let latest_chapter_element = - if self.config.chapter_order.eq_ignore_ascii_case("asc") { - document.select(&self.chapter_link_selector).next_back() - } else { - document.select(&self.chapter_link_selector).next() - }; + let latest_chapter_element = if self.config.chapter_order.eq_ignore_ascii_case("asc") { + document.select(&self.chapter_link_selector).next_back() + } else { + document.select(&self.chapter_link_selector).next() + }; if let Some(element) = latest_chapter_element { return self.process_link_element(element, series_page_url); @@ -187,27 +179,19 @@ impl ChapterParser { let mut chapter_map: HashMap = HashMap::new(); for link_element in document.select(&self.chapter_link_selector) { - if let Some(info) = - self.process_link_element(link_element, series_page_url)? - { + if let Some(info) = self.process_link_element(link_element, series_page_url)? { let key = (info.number * 100.0) as i32; chapter_map.entry(key).or_insert(info); } } // Collect unique chapters from the map - let mut chapters: Vec = - chapter_map.into_values().collect(); + let mut chapters: Vec = chapter_map.into_values().collect(); // Sort chapters by their number to ensure correct processing order - chapters.sort_by(|a, b| { - a.number - .partial_cmp(&b.number) - .unwrap_or(std::cmp::Ordering::Equal) - }); + chapters.sort_by(|a, b| a.number.total_cmp(&b.number)); println!("[FULL SCAN] Found {} unique chapters", chapters.len()); - Ok(chapters) } } @@ -223,16 +207,14 @@ pub fn extract_image_urls_from_html_content( ); let document = Html::parse_document(html_content); - let image_element_selector = Selector::parse( - &config.image_selector_on_chapter_page, - ) - .map_err(|e| { - anyhow::anyhow!( - "Invalid CSS selector for image {}: {:?}", - &config.image_selector_on_chapter_page, - e - ) - })?; + let image_element_selector = + Selector::parse(&config.image_selector_on_chapter_page).map_err(|e| { + anyhow::anyhow!( + "Invalid CSS selector for image {}: {:?}", + &config.image_selector_on_chapter_page, + e + ) + })?; let mut image_urls = Vec::new(); @@ -244,24 +226,20 @@ pub fn extract_image_urls_from_html_content( } // Try to resolve as a relative URL first, then fall back to parsing as is utils::to_absolute_url(base_chapter_url_relative_path, trimmed_src) - .or_else(|_| Url::parse(trimmed_src).map(|u| u.to_string())) + .or_else(|_err| Url::parse(trimmed_src).map(|u| u.to_string())) .ok() }; for img_element in document.select(&image_element_selector) { // Try primary attribute first - let maybe_url = try_resolve_url( - img_element.value().attr(&config.image_url_attribute), - ) - // If primary fail, iterate through other fallback and use the one that works - .or_else(|| { - config - .image_url_fallback_attributes - .iter() - .find_map(|attr| { - try_resolve_url(img_element.value().attr(attr)) - }) - }); + let maybe_url = try_resolve_url(img_element.value().attr(&config.image_url_attribute)) + // If primary fail, iterate through other fallback and use the one that works + .or_else(|| { + config + .image_url_fallback_attributes + .iter() + .find_map(|attr| try_resolve_url(img_element.value().attr(attr))) + }); if let Some(url_to_add) = maybe_url { // Ensure no duplicate URLs are added. For images, order is important diff --git a/backend/src/task_workers/channels.rs b/backend/src/task_workers/channels.rs index 79a3a4d..c5c3d4b 100644 --- a/backend/src/task_workers/channels.rs +++ b/backend/src/task_workers/channels.rs @@ -1,20 +1,19 @@ -use crate::database::DatabaseService; +use std::sync::Arc; + +use arc_swap::ArcSwap; +use reqwest::Client; +use tokio::sync::mpsc; + use crate::database::storage::StorageClient; +use crate::database::DatabaseService; use crate::scraping::model::SitesConfig; -use crate::task_workers::delete_series_worker::{ - run_deletion_scheduler, run_deletion_worker, -}; +use crate::task_workers::delete_password_reset_token_worker::run_cleanup_password_reset_token_worker; +use crate::task_workers::delete_series_worker::{run_deletion_scheduler, run_deletion_worker}; use crate::task_workers::log_view_cleanup_worker::run_log_view_cleanup_worker; -use crate::task_workers::repair_chapter_worker::{ - RepairChapterMsg, run_repair_chapter_worker, -}; +use crate::task_workers::repair_chapter_worker::{run_repair_chapter_worker, RepairChapterMsg}; use crate::task_workers::series_check_worker::{ - SeriesCheckJob, run_series_check_scheduler, run_series_check_worker, + run_series_check_scheduler, run_series_check_worker, SeriesCheckJob, }; -use arc_swap::ArcSwap; -use reqwest::Client; -use std::sync::Arc; -use tokio::sync::mpsc; #[derive(Clone)] pub struct OnDemandChannels { @@ -29,8 +28,7 @@ pub fn setup_worker_channels( sites_config: Arc>, ) -> OnDemandChannels { // Check series worker channels - let (series_check_tx, series_check_rx) = - async_channel::bounded::(16); + let (series_check_tx, series_check_rx) = async_channel::bounded::(16); tokio::spawn(run_series_check_scheduler( db_service.clone(), @@ -50,7 +48,7 @@ pub fn setup_worker_channels( )); } - // Deletion worker channels + // Delete series worker channels let (deletion_tx, deletion_rx) = mpsc::channel(16); tokio::spawn(run_deletion_scheduler(db_service.clone(), deletion_tx)); @@ -61,19 +59,22 @@ pub fn setup_worker_channels( deletion_rx, )); - // Repair worker channels + // Repair chapter series worker channels let (repair_tx, repair_rx) = mpsc::channel::(16); tokio::spawn(run_repair_chapter_worker( repair_rx, db_service.clone(), - storage_client.clone(), - http_client.clone(), - sites_config.clone(), + storage_client, + http_client, + sites_config, )); - // Log View Cleanup worker + // Series Log View cleanup worker tokio::spawn(run_log_view_cleanup_worker(db_service.clone())); + // Passowrd reset token cleanup worker + tokio::spawn(run_cleanup_password_reset_token_worker(db_service)); + OnDemandChannels { repair_tx, series_check_tx, diff --git a/backend/src/task_workers/delete_password_reset_token_worker.rs b/backend/src/task_workers/delete_password_reset_token_worker.rs new file mode 100644 index 0000000..5f2b255 --- /dev/null +++ b/backend/src/task_workers/delete_password_reset_token_worker.rs @@ -0,0 +1,51 @@ +use tokio_cron_scheduler::{Job, JobScheduler}; + +use crate::database::DatabaseService; + +pub async fn run_cleanup_password_reset_token_worker(db_service: DatabaseService) { + let token_cleanup = async { + let scheduler = JobScheduler::new().await?; + let db_clone = db_service.clone(); + let cron_exp = "0 0 0,12 * * * *"; + + let cleanup_job = Job::new_async(cron_exp, move |_uuid, _locked| { + let db = db_clone.clone(); + + Box::pin(async move { + println!("[CRON] Starting password reset token cleanup..."); + + match db.cleanup_password_reset_token().await { + Ok(delete_token) => { + if delete_token > 0 { + println!("Deleting password reset token: {}", delete_token); + } else { + println!("[CRON] No expired tokens found."); + } + } + Err(error) => eprintln!("Failed to cleanup password reset token: {}", error), + } + }) + })?; + + scheduler.add(cleanup_job).await?; + scheduler.start().await?; + + Ok::(scheduler) + } + .await; + + match token_cleanup { + Ok(_scheduler) => { + // CRITICAL: Hold this task so that the scheduler doesn't get dropped. + // The variable '_scheduler' is owned by this scope. While pending() waits, + // the scheduler remains alive and cron jobs continue to run in the background. + std::future::pending::<()>().await; + } + Err(e) => { + eprintln!( + "[FATAL DELETE TOKEN PASSWORD ERROR] Reset Password Token Cleanup Scheduler died: {}", + e + ); + } + } +} diff --git a/backend/src/task_workers/delete_series_worker.rs b/backend/src/task_workers/delete_series_worker.rs index 67c285b..ebb6a37 100644 --- a/backend/src/task_workers/delete_series_worker.rs +++ b/backend/src/task_workers/delete_series_worker.rs @@ -1,11 +1,13 @@ -use crate::database::storage::StorageClient; -use crate::database::{DatabaseService, Series, SeriesStatus}; -use anyhow::Context; -use backon::{BackoffBuilder, Retryable}; use std::sync::Arc; use std::time::Duration; + +use anyhow::Context; +use backon::{BackoffBuilder, Retryable}; use tokio::sync::mpsc; +use crate::database::storage::StorageClient; +use crate::database::{DatabaseService, Series, SeriesStatus}; + #[derive(Debug, Clone)] pub struct DeletionJob { series: Series, @@ -89,9 +91,7 @@ pub async fn run_deletion_worker( async move { execute_full_deletion(series_id, &db_attempt, storage_attempt) .await - .with_context(|| { - format!("Attempt for series {} failed", series_id) - }) + .with_context(|| format!("Attempt for series {} failed", series_id)) } }; @@ -112,10 +112,7 @@ pub async fn run_deletion_worker( ); if let Err(e_update) = db_service - .update_series_processing_status( - series_id, - SeriesStatus::DeletionFailed, - ) + .update_series_processing_status(series_id, SeriesStatus::DeletionFailed) .await { eprintln!( diff --git a/backend/src/task_workers/log_view_cleanup_worker.rs b/backend/src/task_workers/log_view_cleanup_worker.rs index b7819bc..d634b1e 100644 --- a/backend/src/task_workers/log_view_cleanup_worker.rs +++ b/backend/src/task_workers/log_view_cleanup_worker.rs @@ -1,33 +1,50 @@ +use tokio_cron_scheduler::{Job, JobScheduler}; + use crate::database::DatabaseService; -use std::time::Duration; pub async fn run_log_view_cleanup_worker(db_service: DatabaseService) { - println!("[WORKER] Log view cleanup worker started"); + let log_cleanup = async { + let scheduler = JobScheduler::new().await?; + let db_clone = db_service.clone(); + let cron_exp = "0 0 2 * * * *"; - let mut interval = tokio::time::interval(Duration::from_secs(24 * 60 * 60)); + let cleanup_job = Job::new_async(cron_exp, move |_uuid, _locked| { + let db = db_clone.clone(); + Box::pin(async move { + println!("[CRON] Starting daily view log cleanup (Pruning > 35 days)..."); - // The first tick from `interval` fires immediately - // Skip it to ensure the first cleanup run after 24 hours - interval.tick().await; + match db.cleanup_old_view_logs().await { + Ok(deleted) => { + if deleted > 0 { + println!("[CRON] Cleanup success. Pruned {} old rows.", deleted); + } else { + println!("[CRON] Cleanup ran. Database is clean (no rows > 40 days)."); + } + } + Err(e) => eprintln!("[CRON] Cleanup failed: {}", e), + } + }) + })?; - loop { - // Wait next tick in interval - interval.tick().await; + scheduler.add(cleanup_job).await?; + scheduler.start().await?; - match db_service.cleanup_old_view_logs().await { - Ok(deleted_rows) => { - if deleted_rows > 0 { - println!( - "[WORKER] Cleaned up {} old log view entries", - deleted_rows - ); - } else { - println!("[WORKER] No old log view entries to clean up"); - } - } - Err(e) => { - eprintln!("[WORKER] Error cleaning up log view entries: {}", e); - } + Ok::(scheduler) + } + .await; + + match log_cleanup { + Ok(_scheduler) => { + // CRITICAL: Hold this task so that the scheduler doesn't get dropped. + // The variable '_scheduler' is owned by this scope. While pending() waits, + // the scheduler remains alive and cron jobs continue to run in the background. + std::future::pending::<()>().await; + } + Err(e) => { + eprintln!( + "[FATAL LOG VIEW CLEANUP WORKER ERROR] Log View Cleanup Scheduler died: {}", + e + ); } } } diff --git a/backend/src/task_workers/mod.rs b/backend/src/task_workers/mod.rs index 9cd9501..152848c 100644 --- a/backend/src/task_workers/mod.rs +++ b/backend/src/task_workers/mod.rs @@ -1,4 +1,5 @@ pub mod channels; +pub mod delete_password_reset_token_worker; pub mod delete_series_worker; pub mod log_view_cleanup_worker; pub mod repair_chapter_worker; diff --git a/backend/src/task_workers/repair_chapter_worker.rs b/backend/src/task_workers/repair_chapter_worker.rs index f446870..b399ca8 100644 --- a/backend/src/task_workers/repair_chapter_worker.rs +++ b/backend/src/task_workers/repair_chapter_worker.rs @@ -1,12 +1,14 @@ -use crate::app::orchestrator::repair_specific_chapter_series; -use crate::database::DatabaseService; -use crate::database::storage::StorageClient; -use crate::scraping::model::SitesConfig; +use std::sync::Arc; + use arc_swap::ArcSwap; use reqwest::Client; -use std::sync::Arc; use tokio::sync::mpsc; +use crate::database::DatabaseService; +use crate::database::storage::StorageClient; +use crate::processing::orchestrator::repair_specific_chapter_series; +use crate::scraping::model::SitesConfig; + #[derive(Debug)] pub struct RepairChapterMsg { pub series_id: i32, diff --git a/backend/src/task_workers/series_check_worker.rs b/backend/src/task_workers/series_check_worker.rs index f37704c..ff1f02a 100644 --- a/backend/src/task_workers/series_check_worker.rs +++ b/backend/src/task_workers/series_check_worker.rs @@ -1,11 +1,13 @@ -use crate::app::orchestrator; +use std::sync::Arc; +use std::time::Duration; + +use arc_swap::ArcSwap; +use reqwest::Client; + use crate::database::storage::StorageClient; use crate::database::{DatabaseService, Series, SeriesStatus}; +use crate::processing::orchestrator; use crate::scraping::model::SitesConfig; -use arc_swap::ArcSwap; -use reqwest::Client; -use std::sync::Arc; -use std::time::Duration; #[derive(Debug)] pub struct SeriesCheckJob { @@ -20,37 +22,38 @@ pub async fn run_series_check_scheduler( println!("[SERIES-SCHEDULER] Starting..."); // Interval to check db for job - let mut interval = tokio::time::interval(Duration::from_secs(60)); + let mut interval = tokio::time::interval(Duration::from_secs(30)); // Skip first tick - interval.tick().await; + interval.set_missed_tick_behavior(MissedTickBehavior::Skip); loop { interval.tick().await; loop { - match db_service.find_and_lock_series_for_check().await { - Ok(Some(series)) => { + match db_service.find_and_lock_series_for_check(20).await { + Ok(series_list) => { + if series_list.is_empty() { + // If no job, waiting fot the next interval tick + break; + } + println!( - "[SERIES-SCHEDULER] Found series for check {}, id {}", - series.title, series.id + "[SERIES-SCHEDULER] Found batch of {} series to check", + series_list.len() ); - let job = SeriesCheckJob { series }; - if job_sender.send(job).await.is_err() { - eprintln!( - "[SERIES-SCHEDULER] CRITICAL: Receiver channel closed. Shutting down." - ); - return; + + for series in series_list { + let job = SeriesCheckJob { series }; + // Send worker queue + // If queue full, will wait (backpressure) until worker empty + if job_sender.send(job).await.is_err() { + eprintln!("[SERIES-SCHEDULER] CRITICAL: Channel closed."); + return; + } } } - Ok(None) => { - // No job found, wait for next tick - break; - } Err(e) => { - eprintln!( - "[SERIES-SCHEDULER] Error finding {}. Retrying later", - e - ); + eprintln!("[SERIES-SCHEDULER] Error finding {}. Retrying later", e); break; } } @@ -101,11 +104,7 @@ pub async fn run_series_check_worker( }; if let Err(e) = db_service - .update_series_check_schedule( - series.id, - Some(final_status), - next_check_time, - ) + .update_series_check_schedule(series.id, Some(final_status), next_check_time) .await { eprintln!( diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..e013bb5 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,71 @@ +version: '3.8' + +name: manga_app +services: + backend: + container_name: backend_rust + build: + context: ./backend + dockerfile: Dockerfile + ports: + - "8000:8000" + env_file: + - .env + environment: + - DATABASE_URL=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@database:5432/${POSTGRES_DB} + depends_on: + database: + condition: service_healthy + command: sh -c "sqlx migrate run && ./manga_app" + + frontend: + container_name: frontend_svelte + build: + context: ./frontend + dockerfile: Dockerfile + ports: + - "80:80" + depends_on: + - backend + + database: + image: postgres:17.7-alpine + restart: always + container_name: postgres_db + expose: + - 5432 + env_file: + - .env + volumes: + - postgres_data:/var/lib/postgresql/data/ + healthcheck: + test: [ "CMD-SHELL", "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}" ] + interval: 10s + timeout: 5s + retries: 5 + + migrate: + build: + context: ./backend + dockerfile: Dockerfile + container_name: rust_migrator + depends_on: + db: + condition: service_healthy + # Run the migration command and exit + command: sqlx migrate run + environment: + DATABASE_URL: "postgres://user:password@db:5432/your_database" + + migrator: + build: + context: ./backend + dockerfile: Dockerfile + environment: + DATABASE_URL: "postgres://user:password@db:5432/your_database" + depends_on: + - db + volumes: + - ./migrations:/processing/migrations + # Override the command to run the migration binary or script + command: [ "./your_app_name", "migrate" ] # Example command \ No newline at end of file diff --git a/nginx/Dockerfile b/nginx/Dockerfile deleted file mode 100644 index c0f93e9..0000000 --- a/nginx/Dockerfile +++ /dev/null @@ -1,3 +0,0 @@ -FROM frontend AS frontend_build - -FROM nginx:stable-alpine3.21-slim \ No newline at end of file diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 8967712..beeeb43 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -#channel = "nightly" -channel = "stable" +channel = "nightly" +#channel = "stable" components = ["rustfmt"] \ No newline at end of file diff --git a/rustfmt.toml b/rustfmt.toml index 0a74c3c..0b25950 100644 --- a/rustfmt.toml +++ b/rustfmt.toml @@ -1,3 +1,8 @@ -max_width = 80 +max_width = 100 tab_spaces = 4 -edition = "2024" \ No newline at end of file +edition = "2024" +style_edition = "2024" +comment_width = 100 +imports_granularity = "Module" +group_imports = "StdExternalCrate" +unstable_features = true \ No newline at end of file