diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index cabc485..b8a9296 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -14,26 +14,25 @@ jobs: runs-on: blacksmith-2vcpu-ubuntu-2404 steps: - uses: actions/checkout@v6 + - name: Rust toolchain - uses: dtolnay/rust-toolchain@master - #uses: actions-rust-lang/setup-rust-toolchain@v1 + uses: actions-rust-lang/setup-rust-toolchain@v1 with: components: clippy - toolchain: nightly + cache: true + cache-on-failure: true + - name: Install and Cache Apt packages uses: awalsh128/cache-apt-pkgs-action@latest with: packages: nasm version: 1.0 - - name: Cache Rust dependencies - uses: Swatinem/rust-cache@v2 - with: - cache-on-failure: true + - name: Run linter (clippy action) - #env: SQLX_OFFLINE: true + env: + SQLX_OFFLINE: true uses: clechasseur/rs-clippy-check@v5 with: - #token: ${{ secrets.GITHUB_TOKEN }} args: --workspace # args: --verbose -- -W warnings @@ -41,11 +40,14 @@ jobs: runs-on: blacksmith-2vcpu-ubuntu-2404 steps: - uses: actions/checkout@v6 + - name: Rustfmt Toolchain uses: actions-rust-lang/setup-rust-toolchain@v1 with: - toolchain: nightly components: rustfmt + cache: true + cache-on-failure: true + - name: Rust fmt check uses: actions-rust-lang/rustfmt@v1 diff --git a/.sqlx/query-731032aa92d1405c18f5d6534092dfa6afa93aef6c1e2eb0506dad4d389e44e6.json b/.sqlx/query-1b7182155d6f6ef9f582355c657ad82c2e5ce1fc7e4efd2122c3d57dbb84cde0.json similarity index 61% rename from .sqlx/query-731032aa92d1405c18f5d6534092dfa6afa93aef6c1e2eb0506dad4d389e44e6.json rename to .sqlx/query-1b7182155d6f6ef9f582355c657ad82c2e5ce1fc7e4efd2122c3d57dbb84cde0.json index a1ebb55..8c57d60 100644 --- a/.sqlx/query-731032aa92d1405c18f5d6534092dfa6afa93aef6c1e2eb0506dad4d389e44e6.json +++ b/.sqlx/query-1b7182155d6f6ef9f582355c657ad82c2e5ce1fc7e4efd2122c3d57dbb84cde0.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT\n r.id,\n u.username as reporter_username,\n r.reporter_id,\n r.created_at,\n r.reason as \"reason!: ReportReason\",\n sc.id as chapter_id,\n sc.chapter_number as chapter_number,\n sc.title as chapter_series_title,\n c.id as comment_id,\n SUBSTRING(c.content_html, 1, 50) as comment_preview,\n COUNT(*) OVER() as total_items\n FROM reports r\n INNER JOIN users u ON r.reporter_id = u.id\n LEFT JOIN series_chapters sc ON r.chapter_id = sc.id\n LEFT JOIN series s ON sc.series_id = s.id\n LEFT JOIN comments c ON r.comment_id = c.id\n ORDER BY r.created_at DESC\n LIMIT $1\n OFFSET $2\n ", + "query": "\n SELECT\n r.id,\n u.username as reporter_username,\n r.reporter_id,\n r.created_at,\n r.reason as \"reason: ReportReason\",\n sc.id as chapter_id,\n sc.chapter_number as chapter_number,\n s.title as chapter_series_title,\n c.id as comment_id,\n SUBSTRING(c.content_html, 1, 50) as comment_preview,\n COUNT(*) OVER() as total_items\n FROM reports r\n INNER JOIN users u ON r.reporter_id = u.id\n LEFT JOIN series_chapters sc ON r.chapter_id = sc.id\n LEFT JOIN series s ON sc.series_id = s.id\n LEFT JOIN comments c ON r.comment_id = c.id\n ORDER BY r.created_at DESC\n LIMIT $1\n OFFSET $2\n ", "describe": { "columns": [ { @@ -25,7 +25,7 @@ }, { "ordinal": 4, - "name": "reason!: ReportReason", + "name": "reason: ReportReason", "type_info": { "Custom": { "name": "report_reason", @@ -92,11 +92,11 @@ false, false, false, - true, + false, false, null, null ] }, - "hash": "731032aa92d1405c18f5d6534092dfa6afa93aef6c1e2eb0506dad4d389e44e6" + "hash": "1b7182155d6f6ef9f582355c657ad82c2e5ce1fc7e4efd2122c3d57dbb84cde0" } diff --git a/.sqlx/query-4e99507d676e4da8900cace2df44e52907f74d57a294980dd4fb683267690f14.json b/.sqlx/query-3934adbd11086a12afeb81cacf1fc4d9ef7b9a80962efb7107f0c9e10472fe20.json similarity index 50% rename from .sqlx/query-4e99507d676e4da8900cace2df44e52907f74d57a294980dd4fb683267690f14.json rename to .sqlx/query-3934adbd11086a12afeb81cacf1fc4d9ef7b9a80962efb7107f0c9e10472fe20.json index d52ed59..45a59ae 100644 --- a/.sqlx/query-4e99507d676e4da8900cace2df44e52907f74d57a294980dd4fb683267690f14.json +++ b/.sqlx/query-3934adbd11086a12afeb81cacf1fc4d9ef7b9a80962efb7107f0c9e10472fe20.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n WITH locked_rows AS ( \n SELECT id\n FROM series_chapters\n WHERE status = 'Processing'\n ORDER BY created_at ASC \n LIMIT $1\n FOR UPDATE SKIP LOCKED\n )\n UPDATE series_chapters sc\n SET \n status = 'Processing', \n updated_at = NOW()\n FROM locked_rows lr, series s\n WHERE sc.id = lr.id AND sc.series_id = s.id\n RETURNING\n sc.id as chapter_id,\n sc.chapter_number,\n sc.source_url as chapter_url,\n s.id as series_id,\n s.title as series_title,\n s.source_website_host as source_host,\n s.current_source_url as series_url\n ", + "query": "\n WITH locked_rows AS (\n SELECT id\n FROM series_chapters\n WHERE status = 'Processing'\n ORDER BY created_at ASC\n LIMIT $1\n FOR UPDATE SKIP LOCKED\n )\n UPDATE series_chapters sc\n SET\n status = 'Processing',\n updated_at = NOW()\n FROM locked_rows lr, series s\n WHERE sc.id = lr.id AND sc.series_id = s.id\n RETURNING\n sc.id as chapter_id,\n sc.chapter_number,\n sc.source_url as chapter_url,\n s.id as series_id,\n s.title as series_title,\n s.source_website_host as source_host,\n s.current_source_url as series_url\n ", "describe": { "columns": [ { @@ -54,5 +54,5 @@ false ] }, - "hash": "4e99507d676e4da8900cace2df44e52907f74d57a294980dd4fb683267690f14" + "hash": "3934adbd11086a12afeb81cacf1fc4d9ef7b9a80962efb7107f0c9e10472fe20" } diff --git a/.sqlx/query-c2aea4e6e939393b141fa4035d8062e0ca5f3546d608aa1e5a4ef69814e05531.json b/.sqlx/query-5ce9e0d4103250f4cfad173ee90ca2e91131d9862083f6a8678c18e6d3f6d477.json similarity index 65% rename from .sqlx/query-c2aea4e6e939393b141fa4035d8062e0ca5f3546d608aa1e5a4ef69814e05531.json rename to .sqlx/query-5ce9e0d4103250f4cfad173ee90ca2e91131d9862083f6a8678c18e6d3f6d477.json index 8122dac..31b031f 100644 --- a/.sqlx/query-c2aea4e6e939393b141fa4035d8062e0ca5f3546d608aa1e5a4ef69814e05531.json +++ b/.sqlx/query-5ce9e0d4103250f4cfad173ee90ca2e91131d9862083f6a8678c18e6d3f6d477.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n WITH candidate AS (\n SELECT id FROM series\n WHERE\n processing_status = $1\n AND next_checked_at <= NOW()\n ORDER BY next_checked_at ASC\n LIMIT $2\n FOR UPDATE SKIP LOCKED\n )\n UPDATE series\n SET processing_status = $3\n WHERE id IN (SELECT id FROM candidate)\n RETURNING\n id, \n title, \n current_source_url, source_website_host,\n last_chapter_found_in_storage,\n processing_status as \"processing_status: SeriesStatus\",\n check_interval_minutes\n ", + "query": "\n WITH candidate AS (\n SELECT id FROM series\n WHERE\n processing_status = $1\n AND next_checked_at <= NOW()\n ORDER BY next_checked_at ASC\n LIMIT $2\n FOR UPDATE SKIP LOCKED\n )\n UPDATE series\n SET processing_status = $3\n WHERE id IN (SELECT id FROM candidate)\n RETURNING\n id,\n title,\n current_source_url,\n source_website_host,\n check_interval_minutes\n ", "describe": { "columns": [ { @@ -25,35 +25,6 @@ }, { "ordinal": 4, - "name": "last_chapter_found_in_storage", - "type_info": "Float4" - }, - { - "ordinal": 5, - "name": "processing_status: SeriesStatus", - "type_info": { - "Custom": { - "name": "series_status", - "kind": { - "Enum": [ - "Pending", - "Processing", - "Available", - "Ongoing", - "Completed", - "Hiatus", - "Discontinued", - "Error", - "Pending Deletion", - "Deleting", - "Deletion Failed" - ] - } - } - } - }, - { - "ordinal": 6, "name": "check_interval_minutes", "type_info": "Int4" } @@ -108,10 +79,8 @@ false, false, false, - true, - false, false ] }, - "hash": "c2aea4e6e939393b141fa4035d8062e0ca5f3546d608aa1e5a4ef69814e05531" + "hash": "5ce9e0d4103250f4cfad173ee90ca2e91131d9862083f6a8678c18e6d3f6d477" } diff --git a/.sqlx/query-49005252d64c1f75617187b2c904218bfdde0826bc8810c260fdc1d82c72575f.json b/.sqlx/query-7a138c214307e2e4776a2519c4f99777db56aa40a538a227fc69efeb36ef98b9.json similarity index 94% rename from .sqlx/query-49005252d64c1f75617187b2c904218bfdde0826bc8810c260fdc1d82c72575f.json rename to .sqlx/query-7a138c214307e2e4776a2519c4f99777db56aa40a538a227fc69efeb36ef98b9.json index 9694ca5..ea8aece 100644 --- a/.sqlx/query-49005252d64c1f75617187b2c904218bfdde0826bc8810c260fdc1d82c72575f.json +++ b/.sqlx/query-7a138c214307e2e4776a2519c4f99777db56aa40a538a227fc69efeb36ef98b9.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT id, title, original_title, description, cover_image_url, current_source_url,\n source_website_host, views_count, bookmarks_count, total_rating_score, total_ratings_count,\n last_chapter_found_in_storage, processing_status as \"processing_status: SeriesStatus\",\n check_interval_minutes, last_checked_at, next_checked_at, created_at, updated_at\n FROM series \n WHERE title = $1\n ", + "query": "\n SELECT id, title, original_title, description, cover_image_url, current_source_url,\n source_website_host, views_count, bookmarks_count, total_rating_score, total_ratings_count,\n last_chapter_found_in_storage, processing_status as \"processing_status: SeriesStatus\",\n check_interval_minutes, last_checked_at, next_checked_at, created_at, updated_at\n FROM series\n WHERE title = $1\n ", "describe": { "columns": [ { @@ -139,5 +139,5 @@ false ] }, - "hash": "49005252d64c1f75617187b2c904218bfdde0826bc8810c260fdc1d82c72575f" + "hash": "7a138c214307e2e4776a2519c4f99777db56aa40a538a227fc69efeb36ef98b9" } diff --git a/.sqlx/query-7a6f18671e75f744c4ea3a07a9d92a1f86f4ba73c4f084261f267d3c99bb342b.json b/.sqlx/query-7a6f18671e75f744c4ea3a07a9d92a1f86f4ba73c4f084261f267d3c99bb342b.json new file mode 100644 index 0000000..68b6d51 --- /dev/null +++ b/.sqlx/query-7a6f18671e75f744c4ea3a07a9d92a1f86f4ba73c4f084261f267d3c99bb342b.json @@ -0,0 +1,104 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH search_candidates AS (\n SELECT\n r.id,\n u.username as reporter_username,\n r.reporter_id,\n r.created_at,\n r.reason,\n sc.id as chapter_id,\n sc.chapter_number,\n s.title as chapter_series_title,\n c.id as comment_id,\n c.content_user_markdown,\n c.content_html,\n GREATEST(\n similarity(u.username, $3),\n similarity(COALESCE(s.title, ''), $3),\n similarity(COALESCE(c.content_user_markdown, ''), $3)\n ) as sim_score\n FROM reports r\n INNER JOIN users u ON r.reporter_id = u.id\n LEFT JOIN series_chapters sc ON r.chapter_id = sc.id\n LEFT JOIN series s ON sc.series_id = s.id\n LEFT JOIN comments c ON r.comment_id = c.id\n WHERE\n -- Reporter Username\n (u.username ILIKE '%' || $3 || '%' OR (u.username % $3 AND similarity(u.username, $3) >= $4))\n OR\n -- Series Title (only if related to series)\n (s.title IS NOT NULL AND (s.title ILIKE '%' || $3 || '%' OR (s.title % $3 AND similarity(s.title, $3) >= $4)))\n OR\n -- Comment Content (only if related to comment)\n (c.content_user_markdown IS NOT NULL AND c.content_user_markdown ILIKE '%' || $3 || '%')\n ),\n ranked_results AS (\n SELECT *,\n CASE\n -- Reporter Username\n WHEN reporter_username ILIKE $3 THEN 10\n WHEN reporter_username ILIKE $3 || '%' THEN 9\n -- Series Title\n WHEN chapter_series_title ILIKE '%' || $3 || '%' THEN 8\n -- Comment Content\n ELSE 5\n END as search_rank\n FROM search_candidates\n ),\n total_count AS (\n SELECT COUNT(*) as total FROM ranked_results\n )\n SELECT\n rr.id,\n rr.reporter_username,\n rr.reporter_id,\n rr.created_at,\n rr.reason as \"reason: ReportReason\",\n rr.chapter_id,\n rr.chapter_number,\n rr.chapter_series_title,\n rr.comment_id,\n SUBSTRING(rr.content_html, 1, 50) as comment_preview,\n tc.total as total_items\n FROM ranked_results rr\n CROSS JOIN total_count tc\n ORDER BY rr.search_rank DESC, rr.sim_score DESC, rr.created_at DESC\n LIMIT $1\n OFFSET $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "reporter_username", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "reporter_id", + "type_info": "Int4" + }, + { + "ordinal": 3, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "reason: ReportReason", + "type_info": { + "Custom": { + "name": "report_reason", + "kind": { + "Enum": [ + "broken_image", + "wrong_chapter", + "duplicate_chapter", + "missing_image", + "missing_chapter", + "slow_loading", + "broken_text", + "toxic", + "racist", + "spam", + "other" + ] + } + } + } + }, + { + "ordinal": 5, + "name": "chapter_id", + "type_info": "Int4" + }, + { + "ordinal": 6, + "name": "chapter_number", + "type_info": "Float4" + }, + { + "ordinal": 7, + "name": "chapter_series_title", + "type_info": "Text" + }, + { + "ordinal": 8, + "name": "comment_id", + "type_info": "Int8" + }, + { + "ordinal": 9, + "name": "comment_preview", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "total_items", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Int8", + "Text", + "Float4" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false, + false, + null, + null + ] + }, + "hash": "7a6f18671e75f744c4ea3a07a9d92a1f86f4ba73c4f084261f267d3c99bb342b" +} diff --git a/.sqlx/query-a4d5abc3b51e7e82774d067146a2e8e371a3684339e1bca5c2b1a3c4116d26a0.json b/.sqlx/query-a4d5abc3b51e7e82774d067146a2e8e371a3684339e1bca5c2b1a3c4116d26a0.json deleted file mode 100644 index ccf7448..0000000 --- a/.sqlx/query-a4d5abc3b51e7e82774d067146a2e8e371a3684339e1bca5c2b1a3c4116d26a0.json +++ /dev/null @@ -1,61 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n WITH base_search AS (\n SELECT\n u.id,\n u.username,\n u.email,\n u.role_id,\n u.is_active,\n r.role_name,\n similarity(u.username || ' ' || u.email, $3) AS sim_score\n FROM users u\n JOIN roles r ON u.role_id = r.id\n WHERE\n (u.username ILIKE '%' || $3 || '%')\n OR\n (u.email ILIKE '%' || $3 || '%')\n OR\n (\n (u.username || ' ' || u.email) % $3\n AND\n similarity(u.username || ' ' || u.email, $3) >= $4\n )\n ),\n ranked_results AS (\n SELECT\n *,\n CASE\n WHEN username ILIKE $3 OR email ILIKE $3 THEN 10\n WHEN username ILIKE '%' || $3 || '%' OR email ILIKE '%' || $3 || '%' THEN 8\n ELSE 6\n END as search_rank\n FROM base_search\n ),\n total_count AS (\n SELECT COUNT(*) AS total FROM ranked_results\n )\n SELECT\n rr.id,\n rr.username,\n rr.email,\n rr.role_name,\n rr.role_id,\n rr.is_active,\n tc.total as total_items\n FROM ranked_results rr\n CROSS JOIN total_count tc\n -- We can order by columns (search_rank, sim_score) that are not in the final SELECT list\n ORDER BY rr.search_rank DESC, rr.sim_score DESC, rr.id ASC\n LIMIT $1\n OFFSET $2\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int4" - }, - { - "ordinal": 1, - "name": "username", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "email", - "type_info": "Text" - }, - { - "ordinal": 3, - "name": "role_name", - "type_info": "Text" - }, - { - "ordinal": 4, - "name": "role_id", - "type_info": "Int4" - }, - { - "ordinal": 5, - "name": "is_active", - "type_info": "Bool" - }, - { - "ordinal": 6, - "name": "total_items", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Int8", - "Int8", - "Text", - "Float4" - ] - }, - "nullable": [ - false, - false, - false, - false, - false, - true, - null - ] - }, - "hash": "a4d5abc3b51e7e82774d067146a2e8e371a3684339e1bca5c2b1a3c4116d26a0" -} diff --git a/.sqlx/query-6268cbf2da3d4eab296bfb75c99a4701c0c906d9ffef98ca603fc42523388544.json b/.sqlx/query-d3cea17667c18958b296f28b849c66fb84ecdcd57d867f102d263a461a6538f8.json similarity index 76% rename from .sqlx/query-6268cbf2da3d4eab296bfb75c99a4701c0c906d9ffef98ca603fc42523388544.json rename to .sqlx/query-d3cea17667c18958b296f28b849c66fb84ecdcd57d867f102d263a461a6538f8.json index 415f031..bd639c7 100644 --- a/.sqlx/query-6268cbf2da3d4eab296bfb75c99a4701c0c906d9ffef98ca603fc42523388544.json +++ b/.sqlx/query-d3cea17667c18958b296f28b849c66fb84ecdcd57d867f102d263a461a6538f8.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "UPDATE series SET processing_status = $1, last_checked_at = NOW(), next_checked_at = $2 WHERE id = $3", + "query": "UPDATE series\n SET processing_status = $1, last_checked_at = NOW(), next_checked_at = $2 WHERE id = $3", "describe": { "columns": [], "parameters": { @@ -31,5 +31,5 @@ }, "nullable": [] }, - "hash": "6268cbf2da3d4eab296bfb75c99a4701c0c906d9ffef98ca603fc42523388544" + "hash": "d3cea17667c18958b296f28b849c66fb84ecdcd57d867f102d263a461a6538f8" } diff --git a/.sqlx/query-4bc9cada744d2c91bf58c583fda5d5c9e97d9fd394391e623845d2834f95625b.json b/.sqlx/query-dfae1166db2514bb7ed82c59ed153fa2c6bff065a89f2fde06ed3d2589eecb42.json similarity index 78% rename from .sqlx/query-4bc9cada744d2c91bf58c583fda5d5c9e97d9fd394391e623845d2834f95625b.json rename to .sqlx/query-dfae1166db2514bb7ed82c59ed153fa2c6bff065a89f2fde06ed3d2589eecb42.json index 2a29e13..013b067 100644 --- a/.sqlx/query-4bc9cada744d2c91bf58c583fda5d5c9e97d9fd394391e623845d2834f95625b.json +++ b/.sqlx/query-dfae1166db2514bb7ed82c59ed153fa2c6bff065a89f2fde06ed3d2589eecb42.json @@ -1,22 +1,12 @@ { "db_name": "PostgreSQL", - "query": "\n WITH candidate AS (\n SELECT id FROM series\n WHERE processing_status = $1\n LIMIT 1\n FOR UPDATE SKIP LOCKED\n )\n UPDATE series\n SET processing_status = $2\n WHERE id = (SELECT id FROM candidate)\n RETURNING\n id,\n title,\n cover_image_url\n ", + "query": "\n WITH candidate AS (\n SELECT id FROM series\n WHERE processing_status = $1\n LIMIT 1\n FOR UPDATE SKIP LOCKED\n )\n UPDATE series\n SET processing_status = $2\n WHERE id = (SELECT id FROM candidate)\n RETURNING\n id\n ", "describe": { "columns": [ { "ordinal": 0, "name": "id", "type_info": "Int4" - }, - { - "ordinal": 1, - "name": "title", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "cover_image_url", - "type_info": "Text" } ], "parameters": { @@ -64,10 +54,8 @@ ] }, "nullable": [ - false, - false, false ] }, - "hash": "4bc9cada744d2c91bf58c583fda5d5c9e97d9fd394391e623845d2834f95625b" + "hash": "dfae1166db2514bb7ed82c59ed153fa2c6bff065a89f2fde06ed3d2589eecb42" } diff --git a/.sqlx/query-35fbad5e10a60481b24b5b3529ed6df8469dc31fbd21875a1d9fb196e2e3aa8d.json b/.sqlx/query-e9f94d2ecce9ab5216efeccc66547a2a9bdaedd03861d5a67f54a23991168b91.json similarity index 94% rename from .sqlx/query-35fbad5e10a60481b24b5b3529ed6df8469dc31fbd21875a1d9fb196e2e3aa8d.json rename to .sqlx/query-e9f94d2ecce9ab5216efeccc66547a2a9bdaedd03861d5a67f54a23991168b91.json index 7e307af..83a021b 100644 --- a/.sqlx/query-35fbad5e10a60481b24b5b3529ed6df8469dc31fbd21875a1d9fb196e2e3aa8d.json +++ b/.sqlx/query-e9f94d2ecce9ab5216efeccc66547a2a9bdaedd03861d5a67f54a23991168b91.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "\n SELECT id, title, original_title, description, cover_image_url, current_source_url,\n source_website_host, views_count, bookmarks_count, total_rating_score, total_ratings_count,\n last_chapter_found_in_storage, processing_status as \"processing_status: SeriesStatus\",\n check_interval_minutes, last_checked_at, next_checked_at, created_at, updated_at\n FROM series \n WHERE id = $1\n ", + "query": "\n SELECT id, title, original_title, description, cover_image_url, current_source_url,\n source_website_host, views_count, bookmarks_count, total_rating_score, total_ratings_count,\n last_chapter_found_in_storage, processing_status as \"processing_status: SeriesStatus\",\n check_interval_minutes, last_checked_at, next_checked_at, created_at, updated_at\n FROM series\n WHERE id = $1\n ", "describe": { "columns": [ { @@ -139,5 +139,5 @@ false ] }, - "hash": "35fbad5e10a60481b24b5b3529ed6df8469dc31fbd21875a1d9fb196e2e3aa8d" + "hash": "e9f94d2ecce9ab5216efeccc66547a2a9bdaedd03861d5a67f54a23991168b91" } diff --git a/.sqlx/query-fdfc4e114a396e29ad97c15b5519ef44e24d71a5290ed386951b7dc4c1c85c50.json b/.sqlx/query-fdfc4e114a396e29ad97c15b5519ef44e24d71a5290ed386951b7dc4c1c85c50.json new file mode 100644 index 0000000..c879018 --- /dev/null +++ b/.sqlx/query-fdfc4e114a396e29ad97c15b5519ef44e24d71a5290ed386951b7dc4c1c85c50.json @@ -0,0 +1,61 @@ +{ + "db_name": "PostgreSQL", + "query": "\n WITH search_results AS (\n SELECT\n u.id,\n u.username,\n u.email,\n u.role_id,\n u.is_active,\n r.role_name,\n similarity(u.username || ' ' || u.email, $3) AS sim_score\n FROM users u\n JOIN roles r ON u.role_id = r.id\n WHERE\n (u.username ILIKE '%' || $3 || '%')\n OR\n (u.email ILIKE '%' || $3 || '%')\n OR\n (\n (u.username || ' ' || u.email) % $3\n AND\n similarity(u.username || ' ' || u.email, $3) >= $4\n )\n ),\n ranked_results AS (\n SELECT\n *,\n CASE\n WHEN username ILIKE $3 OR email ILIKE $3 THEN 10\n WHEN username ILIKE '%' || $3 || '%' OR email ILIKE '%' || $3 || '%' THEN 8\n ELSE 6\n END as search_rank\n -- Removed redundant sim_score definition here to fix 'ambiguous' error\n FROM search_results\n ),\n total_count AS (\n SELECT COUNT(*) AS total FROM ranked_results\n )\n SELECT\n rr.id,\n rr.username,\n rr.email,\n rr.role_name,\n rr.role_id,\n rr.is_active,\n tc.total as total_items\n FROM ranked_results rr\n CROSS JOIN total_count tc\n ORDER BY rr.search_rank DESC, rr.sim_score DESC, rr.id ASC\n LIMIT $1 \n OFFSET $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "username", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "email", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "role_name", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "role_id", + "type_info": "Int4" + }, + { + "ordinal": 5, + "name": "is_active", + "type_info": "Bool" + }, + { + "ordinal": 6, + "name": "total_items", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Int8", + "Text", + "Float4" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + true, + null + ] + }, + "hash": "fdfc4e114a396e29ad97c15b5519ef44e24d71a5290ed386951b7dc4c1c85c50" +} diff --git a/backend/src/api/admin/admin_comment_handler.rs b/backend/src/api/admin/admin_actions_handler.rs similarity index 66% rename from backend/src/api/admin/admin_comment_handler.rs rename to backend/src/api/admin/admin_actions_handler.rs index c10eade..23a6e00 100644 --- a/backend/src/api/admin/admin_comment_handler.rs +++ b/backend/src/api/admin/admin_actions_handler.rs @@ -1,11 +1,12 @@ -use axum::extract::{Path, State}; -use axum::http::StatusCode; use axum::Json; +use axum::extract::{Path, Query, State}; +use axum::http::StatusCode; use axum_core::__private::tracing::{error, info}; use axum_core::response::{IntoResponse, Response}; use serde_json::json; -use crate::api::extractor::ModeratorOrHigherUser; +use crate::api::admin::ReportPaginationParams; +use crate::api::extractor::{AdminOrHigherUser, ModeratorOrHigherUser}; use crate::builder::startup::AppState; use crate::database::{DeleteCommentResult, UpdateCommentResponse}; @@ -105,3 +106,65 @@ pub async fn admin_delete_comment_handler( } } } + +pub async fn list_reports_handler( + admin: AdminOrHigherUser, + State(state): State, + Query(params): Query, +) -> Response { + info!( + "->> {:<12} - list_reports_handler - user: {}", + "HANDLER", admin.0.username + ); + + match state + .db_service + .get_admin_paginated_pending_reports( + params.page, + params.page_size, + params.search.as_deref(), + ) + .await + { + Ok(paginated_result) => ( + StatusCode::OK, + Json(json!({"status": "success", "data": paginated_result})), + ) + .into_response(), + Err(e) => { + error!("Failed to fetch reports: {}", e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(json!({"status": "error", "message": "Failed to retrieve reports."})), + ) + .into_response() + } + } +} + +pub async fn resolve_report_handler( + admin: AdminOrHigherUser, + State(state): State, + Path(report_id): Path, +) -> Response { + info!( + "->> {:<12} - resolve_report_handler - user: {}, report_id: {}", + "HANDLER", admin.0.username, report_id + ); + + match state.db_service.admin_resolve_reports(report_id).await { + Ok(_) => ( + StatusCode::OK, + Json(json!({"status": "success", "message": format!("Report #{} resolved and cleared.", report_id)})), + ) + .into_response(), + Err(e) => { + error!("Failed to resolve report {}: {}", report_id, e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(json!({"status": "error", "message": "Failed to resolve report."})), + ) + .into_response() + } + } +} diff --git a/backend/src/api/admin/admin_routes.rs b/backend/src/api/admin/admin_routes.rs index 4d31829..f039933 100644 --- a/backend/src/api/admin/admin_routes.rs +++ b/backend/src/api/admin/admin_routes.rs @@ -1,7 +1,9 @@ -use axum::routing::{delete, get, patch, post}; use axum::Router; +use axum::routing::{delete, get, patch, post}; -use crate::api::admin::admin_comment_handler::admin_delete_comment_handler; +use crate::api::admin::admin_actions_handler::{ + admin_delete_comment_handler, list_reports_handler, resolve_report_handler, +}; use crate::api::admin::admin_series_handlers::{ create_category_tag_handler, create_new_series_handler, delete_category_tag_handler, delete_chapter_handler, delete_series_handler, get_all_paginated_series_handler, @@ -26,6 +28,8 @@ fn admin_user_routes() -> Router { "/comments/delete/{id}", delete(admin_delete_comment_handler), ) + .route("/reports/list", get(list_reports_handler)) + .route("/reports/resolve/{id}", delete(resolve_report_handler)) } /// Admin Series management routes diff --git a/backend/src/api/admin/mod.rs b/backend/src/api/admin/mod.rs index 8c44278..c8f8984 100644 --- a/backend/src/api/admin/mod.rs +++ b/backend/src/api/admin/mod.rs @@ -1,12 +1,14 @@ use axum::Json; use axum_core::__private::tracing::log::error; use axum_core::response::{IntoResponse, Response}; +use chrono::{DateTime, Utc}; use reqwest::StatusCode; use serde::{Deserialize, Serialize}; use crate::common::error::AuthError; +use crate::database::ReportReason; -pub mod admin_comment_handler; +pub mod admin_actions_handler; pub mod admin_routes; pub mod admin_series_handlers; pub mod admin_user_handler; @@ -154,3 +156,29 @@ pub struct AdminUpdateUserPayload { role_id: Option, is_active: Option, } + +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ReportPaginationParams { + #[serde(default = "default_page")] + page: u32, + #[serde(default = "default_page_size")] + page_size: u32, + #[serde(default)] + search: Option, +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ReportView { + pub id: i32, + pub reporter_username: String, + pub reporter_id: i32, + pub created_at: DateTime, + pub reason: ReportReason, + pub chapter_id: Option, + pub chapter_number: Option, + pub chapter_series_title: Option, + pub comment_id: Option, + pub comment_preview: Option, +} diff --git a/backend/src/api/public/chapter_handlers.rs b/backend/src/api/public/chapter_handlers.rs new file mode 100644 index 0000000..5ffdec2 --- /dev/null +++ b/backend/src/api/public/chapter_handlers.rs @@ -0,0 +1,44 @@ +use axum::Json; +use axum::extract::{Path, State}; +use axum::http::StatusCode; +use axum_core::__private::tracing::error; +use axum_core::response::{IntoResponse, Response}; + +use crate::api::extractor::AuthenticatedUser; +use crate::builder::startup::AppState; +use crate::database::{CreateChapterReportRequest, ReportTarget}; + +pub async fn report_chapter_handler( + user: AuthenticatedUser, + State(state): State, + Path(chapter_id): Path, + Json(payload): Json, +) -> Response { + let reason = payload.reason.into(); + + match state + .db_service + .user_submit_report(user.id, ReportTarget::Chapter(chapter_id), reason) + .await + { + Ok(_) => ( + StatusCode::OK, + Json(serde_json::json!({ + "status": "success", + "message": "Report submitted successfully. Thank you for helping us." + })), + ) + .into_response(), + Err(e) => { + error!("Failed to submit report for chapter {}: {}", chapter_id, e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "status": "error", + "message": "Failed to submit report." + })), + ) + .into_response() + } + } +} diff --git a/backend/src/api/public/comments_handlers.rs b/backend/src/api/public/comment_handlers.rs similarity index 93% rename from backend/src/api/public/comments_handlers.rs rename to backend/src/api/public/comment_handlers.rs index 0acbc9a..a2edb34 100644 --- a/backend/src/api/public/comments_handlers.rs +++ b/backend/src/api/public/comment_handlers.rs @@ -1,6 +1,6 @@ +use axum::Json; use axum::extract::{Path, Query, State}; use axum::http::StatusCode; -use axum::Json; use axum_core::__private::tracing::error; use axum_core::response::{IntoResponse, Response}; use axum_extra::extract::Multipart; @@ -10,7 +10,10 @@ use uuid::Uuid; use crate::api::extractor::{AuthenticatedUser, OptionalAuthenticatedUser}; use crate::api::public::user_handlers::extract_field_data; use crate::builder::startup::AppState; -use crate::database::{Comment, CommentEntityType, CommentSort, DeleteCommentResult, VotePayload}; +use crate::database::{ + Comment, CommentEntityType, CommentSort, CreateCommentReportRequest, DeleteCommentResult, + ReportTarget, VotePayload, +}; /// Helper function to recursively prepend the base CDN URL to all comment attachment URLs. /// This modifies the comments in place using an iterative stack-based approach. @@ -485,3 +488,39 @@ pub async fn vote_on_comment_handler( } } } + +pub async fn report_comment_handler( + user: AuthenticatedUser, + State(state): State, + Path(comment_id): Path, + Json(payload): Json, +) -> Response { + let reason = payload.reason.into(); + + match state + .db_service + .user_submit_report(user.id, ReportTarget::Comment(comment_id), reason) + .await + { + Ok(_) => ( + StatusCode::OK, + Json(serde_json::json!({ + "status": "success", + "message": "Report comment submitted successfully." + })), + ) + .into_response(), + + Err(e) => { + error!("Failed to submit report for comment {}: {}", comment_id, e); + ( + StatusCode::INTERNAL_SERVER_ERROR, + Json(serde_json::json!({ + "status": "error", + "message": "Failed to submit report." + })), + ) + .into_response() + } + } +} diff --git a/backend/src/api/public/mod.rs b/backend/src/api/public/mod.rs index 8e3a16b..ac91cc6 100644 --- a/backend/src/api/public/mod.rs +++ b/backend/src/api/public/mod.rs @@ -1,5 +1,6 @@ pub mod auth_handlers; -pub mod comments_handlers; +pub mod chapter_handlers; +pub mod comment_handlers; pub mod public_routes; pub mod series_handlers; pub mod user_handlers; diff --git a/backend/src/api/public/public_routes.rs b/backend/src/api/public/public_routes.rs index 87d67ac..2ecc047 100644 --- a/backend/src/api/public/public_routes.rs +++ b/backend/src/api/public/public_routes.rs @@ -1,15 +1,16 @@ -use axum::routing::{delete, get, patch, post}; use axum::Router; +use axum::routing::{delete, get, patch, post}; use crate::api::public::auth_handlers::{ forgot_password_handler, login_handler, logout_handler, protected_handler, realtime_check_username_handler, refresh_access_token_handler, register_new_user_handler, reset_password_handler, }; -use crate::api::public::comments_handlers::{ +use crate::api::public::chapter_handlers::report_chapter_handler; +use crate::api::public::comment_handlers::{ create_chapter_comment_handler, create_series_comment_handler, delete_comment_handler, - get_chapter_comment_handler, get_series_comment_handler, update_existing_comment_handler, - upload_comment_attachments_handler, vote_on_comment_handler, + get_chapter_comment_handler, get_series_comment_handler, report_comment_handler, + update_existing_comment_handler, upload_comment_attachments_handler, vote_on_comment_handler, }; use crate::api::public::series_handlers::{ browse_series_handler, fetch_chapter_details_handler, fetch_most_viewed_series_handler, @@ -59,6 +60,8 @@ fn user_logged_in_api_routes() -> Router { "/series/{id}/bookmark/status", get(get_bookmark_status_current_user_handler), ) + .route("/series/chapter/{id}/report", post(report_chapter_handler)) + .route("/comments/{id}/report", post(report_comment_handler)) } /// General public api routes (no authentication required) diff --git a/backend/src/database/admin_actions.rs b/backend/src/database/admin_actions.rs new file mode 100644 index 0000000..e1b168e --- /dev/null +++ b/backend/src/database/admin_actions.rs @@ -0,0 +1,1081 @@ +use super::*; +use crate::api::admin::ReportView; +use crate::api::extractor::Role; + +// ========================================================================= +// Admin Series Management +// ========================================================================= +impl DatabaseService { + pub async fn add_new_series(&self, data: &NewSeriesData<'_>) -> AnyhowResult { + let mut tx = self + .pool + .begin() + .await + .context("Failed to begin transaction")?; + + let host = get_host_from_url(Some(data.source_url)); + + let new_series_id = sqlx::query_scalar!( + r#" + INSERT INTO series + (title, original_title, description, cover_image_url, current_source_url, source_website_host, check_interval_minutes) + VALUES ($1, $2, $3, $4, $5, $6, $7) + RETURNING id + "#, + data.title, + data.original_title, + data.description, + data.cover_image_url, + data.source_url, + host, + data.check_interval_minutes, + ) + .fetch_one(&mut *tx) + .await + .context("Failed to add series with sqlx")?; + + if let Some(author_names) = data.authors { + for name in author_names { + let author_id = sqlx::query_scalar!( + r#" + WITH ins AS( + INSERT INTO authors (name) + VALUES ($1) + ON CONFLICT (name) DO NOTHING + RETURNING id + ) + SELECT id FROM ins + UNION ALL + SELECT id FROM authors WHERE name = $1 + LIMIT 1 + "#, + name + ) + .fetch_one(&mut *tx) + .await + .context("Failed to find or create author with sqlx")?; + + sqlx::query!( + "INSERT INTO series_authors (series_id, author_id) VALUES ($1, $2) ON CONFLICT DO NOTHING", + new_series_id, + author_id + ).execute(&mut *tx).await.context(format!("Failed to link author {} to ", name))?; + } + } + + if let Some(category_ids) = data.category_ids + && !category_ids.is_empty() + { + for &category_id in category_ids { + // Insert the relationship into the series_categories junction table. + sqlx::query!( + "INSERT INTO series_categories (series_id, category_id) VALUES ($1, $2) ON CONFLICT DO NOTHING", + new_series_id, + category_id + ) + .execute(&mut *tx) + .await + .context(format!("Failed to link category {} to series", category_id))?; + } + } + + tx.commit().await.context("Failed to commit transaction")?; + + Ok(new_series_id) + } + + pub async fn update_series_metadata( + &self, + series_id: i32, + data: &UpdateSeriesData<'_>, + ) -> AnyhowResult { + let mut tx = self + .pool + .begin() + .await + .context("Failed to begin transaction")?; + + let host = get_host_from_url(data.source_url); + + let result = sqlx::query!( + "UPDATE series + SET + title = COALESCE($1, title), + original_title = COALESCE($2, original_title), + description = COALESCE($3, description), + cover_image_url = COALESCE($4, cover_image_url), + current_source_url = COALESCE($5, current_source_url), + source_website_host = COALESCE($6, source_website_host), + check_interval_minutes = COALESCE($7, check_interval_minutes), + updated_at = NOW() + WHERE id = $8", + data.title, + data.original_title, + data.description, + data.cover_image_url, + data.source_url, + host, + data.check_interval_minutes, + series_id + ) + .execute(&mut *tx) + .await + .context("Failed to update series with sqlx")?; + + if let Some(author_names) = data.authors { + sqlx::query!("DELETE FROM series_authors WHERE series_id = $1", series_id) + .execute(&mut *tx) + .await + .context("Failed to delete existing authors for series")?; + + for name in author_names { + let author_id = sqlx::query_scalar!( + r#" + WITH ins AS ( + INSERT INTO authors (name) VALUES ($1) + ON CONFLICT (name) DO NOTHING + RETURNING id + ) + SELECT id FROM ins + UNION ALL + SELECT id FROM authors WHERE name = $1 + LIMIT 1 + "#, + name + ) + .fetch_one(&mut *tx) + .await + .context(format!("Failed to find or create author: {}", name))?; + + sqlx::query!( + "INSERT INTO series_authors (series_id, author_id) VALUES ($1, $2) ON CONFLICT DO NOTHING", + series_id, + author_id + ) + .execute(&mut *tx) + .await + .context(format!("Failed to link author {} to series", name))?; + } + } + + if let Some(category_ids) = data.category_ids { + sqlx::query!( + "DELETE FROM series_categories WHERE series_id = $1", + series_id + ) + .execute(&mut *tx) + .await + .context("Failed to delete existing categories for series")?; + + if !category_ids.is_empty() { + for category_id in category_ids { + sqlx::query!( + "INSERT INTO series_categories (series_id, category_id) VALUES ($1, $2) ON CONFLICT DO NOTHING", + series_id, + category_id + ) + .execute(&mut *tx) + .await + .context(format!("Failed to link category {} to series", category_id))?; + } + } + } + + tx.commit().await.context("Failed to commit transaction")?; + + Ok(result.rows_affected()) + } + + // Get paginated series search list for admin panel + pub async fn get_admin_paginated_series( + &self, + page: u32, + page_size: u32, + search_query: Option<&str>, + ) -> AnyhowResult> { + let page = page.max(1); + let limit = page_size as i64; + let offset = (page as i64 - 1) * limit; + + #[derive(Debug, FromRow)] + struct QueryResult { + id: i32, + title: String, + original_title: Option, + description: String, + cover_image_url: String, + current_source_url: String, + updated_at: DateTime, + processing_status: SeriesStatus, + #[sqlx(json)] + authors: serde_json::Value, + total_items: Option, + } + + let record_list = match search_query.filter(|q| !q.trim().is_empty()) { + Some(search_match) => { + let search_match = search_match.trim(); + let similarity_threshold = 0.20_f32; + + sqlx::query_as!( + QueryResult, + r#" + WITH base_search AS ( + SELECT + s.id, s.title, s.original_title, s.description, s.cover_image_url, + s.current_source_url, s.updated_at, s.processing_status, + -- Calculate similarity score for ranking + similarity(s.title, $3) as sim_score + FROM series s + WHERE + s.title ILIKE '%' || $3 || '%' + OR + (s.title % $3 AND similarity(s.title, $3) >= $4) + ), + ranked_results AS ( + SELECT + *, + CASE + WHEN title ILIKE $3 THEN 10 + WHEN title ILIKE $3 || '%' THEN 8 + WHEN title ILIKE '%' || $3 || '%' THEN 6 + ELSE 4 + END as search_rank + FROM base_search + ), + total_count AS ( + SELECT COUNT(*) AS total FROM ranked_results + ) + SELECT + rr.id, rr.title, rr.original_title, rr.description, + rr.cover_image_url, rr.current_source_url, rr.updated_at, + rr.processing_status as "processing_status: SeriesStatus", + -- Aggregate author names into a JSON array for each series + COALESCE( + json_agg(a.name) FILTER (WHERE a.id IS NOT NULL), + '[]'::json + ) AS "authors!", + tc.total as total_items + FROM ranked_results rr + CROSS JOIN total_count tc + LEFT JOIN series_authors sa ON rr.id = sa.series_id + LEFT JOIN authors a ON sa.author_id = a.id + GROUP BY + rr.id, rr.title, rr.original_title, rr.description, rr.cover_image_url, + rr.current_source_url, rr.updated_at, rr.processing_status, + rr.search_rank, rr.sim_score, tc.total + -- Order by the best rank, then by similarity, then by ID for stable sorting + ORDER BY rr.search_rank DESC, rr.sim_score DESC, rr.id ASC + LIMIT $1 + OFFSET $2 + "#, + limit, + offset, + search_match, + similarity_threshold, + ) + .fetch_all(&self.pool) + .await + .context("Failed to query all series") + } + None => { + // No search - simple pagination + sqlx::query_as!( + QueryResult, + r#" + SELECT + s.id, s.title, s.original_title, s.description, s.cover_image_url, + s.current_source_url, s.updated_at, + s.processing_status as "processing_status: SeriesStatus", + COALESCE( + json_agg(a.name) FILTER (WHERE a.id IS NOT NULL), + '[]'::json + ) as "authors!", + COUNT(*) OVER () as total_items + FROM + series s + LEFT JOIN series_authors sa ON s.id = sa.series_id + LEFT JOIN authors a ON sa.author_id = a.id + GROUP BY s.id + ORDER BY s.updated_at DESC + LIMIT $1 OFFSET $2 + "#, + limit, + offset + ) + .fetch_all(&self.pool) + .await + .context("Failed to get paginated series without search") + } + }?; + + let total_items = record_list + .first() + .map_or(0, |row| row.total_items.unwrap_or(0)); + + let series_list = record_list + .into_iter() + .map(|r| SeriesWithAuthors { + id: r.id, + title: r.title, + original_title: r.original_title, + description: r.description, + cover_image_url: r.cover_image_url, + current_source_url: r.current_source_url, + processing_status: r.processing_status, + updated_at: r.updated_at, + authors: serde_json::from_value(r.authors).unwrap_or_default(), + }) + .collect(); + + Ok(PaginatedResult { + items: series_list, + total_items, + }) + } + + pub async fn mark_series_for_deletion(&self, series_id: i32) -> AnyhowResult { + let result = sqlx::query!( + "UPDATE series + SET processing_status = $1, updated_at = NOW() + WHERE id = $2 + AND processing_status NOT IN ($3, $4)", + SeriesStatus::PendingDeletion as _, + series_id, + SeriesStatus::PendingDeletion as _, + SeriesStatus::Deleting as _, + ) + .execute(&self.pool) + .await + .context("Failed to mark series for deletion with sqlx")?; + + Ok(result.rows_affected()) + } + + pub async fn delete_series_by_id(&self, series_id: i32) -> AnyhowResult { + let mut tx = self + .pool + .begin() + .await + .context("Failed to start transaction for series deletion")?; + + let chapter_ids: Vec = sqlx::query_scalar!( + "SELECT id FROM series_chapters WHERE series_id = $1", + series_id + ) + .fetch_all(&mut *tx) + .await + .context("Failed to get chapter IDs for deletion")?; + + if !chapter_ids.is_empty() { + // Delete all image record for all chapters + sqlx::query!( + "DELETE FROM chapter_images WHERE chapter_id = ANY ($1)", + &chapter_ids + ) + .execute(&mut *tx) + .await + .context("Failed to delete chapter images")?; + } + + // Delete all chapter records + sqlx::query!( + "DELETE FROM series_chapters WHERE series_id = $1", + series_id + ) + .execute(&mut *tx) + .await + .context("Failed to delete series chapters")?; + + // Delete all author link records + sqlx::query!("DELETE FROM series_authors WHERE series_id = $1", series_id) + .execute(&mut *tx) + .await + .context("Failed to delete series-authors links")?; + + let result = sqlx::query!("DELETE FROM series WHERE id = $1", series_id) + .execute(&mut *tx) + .await + .context("Failed to delete series")?; + + tx.commit() + .await + .context("Failed to commit transaction for series deletion")?; + + Ok(result.rows_affected()) + } + + pub async fn create_category_tag(&self, name: &str) -> AnyhowResult { + let category = sqlx::query_as!( + CategoryTag, + "INSERT INTO categories (name) VALUES ($1) RETURNING id, name", + name + ) + .fetch_one(&self.pool) + .await + .context("Failed to create category tag with sqlx")?; + + Ok(category) + } + + pub async fn delete_category_tag(&self, id: i32) -> AnyhowResult { + let result = sqlx::query!("DELETE FROM categories WHERE id = $1", id) + .execute(&self.pool) + .await + .context("Failed to delete category tag with sqlx")?; + + Ok(result.rows_affected()) + } +} + +// ========================================================================= +// Admin Chapter Management +// ========================================================================= +impl DatabaseService { + pub async fn delete_chapter_and_images_for_chapter( + &self, + series_id: i32, + chapter_number: f32, + ) -> AnyhowResult { + // exclusive connection from the pool + let mut tx = self + .pool + .begin() + .await + .context("Failed to start transaction")?; + + let chapter_id_to_delete = sqlx::query_scalar!( + "SELECT id FROM series_chapters WHERE series_id = $1 AND chapter_number = $2", + series_id, + chapter_number, + ) + .fetch_optional(&mut *tx) + .await + .context("Failed to get chapter ID to delete")?; + + if let Some(chapter_id) = chapter_id_to_delete { + sqlx::query!( + "DELETE FROM chapter_images WHERE chapter_id = $1", + chapter_id + ) + .execute(&mut *tx) + .await + .context("Failed to delete chapter images")?; + + let result = sqlx::query!("DELETE FROM series_chapters WHERE id = $1", chapter_id) + .execute(&mut *tx) + .await + .context("Failed to delete chapter")?; + + // If transaction was successful, commit it + tx.commit().await.context("Failed to commit transaction")?; + + Ok(result.rows_affected()) + } else { + Ok(0) // No chapter found to delete + } + } +} + +// ========================================================================= +// Admin User Management +// ========================================================================= +impl DatabaseService { + // Get paginated user search list for admin panel + pub async fn get_admin_paginated_user( + &self, + page: u32, + page_size: u32, + search_query: Option<&str>, + ) -> AnyhowResult> { + let page = page.max(1); + let limit = page_size as i64; + let offset = (page as i64 - 1) * limit; + + struct UserRow { + id: i32, + username: String, + email: String, + role_id: i32, + role_name: String, + is_active: Option, + total_items: Option, + } + + let records: Vec = match search_query.filter(|q| !q.trim().is_empty()) { + Some(search_match) => { + let search_match = search_match.trim(); + let similarity_threshold = 0.30_f32; + + sqlx::query_as!( + UserRow, + r#" + WITH search_results AS ( + SELECT + u.id, + u.username, + u.email, + u.role_id, + u.is_active, + r.role_name, + similarity(u.username || ' ' || u.email, $3) AS sim_score + FROM users u + JOIN roles r ON u.role_id = r.id + WHERE + (u.username ILIKE '%' || $3 || '%') + OR + (u.email ILIKE '%' || $3 || '%') + OR + ( + (u.username || ' ' || u.email) % $3 + AND + similarity(u.username || ' ' || u.email, $3) >= $4 + ) + ), + ranked_results AS ( + SELECT + *, + CASE + WHEN username ILIKE $3 OR email ILIKE $3 THEN 10 + WHEN username ILIKE '%' || $3 || '%' OR email ILIKE '%' || $3 || '%' THEN 8 + ELSE 6 + END as search_rank + -- Removed redundant sim_score definition here to fix 'ambiguous' error + FROM search_results + ), + total_count AS ( + SELECT COUNT(*) AS total FROM ranked_results + ) + SELECT + rr.id, + rr.username, + rr.email, + rr.role_name, + rr.role_id, + rr.is_active, + tc.total as total_items + FROM ranked_results rr + CROSS JOIN total_count tc + ORDER BY rr.search_rank DESC, rr.sim_score DESC, rr.id ASC + LIMIT $1 + OFFSET $2 + "#, + limit, + offset, + search_match, + similarity_threshold + ) + .fetch_all(&self.pool) + .await + .context("Failed to search paginated users")? + } + None => sqlx::query_as!( + UserRow, + r#" + SELECT + u.id, + u.username, + u.email, + u.role_id, + r.role_name, + u.is_active, + COUNT(*) OVER() as "total_items" + FROM users u + JOIN roles r ON u.role_id = r.id + ORDER BY u.id ASC + LIMIT $1 OFFSET $2 + "#, + limit, + offset + ) + .fetch_all(&self.pool) + .await + .context("Failed to get paginated users")?, + }; + + let total_items = records.first().and_then(|row| row.total_items).unwrap_or(0); + + let user_list = records + .into_iter() + .map(|row| UserWithRole { + id: row.id, + username: row.username, + email: row.email, + role_name: row.role_name, + role_id: row.role_id, + is_active: row.is_active.unwrap_or(false), + }) + .collect(); + + Ok(PaginatedResult { + items: user_list, + total_items, + }) + } + + // Delete user by ID (admin) + pub async fn admin_delete_user(&self, user_id: i32) -> AnyhowResult { + let result = sqlx::query!("DELETE FROM users WHERE id = $1", user_id) + .execute(&self.pool) + .await + .context("Failed to delete user")?; + + Ok(result.rows_affected()) + } + + /// Partial Update user details (admin) + /// This function updates only the provided fields using a "Fetch-Merge-Update" pattern + /// It returns the updated user data or None if the user was not found + pub async fn admin_update_user( + &self, + user_id: i32, + username: Option<&str>, + email: Option<&str>, + role_id: Option, + is_active: Option, + actor_role: Role, + ) -> AnyhowResult> { + let mut tx = self + .pool + .begin() + .await + .context("Failed to begin transaction")?; + + // Fetch the current user data (and lock the row for update) + let current_user = sqlx::query!( + "SELECT u.username, u.email, u.role_id, u.is_active, r.role_name + FROM users u + JOIN roles r ON u.role_id = r.id + WHERE u.id = $1 + FOR UPDATE", + user_id + ) + .fetch_optional(&mut *tx) + .await + .context("Failed to fetch user")?; + + // If user doesnt exist, rollback and return None + let Some(current_user) = current_user else { + tx.rollback().await.context("Failed to rollback user")?; + return Ok(None); + }; + + // Get target role enum + let target_role = Role::from_name(¤t_user.role_name).unwrap_or(Role::User); + + // Check if actor has permission to modify target user + // Admin (2) CANT change SuperAdmin (3) -> 2 <= 3 (Failed) + // Admin (2) CANT change Admin (2) -> 2 <= 2 (Failed) + // Admin (2) CAN change Moderator (1) -> 2 <= 1 (Pass) + if actor_role <= target_role { + tx.rollback().await.context("Failed to rollback user")?; + anyhow::bail!( + "FORBIDDEN: You do not have permission to modify a user with an equal or higher role." + ); + } + + let mut new_role_id = current_user.role_id; + + if let Some(req_role_id) = role_id + && req_role_id != current_user.role_id + { + let new_role_name = + sqlx::query_scalar!("SELECT role_name FROM roles WHERE id = $1", new_role_id) + .fetch_optional(&mut *tx) + .await + .context("Failed to fetch role_id")? + .ok_or_else(|| anyhow::anyhow!("Invalid role_id: {}", new_role_id))?; + + let new_role_enum = Role::from_name(&new_role_name).unwrap_or(Role::User); + + if new_role_enum >= actor_role { + tx.rollback().await.context("Failed to rollback user")?; + anyhow::bail!("FORBIDDEN: You cannot assign a role higher than your own."); + } + new_role_id = req_role_id; + } + + // Merge: Use new value if Some, otherwise keep the current value + let new_username = username.unwrap_or(¤t_user.username); + let new_email = email.unwrap_or(¤t_user.email); + let new_is_active = is_active.or(current_user.is_active); + + // Check for conflicts (username or email) with *other* users + // Only check if username or email is actually changing + if (username.is_some() && username != Some(¤t_user.username)) + || (email.is_some() && email != Some(¤t_user.email)) + { + let conflict = sqlx::query_scalar!( + "SELECT 1 FROM users WHERE (username = $1 OR email = $2) AND id != $3 LIMIT 1", + new_username, + new_email, + user_id + ) + .fetch_optional(&mut *tx) + .await + .context("Failed to check for username/email conflict")?; + + if conflict.is_some() { + tx.rollback().await.context("Failed to rollback user")?; + // Return a specific error message that the handler can catch + anyhow::bail!( + "Username or email already exists for another user with id {}", + user_id + ); + } + } + + // Update the user with merged data + sqlx::query!( + r#" + UPDATE users + SET username = $1, email = $2, role_id = $3, is_active = $4, updated_at = NOW() + WHERE id = $5 + "#, + new_username, + new_email, + new_role_id, + new_is_active, + user_id + ) + .execute(&mut *tx) + .await + .context("Failed to update user")?; + + // Fetch the updated user data to return + let updated_user = sqlx::query_as!( + UserWithRole, + r#" + SELECT + u.id, + u.username, + u.email, + u.role_id, + COALESCE(u.is_active, false) as "is_active!", + r.role_name + FROM users u + JOIN roles r ON u.role_id = r.id + WHERE u.id = $1 + "#, + user_id + ) + .fetch_one(&mut *tx) + .await + .context("Failed to fetch updated user")?; + + // Commit the transaction + tx.commit().await.context("Failed to commit transaction")?; + + Ok(Some(updated_user)) + } +} + +// ========================================================================= +// Admin Comment Management +// ========================================================================= +impl DatabaseService { + // Delete comment as admin + pub async fn admin_delete_comment( + &self, + comment_id: i64, + requestor_role_id: i32, + ) -> AnyhowResult { + let mut tx = self + .pool + .begin() + .await + .context("Failed to begin transaction")?; + + let target_info = sqlx::query!( + r#" + SELECT + u.role_id, + c.user_id + FROM comments c + JOIN users u ON c.user_id = u.id + WHERE c.id = $1 + "#, + comment_id + ) + .fetch_optional(&mut *tx) + .await + .context("Failed to fetch comment info")?; + + let target_user_role_id = match target_info { + Some(record) => record.role_id, + None => { + return Ok(DeleteCommentResult::NotFound); + } + }; + + println!("DEBUG: Requestor Role ID: {}", requestor_role_id); + println!("DEBUG: Target User Role ID: {}", target_user_role_id); + + // Validation Tiered logic + // Role: SuperAdmin=1, Admin=2, Moderator=3, User=4 + let is_super_admin = requestor_role_id == 1; + + // If not super admin, check hierarchy. + // We deny if requestor_role_id is Greater or Equal to target_user_role_id. + if !is_super_admin && requestor_role_id >= target_user_role_id { + tx.rollback().await?; + return Ok(DeleteCommentResult::InsufficientPermissions); + } + + let attachment_object_key: Vec = sqlx::query_scalar!( + "SELECT file_url FROM comment_attachments WHERE comment_id = $1", + comment_id + ) + .fetch_all(&mut *tx) + .await + .context("Failed to fetch attachment keys")?; + + let has_replies: bool = sqlx::query_scalar!( + r#" + SELECT EXISTS( + SELECT 1 FROM comments WHERE parent_id = $1 AND deleted_at IS NULL + ) + "#, + comment_id + ) + .fetch_one(&mut *tx) + .await + .context("Failed to check for replies")? + .context("EXISTS query returned NULL, which should not happen")?; + + let row_affected: u64; + + if has_replies { + let soft_delete_result = sqlx::query_as!( + UpdateCommentResponse, + r#" + UPDATE comments + SET + content_user_markdown = '', + content_html = '

[Removed by Mod]

', + deleted_at = NOW(), + updated_at = NOW() + WHERE id = $1 + RETURNING id, content_user_markdown, content_html, updated_at, (deleted_at IS NOT NULL) as "is_deleted!" + "#, + comment_id + ) + .fetch_optional(&mut *tx) + .await + .context("Failed to soft delete comment")?; + + if let Some(updated_comment) = soft_delete_result { + sqlx::query!( + "DELETE FROM comment_attachments WHERE comment_id = $1", + comment_id + ) + .execute(&mut *tx) + .await + .context("Failed to delete comment attachments")?; + + tx.commit().await.context("Failed to comment deletion")?; + + Ok(DeleteCommentResult::SoftDeleted( + updated_comment, + attachment_object_key, + )) + } else { + tx.rollback().await.context("Failed to comment deletion")?; + Ok(DeleteCommentResult::NotFound) + } + } else { + let hard_delete_result = sqlx::query!("DELETE FROM comments WHERE id = $1", comment_id) + .execute(&mut *tx) + .await + .context("Failed to delete comment")?; + + row_affected = hard_delete_result.rows_affected(); + + if row_affected == 0 { + tx.rollback().await.context("Failed to delete comment")?; + return Ok(DeleteCommentResult::NotFound); + } + + tx.commit().await.context("Failed to commit transaction")?; + + Ok(DeleteCommentResult::HardDeleted(attachment_object_key)) + } + } +} + +// ========================================================================= +// Admin Reports Management +// ========================================================================= +impl DatabaseService { + pub async fn get_admin_paginated_pending_reports( + &self, + page: u32, + page_size: u32, + search_query: Option<&str>, + ) -> AnyhowResult> { + let limit = page_size as i64; + let offset = (page.max(1) as i64 - 1) * limit; + + #[derive(Debug, FromRow)] + struct RawReportRow { + id: i32, + reporter_username: String, + reporter_id: i32, + created_at: DateTime, + reason: ReportReason, + chapter_id: Option, + chapter_number: Option, + chapter_series_title: Option, + comment_id: Option, + comment_preview: Option, + total_items: Option, + } + + let records = match search_query.filter(|q| !q.trim().is_empty()) { + Some(search_match) => { + let search_match = search_match.trim(); + let similarity_threshold = 0.20_f32; + + sqlx::query_as!( + RawReportRow, + r#" + WITH search_candidates AS ( + SELECT + r.id, + u.username as reporter_username, + r.reporter_id, + r.created_at, + r.reason, + sc.id as chapter_id, + sc.chapter_number, + s.title as chapter_series_title, + c.id as comment_id, + c.content_user_markdown, + c.content_html, + GREATEST( + similarity(u.username, $3), + similarity(COALESCE(s.title, ''), $3), + similarity(COALESCE(c.content_user_markdown, ''), $3) + ) as sim_score + FROM reports r + INNER JOIN users u ON r.reporter_id = u.id + LEFT JOIN series_chapters sc ON r.chapter_id = sc.id + LEFT JOIN series s ON sc.series_id = s.id + LEFT JOIN comments c ON r.comment_id = c.id + WHERE + -- Reporter Username + (u.username ILIKE '%' || $3 || '%' OR (u.username % $3 AND similarity(u.username, $3) >= $4)) + OR + -- Series Title (only if related to series) + (s.title IS NOT NULL AND (s.title ILIKE '%' || $3 || '%' OR (s.title % $3 AND similarity(s.title, $3) >= $4))) + OR + -- Comment Content (only if related to comment) + (c.content_user_markdown IS NOT NULL AND c.content_user_markdown ILIKE '%' || $3 || '%') + ), + ranked_results AS ( + SELECT *, + CASE + -- Reporter Username + WHEN reporter_username ILIKE $3 THEN 10 + WHEN reporter_username ILIKE $3 || '%' THEN 9 + -- Series Title + WHEN chapter_series_title ILIKE '%' || $3 || '%' THEN 8 + -- Comment Content + ELSE 5 + END as search_rank + FROM search_candidates + ), + total_count AS ( + SELECT COUNT(*) as total FROM ranked_results + ) + SELECT + rr.id, + rr.reporter_username, + rr.reporter_id, + rr.created_at, + rr.reason as "reason: ReportReason", + rr.chapter_id, + rr.chapter_number, + rr.chapter_series_title, + rr.comment_id, + SUBSTRING(rr.content_html, 1, 50) as comment_preview, + tc.total as total_items + FROM ranked_results rr + CROSS JOIN total_count tc + ORDER BY rr.search_rank DESC, rr.sim_score DESC, rr.created_at DESC + LIMIT $1 + OFFSET $2 + "#, + limit, + offset, + search_match, + similarity_threshold, + ) + .fetch_all(&self.pool) + .await + .context("Failed to fetch reports with sqlx")? + } + None => sqlx::query_as!( + RawReportRow, + r#" + SELECT + r.id, + u.username as reporter_username, + r.reporter_id, + r.created_at, + r.reason as "reason: ReportReason", + sc.id as chapter_id, + sc.chapter_number as chapter_number, + s.title as chapter_series_title, + c.id as comment_id, + SUBSTRING(c.content_html, 1, 50) as comment_preview, + COUNT(*) OVER() as total_items + FROM reports r + INNER JOIN users u ON r.reporter_id = u.id + LEFT JOIN series_chapters sc ON r.chapter_id = sc.id + LEFT JOIN series s ON sc.series_id = s.id + LEFT JOIN comments c ON r.comment_id = c.id + ORDER BY r.created_at DESC + LIMIT $1 + OFFSET $2 + "#, + limit, + offset, + ) + .fetch_all(&self.pool) + .await + .context("Failed to fetch all pending reports")?, + }; + + let total_items = records + .first() + .map_or(0, |row| row.total_items.unwrap_or(0)); + + let items: Vec = records + .into_iter() + .map(|row| ReportView { + id: row.id, + reporter_username: row.reporter_username, + reporter_id: row.reporter_id, + created_at: row.created_at, + reason: row.reason, + chapter_id: row.chapter_id, + chapter_number: row.chapter_number, + chapter_series_title: row.chapter_series_title, + comment_id: row.comment_id, + comment_preview: row.comment_preview, + }) + .collect(); + + Ok(PaginatedResult { items, total_items }) + } + + pub async fn admin_resolve_reports(&self, report_id: i32) -> AnyhowResult<()> { + sqlx::query!("DELETE FROM reports WHERE id = $1", report_id) + .execute(&self.pool) + .await + .context("Failed to delete reports")?; + + Ok(()) + } +} diff --git a/backend/src/database/mod.rs b/backend/src/database/mod.rs index bb7f62b..96f2447 100644 --- a/backend/src/database/mod.rs +++ b/backend/src/database/mod.rs @@ -378,6 +378,88 @@ pub struct CommentVoteResponse { pub current_user_vote: Option, } +#[derive(Debug, Serialize, Deserialize, Type, Clone)] +#[sqlx(type_name = "report_reason", rename_all = "snake_case")] +pub enum ReportReason { + // Chapter Reports + BrokenImage, + WrongChapter, + DuplicateChapter, + MissingImage, + MissingChapter, + SlowLoading, + BrokenText, + + // Comment Reports + Toxic, + Racist, + Spam, + + Other, +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum ChapterReportReason { + BrokenImage, + WrongChapter, + DuplicateChapter, + MissingImage, + MissingChapter, + SlowLoading, + BrokenText, +} + +impl From for ReportReason { + fn from(value: ChapterReportReason) -> Self { + match value { + ChapterReportReason::BrokenImage => ReportReason::BrokenImage, + ChapterReportReason::WrongChapter => ReportReason::WrongChapter, + ChapterReportReason::DuplicateChapter => ReportReason::DuplicateChapter, + ChapterReportReason::MissingImage => ReportReason::MissingImage, + ChapterReportReason::MissingChapter => ReportReason::MissingChapter, + ChapterReportReason::SlowLoading => ReportReason::SlowLoading, + ChapterReportReason::BrokenText => ReportReason::BrokenText, + } + } +} + +#[derive(Debug, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum CommentReportReason { + Toxic, + Racist, + Spam, + Other, +} + +impl From for ReportReason { + fn from(value: CommentReportReason) -> Self { + match value { + CommentReportReason::Toxic => ReportReason::Toxic, + CommentReportReason::Racist => ReportReason::Racist, + CommentReportReason::Spam => ReportReason::Spam, + CommentReportReason::Other => ReportReason::Other, + } + } +} + +#[derive(Debug, Deserialize)] +pub enum ReportTarget { + Chapter(i32), + Comment(i64), +} + +#[derive(Debug, Deserialize)] +pub struct CreateChapterReportRequest { + pub reason: ChapterReportReason, +} + +#[derive(Debug, Deserialize)] +pub struct CreateCommentReportRequest { + pub reason: CommentReportReason, +} + // A helper function to extract a hostname from an optional URL string. // This is created to avoid code duplication, following the DRY principle. fn get_host_from_url(url_option: Option<&str>) -> Option { diff --git a/rust-toolchain.toml b/rust-toolchain.toml index beeeb43..7e66442 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly" +channel = "nightly-2026-01-08" #channel = "stable" -components = ["rustfmt"] \ No newline at end of file +components = [] \ No newline at end of file