Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .cargo/config.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[build]
rustflags = ["-C", "target-cpu=native"]
#"-C", "target-feature=+crt-static"
19 changes: 13 additions & 6 deletions backend/src/api/admin/admin_series_handlers.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use axum::Json;
use axum::extract::{Path, Query, State};
use axum::http::StatusCode;
use axum::Json;
use axum_core::__private::tracing::warn;
use axum_core::response::{IntoResponse, Response};
use axum_extra::extract::Multipart;
Expand All @@ -14,9 +14,9 @@ use crate::api::admin::{
};
use crate::api::extractor::{AdminOrHigherUser, SuperAdminUser};
use crate::builder::startup::AppState;
use crate::database::{NewSeriesData, Series, UpdateSeriesData};
use crate::database::{NewSeriesData, Series, SeriesCheckTaskInfo, UpdateSeriesData};
use crate::task_workers::check_series_worker::SeriesCheckJob;
use crate::task_workers::repair_chapter_worker;
use crate::task_workers::series_check_worker::SeriesCheckJob;

// Create new series
pub async fn create_new_series_handler(
Expand Down Expand Up @@ -69,10 +69,17 @@ pub async fn create_new_series_handler(
}
};

// Crate and send job to worker via priority queue
let job = SeriesCheckJob {
series: fetch_new_series,
let series_task = SeriesCheckTaskInfo {
id: fetch_new_series.id,
title: fetch_new_series.title,
current_source_url: fetch_new_series.current_source_url,
source_website_host: fetch_new_series.source_website_host,
check_interval_minutes: fetch_new_series.check_interval_minutes,
};

// Crate and send job to worker via priority queue
let job = SeriesCheckJob { series_task };

if let Err(e) = state.worker_channels.series_check_tx.send(job).await {
eprintln!(
"Failed to send job to worker for series: {} {}",
Expand Down
214 changes: 117 additions & 97 deletions backend/src/database/chapters.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,59 @@
use super::*;

/// Macros `sqlx::query!`
/// For DML operations (INSERT, UPDATE, DELETE) or SELECTs,
/// where you're manually processing generic `sqlx::Row`s (anonymous struct).
///
/// Macros `sqlx::query_as!`
/// For mapping SELECT results directly to a defined rust struct (`#[derive(FromRow)]`),
/// recommended for structured data retrieval.
///
/// Macros `sqlx::query_scalar!`
/// For queries returning a single value (one row, one column).
/// Highly efficient for this purpose.
// =========================================================================
// Public Read Used by the API to display chapters to users
// =========================================================================
impl DatabaseService {
pub async fn get_images_urls_for_chapter_series(
&self,
series_id: i32,
chapter_number: f32,
) -> AnyhowResult<Vec<String>> {
let urls = sqlx::query_scalar!(
r#"
SELECT ci.image_url
FROM chapter_images ci
JOIN series_chapters mc ON ci.chapter_id = mc.id
WHERE mc.series_id = $1 AND mc.chapter_number = $2
ORDER BY ci.image_order ASC
"#,
series_id,
chapter_number,
)
.fetch_all(&self.pool)
.await
.context("Failed to get images URLs for chapter series")?;

Ok(urls)
}

// Get chapters for a sepecific series
pub async fn get_chapters_by_series_id(
&self,
series_id: i32,
) -> AnyhowResult<Vec<SeriesChapter>> {
let chapters = sqlx::query_as!(
SeriesChapter,
r#"
SELECT id, series_id, chapter_number, status AS "status: _",title, source_url, created_at
FROM series_chapters
WHERE series_id = $1
ORDER BY chapter_number
DESC
"#,
series_id
)
.fetch_all(&self.pool)
.await
.context("Failed to query chapters by series ID with sqlx")?;

Ok(chapters)
}
}

// =========================================================================
// Scraper Ingestion & Data Entry
// =========================================================================
impl DatabaseService {
/// Adds a new chapter to the database and returns its new ID.
/// This function assumes the chapter does not already exist (checked by source_url uniqueness).
Expand All @@ -20,16 +63,22 @@ impl DatabaseService {
chapter_number: f32,
title: Option<&str>,
source_url: &str,
chapter_status: ChapterStatus,
) -> AnyhowResult<i32> {
let new_id = sqlx::query_scalar!(
"INSERT INTO series_chapters (series_id, chapter_number, title, source_url)
VALUES ($1, $2, $3, $4)
ON CONFLICT (source_url) DO UPDATE SET updated_at = NOW()
"INSERT INTO series_chapters (series_id, chapter_number, title, source_url, status)
VALUES ($1, $2, $3, $4, $5)
ON CONFLICT (series_id, chapter_number)
DO UPDATE SET
updated_at = NOW(),
source_url = EXCLUDED.source_url,
status = EXCLUDED.status
RETURNING id",
series_id,
chapter_number,
title,
source_url,
chapter_status as ChapterStatus
)
.fetch_one(&self.pool)
.await
Expand Down Expand Up @@ -57,48 +106,65 @@ impl DatabaseService {
Ok(new_id)
}

pub async fn delete_chapter_and_images_for_chapter(
&self,
series_id: i32,
chapter_number: f32,
) -> AnyhowResult<u64> {
// exclusive connection from the pool
let mut tx = self
.pool
.begin()
.await
.context("Failed to start transaction")?;

let chapter_id_to_delete = sqlx::query_scalar!(
"SELECT id FROM series_chapters WHERE series_id = $1 AND chapter_number = $2",
series_id,
chapter_number,
pub async fn get_max_known_chapter(&self, series_id: i32) -> AnyhowResult<f32> {
let result = sqlx::query_scalar!(
r#"
SELECT MAX(chapter_number)
FROM series_chapters
WHERE series_id = $1
"#,
series_id
)
.fetch_optional(&mut *tx) // Run query inside transaction
.fetch_one(&self.pool)
.await
.context("Failed to get chapter ID to delete")?;

if let Some(chapter_id) = chapter_id_to_delete {
sqlx::query!(
"DELETE FROM chapter_images WHERE chapter_id = $1",
chapter_id
)
.execute(&mut *tx)
.await
.context("Failed to delete chapter images")?;
.context("Failed to get max known chapter number")?;

let result = sqlx::query!("DELETE FROM series_chapters WHERE id = $1", chapter_id)
.execute(&mut *tx)
.await
.context("Failed to delete chapter")?;
Ok(result.unwrap_or(0.0))
}
}

// If transaction was successful, commit it
tx.commit().await.context("Failed to commit transaction")?;
// =========================================================================
// Background Worker & Job Queue
// Handling "Processing" status, locking, and job distribution
// =========================================================================
impl DatabaseService {
pub async fn find_and_lock_pending_chapters(
&self,
limit: i64,
) -> AnyhowResult<Vec<DownloadJobData>> {
let record = sqlx::query_as!(
DownloadJobData,
r#"
WITH locked_rows AS (
SELECT id
FROM series_chapters
WHERE status = 'Processing'
ORDER BY created_at ASC
LIMIT $1
FOR UPDATE SKIP LOCKED
)
UPDATE series_chapters sc
SET
status = 'Processing',
updated_at = NOW()
FROM locked_rows lr, series s
WHERE sc.id = lr.id AND sc.series_id = s.id
RETURNING
sc.id as chapter_id,
sc.chapter_number,
sc.source_url as chapter_url,
s.id as series_id,
s.title as series_title,
s.source_website_host as source_host,
s.current_source_url as series_url
"#,
limit
)
.fetch_all(&self.pool)
.await
.context("Failed to deque pending chapters")?;

Ok(result.rows_affected())
} else {
Ok(0) // No chapter found to delete
}
Ok(record)
}

pub async fn update_chapter_status(
Expand Down Expand Up @@ -127,50 +193,4 @@ impl DatabaseService {
}
}
}

pub async fn get_images_urls_for_chapter_series(
&self,
series_id: i32,
chapter_number: f32,
) -> AnyhowResult<Vec<String>> {
let urls = sqlx::query_scalar!(
r#"
SELECT ci.image_url
FROM chapter_images ci
JOIN series_chapters mc ON ci.chapter_id = mc.id
WHERE mc.series_id = $1 AND mc.chapter_number = $2
ORDER BY ci.image_order ASC
"#,
series_id,
chapter_number,
)
.fetch_all(&self.pool)
.await
.context("Failed to get images URLs for chapter series")?;

Ok(urls)
}

// Get chapters for a sepecific series
pub async fn get_chapters_by_series_id(
&self,
series_id: i32,
) -> AnyhowResult<Vec<SeriesChapter>> {
let chapters = sqlx::query_as!(
SeriesChapter,
r#"
SELECT id, series_id, chapter_number, status AS "status: _",title, source_url, created_at
FROM series_chapters
WHERE series_id = $1
ORDER BY chapter_number
DESC
"#,
series_id
)
.fetch_all(&self.pool)
.await
.context("Failed to query chapters by series ID with sqlx")?;

Ok(chapters)
}
}
32 changes: 31 additions & 1 deletion backend/src/database/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,13 @@
use sqlx::{FromRow, PgPool, Type};
use url::Url;

pub mod admin_actions;
pub mod auth;
pub mod chapters;
pub mod comments;
pub mod series;
pub mod series_user_actions;
pub mod storage;
pub mod user_actions;

Check failure on line 16 in backend/src/database/mod.rs

View workflow job for this annotation

GitHub Actions / linter

file not found for module `user_actions`

error[E0583]: file not found for module `user_actions` --> backend/src/database/mod.rs:16:1 | 16 | pub mod user_actions; | ^^^^^^^^^^^^^^^^^^^^^ | = help: to create the module `user_actions`, create file "backend/src/database/user_actions.rs" or "backend/src/database/user_actions/mod.rs" = note: if there is a `mod user_actions` elsewhere in the crate already, import it with `use crate::...` instead
pub mod users;

// Type alias for database connection pool
Expand Down Expand Up @@ -93,6 +94,7 @@
#[derive(Debug, Clone, PartialEq, Eq, sqlx::Type, Serialize, Deserialize)]
#[sqlx(type_name = "chapter_status", rename_all = "PascalCase")]
pub enum ChapterStatus {
Pending,
Processing,
Available,
NoImagesFound,
Expand Down Expand Up @@ -175,6 +177,32 @@
Rating,
}

#[derive(Debug, Clone, FromRow)]
pub struct SeriesCheckTaskInfo {
pub id: i32,
pub title: String,
pub current_source_url: String,
pub source_website_host: String,
pub check_interval_minutes: i32,
}

#[derive(Debug, FromRow)]
pub struct DownloadJobData {
pub series_id: i32,
pub series_title: String,
pub series_url: String,
pub source_host: String,

pub chapter_id: i32,
pub chapter_number: f32,
pub chapter_url: String,
}

#[derive(Debug, Clone, FromRow)]
pub struct SeriesDeletionJob {
pub id: i32,
}

#[derive(Debug, FromRow, Serialize, Deserialize)]
pub struct CategoryTag {
pub id: i32,
Expand Down Expand Up @@ -312,6 +340,7 @@
user_id: i32,
user_username: String,
user_avatar_url: Option<String>,
user_role_id: i32,
upvotes: i64,
downvotes: i64,
is_deleted: bool,
Expand All @@ -324,6 +353,7 @@
pub id: i32,
pub username: String,
pub avatar_url: Option<String>,
pub role_id: i32,
}

#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Type)]
Expand Down
Loading
Loading