diff --git a/README.md b/README.md
new file mode 100644
index 0000000..ed56dab
--- /dev/null
+++ b/README.md
@@ -0,0 +1,168 @@
+# Yumemi
+
+
+
+
+
+
+ A feature-rich manga reader for Android
+
+
+
+
+
+
+
+
+
+---
+
+Yumemi is a free and open-source manga reader for Android based on [Kotatsu](https://github.com/KotatsuApp/Kotatsu). It provides a clean, modern interface for reading manga from various online sources with powerful features for organizing and tracking your reading progress.
+
+## Features
+
+- **Multiple Sources** — Access manga from 1000+ sources via [kotatsu-parsers](https://github.com/YakaTeam/kotatsu-parsers)
+- **Offline Reading** — Download chapters for reading without internet
+- **Reading Progress Tracking** — Automatic history and bookmarks
+- **Scrobbling** — Sync progress with MyAnimeList, AniList, Shikimori, and Kitsu
+- **Local Manga** — Read downloaded CBZ/CBR/ZIP archives
+- **Customizable Reader** — Multiple reading modes, color filters, and gestures
+- **Material Design** — Modern UI following Material Design 3 guidelines
+- **App Lock** — Protect the app with biometric or PIN authentication
+- **Discord Rich Presence** — Share what you're reading on Discord
+- **Tracker** — Get notified when new chapters are available
+- **Categories & Favorites** — Organize your library your way
+- **Search** — Find manga across all sources simultaneously
+
+## Screenshots
+
+
+
+## Download
+
+Download the latest APK from the [Releases](https://github.com/AgentKush/Yumemi/releases) page.
+
+### Build Variants
+
+| Variant | Description |
+|---------|-------------|
+| **Release** | Optimized build with ProGuard minification |
+| **Debug** | Development build with debugging enabled |
+| **Nightly** | Daily builds with latest changes |
+
+## Building from Source
+
+### Prerequisites
+
+- Android Studio Ladybug or later
+- JDK 17 or later
+- Android SDK 36
+
+### Build Commands
+
+```bash
+# Clone the repository
+git clone https://github.com/AgentKush/Yumemi.git
+cd Yumemi
+
+# Build debug APK
+./gradlew assembleDebug
+
+# Build release APK
+./gradlew assembleRelease
+
+# Build nightly APK
+./gradlew assembleNightly
+
+# Run tests
+./gradlew test
+```
+
+### Output Locations
+
+- Debug: `app/build/outputs/apk/debug/app-debug.apk`
+- Release: `app/build/outputs/apk/release/app-release.apk`
+- Nightly: `app/build/outputs/apk/nightly/app-nightly.apk`
+
+## Tech Stack
+
+- **Language:** Kotlin
+- **Architecture:** MVVM with Clean Architecture
+- **Dependency Injection:** Hilt
+- **Database:** Room
+- **Networking:** OkHttp + Coroutines
+- **Image Loading:** Coil 3
+- **Background Work:** WorkManager
+- **UI:** Material Design 3, ViewBinding
+
+## Project Structure
+
+```
+app/
+├── src/main/kotlin/org/koitharu/kotatsu/
+│ ├── bookmarks/ # Bookmark management
+│ ├── browser/ # WebView browser for sources
+│ ├── core/ # Core utilities, database, network
+│ ├── details/ # Manga details screen
+│ ├── download/ # Download management
+│ ├── explore/ # Source exploration
+│ ├── favourites/ # Favorites management
+│ ├── filter/ # Search filters
+│ ├── history/ # Reading history
+│ ├── list/ # Manga list components
+│ ├── local/ # Local manga handling
+│ ├── main/ # Main activity and navigation
+│ ├── reader/ # Manga reader
+│ ├── scrobbling/ # External tracker integration
+│ ├── search/ # Search functionality
+│ ├── settings/ # App settings
+│ ├── sync/ # Cloud sync
+│ ├── tracker/ # Update tracker
+│ └── widget/ # Home screen widgets
+```
+
+## Contributing
+
+Contributions are welcome! Please feel free to submit a Pull Request.
+
+1. Fork the repository
+2. Create your feature branch (`git checkout -b feature/amazing-feature`)
+3. Commit your changes (`git commit -m 'Add some amazing feature'`)
+4. Push to the branch (`git push origin feature/amazing-feature`)
+5. Open a Pull Request
+
+### Reporting Issues
+
+- Check existing issues before creating a new one
+- Include device info, Android version, and app version
+- Provide steps to reproduce the issue
+- Include screenshots or logs if applicable
+
+## Acknowledgements
+
+- [Kotatsu](https://github.com/KotatsuApp/Kotatsu) — Original project
+- [YakaTeam/kotatsu-parsers](https://github.com/YakaTeam/kotatsu-parsers) — Manga source parsers
+- All the contributors who help improve this project
+
+## License
+
+```
+Copyright (C) 2020-2026 Yumemi Contributors
+
+This program is free software: you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation, either version 3 of the License, or
+(at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program. If not, see .
+```
+
+---
+
+Made with ❤️ for manga readers everywhere
diff --git a/app/build.gradle b/app/build.gradle
index eaa3957..7afb747 100644
--- a/app/build.gradle
+++ b/app/build.gradle
@@ -56,12 +56,13 @@ android {
minifyEnabled true
shrinkResources true
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
-// signingConfig signingConfigs.Yukimi
+ // Use debug signing for testing - replace with proper keystore for production
+ signingConfig signingConfigs.debug
}
nightly {
initWith release
applicationIdSuffix = '.nightly'
-// signingConfig signingConfigs.Yukimi
+ signingConfig signingConfigs.debug
}
}
buildFeatures {
@@ -82,11 +83,11 @@ android {
}
compileOptions {
coreLibraryDesugaringEnabled true
- sourceCompatibility JavaVersion.VERSION_11
- targetCompatibility JavaVersion.VERSION_11
+ sourceCompatibility JavaVersion.VERSION_17
+ targetCompatibility JavaVersion.VERSION_17
}
kotlinOptions {
- jvmTarget = JavaVersion.VERSION_11.toString()
+ jvmTarget = JavaVersion.VERSION_17.toString()
freeCompilerArgs += [
'-opt-in=kotlin.ExperimentalStdlibApi',
'-opt-in=kotlinx.coroutines.ExperimentalCoroutinesApi',
diff --git a/app/src/main/AndroidManifest.xml b/app/src/main/AndroidManifest.xml
index d23fea7..b1a7745 100644
--- a/app/src/main/AndroidManifest.xml
+++ b/app/src/main/AndroidManifest.xml
@@ -445,6 +445,9 @@
+
@@ -75,7 +79,7 @@ open class BaseApp : Application(), Configuration.Provider {
override fun onCreate() {
super.onCreate()
- PlatformRegistry.applicationContext = this // TODO replace with OkHttp.initialize
+ OkHttp.initialize(this)
if (ACRA.isACRASenderServiceProcess()) {
return
}
@@ -94,6 +98,7 @@ open class BaseApp : Application(), Configuration.Provider {
localStorageChanges.collect(localMangaIndexProvider.get())
}
workScheduleManager.init()
+ dnsPrefetchManager.initialize()
}
override fun attachBaseContext(base: Context) {
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/cache/MemoryContentCache.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/cache/MemoryContentCache.kt
index cf78bcb..96ec282 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/cache/MemoryContentCache.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/cache/MemoryContentCache.kt
@@ -3,6 +3,7 @@ package org.koitharu.kotatsu.core.cache
import android.app.Application
import android.content.ComponentCallbacks2
import android.content.res.Configuration
+import org.koitharu.kotatsu.core.prefs.AppSettings
import org.koitharu.kotatsu.core.util.ext.isLowRamDevice
import org.koitharu.kotatsu.parsers.model.Manga
import org.koitharu.kotatsu.parsers.model.MangaPage
@@ -11,16 +12,41 @@ import java.util.concurrent.TimeUnit
import javax.inject.Inject
import javax.inject.Singleton
+/**
+ * In-memory cache for manga content with configurable TTL.
+ *
+ * Cache TTL settings can be configured in AppSettings:
+ * - Details cache: default 5 minutes (configurable 1-60 min)
+ * - Pages cache: default 10 minutes (configurable 1-120 min)
+ * - Related manga cache: default 10 minutes (configurable 1-120 min)
+ *
+ * Note: Changes to TTL settings require app restart to take effect.
+ */
@Singleton
-class MemoryContentCache @Inject constructor(application: Application) : ComponentCallbacks2 {
+class MemoryContentCache @Inject constructor(
+ application: Application,
+ settings: AppSettings,
+) : ComponentCallbacks2 {
private val isLowRam = application.isLowRamDevice()
- private val detailsCache = ExpiringLruCache>(if (isLowRam) 1 else 4, 5, TimeUnit.MINUTES)
- private val pagesCache =
- ExpiringLruCache>>(if (isLowRam) 1 else 4, 10, TimeUnit.MINUTES)
- private val relatedMangaCache =
- ExpiringLruCache>>(if (isLowRam) 1 else 3, 10, TimeUnit.MINUTES)
+ private val detailsCache = ExpiringLruCache>(
+ maxSize = if (isLowRam) 1 else 4,
+ lifetime = settings.cacheDetailsTtlMinutes.toLong(),
+ timeUnit = TimeUnit.MINUTES,
+ )
+
+ private val pagesCache = ExpiringLruCache>>(
+ maxSize = if (isLowRam) 1 else 4,
+ lifetime = settings.cachePagesTtlMinutes.toLong(),
+ timeUnit = TimeUnit.MINUTES,
+ )
+
+ private val relatedMangaCache = ExpiringLruCache>>(
+ maxSize = if (isLowRam) 1 else 3,
+ lifetime = settings.cacheRelatedTtlMinutes.toLong(),
+ timeUnit = TimeUnit.MINUTES,
+ )
init {
application.registerComponentCallbacks(this)
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/db/DatabaseOptimizer.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/db/DatabaseOptimizer.kt
new file mode 100644
index 0000000..a5f88df
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/db/DatabaseOptimizer.kt
@@ -0,0 +1,236 @@
+package org.koitharu.kotatsu.core.db
+
+import android.content.Context
+import android.util.Log
+import androidx.room.RoomDatabase
+import androidx.sqlite.db.SupportSQLiteDatabase
+import dagger.hilt.android.qualifiers.ApplicationContext
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.withContext
+import org.koitharu.kotatsu.BuildConfig
+import javax.inject.Inject
+import javax.inject.Singleton
+
+/**
+ * Database query optimizer that provides runtime optimizations and diagnostics.
+ *
+ * Features:
+ * - Periodic ANALYZE to update query planner statistics
+ * - VACUUM for database compaction (when database is idle)
+ * - Query plan analysis for debugging slow queries
+ * - Index usage statistics
+ */
+@Singleton
+class DatabaseOptimizer @Inject constructor(
+ @ApplicationContext private val context: Context,
+ private val database: MangaDatabase,
+) {
+
+ /**
+ * Run ANALYZE to update SQLite's query planner statistics.
+ * Should be called periodically (e.g., once per app session or daily).
+ */
+ suspend fun analyzeDatabase() = withContext(Dispatchers.IO) {
+ try {
+ database.openHelper.writableDatabase.execSQL("ANALYZE")
+ logDebug { "Database ANALYZE completed successfully" }
+ } catch (e: Exception) {
+ Log.e(TAG, "Failed to analyze database", e)
+ }
+ }
+
+ /**
+ * Run VACUUM to compact the database and reclaim unused space.
+ * Should be called infrequently (e.g., weekly or when database size is large).
+ * Note: This operation can be slow and locks the database.
+ */
+ suspend fun vacuumDatabase() = withContext(Dispatchers.IO) {
+ try {
+ database.openHelper.writableDatabase.execSQL("VACUUM")
+ logDebug { "Database VACUUM completed successfully" }
+ } catch (e: Exception) {
+ Log.e(TAG, "Failed to vacuum database", e)
+ }
+ }
+
+ /**
+ * Get the current database file size in bytes.
+ */
+ fun getDatabaseSize(): Long {
+ return try {
+ context.getDatabasePath("kotatsu-db").length()
+ } catch (e: Exception) {
+ -1L
+ }
+ }
+
+ /**
+ * Get database statistics including page count, free pages, etc.
+ */
+ suspend fun getDatabaseStats(): DatabaseStats = withContext(Dispatchers.IO) {
+ try {
+ val db = database.openHelper.readableDatabase
+
+ val pageCount = queryPragmaInt(db, "page_count")
+ val pageSize = queryPragmaInt(db, "page_size")
+ val freeListCount = queryPragmaInt(db, "freelist_count")
+
+ DatabaseStats(
+ sizeBytes = getDatabaseSize(),
+ pageCount = pageCount,
+ pageSize = pageSize,
+ freePages = freeListCount,
+ usedPages = pageCount - freeListCount,
+ fragmentationPercent = if (pageCount > 0) {
+ (freeListCount.toFloat() / pageCount * 100).coerceIn(0f, 100f)
+ } else 0f,
+ )
+ } catch (e: Exception) {
+ Log.e(TAG, "Failed to get database stats", e)
+ DatabaseStats()
+ }
+ }
+
+ /**
+ * Explain a query plan for debugging purposes.
+ * Only available in debug builds.
+ */
+ suspend fun explainQueryPlan(query: String): List = withContext(Dispatchers.IO) {
+ if (!BuildConfig.DEBUG) {
+ return@withContext emptyList()
+ }
+
+ try {
+ val db = database.openHelper.readableDatabase
+ val results = mutableListOf()
+
+ db.query("EXPLAIN QUERY PLAN $query").use { cursor ->
+ while (cursor.moveToNext()) {
+ val detail = cursor.getString(cursor.getColumnIndexOrThrow("detail"))
+ results.add(detail)
+ }
+ }
+
+ results
+ } catch (e: Exception) {
+ Log.e(TAG, "Failed to explain query plan", e)
+ emptyList()
+ }
+ }
+
+ /**
+ * Get a list of all indexes in the database.
+ */
+ suspend fun getIndexList(): List = withContext(Dispatchers.IO) {
+ try {
+ val db = database.openHelper.readableDatabase
+ val indexes = mutableListOf()
+
+ // Get all tables
+ db.query("SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' AND name NOT LIKE 'room_%'").use { tablesCursor ->
+ while (tablesCursor.moveToNext()) {
+ val tableName = tablesCursor.getString(0)
+
+ // Get indexes for each table
+ db.query("PRAGMA index_list('$tableName')").use { indexCursor ->
+ while (indexCursor.moveToNext()) {
+ val indexName = indexCursor.getString(indexCursor.getColumnIndexOrThrow("name"))
+ val isUnique = indexCursor.getInt(indexCursor.getColumnIndexOrThrow("unique")) == 1
+
+ // Get columns in the index
+ val columns = mutableListOf()
+ db.query("PRAGMA index_info('$indexName')").use { colCursor ->
+ while (colCursor.moveToNext()) {
+ columns.add(colCursor.getString(colCursor.getColumnIndexOrThrow("name")))
+ }
+ }
+
+ indexes.add(IndexInfo(
+ name = indexName,
+ tableName = tableName,
+ columns = columns,
+ isUnique = isUnique,
+ ))
+ }
+ }
+ }
+ }
+
+ indexes
+ } catch (e: Exception) {
+ Log.e(TAG, "Failed to get index list", e)
+ emptyList()
+ }
+ }
+
+ /**
+ * Check database integrity.
+ */
+ suspend fun checkIntegrity(): Boolean = withContext(Dispatchers.IO) {
+ try {
+ val db = database.openHelper.readableDatabase
+ db.query("PRAGMA integrity_check").use { cursor ->
+ if (cursor.moveToFirst()) {
+ val result = cursor.getString(0)
+ return@withContext result == "ok"
+ }
+ }
+ false
+ } catch (e: Exception) {
+ Log.e(TAG, "Failed to check database integrity", e)
+ false
+ }
+ }
+
+ /**
+ * Optimize database for better performance.
+ * Combines ANALYZE and optional VACUUM based on fragmentation.
+ */
+ suspend fun optimize(forceVacuum: Boolean = false) = withContext(Dispatchers.IO) {
+ // Always run ANALYZE
+ analyzeDatabase()
+
+ // Only VACUUM if fragmentation is high or forced
+ val stats = getDatabaseStats()
+ if (forceVacuum || stats.fragmentationPercent > VACUUM_THRESHOLD_PERCENT) {
+ logDebug { "Running VACUUM due to ${stats.fragmentationPercent}% fragmentation" }
+ vacuumDatabase()
+ }
+ }
+
+ private fun queryPragmaInt(db: SupportSQLiteDatabase, pragma: String): Int {
+ return db.query("PRAGMA $pragma").use { cursor ->
+ if (cursor.moveToFirst()) cursor.getInt(0) else 0
+ }
+ }
+
+ private inline fun logDebug(message: () -> String) {
+ if (BuildConfig.DEBUG) {
+ Log.d(TAG, message())
+ }
+ }
+
+ data class DatabaseStats(
+ val sizeBytes: Long = 0,
+ val pageCount: Int = 0,
+ val pageSize: Int = 0,
+ val freePages: Int = 0,
+ val usedPages: Int = 0,
+ val fragmentationPercent: Float = 0f,
+ ) {
+ val sizeMb: Float
+ get() = sizeBytes / (1024f * 1024f)
+ }
+
+ data class IndexInfo(
+ val name: String,
+ val tableName: String,
+ val columns: List,
+ val isUnique: Boolean,
+ )
+
+ companion object {
+ private const val TAG = "DatabaseOptimizer"
+ private const val VACUUM_THRESHOLD_PERCENT = 20f
+ }
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/db/MangaDatabase.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/db/MangaDatabase.kt
index 72a4ba3..1f87ec1 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/db/MangaDatabase.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/db/MangaDatabase.kt
@@ -16,12 +16,14 @@ import org.koitharu.kotatsu.core.db.dao.ChaptersDao
import org.koitharu.kotatsu.core.db.dao.MangaDao
import org.koitharu.kotatsu.core.db.dao.MangaSourcesDao
import org.koitharu.kotatsu.core.db.dao.PreferencesDao
+import org.koitharu.kotatsu.core.db.dao.SourceHealthDao
import org.koitharu.kotatsu.core.db.dao.TagsDao
import org.koitharu.kotatsu.core.db.dao.TrackLogsDao
import org.koitharu.kotatsu.core.db.entity.ChapterEntity
import org.koitharu.kotatsu.core.db.entity.MangaEntity
import org.koitharu.kotatsu.core.db.entity.MangaPrefsEntity
import org.koitharu.kotatsu.core.db.entity.MangaSourceEntity
+import org.koitharu.kotatsu.core.db.entity.SourceHealthEntity
import org.koitharu.kotatsu.core.db.entity.MangaTagsEntity
import org.koitharu.kotatsu.core.db.entity.TagEntity
import org.koitharu.kotatsu.core.db.migrations.Migration10To11
@@ -43,6 +45,8 @@ import org.koitharu.kotatsu.core.db.migrations.Migration24To23
import org.koitharu.kotatsu.core.db.migrations.Migration24To25
import org.koitharu.kotatsu.core.db.migrations.Migration25To26
import org.koitharu.kotatsu.core.db.migrations.Migration26To27
+import org.koitharu.kotatsu.core.db.migrations.Migration27To28
+import org.koitharu.kotatsu.core.db.migrations.Migration28To29
import org.koitharu.kotatsu.core.db.migrations.Migration2To3
import org.koitharu.kotatsu.core.db.migrations.Migration3To4
import org.koitharu.kotatsu.core.db.migrations.Migration4To5
@@ -70,14 +74,14 @@ import org.koitharu.kotatsu.tracker.data.TrackEntity
import org.koitharu.kotatsu.tracker.data.TrackLogEntity
import org.koitharu.kotatsu.tracker.data.TracksDao
-const val DATABASE_VERSION = 27
+const val DATABASE_VERSION = 29
@Database(
entities = [
MangaEntity::class, TagEntity::class, HistoryEntity::class, MangaTagsEntity::class, ChapterEntity::class,
FavouriteCategoryEntity::class, FavouriteEntity::class, MangaPrefsEntity::class, TrackEntity::class,
TrackLogEntity::class, SuggestionEntity::class, BookmarkEntity::class, ScrobblingEntity::class,
- MangaSourceEntity::class, StatsEntity::class, LocalMangaIndexEntity::class,
+ MangaSourceEntity::class, StatsEntity::class, LocalMangaIndexEntity::class, SourceHealthEntity::class,
],
version = DATABASE_VERSION,
)
@@ -112,6 +116,8 @@ abstract class MangaDatabase : RoomDatabase() {
abstract fun getLocalMangaIndexDao(): LocalMangaIndexDao
abstract fun getChaptersDao(): ChaptersDao
+
+ abstract fun getSourceHealthDao(): SourceHealthDao
}
fun getDatabaseMigrations(context: Context): Array = arrayOf(
@@ -142,12 +148,15 @@ fun getDatabaseMigrations(context: Context): Array = arrayOf(
Migration24To25(),
Migration25To26(),
Migration26To27(),
+ Migration27To28(),
+ Migration28To29(),
)
fun MangaDatabase(context: Context): MangaDatabase = Room
.databaseBuilder(context, MangaDatabase::class.java, "kotatsu-db")
.addMigrations(*getDatabaseMigrations(context))
.addCallback(DatabasePrePopulateCallback(context.resources))
+ .fallbackToDestructiveMigration(dropAllTables = true)
.build()
fun InvalidationTracker.removeObserverAsync(observer: InvalidationTracker.Observer) {
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/db/Tables.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/db/Tables.kt
index cbd87b5..6fd0ea1 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/db/Tables.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/db/Tables.kt
@@ -9,3 +9,4 @@ const val TABLE_MANGA_TAGS = "manga_tags"
const val TABLE_SOURCES = "sources"
const val TABLE_CHAPTERS = "chapters"
const val TABLE_PREFERENCES = "preferences"
+const val TABLE_SOURCE_HEALTH = "source_health"
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/db/dao/SourceHealthDao.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/db/dao/SourceHealthDao.kt
new file mode 100644
index 0000000..467dae3
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/db/dao/SourceHealthDao.kt
@@ -0,0 +1,129 @@
+package org.koitharu.kotatsu.core.db.dao
+
+import androidx.room.Dao
+import androidx.room.Insert
+import androidx.room.OnConflictStrategy
+import androidx.room.Query
+import androidx.room.Transaction
+import kotlinx.coroutines.flow.Flow
+import org.koitharu.kotatsu.core.db.entity.SourceHealthEntity
+
+@Dao
+abstract class SourceHealthDao {
+
+ @Query("SELECT * FROM source_health WHERE source = :source")
+ abstract suspend fun get(source: String): SourceHealthEntity?
+
+ @Query("SELECT * FROM source_health WHERE source = :source")
+ abstract fun observe(source: String): Flow
+
+ @Query("SELECT * FROM source_health ORDER BY (success_count + failure_count) DESC")
+ abstract suspend fun getAll(): List
+
+ @Query("SELECT * FROM source_health ORDER BY (success_count + failure_count) DESC")
+ abstract fun observeAll(): Flow>
+
+ @Query("SELECT * FROM source_health WHERE (CAST(success_count AS REAL) / (success_count + failure_count)) * 100 >= :minSuccessRate ORDER BY avg_response_time ASC")
+ abstract suspend fun getHealthySources(minSuccessRate: Float = 80f): List
+
+ @Query("SELECT * FROM source_health WHERE consecutive_failures >= :threshold")
+ abstract suspend fun getFailingSources(threshold: Int = 3): List
+
+ @Insert(onConflict = OnConflictStrategy.REPLACE)
+ abstract suspend fun upsert(entity: SourceHealthEntity)
+
+ @Query("DELETE FROM source_health WHERE source = :source")
+ abstract suspend fun delete(source: String)
+
+ @Query("DELETE FROM source_health")
+ abstract suspend fun deleteAll()
+
+ /**
+ * Record a successful request for a source
+ */
+ @Transaction
+ open suspend fun recordSuccess(source: String, responseTimeMs: Long) {
+ val existing = get(source)
+ val now = System.currentTimeMillis()
+
+ if (existing == null) {
+ upsert(
+ SourceHealthEntity(
+ source = source,
+ successCount = 1,
+ failureCount = 0,
+ avgResponseTime = responseTimeMs,
+ minResponseTime = responseTimeMs,
+ maxResponseTime = responseTimeMs,
+ lastSuccessAt = now,
+ consecutiveFailures = 0,
+ )
+ )
+ } else {
+ val totalRequests = existing.successCount + existing.failureCount + 1
+ // Exponential moving average for response time (weight recent more heavily)
+ val newAvg = if (existing.avgResponseTime > 0) {
+ ((existing.avgResponseTime * 0.7) + (responseTimeMs * 0.3)).toLong()
+ } else {
+ responseTimeMs
+ }
+
+ upsert(
+ existing.copy(
+ successCount = existing.successCount + 1,
+ avgResponseTime = newAvg,
+ minResponseTime = minOf(existing.minResponseTime, responseTimeMs),
+ maxResponseTime = maxOf(existing.maxResponseTime, responseTimeMs),
+ lastSuccessAt = now,
+ consecutiveFailures = 0, // Reset on success
+ )
+ )
+ }
+ }
+
+ /**
+ * Record a failed request for a source
+ */
+ @Transaction
+ open suspend fun recordFailure(source: String, errorMessage: String?) {
+ val existing = get(source)
+ val now = System.currentTimeMillis()
+ val truncatedError = errorMessage?.take(200)
+
+ if (existing == null) {
+ upsert(
+ SourceHealthEntity(
+ source = source,
+ successCount = 0,
+ failureCount = 1,
+ lastFailureAt = now,
+ lastError = truncatedError,
+ consecutiveFailures = 1,
+ )
+ )
+ } else {
+ upsert(
+ existing.copy(
+ failureCount = existing.failureCount + 1,
+ lastFailureAt = now,
+ lastError = truncatedError,
+ consecutiveFailures = existing.consecutiveFailures + 1,
+ )
+ )
+ }
+ }
+
+ /**
+ * Reset statistics for a specific source
+ */
+ @Transaction
+ open suspend fun resetStats(source: String) {
+ val existing = get(source) ?: return
+ upsert(
+ SourceHealthEntity(
+ source = source,
+ statsResetAt = System.currentTimeMillis(),
+ )
+ )
+ }
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/db/entity/MangaEntity.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/db/entity/MangaEntity.kt
index fd4a230..3635f03 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/db/entity/MangaEntity.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/db/entity/MangaEntity.kt
@@ -2,10 +2,18 @@ package org.koitharu.kotatsu.core.db.entity
import androidx.room.ColumnInfo
import androidx.room.Entity
+import androidx.room.Index
import androidx.room.PrimaryKey
import org.koitharu.kotatsu.core.db.TABLE_MANGA
-@Entity(tableName = TABLE_MANGA)
+@Entity(
+ tableName = TABLE_MANGA,
+ indices = [
+ Index(value = ["source"]),
+ Index(value = ["title"]),
+ Index(value = ["public_url"]),
+ ],
+)
data class MangaEntity(
@PrimaryKey(autoGenerate = false)
@ColumnInfo(name = "manga_id") val id: Long,
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/db/entity/SourceHealthEntity.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/db/entity/SourceHealthEntity.kt
new file mode 100644
index 0000000..a09ba3e
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/db/entity/SourceHealthEntity.kt
@@ -0,0 +1,113 @@
+package org.koitharu.kotatsu.core.db.entity
+
+import androidx.room.ColumnInfo
+import androidx.room.Entity
+import androidx.room.PrimaryKey
+import org.koitharu.kotatsu.core.db.TABLE_SOURCE_HEALTH
+
+/**
+ * Entity for tracking source health metrics.
+ * Records success/failure counts, response times, and reliability scores.
+ */
+@Entity(tableName = TABLE_SOURCE_HEALTH)
+data class SourceHealthEntity(
+ @PrimaryKey(autoGenerate = false)
+ @ColumnInfo(name = "source")
+ val source: String,
+
+ /** Total number of successful requests */
+ @ColumnInfo(name = "success_count")
+ val successCount: Long = 0L,
+
+ /** Total number of failed requests */
+ @ColumnInfo(name = "failure_count")
+ val failureCount: Long = 0L,
+
+ /** Average response time in milliseconds (rolling average) */
+ @ColumnInfo(name = "avg_response_time")
+ val avgResponseTime: Long = 0L,
+
+ /** Minimum response time observed */
+ @ColumnInfo(name = "min_response_time")
+ val minResponseTime: Long = Long.MAX_VALUE,
+
+ /** Maximum response time observed */
+ @ColumnInfo(name = "max_response_time")
+ val maxResponseTime: Long = 0L,
+
+ /** Timestamp of last successful request */
+ @ColumnInfo(name = "last_success_at")
+ val lastSuccessAt: Long = 0L,
+
+ /** Timestamp of last failed request */
+ @ColumnInfo(name = "last_failure_at")
+ val lastFailureAt: Long = 0L,
+
+ /** Most recent error message (truncated) */
+ @ColumnInfo(name = "last_error")
+ val lastError: String? = null,
+
+ /** Number of consecutive failures (reset on success) */
+ @ColumnInfo(name = "consecutive_failures")
+ val consecutiveFailures: Int = 0,
+
+ /** Timestamp when stats were last reset */
+ @ColumnInfo(name = "stats_reset_at")
+ val statsResetAt: Long = System.currentTimeMillis(),
+) {
+ /**
+ * Calculate success rate as a percentage (0-100)
+ */
+ val successRate: Float
+ get() {
+ val total = successCount + failureCount
+ return if (total > 0) (successCount.toFloat() / total) * 100f else 0f
+ }
+
+ /**
+ * Calculate reliability score (0-100) based on success rate and response time
+ */
+ val reliabilityScore: Float
+ get() {
+ val total = successCount + failureCount
+ if (total == 0L) return 0f
+
+ // Base score from success rate (0-70 points)
+ val successScore = successRate * 0.7f
+
+ // Response time score (0-30 points)
+ // Target: < 1000ms = 30 points, > 5000ms = 0 points
+ val responseScore = when {
+ avgResponseTime <= 0 -> 15f // No data, neutral
+ avgResponseTime < 1000 -> 30f
+ avgResponseTime < 2000 -> 25f
+ avgResponseTime < 3000 -> 20f
+ avgResponseTime < 4000 -> 15f
+ avgResponseTime < 5000 -> 10f
+ else -> 5f
+ }
+
+ return successScore + responseScore
+ }
+
+ /**
+ * Determine health status based on reliability score
+ */
+ val healthStatus: HealthStatus
+ get() = when {
+ successCount + failureCount == 0L -> HealthStatus.UNKNOWN
+ consecutiveFailures >= 5 -> HealthStatus.CRITICAL
+ reliabilityScore >= 80f -> HealthStatus.HEALTHY
+ reliabilityScore >= 60f -> HealthStatus.DEGRADED
+ reliabilityScore >= 40f -> HealthStatus.POOR
+ else -> HealthStatus.CRITICAL
+ }
+
+ enum class HealthStatus {
+ UNKNOWN,
+ HEALTHY,
+ DEGRADED,
+ POOR,
+ CRITICAL
+ }
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/db/migrations/Migration27To28.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/db/migrations/Migration27To28.kt
new file mode 100644
index 0000000..2ca72a9
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/db/migrations/Migration27To28.kt
@@ -0,0 +1,66 @@
+package org.koitharu.kotatsu.core.db.migrations
+
+import androidx.room.migration.Migration
+import androidx.sqlite.db.SupportSQLiteDatabase
+
+/**
+ * Migration 27 to 28: Database Query Optimization
+ *
+ * Adds indexes to improve query performance for:
+ * - Manga searches by source, title, author
+ * - History queries with deleted_at filtering and updated_at ordering
+ * - Track queries for new chapters and last chapter date
+ * - Bookmark lookups by manga_id
+ * - Scrobbling lookups by manga_id
+ */
+class Migration27To28 : Migration(27, 28) {
+
+ override fun migrate(db: SupportSQLiteDatabase) {
+ // Manga table indexes
+ db.execSQL("CREATE INDEX IF NOT EXISTS index_manga_source ON manga(source)")
+ db.execSQL("CREATE INDEX IF NOT EXISTS index_manga_title ON manga(title)")
+ db.execSQL("CREATE INDEX IF NOT EXISTS index_manga_public_url ON manga(public_url)")
+
+ // History table indexes for common query patterns
+ db.execSQL("CREATE INDEX IF NOT EXISTS index_history_deleted_at ON history(deleted_at)")
+ db.execSQL("CREATE INDEX IF NOT EXISTS index_history_updated_at ON history(updated_at)")
+ db.execSQL("CREATE INDEX IF NOT EXISTS index_history_created_at ON history(created_at)")
+ db.execSQL("CREATE INDEX IF NOT EXISTS index_history_percent ON history(percent)")
+ // Composite index for the most common history query pattern
+ db.execSQL("CREATE INDEX IF NOT EXISTS index_history_deleted_updated ON history(deleted_at, updated_at DESC)")
+
+ // Tracks table indexes for sorting and filtering
+ db.execSQL("CREATE INDEX IF NOT EXISTS index_tracks_chapters_new ON tracks(chapters_new)")
+ db.execSQL("CREATE INDEX IF NOT EXISTS index_tracks_last_chapter_date ON tracks(last_chapter_date)")
+ db.execSQL("CREATE INDEX IF NOT EXISTS index_tracks_last_check_time ON tracks(last_check_time)")
+
+ // Favourites additional indexes (composite for common queries)
+ db.execSQL("CREATE INDEX IF NOT EXISTS index_favourites_deleted_created ON favourites(deleted_at, created_at DESC)")
+
+ // Bookmarks index for manga lookups
+ db.execSQL("CREATE INDEX IF NOT EXISTS index_bookmarks_manga_id ON bookmarks(manga_id)")
+
+ // Scrobblings index for manga lookups
+ db.execSQL("CREATE INDEX IF NOT EXISTS index_scrobblings_manga_id ON scrobblings(manga_id)")
+
+ // Local index table optimization
+ db.execSQL("CREATE INDEX IF NOT EXISTS index_local_index_manga_id ON local_index(manga_id)")
+
+ // Suggestions index
+ db.execSQL("CREATE INDEX IF NOT EXISTS index_suggestions_manga_id ON suggestions(manga_id)")
+
+ // Tags index for title searches
+ db.execSQL("CREATE INDEX IF NOT EXISTS index_tags_title ON tags(title)")
+
+ // Track logs index for efficient lookups
+ db.execSQL("CREATE INDEX IF NOT EXISTS index_track_logs_manga_id ON track_logs(manga_id)")
+ db.execSQL("CREATE INDEX IF NOT EXISTS index_track_logs_created_at ON track_logs(created_at)")
+
+ // Stats table indexes
+ db.execSQL("CREATE INDEX IF NOT EXISTS index_stats_manga_id ON stats(manga_id)")
+ db.execSQL("CREATE INDEX IF NOT EXISTS index_stats_started_at ON stats(started_at)")
+
+ // Run ANALYZE to update query planner statistics
+ db.execSQL("ANALYZE")
+ }
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/db/migrations/Migration28To29.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/db/migrations/Migration28To29.kt
new file mode 100644
index 0000000..f129fc5
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/db/migrations/Migration28To29.kt
@@ -0,0 +1,36 @@
+package org.koitharu.kotatsu.core.db.migrations
+
+import androidx.room.migration.Migration
+import androidx.sqlite.db.SupportSQLiteDatabase
+
+/**
+ * Migration 28 to 29: Source Health Monitor
+ *
+ * Creates the source_health table to track reliability and performance metrics
+ * for manga sources including success/failure rates, response times, and error history.
+ */
+class Migration28To29 : Migration(28, 29) {
+
+ override fun migrate(db: SupportSQLiteDatabase) {
+ db.execSQL(
+ """
+ CREATE TABLE IF NOT EXISTS source_health (
+ source TEXT NOT NULL PRIMARY KEY,
+ success_count INTEGER NOT NULL DEFAULT 0,
+ failure_count INTEGER NOT NULL DEFAULT 0,
+ avg_response_time INTEGER NOT NULL DEFAULT 0,
+ min_response_time INTEGER NOT NULL DEFAULT 9223372036854775807,
+ max_response_time INTEGER NOT NULL DEFAULT 0,
+ last_success_at INTEGER NOT NULL DEFAULT 0,
+ last_failure_at INTEGER NOT NULL DEFAULT 0,
+ last_error TEXT,
+ consecutive_failures INTEGER NOT NULL DEFAULT 0,
+ stats_reset_at INTEGER NOT NULL DEFAULT 0
+ )
+ """.trimIndent()
+ )
+
+ // Add index for efficient queries on health status
+ db.execSQL("CREATE INDEX IF NOT EXISTS index_source_health_consecutive_failures ON source_health(consecutive_failures)")
+ }
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/exceptions/ProxyWebViewUnsupportedException.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/exceptions/ProxyWebViewUnsupportedException.kt
new file mode 100644
index 0000000..7e4f644
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/exceptions/ProxyWebViewUnsupportedException.kt
@@ -0,0 +1,7 @@
+package org.koitharu.kotatsu.core.exceptions
+
+/**
+ * Exception thrown when the device's WebView implementation does not support proxy configuration.
+ * This typically occurs on older Android versions or devices with outdated WebView providers.
+ */
+class ProxyWebViewUnsupportedException : IllegalStateException("Proxy for WebView is not supported on this device")
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/exceptions/resolve/CaptchaHandler.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/exceptions/resolve/CaptchaHandler.kt
index ceae3d5..fa041bf 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/exceptions/resolve/CaptchaHandler.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/exceptions/resolve/CaptchaHandler.kt
@@ -84,16 +84,24 @@ class CaptchaHandler @Inject constructor(
super.onError(request, result)
val e = result.throwable
if (e is CloudFlareException) {
- val scope = request.lifecycle?.coroutineScope ?: processLifecycleScope
+ val lifecycle = request.lifecycle
+ val scope = lifecycle?.coroutineScope ?: processLifecycleScope
+ // Extract request data to avoid holding reference to potentially destroyed context
+ val requestData = request.data
+ val suppressCaptcha = request.extras[suppressCaptchaKey]
scope.launch {
if (
handleException(
source = e.source,
exception = e,
- notify = request.extras[suppressCaptchaKey] != true,
+ notify = suppressCaptcha != true,
)
) {
- coilProvider.get().enqueue(request) // TODO check if ok
+ // Rebuild the request with application context to avoid leaking Activity
+ val newRequest = ImageRequest.Builder(context)
+ .data(requestData)
+ .build()
+ coilProvider.get().enqueue(newRequest)
}
}
}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/exceptions/resolve/ExceptionResolver.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/exceptions/resolve/ExceptionResolver.kt
index 752eb07..72315d2 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/exceptions/resolve/ExceptionResolver.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/exceptions/resolve/ExceptionResolver.kt
@@ -19,6 +19,7 @@ import org.koitharu.kotatsu.core.exceptions.CloudFlareProtectedException
import org.koitharu.kotatsu.core.exceptions.EmptyMangaException
import org.koitharu.kotatsu.core.exceptions.InteractiveActionRequiredException
import org.koitharu.kotatsu.core.exceptions.ProxyConfigException
+import org.koitharu.kotatsu.core.exceptions.ProxyWebViewUnsupportedException
import org.koitharu.kotatsu.core.exceptions.UnsupportedSourceException
import org.koitharu.kotatsu.core.nav.AppRouter
import org.koitharu.kotatsu.core.nav.router
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/github/AppUpdateCheckWorker.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/github/AppUpdateCheckWorker.kt
new file mode 100644
index 0000000..cd1506e
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/github/AppUpdateCheckWorker.kt
@@ -0,0 +1,129 @@
+package org.koitharu.kotatsu.core.github
+
+import android.content.Context
+import androidx.hilt.work.HiltWorker
+import androidx.work.BackoffPolicy
+import androidx.work.Constraints
+import androidx.work.CoroutineWorker
+import androidx.work.ExistingPeriodicWorkPolicy
+import androidx.work.NetworkType
+import androidx.work.PeriodicWorkRequestBuilder
+import androidx.work.WorkManager
+import androidx.work.WorkerParameters
+import dagger.assisted.Assisted
+import dagger.assisted.AssistedInject
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.withContext
+import org.koitharu.kotatsu.core.prefs.AppSettings
+import java.util.concurrent.TimeUnit
+
+/**
+ * Background worker that periodically checks for app updates.
+ *
+ * Features:
+ * - Configurable check interval (default: daily)
+ * - WiFi-only option for data-conscious users
+ * - Respects user preferences for update channels
+ * - Shows notification when update is available
+ */
+@HiltWorker
+class AppUpdateCheckWorker @AssistedInject constructor(
+ @Assisted appContext: Context,
+ @Assisted params: WorkerParameters,
+ private val updateRepository: AppUpdateRepository,
+ private val updateNotifier: AppUpdateNotifier,
+ private val settings: AppSettings,
+) : CoroutineWorker(appContext, params) {
+
+ override suspend fun doWork(): Result = withContext(Dispatchers.IO) {
+ try {
+ // Check if auto-update check is enabled
+ if (!settings.isAutoUpdateCheckEnabled) {
+ return@withContext Result.success()
+ }
+
+ // Fetch available update
+ val update = updateRepository.fetchUpdate()
+
+ if (update != null) {
+ // Check if this version was previously skipped
+ if (!settings.isVersionSkipped(update.name)) {
+ updateNotifier.showUpdateNotification(update)
+ }
+ }
+
+ // Update last check time
+ settings.lastUpdateCheckTime = System.currentTimeMillis()
+
+ Result.success()
+ } catch (e: Exception) {
+ if (runAttemptCount < MAX_RETRY_COUNT) {
+ Result.retry()
+ } else {
+ Result.failure()
+ }
+ }
+ }
+
+ companion object {
+ private const val WORK_NAME = "app_update_check"
+ private const val MAX_RETRY_COUNT = 3
+
+ /**
+ * Schedule periodic update checks.
+ */
+ fun schedule(context: Context, settings: AppSettings) {
+ val workManager = WorkManager.getInstance(context)
+
+ if (!settings.isAutoUpdateCheckEnabled) {
+ workManager.cancelUniqueWork(WORK_NAME)
+ return
+ }
+
+ val intervalHours = settings.updateCheckIntervalHours.toLong()
+
+ val constraints = Constraints.Builder()
+ .setRequiredNetworkType(
+ if (settings.isUpdateCheckWifiOnly) NetworkType.UNMETERED
+ else NetworkType.CONNECTED
+ )
+ .setRequiresBatteryNotLow(true)
+ .build()
+
+ val request = PeriodicWorkRequestBuilder(
+ intervalHours, TimeUnit.HOURS,
+ intervalHours / 4, TimeUnit.HOURS, // Flex interval
+ )
+ .setConstraints(constraints)
+ .setBackoffCriteria(BackoffPolicy.EXPONENTIAL, 30, TimeUnit.MINUTES)
+ .build()
+
+ workManager.enqueueUniquePeriodicWork(
+ WORK_NAME,
+ ExistingPeriodicWorkPolicy.UPDATE,
+ request,
+ )
+ }
+
+ /**
+ * Cancel scheduled update checks.
+ */
+ fun cancel(context: Context) {
+ WorkManager.getInstance(context).cancelUniqueWork(WORK_NAME)
+ }
+
+ /**
+ * Run an immediate update check.
+ */
+ fun checkNow(context: Context) {
+ val request = androidx.work.OneTimeWorkRequestBuilder()
+ .setConstraints(
+ Constraints.Builder()
+ .setRequiredNetworkType(NetworkType.CONNECTED)
+ .build()
+ )
+ .build()
+ WorkManager.getInstance(context).enqueue(request)
+ }
+ }
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/github/AppUpdateDismissReceiver.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/github/AppUpdateDismissReceiver.kt
new file mode 100644
index 0000000..fa96161
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/github/AppUpdateDismissReceiver.kt
@@ -0,0 +1,21 @@
+package org.koitharu.kotatsu.core.github
+
+import android.content.BroadcastReceiver
+import android.content.Context
+import android.content.Intent
+import dagger.hilt.android.AndroidEntryPoint
+import javax.inject.Inject
+
+/**
+ * Broadcast receiver for handling update notification dismissal.
+ */
+@AndroidEntryPoint
+class AppUpdateDismissReceiver : BroadcastReceiver() {
+
+ @Inject
+ lateinit var updateNotifier: AppUpdateNotifier
+
+ override fun onReceive(context: Context, intent: Intent) {
+ updateNotifier.cancelNotification()
+ }
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/github/AppUpdateNotifier.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/github/AppUpdateNotifier.kt
new file mode 100644
index 0000000..cb3758e
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/github/AppUpdateNotifier.kt
@@ -0,0 +1,153 @@
+package org.koitharu.kotatsu.core.github
+
+import android.Manifest
+import android.app.NotificationChannel
+import android.app.NotificationManager
+import android.app.PendingIntent
+import android.content.Context
+import android.content.Intent
+import android.content.pm.PackageManager
+import android.os.Build
+import androidx.core.app.NotificationCompat
+import androidx.core.app.NotificationManagerCompat
+import androidx.core.content.ContextCompat
+import dagger.hilt.android.qualifiers.ApplicationContext
+import org.koitharu.kotatsu.R
+import org.koitharu.kotatsu.core.util.FileSize
+import org.koitharu.kotatsu.settings.about.AppUpdateActivity
+import javax.inject.Inject
+import javax.inject.Singleton
+
+/**
+ * Handles app update notifications.
+ */
+@Singleton
+class AppUpdateNotifier @Inject constructor(
+ @ApplicationContext private val context: Context,
+) {
+ private val notificationManager = NotificationManagerCompat.from(context)
+
+ init {
+ createNotificationChannel()
+ }
+
+ /**
+ * Show a notification about an available update.
+ */
+ fun showUpdateNotification(version: AppVersion) {
+ if (!hasNotificationPermission()) {
+ return
+ }
+
+ val intent = Intent(context, AppUpdateActivity::class.java).apply {
+ flags = Intent.FLAG_ACTIVITY_NEW_TASK or Intent.FLAG_ACTIVITY_CLEAR_TOP
+ }
+
+ val pendingIntent = PendingIntent.getActivity(
+ context,
+ NOTIFICATION_ID,
+ intent,
+ PendingIntent.FLAG_UPDATE_CURRENT or PendingIntent.FLAG_IMMUTABLE,
+ )
+
+ val notification = NotificationCompat.Builder(context, CHANNEL_ID)
+ .setSmallIcon(R.drawable.ic_updated)
+ .setContentTitle(context.getString(R.string.new_version_s, version.name))
+ .setContentText(
+ context.getString(R.string.size_s, FileSize.BYTES.format(context, version.apkSize))
+ )
+ .setStyle(
+ NotificationCompat.BigTextStyle()
+ .bigText(buildNotificationText(version))
+ )
+ .setContentIntent(pendingIntent)
+ .setAutoCancel(true)
+ .setPriority(NotificationCompat.PRIORITY_LOW)
+ .setCategory(NotificationCompat.CATEGORY_RECOMMENDATION)
+ .addAction(
+ R.drawable.ic_download,
+ context.getString(R.string.download),
+ pendingIntent,
+ )
+ .addAction(
+ 0,
+ context.getString(R.string.close),
+ createDismissIntent(),
+ )
+ .build()
+
+ notificationManager.notify(NOTIFICATION_ID, notification)
+ }
+
+ /**
+ * Cancel the update notification.
+ */
+ fun cancelNotification() {
+ notificationManager.cancel(NOTIFICATION_ID)
+ }
+
+ private fun buildNotificationText(version: AppVersion): String {
+ return buildString {
+ append(context.getString(R.string.new_version_s, version.name))
+ appendLine()
+ append(context.getString(R.string.size_s, FileSize.BYTES.format(context, version.apkSize)))
+
+ // Add first few lines of changelog if available
+ val changelog = version.description.lines()
+ .filter { it.isNotBlank() }
+ .take(3)
+ if (changelog.isNotEmpty()) {
+ appendLine()
+ appendLine()
+ append(context.getString(R.string.changelog))
+ append(":")
+ changelog.forEach { line ->
+ appendLine()
+ append("• ")
+ append(line.removePrefix("- ").removePrefix("* ").trim())
+ }
+ }
+ }
+ }
+
+ private fun createDismissIntent(): PendingIntent {
+ val intent = Intent(context, AppUpdateDismissReceiver::class.java)
+ return PendingIntent.getBroadcast(
+ context,
+ DISMISS_REQUEST_CODE,
+ intent,
+ PendingIntent.FLAG_UPDATE_CURRENT or PendingIntent.FLAG_IMMUTABLE,
+ )
+ }
+
+ private fun createNotificationChannel() {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+ val channel = NotificationChannel(
+ CHANNEL_ID,
+ context.getString(R.string.updates),
+ NotificationManager.IMPORTANCE_LOW,
+ ).apply {
+ description = context.getString(R.string.updates)
+ setShowBadge(true)
+ }
+ notificationManager.createNotificationChannel(channel)
+ }
+ }
+
+ private fun hasNotificationPermission(): Boolean {
+ return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
+ ContextCompat.checkSelfPermission(
+ context,
+ Manifest.permission.POST_NOTIFICATIONS,
+ ) == PackageManager.PERMISSION_GRANTED
+ } else {
+ true
+ }
+ }
+
+ companion object {
+ private const val CHANNEL_ID = "app_updates"
+ private const val NOTIFICATION_ID = 10001
+ private const val DISMISS_REQUEST_CODE = 10002
+ }
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/github/VersionId.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/github/VersionId.kt
index 8fde282..dd6bada 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/github/VersionId.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/github/VersionId.kt
@@ -45,8 +45,9 @@ val VersionId.isStable: Boolean
fun VersionId(versionName: String): VersionId {
if (versionName.contains("nightly", ignoreCase = true) ||
- versionName.contains("daily", ignoreCase = true)) {
- // Nightly build
+ versionName.contains("daily", ignoreCase = true) ||
+ versionName.startsWith("N", ignoreCase = false)) {
+ // Nightly build (handles "nightly", "daily", and "N20260122" formats)
return VersionId(
major = 0,
minor = 0,
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/nav/AppRouter.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/nav/AppRouter.kt
index d60c787..4b806af 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/nav/AppRouter.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/nav/AppRouter.kt
@@ -640,16 +640,45 @@ class AppRouter private constructor(
.startChooser()
}
- private fun shareFile(file: File) { // TODO directory sharing support
+ private fun shareFile(file: File) {
val context = contextOrNull() ?: return
+ if (file.isDirectory) {
+ shareDirectory(file, context)
+ } else {
+ shareSingleFile(file, context)
+ }
+ }
+
+ private fun shareSingleFile(file: File, context: Context) {
+ val mimeType = when {
+ file.extension.equals("cbz", ignoreCase = true) -> TYPE_CBZ
+ file.extension.equals("cbr", ignoreCase = true) -> TYPE_CBR
+ file.extension.equals("zip", ignoreCase = true) -> TYPE_ZIP
+ else -> TYPE_OCTET_STREAM
+ }
val intentBuilder = ShareCompat.IntentBuilder(context)
- .setType(TYPE_CBZ)
+ .setType(mimeType)
val uri = FileProvider.getUriForFile(context, "${BuildConfig.APPLICATION_ID}.files", file)
intentBuilder.addStream(uri)
intentBuilder.setChooserTitle(context.getString(R.string.share_s, file.name))
intentBuilder.startChooser()
}
+ private fun shareDirectory(directory: File, context: Context) {
+ val files = directory.listFiles { f -> f.isFile && !f.isHidden }
+ if (files.isNullOrEmpty()) {
+ return
+ }
+ val intentBuilder = ShareCompat.IntentBuilder(context)
+ .setType(TYPE_IMAGE)
+ for (file in files) {
+ val uri = FileProvider.getUriForFile(context, "${BuildConfig.APPLICATION_ID}.files", file)
+ intentBuilder.addStream(uri)
+ }
+ intentBuilder.setChooserTitle(context.getString(R.string.share_s, directory.name))
+ intentBuilder.startChooser()
+ }
+
@UiContext
private fun contextOrNull(): Context? = activity ?: fragment?.context
@@ -854,8 +883,11 @@ class AppRouter private constructor(
private const val TYPE_TEXT = "text/plain"
private const val TYPE_IMAGE = "image/*"
private const val TYPE_CBZ = "application/x-cbz"
+ private const val TYPE_CBR = "application/x-cbr"
+ private const val TYPE_ZIP = "application/zip"
+ private const val TYPE_OCTET_STREAM = "application/octet-stream"
- private fun Class.fragmentTag() = name // TODO
+ private fun Class.fragmentTag() = name
private inline fun fragmentTag() = F::class.java.fragmentTag()
}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/AdaptiveNetworkSettings.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/AdaptiveNetworkSettings.kt
new file mode 100644
index 0000000..0ed36e2
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/AdaptiveNetworkSettings.kt
@@ -0,0 +1,199 @@
+package org.koitharu.kotatsu.core.network
+
+import kotlinx.coroutines.flow.Flow
+import kotlinx.coroutines.flow.map
+import javax.inject.Inject
+import javax.inject.Singleton
+
+/**
+ * Provides adaptive network settings based on current network quality.
+ * Use this class to get dynamic settings that adjust to network conditions.
+ */
+@Singleton
+class AdaptiveNetworkSettings @Inject constructor(
+ private val networkQualityMonitor: NetworkQualityMonitor,
+) {
+ /**
+ * Current network quality level.
+ */
+ val quality: Flow
+ get() = networkQualityMonitor.quality
+
+ /**
+ * Current quality value (for synchronous access).
+ */
+ val currentQuality: NetworkQuality
+ get() = networkQualityMonitor.quality.value
+
+ /**
+ * Observable settings that update when network quality changes.
+ */
+ val adaptiveSettings: Flow = networkQualityMonitor.quality.map { quality ->
+ Settings(
+ quality = quality,
+ connectTimeoutMs = getConnectTimeout(quality),
+ readTimeoutMs = getReadTimeout(quality),
+ writeTimeoutMs = getWriteTimeout(quality),
+ maxConcurrentDownloads = getMaxConcurrentDownloads(quality),
+ preloadPageCount = getPreloadPageCount(quality),
+ imageQualityReduction = getImageQualityReduction(quality),
+ retryCount = getRetryCount(quality),
+ retryDelayMs = getRetryDelay(quality),
+ enableAggressiveCache = shouldEnableAggressiveCache(quality),
+ )
+ }
+
+ /**
+ * Get current settings snapshot.
+ */
+ fun getCurrentSettings(): Settings {
+ val quality = currentQuality
+ return Settings(
+ quality = quality,
+ connectTimeoutMs = getConnectTimeout(quality),
+ readTimeoutMs = getReadTimeout(quality),
+ writeTimeoutMs = getWriteTimeout(quality),
+ maxConcurrentDownloads = getMaxConcurrentDownloads(quality),
+ preloadPageCount = getPreloadPageCount(quality),
+ imageQualityReduction = getImageQualityReduction(quality),
+ retryCount = getRetryCount(quality),
+ retryDelayMs = getRetryDelay(quality),
+ enableAggressiveCache = shouldEnableAggressiveCache(quality),
+ )
+ }
+
+ /**
+ * Report a successful download for bandwidth estimation.
+ */
+ fun reportDownload(bytes: Long, durationMs: Long) {
+ networkQualityMonitor.reportDownload(bytes, durationMs)
+ }
+
+ private fun getConnectTimeout(quality: NetworkQuality): Long {
+ return when (quality) {
+ NetworkQuality.EXCELLENT -> 10_000L
+ NetworkQuality.GOOD -> 15_000L
+ NetworkQuality.MODERATE -> 20_000L
+ NetworkQuality.POOR -> 30_000L
+ NetworkQuality.OFFLINE -> 15_000L
+ }
+ }
+
+ private fun getReadTimeout(quality: NetworkQuality): Long {
+ return when (quality) {
+ NetworkQuality.EXCELLENT -> 30_000L
+ NetworkQuality.GOOD -> 45_000L
+ NetworkQuality.MODERATE -> 60_000L
+ NetworkQuality.POOR -> 90_000L
+ NetworkQuality.OFFLINE -> 60_000L
+ }
+ }
+
+ private fun getWriteTimeout(quality: NetworkQuality): Long {
+ return when (quality) {
+ NetworkQuality.EXCELLENT -> 15_000L
+ NetworkQuality.GOOD -> 20_000L
+ NetworkQuality.MODERATE -> 25_000L
+ NetworkQuality.POOR -> 40_000L
+ NetworkQuality.OFFLINE -> 20_000L
+ }
+ }
+
+ private fun getMaxConcurrentDownloads(quality: NetworkQuality): Int {
+ return when (quality) {
+ NetworkQuality.EXCELLENT -> 6
+ NetworkQuality.GOOD -> 4
+ NetworkQuality.MODERATE -> 2
+ NetworkQuality.POOR -> 1
+ NetworkQuality.OFFLINE -> 0
+ }
+ }
+
+ private fun getPreloadPageCount(quality: NetworkQuality): Int {
+ return when (quality) {
+ NetworkQuality.EXCELLENT -> 5
+ NetworkQuality.GOOD -> 3
+ NetworkQuality.MODERATE -> 1
+ NetworkQuality.POOR -> 0
+ NetworkQuality.OFFLINE -> 0
+ }
+ }
+
+ /**
+ * Returns image quality reduction factor (0-100).
+ * 0 = full quality, 100 = maximum reduction
+ */
+ private fun getImageQualityReduction(quality: NetworkQuality): Int {
+ return when (quality) {
+ NetworkQuality.EXCELLENT -> 0
+ NetworkQuality.GOOD -> 0
+ NetworkQuality.MODERATE -> 20
+ NetworkQuality.POOR -> 40
+ NetworkQuality.OFFLINE -> 0
+ }
+ }
+
+ private fun getRetryCount(quality: NetworkQuality): Int {
+ return when (quality) {
+ NetworkQuality.EXCELLENT -> 2
+ NetworkQuality.GOOD -> 3
+ NetworkQuality.MODERATE -> 4
+ NetworkQuality.POOR -> 5
+ NetworkQuality.OFFLINE -> 3
+ }
+ }
+
+ private fun getRetryDelay(quality: NetworkQuality): Long {
+ return when (quality) {
+ NetworkQuality.EXCELLENT -> 500L
+ NetworkQuality.GOOD -> 1_000L
+ NetworkQuality.MODERATE -> 2_000L
+ NetworkQuality.POOR -> 4_000L
+ NetworkQuality.OFFLINE -> 1_000L
+ }
+ }
+
+ private fun shouldEnableAggressiveCache(quality: NetworkQuality): Boolean {
+ return when (quality) {
+ NetworkQuality.EXCELLENT -> false
+ NetworkQuality.GOOD -> false
+ NetworkQuality.MODERATE -> true
+ NetworkQuality.POOR -> true
+ NetworkQuality.OFFLINE -> true
+ }
+ }
+
+ /**
+ * Adaptive network settings snapshot.
+ */
+ data class Settings(
+ val quality: NetworkQuality,
+ val connectTimeoutMs: Long,
+ val readTimeoutMs: Long,
+ val writeTimeoutMs: Long,
+ val maxConcurrentDownloads: Int,
+ val preloadPageCount: Int,
+ val imageQualityReduction: Int,
+ val retryCount: Int,
+ val retryDelayMs: Long,
+ val enableAggressiveCache: Boolean,
+ ) {
+ /**
+ * Check if downloads should be allowed.
+ */
+ val allowsDownloads: Boolean
+ get() = quality.isConnected
+
+ /**
+ * Check if preloading is recommended.
+ */
+ val allowsPreloading: Boolean
+ get() = quality.allowsPreloading
+
+ /**
+ * Check if high quality images should be used.
+ */
+ val allowsHighQuality: Boolean
+ get() = quality.allowsHighQuality
+ }
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/BandwidthTrackingInterceptor.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/BandwidthTrackingInterceptor.kt
new file mode 100644
index 0000000..1dae09b
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/BandwidthTrackingInterceptor.kt
@@ -0,0 +1,99 @@
+package org.koitharu.kotatsu.core.network
+
+import okhttp3.Interceptor
+import okhttp3.Response
+import okhttp3.ResponseBody.Companion.asResponseBody
+import okio.Buffer
+import okio.ForwardingSource
+import okio.Source
+import okio.buffer
+import javax.inject.Inject
+import javax.inject.Singleton
+
+/**
+ * Interceptor that tracks download speed and reports it to NetworkQualityMonitor.
+ * This allows the app to adapt to current network conditions.
+ */
+@Singleton
+class BandwidthTrackingInterceptor @Inject constructor(
+ private val networkQualityMonitor: NetworkQualityMonitor,
+) : Interceptor {
+
+ override fun intercept(chain: Interceptor.Chain): Response {
+ val request = chain.request()
+ val startTime = System.currentTimeMillis()
+
+ val response = chain.proceed(request)
+
+ // Only track successful responses with body
+ val body = response.body
+ if (!response.isSuccessful || body == null) {
+ return response
+ }
+
+ // Wrap the response body to track download progress
+ val contentLength = body.contentLength()
+ val trackingSource = TrackingSource(
+ source = body.source(),
+ contentLength = contentLength,
+ startTime = startTime,
+ onComplete = { bytes, durationMs ->
+ if (bytes > MIN_BYTES_TO_TRACK && durationMs > MIN_DURATION_MS) {
+ networkQualityMonitor.reportDownload(bytes, durationMs)
+ }
+ },
+ )
+
+ val trackingBody = trackingSource.buffer().asResponseBody(body.contentType(), contentLength)
+ return response.newBuilder()
+ .body(trackingBody)
+ .build()
+ }
+
+ /**
+ * Source that tracks bytes read and reports bandwidth when complete.
+ */
+ private class TrackingSource(
+ source: Source,
+ private val contentLength: Long,
+ private val startTime: Long,
+ private val onComplete: (bytes: Long, durationMs: Long) -> Unit,
+ ) : ForwardingSource(source) {
+
+ private var totalBytesRead = 0L
+ private var reported = false
+
+ override fun read(sink: Buffer, byteCount: Long): Long {
+ val bytesRead = super.read(sink, byteCount)
+
+ if (bytesRead != -1L) {
+ totalBytesRead += bytesRead
+ }
+
+ // Report when complete
+ if (!reported && (bytesRead == -1L || (contentLength > 0 && totalBytesRead >= contentLength))) {
+ reported = true
+ val duration = System.currentTimeMillis() - startTime
+ onComplete(totalBytesRead, duration)
+ }
+
+ return bytesRead
+ }
+
+ override fun close() {
+ // Report on close if not already reported
+ if (!reported && totalBytesRead > 0) {
+ reported = true
+ val duration = System.currentTimeMillis() - startTime
+ onComplete(totalBytesRead, duration)
+ }
+ super.close()
+ }
+ }
+
+ companion object {
+ // Minimum thresholds for tracking to avoid noise from small requests
+ private const val MIN_BYTES_TO_TRACK = 10_000L // 10KB
+ private const val MIN_DURATION_MS = 50L // 50ms
+ }
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/DnsPrefetchManager.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/DnsPrefetchManager.kt
new file mode 100644
index 0000000..895a4e3
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/DnsPrefetchManager.kt
@@ -0,0 +1,101 @@
+package org.koitharu.kotatsu.core.network
+
+import kotlinx.coroutines.CoroutineScope
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.SupervisorJob
+import kotlinx.coroutines.launch
+import org.koitharu.kotatsu.core.os.NetworkState
+import org.koitharu.kotatsu.explore.data.MangaSourcesRepository
+import org.koitharu.kotatsu.parsers.model.MangaSource
+import javax.inject.Inject
+import javax.inject.Singleton
+
+/**
+ * Manages DNS prefetching for manga sources.
+ *
+ * Automatically prefetches DNS for:
+ * - Common CDN domains used by multiple sources
+ * - Source-specific domains when sources are accessed
+ */
+@Singleton
+class DnsPrefetchManager @Inject constructor(
+ private val dnsPrefetcher: DnsPrefetcher,
+ private val sourcesRepository: MangaSourcesRepository,
+ private val networkState: NetworkState,
+) {
+ private val scope = CoroutineScope(SupervisorJob() + Dispatchers.IO)
+ private var isInitialized = false
+
+ /**
+ * Initialize DNS prefetching.
+ * Should be called during app startup.
+ */
+ fun initialize() {
+ if (isInitialized) return
+ isInitialized = true
+
+ // Start prefetcher with common domains
+ dnsPrefetcher.start()
+ }
+
+ /**
+ * Prefetch DNS for a specific source when the user opens it.
+ */
+ fun prefetchForSource(source: MangaSource) {
+ if (!networkState.isOnline()) return
+
+ // Prefetch common image CDN domains for this source
+ val cdnDomains = getSourceCdnDomains(source)
+ if (cdnDomains.isNotEmpty()) {
+ scope.launch {
+ dnsPrefetcher.prefetch(*cdnDomains.toTypedArray())
+ }
+ }
+ }
+
+ /**
+ * Prefetch DNS for URLs that are about to be loaded.
+ */
+ fun prefetchForUrls(vararg urls: String) {
+ if (!networkState.isOnline()) return
+ scope.launch {
+ dnsPrefetcher.prefetchFromUrls(*urls)
+ }
+ }
+
+ /**
+ * Get cache statistics for debugging.
+ */
+ fun getCacheStats(): DnsPrefetcher.CacheStats {
+ return dnsPrefetcher.getCacheStats()
+ }
+
+ /**
+ * Clear the DNS cache.
+ */
+ fun clearCache() {
+ dnsPrefetcher.clearCache()
+ }
+
+ private fun getSourceCdnDomains(source: MangaSource): Set {
+ // Common CDN domains used by manga sites
+ // This can be extended based on known source patterns
+ return when {
+ source.name.contains("mangadex", ignoreCase = true) -> setOf(
+ "uploads.mangadex.org",
+ "api.mangadex.org",
+ )
+ source.name.contains("webtoon", ignoreCase = true) -> setOf(
+ "webtoon-phinf.pstatic.net",
+ )
+ source.name.contains("mangakakalot", ignoreCase = true) -> setOf(
+ "cm.blazefast.co",
+ "avt.mkklcdnv6temp.com",
+ )
+ source.name.contains("mangasee", ignoreCase = true) -> setOf(
+ "temp.compsci88.com",
+ )
+ else -> emptySet()
+ }
+ }
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/DnsPrefetcher.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/DnsPrefetcher.kt
new file mode 100644
index 0000000..5299255
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/DnsPrefetcher.kt
@@ -0,0 +1,304 @@
+package org.koitharu.kotatsu.core.network
+
+import android.util.Log
+import kotlinx.coroutines.CoroutineScope
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.SupervisorJob
+import kotlinx.coroutines.delay
+import kotlinx.coroutines.isActive
+import kotlinx.coroutines.launch
+import kotlinx.coroutines.sync.Mutex
+import kotlinx.coroutines.sync.withLock
+import okhttp3.Dns
+import org.koitharu.kotatsu.BuildConfig
+import org.koitharu.kotatsu.core.os.NetworkState
+import java.net.InetAddress
+import java.net.UnknownHostException
+import java.util.concurrent.ConcurrentHashMap
+import javax.inject.Inject
+import javax.inject.Singleton
+
+/**
+ * DNS Prefetcher that proactively resolves domain names for faster first connections.
+ *
+ * Features:
+ * - Caches DNS results with configurable TTL
+ * - Prefetches domains for enabled manga sources
+ * - Background refresh before entries expire
+ * - Observable prefetch status for debugging
+ * - Integrates with existing DoH/DNS infrastructure
+ */
+@Singleton
+class DnsPrefetcher @Inject constructor(
+ private val networkState: NetworkState,
+) : Dns {
+
+ private val scope = CoroutineScope(SupervisorJob() + Dispatchers.IO)
+ private val cache = ConcurrentHashMap()
+ private val refreshMutex = Mutex()
+ private val pendingPrefetches = ConcurrentHashMap.newKeySet()
+
+ private var delegate: Dns = Dns.SYSTEM
+ private var isRunning = false
+
+ /**
+ * Common domains that should be prefetched.
+ * These include CDN providers and common manga hosting services.
+ */
+ private val commonDomains = setOf(
+ // CDN providers commonly used by manga sites
+ "cdn.jsdelivr.net",
+ "cdnjs.cloudflare.com",
+ "fonts.googleapis.com",
+ "fonts.gstatic.com",
+
+ // Image hosting services
+ "i.imgur.com",
+ "imgur.com",
+
+ // Common infrastructure
+ "cloudflare.com",
+ "fastly.net",
+ )
+
+ /**
+ * Set the delegate DNS resolver (e.g., DoHManager).
+ */
+ fun setDelegate(dns: Dns) {
+ delegate = dns
+ }
+
+ /**
+ * Start the prefetcher with a list of domains to prefetch.
+ */
+ fun start(domains: Set = emptySet()) {
+ if (isRunning) return
+ isRunning = true
+
+ scope.launch {
+ // Initial prefetch
+ val allDomains = commonDomains + domains
+ prefetchDomains(allDomains)
+
+ // Background refresh loop
+ while (isActive && isRunning) {
+ delay(REFRESH_INTERVAL_MS)
+ refreshExpiringSoon()
+ }
+ }
+ }
+
+ /**
+ * Stop the prefetcher.
+ */
+ fun stop() {
+ isRunning = false
+ }
+
+ /**
+ * Add domains to prefetch queue.
+ */
+ fun prefetch(vararg domains: String) {
+ if (!networkState.isOnline()) return
+
+ scope.launch {
+ for (domain in domains) {
+ prefetchSingle(domain)
+ }
+ }
+ }
+
+ /**
+ * Add domains from URLs to prefetch queue.
+ */
+ fun prefetchFromUrls(vararg urls: String) {
+ val domains = urls.mapNotNull { extractDomain(it) }.toSet()
+ if (domains.isNotEmpty()) {
+ prefetch(*domains.toTypedArray())
+ }
+ }
+
+ /**
+ * DNS lookup with cache support.
+ */
+ override fun lookup(hostname: String): List {
+ // Check cache first
+ val cached = cache[hostname]
+ if (cached != null && !cached.isExpired()) {
+ logDebug { "DNS cache hit: $hostname -> ${cached.addresses.size} addresses" }
+ return cached.addresses
+ }
+
+ // Cache miss or expired, do actual lookup
+ return try {
+ val addresses = delegate.lookup(hostname)
+ cache[hostname] = DnsCacheEntry(
+ hostname = hostname,
+ addresses = addresses,
+ timestamp = System.currentTimeMillis(),
+ )
+ logDebug { "DNS lookup: $hostname -> ${addresses.size} addresses" }
+ addresses
+ } catch (e: UnknownHostException) {
+ // Remove from cache on failure
+ cache.remove(hostname)
+ throw e
+ }
+ }
+
+ /**
+ * Get current cache statistics.
+ */
+ fun getCacheStats(): CacheStats {
+ val now = System.currentTimeMillis()
+ var validCount = 0
+ var expiredCount = 0
+ var totalAddresses = 0
+
+ for (entry in cache.values) {
+ if (entry.isExpired(now)) {
+ expiredCount++
+ } else {
+ validCount++
+ totalAddresses += entry.addresses.size
+ }
+ }
+
+ return CacheStats(
+ totalEntries = cache.size,
+ validEntries = validCount,
+ expiredEntries = expiredCount,
+ totalAddresses = totalAddresses,
+ pendingPrefetches = pendingPrefetches.size,
+ )
+ }
+
+ /**
+ * Clear the DNS cache.
+ */
+ fun clearCache() {
+ cache.clear()
+ logDebug { "DNS cache cleared" }
+ }
+
+ /**
+ * Check if a domain is cached and valid.
+ */
+ fun isCached(hostname: String): Boolean {
+ val entry = cache[hostname] ?: return false
+ return !entry.isExpired()
+ }
+
+ private suspend fun prefetchDomains(domains: Set) {
+ if (!networkState.isOnline()) return
+
+ for (domain in domains) {
+ prefetchSingle(domain)
+ // Small delay to avoid flooding
+ delay(50)
+ }
+ }
+
+ private suspend fun prefetchSingle(hostname: String) {
+ // Skip if already cached and valid
+ val cached = cache[hostname]
+ if (cached != null && !cached.isExpiringSoon()) {
+ return
+ }
+
+ // Skip if already pending
+ if (!pendingPrefetches.add(hostname)) {
+ return
+ }
+
+ try {
+ val addresses = delegate.lookup(hostname)
+ cache[hostname] = DnsCacheEntry(
+ hostname = hostname,
+ addresses = addresses,
+ timestamp = System.currentTimeMillis(),
+ )
+ logDebug { "DNS prefetch: $hostname -> ${addresses.size} addresses" }
+ } catch (e: UnknownHostException) {
+ logDebug { "DNS prefetch failed: $hostname - ${e.message}" }
+ } catch (e: Exception) {
+ logDebug { "DNS prefetch error: $hostname - ${e.message}" }
+ } finally {
+ pendingPrefetches.remove(hostname)
+ }
+ }
+
+ private suspend fun refreshExpiringSoon() {
+ if (!networkState.isOnline()) return
+
+ refreshMutex.withLock {
+ val now = System.currentTimeMillis()
+ val toRefresh = cache.values.filter { it.isExpiringSoon(now) }.map { it.hostname }
+
+ for (hostname in toRefresh) {
+ prefetchSingle(hostname)
+ delay(100)
+ }
+
+ // Clean up expired entries
+ cache.entries.removeIf { it.value.isExpired(now) }
+ }
+ }
+
+ private fun extractDomain(url: String): String? {
+ return try {
+ val withoutProtocol = url
+ .removePrefix("https://")
+ .removePrefix("http://")
+ val endIndex = withoutProtocol.indexOfAny(charArrayOf('/', ':', '?', '#'))
+ if (endIndex > 0) {
+ withoutProtocol.substring(0, endIndex)
+ } else {
+ withoutProtocol
+ }
+ } catch (e: Exception) {
+ null
+ }
+ }
+
+ private inline fun logDebug(message: () -> String) {
+ if (BuildConfig.DEBUG) {
+ Log.d(TAG, message())
+ }
+ }
+
+ /**
+ * Cache entry for a DNS lookup result.
+ */
+ private data class DnsCacheEntry(
+ val hostname: String,
+ val addresses: List,
+ val timestamp: Long,
+ ) {
+ fun isExpired(now: Long = System.currentTimeMillis()): Boolean {
+ return now - timestamp > TTL_MS
+ }
+
+ fun isExpiringSoon(now: Long = System.currentTimeMillis()): Boolean {
+ return now - timestamp > TTL_MS - REFRESH_BEFORE_EXPIRY_MS
+ }
+ }
+
+ /**
+ * Statistics about the DNS cache.
+ */
+ data class CacheStats(
+ val totalEntries: Int,
+ val validEntries: Int,
+ val expiredEntries: Int,
+ val totalAddresses: Int,
+ val pendingPrefetches: Int,
+ )
+
+ companion object {
+ private const val TAG = "DnsPrefetcher"
+ private const val TTL_MS = 5 * 60 * 1000L // 5 minutes
+ private const val REFRESH_BEFORE_EXPIRY_MS = 60 * 1000L // Refresh 1 minute before expiry
+ private const val REFRESH_INTERVAL_MS = 30 * 1000L // Check every 30 seconds
+ }
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/NetworkModule.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/NetworkModule.kt
index 6c64603..7387a7d 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/NetworkModule.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/NetworkModule.kt
@@ -64,6 +64,7 @@ interface NetworkModule {
cookieJar: CookieJar,
settings: AppSettings,
proxyProvider: ProxyProvider,
+ dnsPrefetcher: DnsPrefetcher,
): OkHttpClient = OkHttpClient.Builder().apply {
assertNotInMainThread()
connectTimeout(20, TimeUnit.SECONDS)
@@ -72,7 +73,10 @@ interface NetworkModule {
cookieJar(cookieJar)
proxySelector(proxyProvider.selector)
proxyAuthenticator(proxyProvider.authenticator)
- dns(DoHManager(cache, settings))
+ // Set up DNS with prefetching - prefetcher wraps DoHManager
+ val dohManager = DoHManager(cache, settings)
+ dnsPrefetcher.setDelegate(dohManager)
+ dns(dnsPrefetcher)
if (settings.isSSLBypassEnabled) {
disableCertificateVerification()
} else {
@@ -93,9 +97,11 @@ interface NetworkModule {
fun provideMangaHttpClient(
@BaseHttpClient baseClient: OkHttpClient,
commonHeadersInterceptor: CommonHeadersInterceptor,
+ bandwidthTrackingInterceptor: BandwidthTrackingInterceptor,
): OkHttpClient = baseClient.newBuilder().apply {
addNetworkInterceptor(CacheLimitInterceptor())
addInterceptor(commonHeadersInterceptor)
+ addNetworkInterceptor(bandwidthTrackingInterceptor)
}.build()
}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/NetworkQuality.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/NetworkQuality.kt
new file mode 100644
index 0000000..c46f700
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/NetworkQuality.kt
@@ -0,0 +1,42 @@
+package org.koitharu.kotatsu.core.network
+
+/**
+ * Represents the current network quality level.
+ * Used for adaptive behavior decisions throughout the app.
+ */
+enum class NetworkQuality(
+ val level: Int,
+) {
+ /** No network connection */
+ OFFLINE(0),
+
+ /** Very slow connection - minimize data usage */
+ POOR(1),
+
+ /** Limited bandwidth - reduce quality/preloading */
+ MODERATE(2),
+
+ /** Good connection - normal operation */
+ GOOD(3),
+
+ /** Excellent connection - enable aggressive preloading */
+ EXCELLENT(4);
+
+ val isConnected: Boolean
+ get() = this != OFFLINE
+
+ val allowsPreloading: Boolean
+ get() = level >= GOOD.level
+
+ val allowsHighQuality: Boolean
+ get() = level >= MODERATE.level
+
+ val allowsAggressivePreloading: Boolean
+ get() = level >= EXCELLENT.level
+
+ companion object {
+ fun fromLevel(level: Int): NetworkQuality {
+ return entries.find { it.level == level } ?: OFFLINE
+ }
+ }
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/NetworkQualityMonitor.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/NetworkQualityMonitor.kt
new file mode 100644
index 0000000..c64e9e6
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/NetworkQualityMonitor.kt
@@ -0,0 +1,422 @@
+package org.koitharu.kotatsu.core.network
+
+import android.content.Context
+import android.net.ConnectivityManager
+import android.net.Network
+import android.net.NetworkCapabilities
+import android.net.NetworkRequest
+import android.os.Build
+import android.telephony.TelephonyManager
+import android.util.Log
+import dagger.hilt.android.qualifiers.ApplicationContext
+import kotlinx.coroutines.CoroutineScope
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.SupervisorJob
+import kotlinx.coroutines.delay
+import kotlinx.coroutines.flow.MutableStateFlow
+import kotlinx.coroutines.flow.StateFlow
+import kotlinx.coroutines.flow.asStateFlow
+import kotlinx.coroutines.isActive
+import kotlinx.coroutines.launch
+import org.koitharu.kotatsu.BuildConfig
+import org.koitharu.kotatsu.core.util.ext.connectivityManager
+import java.io.IOException
+import java.net.HttpURLConnection
+import java.net.URL
+import java.util.concurrent.atomic.AtomicLong
+import javax.inject.Inject
+import javax.inject.Singleton
+import kotlin.math.roundToInt
+
+/**
+ * Monitors network quality and provides adaptive behavior recommendations.
+ *
+ * Features:
+ * - Real-time network quality assessment
+ * - Bandwidth estimation based on recent downloads
+ * - Latency tracking
+ * - Cellular network type detection (2G/3G/4G/5G)
+ * - Observable network quality state
+ */
+@Singleton
+class NetworkQualityMonitor @Inject constructor(
+ @ApplicationContext private val context: Context,
+) {
+ private val scope = CoroutineScope(SupervisorJob() + Dispatchers.IO)
+ private val connectivityManager = context.connectivityManager
+
+ private val _quality = MutableStateFlow(NetworkQuality.GOOD)
+ val quality: StateFlow = _quality.asStateFlow()
+
+ private val _bandwidthKbps = MutableStateFlow(0)
+ val bandwidthKbps: StateFlow = _bandwidthKbps.asStateFlow()
+
+ private val _latencyMs = MutableStateFlow(0L)
+ val latencyMs: StateFlow = _latencyMs.asStateFlow()
+
+ // Sliding window for bandwidth samples
+ private val bandwidthSamples = mutableListOf()
+ private val maxSamples = 10
+
+ // For tracking download speeds
+ private val bytesDownloaded = AtomicLong(0L)
+ private val downloadStartTime = AtomicLong(0L)
+
+ private var isMonitoring = false
+ private val callback = NetworkCallbackImpl()
+
+ init {
+ startMonitoring()
+ }
+
+ /**
+ * Start monitoring network quality.
+ */
+ @Synchronized
+ fun startMonitoring() {
+ if (isMonitoring) return
+ isMonitoring = true
+
+ val request = NetworkRequest.Builder()
+ .addTransportType(NetworkCapabilities.TRANSPORT_WIFI)
+ .addTransportType(NetworkCapabilities.TRANSPORT_CELLULAR)
+ .addTransportType(NetworkCapabilities.TRANSPORT_ETHERNET)
+ .addTransportType(NetworkCapabilities.TRANSPORT_VPN)
+ .build()
+
+ try {
+ connectivityManager.registerNetworkCallback(request, callback)
+ } catch (e: Exception) {
+ logDebug { "Failed to register network callback: ${e.message}" }
+ }
+
+ // Initial assessment
+ assessNetworkQuality()
+
+ // Periodic latency checks
+ scope.launch {
+ while (isActive) {
+ measureLatency()
+ delay(LATENCY_CHECK_INTERVAL_MS)
+ }
+ }
+ }
+
+ /**
+ * Stop monitoring network quality.
+ */
+ @Synchronized
+ fun stopMonitoring() {
+ if (!isMonitoring) return
+ isMonitoring = false
+
+ try {
+ connectivityManager.unregisterNetworkCallback(callback)
+ } catch (e: Exception) {
+ logDebug { "Failed to unregister network callback: ${e.message}" }
+ }
+ }
+
+ /**
+ * Report bytes downloaded to help estimate bandwidth.
+ * Call this when downloading images, pages, or other content.
+ */
+ fun reportDownload(bytes: Long, durationMs: Long) {
+ if (durationMs <= 0 || bytes <= 0) return
+
+ val kbps = ((bytes * 8 * 1000) / (durationMs * 1024)).toInt()
+ addBandwidthSample(kbps)
+
+ logDebug { "Download reported: ${bytes}B in ${durationMs}ms = ${kbps}kbps" }
+ }
+
+ /**
+ * Start tracking a download.
+ */
+ fun startDownloadTracking() {
+ downloadStartTime.set(System.currentTimeMillis())
+ bytesDownloaded.set(0)
+ }
+
+ /**
+ * Add bytes to the current download tracking.
+ */
+ fun addDownloadedBytes(bytes: Long) {
+ bytesDownloaded.addAndGet(bytes)
+ }
+
+ /**
+ * End tracking and report the download.
+ */
+ fun endDownloadTracking() {
+ val start = downloadStartTime.getAndSet(0)
+ val bytes = bytesDownloaded.getAndSet(0)
+ if (start > 0 && bytes > 0) {
+ val duration = System.currentTimeMillis() - start
+ reportDownload(bytes, duration)
+ }
+ }
+
+ /**
+ * Get recommended concurrent connection limit based on network quality.
+ */
+ fun getRecommendedConcurrency(): Int {
+ return when (_quality.value) {
+ NetworkQuality.EXCELLENT -> 6
+ NetworkQuality.GOOD -> 4
+ NetworkQuality.MODERATE -> 2
+ NetworkQuality.POOR -> 1
+ NetworkQuality.OFFLINE -> 0
+ }
+ }
+
+ /**
+ * Get recommended timeout multiplier based on network quality.
+ */
+ fun getTimeoutMultiplier(): Float {
+ return when (_quality.value) {
+ NetworkQuality.EXCELLENT -> 0.8f
+ NetworkQuality.GOOD -> 1.0f
+ NetworkQuality.MODERATE -> 1.5f
+ NetworkQuality.POOR -> 2.5f
+ NetworkQuality.OFFLINE -> 1.0f
+ }
+ }
+
+ /**
+ * Check if preloading should be enabled.
+ */
+ fun shouldPreload(): Boolean {
+ return _quality.value.allowsPreloading
+ }
+
+ /**
+ * Check if high quality images should be used.
+ */
+ fun shouldUseHighQuality(): Boolean {
+ return _quality.value.allowsHighQuality
+ }
+
+ /**
+ * Get the number of pages to preload based on network quality.
+ */
+ fun getPreloadPageCount(): Int {
+ return when (_quality.value) {
+ NetworkQuality.EXCELLENT -> 5
+ NetworkQuality.GOOD -> 3
+ NetworkQuality.MODERATE -> 1
+ NetworkQuality.POOR -> 0
+ NetworkQuality.OFFLINE -> 0
+ }
+ }
+
+ private fun assessNetworkQuality() {
+ val network = connectivityManager.activeNetwork
+ if (network == null) {
+ _quality.value = NetworkQuality.OFFLINE
+ return
+ }
+
+ val capabilities = connectivityManager.getNetworkCapabilities(network)
+ if (capabilities == null) {
+ _quality.value = NetworkQuality.OFFLINE
+ return
+ }
+
+ // Determine base quality from network type and capabilities
+ val baseQuality = assessBaseQuality(capabilities)
+
+ // Adjust based on measured bandwidth if available
+ val avgBandwidth = getAverageBandwidth()
+ val bandwidthQuality = when {
+ avgBandwidth <= 0 -> baseQuality
+ avgBandwidth < 256 -> NetworkQuality.POOR
+ avgBandwidth < 1024 -> NetworkQuality.MODERATE
+ avgBandwidth < 5120 -> NetworkQuality.GOOD
+ else -> NetworkQuality.EXCELLENT
+ }
+
+ // Adjust based on latency if available
+ val latency = _latencyMs.value
+ val latencyQuality = when {
+ latency <= 0 -> baseQuality
+ latency > 1000 -> NetworkQuality.POOR
+ latency > 500 -> NetworkQuality.MODERATE
+ latency > 200 -> NetworkQuality.GOOD
+ else -> NetworkQuality.EXCELLENT
+ }
+
+ // Use the lowest quality assessment
+ val finalQuality = minOf(baseQuality, bandwidthQuality, latencyQuality, compareBy { it.level })
+ _quality.value = finalQuality
+
+ logDebug { "Network quality assessed: $finalQuality (base=$baseQuality, bw=$bandwidthQuality, lat=$latencyQuality)" }
+ }
+
+ private fun assessBaseQuality(capabilities: NetworkCapabilities): NetworkQuality {
+ // Check for validated internet
+ if (!capabilities.hasCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET)) {
+ return NetworkQuality.OFFLINE
+ }
+
+ // Check transport type
+ return when {
+ capabilities.hasTransport(NetworkCapabilities.TRANSPORT_WIFI) -> {
+ assessWifiQuality(capabilities)
+ }
+ capabilities.hasTransport(NetworkCapabilities.TRANSPORT_ETHERNET) -> {
+ NetworkQuality.EXCELLENT
+ }
+ capabilities.hasTransport(NetworkCapabilities.TRANSPORT_CELLULAR) -> {
+ assessCellularQuality(capabilities)
+ }
+ capabilities.hasTransport(NetworkCapabilities.TRANSPORT_VPN) -> {
+ // VPN quality depends on underlying network
+ NetworkQuality.GOOD
+ }
+ else -> NetworkQuality.MODERATE
+ }
+ }
+
+ private fun assessWifiQuality(capabilities: NetworkCapabilities): NetworkQuality {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+ val downstreamBandwidth = capabilities.linkDownstreamBandwidthKbps
+ val upstreamBandwidth = capabilities.linkUpstreamBandwidthKbps
+
+ return when {
+ downstreamBandwidth >= 50000 -> NetworkQuality.EXCELLENT
+ downstreamBandwidth >= 10000 -> NetworkQuality.GOOD
+ downstreamBandwidth >= 1000 -> NetworkQuality.MODERATE
+ downstreamBandwidth > 0 -> NetworkQuality.POOR
+ else -> NetworkQuality.GOOD // Unknown, assume good
+ }
+ }
+ return NetworkQuality.GOOD
+ }
+
+ private fun assessCellularQuality(capabilities: NetworkCapabilities): NetworkQuality {
+ // Try to get bandwidth info from capabilities
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
+ val downstreamBandwidth = capabilities.linkDownstreamBandwidthKbps
+
+ return when {
+ downstreamBandwidth >= 20000 -> NetworkQuality.EXCELLENT // 5G/LTE
+ downstreamBandwidth >= 5000 -> NetworkQuality.GOOD // LTE
+ downstreamBandwidth >= 1000 -> NetworkQuality.MODERATE // 3G
+ downstreamBandwidth > 0 -> NetworkQuality.POOR // 2G
+ else -> assessCellularByTelephony()
+ }
+ }
+ return assessCellularByTelephony()
+ }
+
+ private fun assessCellularByTelephony(): NetworkQuality {
+ val telephonyManager = context.getSystemService(Context.TELEPHONY_SERVICE) as? TelephonyManager
+ ?: return NetworkQuality.MODERATE
+
+ return try {
+ when (telephonyManager.dataNetworkType) {
+ TelephonyManager.NETWORK_TYPE_NR -> NetworkQuality.EXCELLENT // 5G
+ TelephonyManager.NETWORK_TYPE_LTE -> NetworkQuality.GOOD
+ TelephonyManager.NETWORK_TYPE_HSPAP,
+ TelephonyManager.NETWORK_TYPE_HSPA,
+ TelephonyManager.NETWORK_TYPE_HSDPA,
+ TelephonyManager.NETWORK_TYPE_HSUPA -> NetworkQuality.MODERATE
+ TelephonyManager.NETWORK_TYPE_UMTS,
+ TelephonyManager.NETWORK_TYPE_EVDO_0,
+ TelephonyManager.NETWORK_TYPE_EVDO_A,
+ TelephonyManager.NETWORK_TYPE_EVDO_B -> NetworkQuality.MODERATE
+ TelephonyManager.NETWORK_TYPE_EDGE,
+ TelephonyManager.NETWORK_TYPE_GPRS,
+ TelephonyManager.NETWORK_TYPE_CDMA,
+ TelephonyManager.NETWORK_TYPE_1xRTT -> NetworkQuality.POOR
+ else -> NetworkQuality.MODERATE
+ }
+ } catch (e: SecurityException) {
+ NetworkQuality.MODERATE
+ }
+ }
+
+ private fun addBandwidthSample(kbps: Int) {
+ synchronized(bandwidthSamples) {
+ bandwidthSamples.add(kbps)
+ while (bandwidthSamples.size > maxSamples) {
+ bandwidthSamples.removeAt(0)
+ }
+ _bandwidthKbps.value = getAverageBandwidth()
+ }
+ assessNetworkQuality()
+ }
+
+ private fun getAverageBandwidth(): Int {
+ synchronized(bandwidthSamples) {
+ if (bandwidthSamples.isEmpty()) return 0
+ return bandwidthSamples.average().roundToInt()
+ }
+ }
+
+ private suspend fun measureLatency() {
+ if (!isMonitoring) return
+
+ try {
+ val startTime = System.currentTimeMillis()
+ val url = URL(LATENCY_TEST_URL)
+ val connection = url.openConnection() as HttpURLConnection
+ connection.connectTimeout = 5000
+ connection.readTimeout = 5000
+ connection.requestMethod = "HEAD"
+
+ try {
+ connection.connect()
+ val latency = System.currentTimeMillis() - startTime
+ _latencyMs.value = latency
+ logDebug { "Latency measured: ${latency}ms" }
+ assessNetworkQuality()
+ } finally {
+ connection.disconnect()
+ }
+ } catch (e: IOException) {
+ logDebug { "Latency measurement failed: ${e.message}" }
+ // Keep the last known latency
+ } catch (e: Exception) {
+ logDebug { "Latency measurement error: ${e.message}" }
+ }
+ }
+
+ private fun > minOf(a: T, b: T, c: T, comparator: Comparator): T {
+ var min = a
+ if (comparator.compare(b, min) < 0) min = b
+ if (comparator.compare(c, min) < 0) min = c
+ return min
+ }
+
+ private inner class NetworkCallbackImpl : ConnectivityManager.NetworkCallback() {
+ override fun onAvailable(network: Network) {
+ assessNetworkQuality()
+ }
+
+ override fun onLost(network: Network) {
+ _quality.value = NetworkQuality.OFFLINE
+ }
+
+ override fun onCapabilitiesChanged(network: Network, capabilities: NetworkCapabilities) {
+ assessNetworkQuality()
+ }
+
+ override fun onUnavailable() {
+ _quality.value = NetworkQuality.OFFLINE
+ }
+ }
+
+ private inline fun logDebug(message: () -> String) {
+ if (BuildConfig.DEBUG) {
+ Log.d(TAG, message())
+ }
+ }
+
+ companion object {
+ private const val TAG = "NetworkQualityMonitor"
+ private const val LATENCY_CHECK_INTERVAL_MS = 60_000L // 1 minute
+ private const val LATENCY_TEST_URL = "https://www.google.com/generate_204"
+ }
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/RateLimitInterceptor.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/RateLimitInterceptor.kt
index 79584c8..75c8d72 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/RateLimitInterceptor.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/RateLimitInterceptor.kt
@@ -1,29 +1,115 @@
package org.koitharu.kotatsu.core.network
+import android.util.Log
+import kotlinx.coroutines.delay
+import kotlinx.coroutines.runBlocking
import okhttp3.Interceptor
import okhttp3.Response
import okhttp3.internal.closeQuietly
+import org.koitharu.kotatsu.BuildConfig
import org.koitharu.kotatsu.parsers.exception.TooManyRequestExceptions
import java.time.ZonedDateTime
import java.time.format.DateTimeFormatter
import java.util.concurrent.TimeUnit
+import kotlin.math.min
+import kotlin.random.Random
+
+/**
+ * Interceptor that handles HTTP 429 (Too Many Requests) responses with exponential backoff.
+ *
+ * Features:
+ * - Configurable max retry attempts
+ * - Exponential backoff with jitter to prevent thundering herd
+ * - Respects Retry-After header when present
+ * - Falls back to calculated delay when header is missing
+ *
+ * @param maxRetries Maximum number of retry attempts (default: 3)
+ * @param initialDelayMs Initial delay in milliseconds (default: 1000ms)
+ * @param maxDelayMs Maximum delay cap in milliseconds (default: 30000ms)
+ * @param backoffMultiplier Multiplier for exponential growth (default: 2.0)
+ */
+class RateLimitInterceptor(
+ private val maxRetries: Int = DEFAULT_MAX_RETRIES,
+ private val initialDelayMs: Long = DEFAULT_INITIAL_DELAY_MS,
+ private val maxDelayMs: Long = DEFAULT_MAX_DELAY_MS,
+ private val backoffMultiplier: Double = DEFAULT_BACKOFF_MULTIPLIER,
+) : Interceptor {
-class RateLimitInterceptor : Interceptor {
override fun intercept(chain: Interceptor.Chain): Response {
- val response = chain.proceed(chain.request())
+ var response = chain.proceed(chain.request())
+ var retryCount = 0
+
+ while (response.code == 429 && retryCount < maxRetries) {
+ val request = response.request
+ val retryAfterMs = response.header(CommonHeaders.RETRY_AFTER)?.parseRetryAfter()
+ response.closeQuietly()
+
+ // Calculate delay: use Retry-After header if available, otherwise exponential backoff
+ val calculatedDelay = calculateBackoffDelay(retryCount)
+ val delayMs = retryAfterMs?.coerceAtMost(maxDelayMs) ?: calculatedDelay
+
+ logDebug { "Rate limited (429) on ${request.url.host}, retry ${retryCount + 1}/$maxRetries after ${delayMs}ms" }
+
+ // Wait before retrying
+ runBlocking {
+ delay(delayMs)
+ }
+
+ retryCount++
+ response = chain.proceed(request)
+ }
+
+ // If still rate limited after all retries, throw exception
if (response.code == 429) {
val request = response.request
+ val retryAfter = response.header(CommonHeaders.RETRY_AFTER)?.parseRetryAfter() ?: 0L
response.closeQuietly()
+ logDebug { "Rate limit exceeded after $maxRetries retries on ${request.url}" }
throw TooManyRequestExceptions(
url = request.url.toString(),
- retryAfter = response.header(CommonHeaders.RETRY_AFTER)?.parseRetryAfter() ?: 0L,
+ retryAfter = retryAfter,
)
}
+
return response
}
- private fun String.parseRetryAfter(): Long {
- return toLongOrNull()?.let { TimeUnit.SECONDS.toMillis(it) }
- ?: ZonedDateTime.parse(this, DateTimeFormatter.RFC_1123_DATE_TIME).toInstant().toEpochMilli()
+ /**
+ * Calculate exponential backoff delay with jitter.
+ * Formula: min(maxDelay, initialDelay * (multiplier ^ retryCount)) + random jitter
+ */
+ private fun calculateBackoffDelay(retryCount: Int): Long {
+ val exponentialDelay = initialDelayMs * Math.pow(backoffMultiplier, retryCount.toDouble()).toLong()
+ val cappedDelay = min(exponentialDelay, maxDelayMs)
+ // Add jitter (±25%) to prevent thundering herd
+ val jitter = (cappedDelay * 0.25 * (Random.nextDouble() - 0.5)).toLong()
+ return (cappedDelay + jitter).coerceAtLeast(initialDelayMs)
+ }
+
+ private fun String.parseRetryAfter(): Long? {
+ // Try parsing as seconds first
+ toLongOrNull()?.let { seconds ->
+ return TimeUnit.SECONDS.toMillis(seconds)
+ }
+ // Try parsing as HTTP date
+ return runCatching {
+ val dateTime = ZonedDateTime.parse(this, DateTimeFormatter.RFC_1123_DATE_TIME)
+ val delayMs = dateTime.toInstant().toEpochMilli() - System.currentTimeMillis()
+ delayMs.coerceAtLeast(0L)
+ }.getOrNull()
+ }
+
+ private inline fun logDebug(message: () -> String) {
+ if (BuildConfig.DEBUG) {
+ Log.d(TAG, message())
+ }
+ }
+
+ companion object {
+ private const val TAG = "RateLimitInterceptor"
+ private const val DEFAULT_MAX_RETRIES = 3
+ private const val DEFAULT_INITIAL_DELAY_MS = 1000L
+ private const val DEFAULT_MAX_DELAY_MS = 30_000L
+ private const val DEFAULT_BACKOFF_MULTIPLIER = 2.0
}
}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/imageproxy/BaseImageProxyInterceptor.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/imageproxy/BaseImageProxyInterceptor.kt
index 40eb783..11e4c99 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/imageproxy/BaseImageProxyInterceptor.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/imageproxy/BaseImageProxyInterceptor.kt
@@ -1,7 +1,6 @@
package org.koitharu.kotatsu.core.network.imageproxy
import android.util.Log
-import androidx.collection.ArraySet
import coil3.intercept.Interceptor
import coil3.network.HttpException
import coil3.request.ErrorResult
@@ -21,11 +20,15 @@ import org.koitharu.kotatsu.parsers.util.await
import org.koitharu.kotatsu.parsers.util.isHttpOrHttps
import org.koitharu.kotatsu.parsers.util.runCatchingCancellable
import java.net.HttpURLConnection
-import java.util.Collections
-abstract class BaseImageProxyInterceptor : ImageProxyInterceptor {
-
- private val blacklist = Collections.synchronizedSet(ArraySet())
+/**
+ * Base class for image proxy interceptors with persistent blacklist support.
+ *
+ * @param blacklistManager Manager for persistent host blacklisting (optional for backward compatibility)
+ */
+abstract class BaseImageProxyInterceptor(
+ private val blacklistManager: ProxyBlacklistManager? = null,
+) : ImageProxyInterceptor {
final override suspend fun intercept(chain: Interceptor.Chain): ImageResult {
val request = chain.request
@@ -34,7 +37,7 @@ abstract class BaseImageProxyInterceptor : ImageProxyInterceptor {
is String -> data.toHttpUrlOrNull()
else -> null
}
- if (url == null || !url.isHttpOrHttps || url.host in blacklist) {
+ if (url == null || !url.isHttpOrHttps || isBlacklisted(url.host)) {
return chain.proceed()
}
val newRequest = onInterceptImageRequest(request, url)
@@ -44,7 +47,7 @@ abstract class BaseImageProxyInterceptor : ImageProxyInterceptor {
logDebug(result.throwable, newRequest.data)
chain.proceed().also {
if (it is SuccessResult && result.throwable.isBlockedByServer()) {
- blacklist.add(url.host)
+ addToBlacklist(url.host)
}
}
}
@@ -59,7 +62,7 @@ abstract class BaseImageProxyInterceptor : ImageProxyInterceptor {
logDebug(error, newRequest.url)
okHttp.doCall(request).also {
if (error.isBlockedByServer()) {
- blacklist.add(request.url.host)
+ addToBlacklist(request.url.host)
}
}
}.getOrThrow()
@@ -69,6 +72,14 @@ abstract class BaseImageProxyInterceptor : ImageProxyInterceptor {
protected abstract suspend fun onInterceptPageRequest(request: Request): Request
+ private fun isBlacklisted(host: String): Boolean {
+ return blacklistManager?.isBlacklisted(host) == true
+ }
+
+ private fun addToBlacklist(host: String) {
+ blacklistManager?.addToBlacklist(host)
+ }
+
private suspend fun OkHttpClient.doCall(request: Request): Response {
return newCall(request).await().ensureSuccess()
}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/imageproxy/ProxyBlacklistManager.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/imageproxy/ProxyBlacklistManager.kt
new file mode 100644
index 0000000..1a11f17
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/imageproxy/ProxyBlacklistManager.kt
@@ -0,0 +1,165 @@
+package org.koitharu.kotatsu.core.network.imageproxy
+
+import android.content.Context
+import android.util.Log
+import dagger.hilt.android.qualifiers.ApplicationContext
+import org.koitharu.kotatsu.BuildConfig
+import java.util.concurrent.TimeUnit
+import javax.inject.Inject
+import javax.inject.Singleton
+
+/**
+ * Manages a persistent blacklist of hosts that have blocked proxy requests.
+ * Blacklisted hosts are stored in SharedPreferences with timestamps for TTL expiration.
+ *
+ * Features:
+ * - Persistent storage across app restarts
+ * - Time-based expiration (default: 6 hours)
+ * - Thread-safe operations
+ * - Manual clear capability
+ */
+@Singleton
+class ProxyBlacklistManager @Inject constructor(
+ @ApplicationContext private val context: Context,
+) {
+
+ private val blacklist = mutableSetOf()
+ private val timestamps = mutableMapOf()
+ private val lock = Any()
+
+ private val prefs by lazy {
+ context.getSharedPreferences(PREFS_NAME, Context.MODE_PRIVATE)
+ }
+
+ init {
+ loadFromStorage()
+ }
+
+ /**
+ * Check if a host is blacklisted (and not expired).
+ */
+ fun isBlacklisted(host: String): Boolean {
+ synchronized(lock) {
+ cleanupExpired()
+ return host in blacklist
+ }
+ }
+
+ /**
+ * Add a host to the blacklist.
+ */
+ fun addToBlacklist(host: String) {
+ synchronized(lock) {
+ if (blacklist.add(host)) {
+ timestamps[host] = System.currentTimeMillis()
+ saveToStorage()
+ logDebug { "Added to proxy blacklist: $host" }
+ }
+ }
+ }
+
+ /**
+ * Remove a specific host from the blacklist.
+ */
+ fun removeFromBlacklist(host: String) {
+ synchronized(lock) {
+ if (blacklist.remove(host)) {
+ timestamps.remove(host)
+ saveToStorage()
+ logDebug { "Removed from proxy blacklist: $host" }
+ }
+ }
+ }
+
+ /**
+ * Clear all blacklisted hosts.
+ */
+ fun clearBlacklist() {
+ synchronized(lock) {
+ blacklist.clear()
+ timestamps.clear()
+ prefs.edit()
+ .remove(KEY_HOSTS)
+ .remove(KEY_TIMESTAMPS)
+ .apply()
+ logDebug { "Proxy blacklist cleared" }
+ }
+ }
+
+ /**
+ * Get the current blacklist size.
+ */
+ val size: Int
+ get() = synchronized(lock) {
+ cleanupExpired()
+ blacklist.size
+ }
+
+ private fun cleanupExpired() {
+ val now = System.currentTimeMillis()
+ val expired = timestamps.filter { (_, timestamp) ->
+ now - timestamp > BLACKLIST_TTL_MS
+ }.keys
+
+ if (expired.isNotEmpty()) {
+ expired.forEach { host ->
+ blacklist.remove(host)
+ timestamps.remove(host)
+ logDebug { "Expired from proxy blacklist: $host" }
+ }
+ saveToStorage()
+ }
+ }
+
+ private fun saveToStorage() {
+ val hostSet = blacklist.toSet()
+ val timestampSet = timestamps.map { (host, time) -> "$host:$time" }.toSet()
+ prefs.edit()
+ .putStringSet(KEY_HOSTS, hostSet)
+ .putStringSet(KEY_TIMESTAMPS, timestampSet)
+ .apply()
+ }
+
+ private fun loadFromStorage() {
+ val hosts = prefs.getStringSet(KEY_HOSTS, emptySet()) ?: emptySet()
+ val timestampEntries = prefs.getStringSet(KEY_TIMESTAMPS, emptySet()) ?: emptySet()
+
+ // Parse timestamps
+ val timestampMap = timestampEntries.mapNotNull { entry ->
+ val parts = entry.split(":")
+ if (parts.size >= 2) {
+ val host = parts.dropLast(1).joinToString(":") // Handle hosts with colons (IPv6)
+ val time = parts.last().toLongOrNull()
+ if (time != null) host to time else null
+ } else null
+ }.toMap()
+
+ // Load only non-expired entries
+ val now = System.currentTimeMillis()
+ hosts.forEach { host ->
+ val timestamp = timestampMap[host] ?: now
+ if (now - timestamp <= BLACKLIST_TTL_MS) {
+ blacklist.add(host)
+ timestamps[host] = timestamp
+ }
+ }
+
+ logDebug { "Loaded ${blacklist.size} hosts from proxy blacklist" }
+ }
+
+ private inline fun logDebug(message: () -> String) {
+ if (BuildConfig.DEBUG) {
+ Log.d(TAG, message())
+ }
+ }
+
+ companion object {
+ private const val TAG = "ProxyBlacklistManager"
+ private const val PREFS_NAME = "image_proxy_blacklist"
+ private const val KEY_HOSTS = "hosts"
+ private const val KEY_TIMESTAMPS = "timestamps"
+
+ /** Blacklist entries expire after 6 hours */
+ val BLACKLIST_TTL_MS = TimeUnit.HOURS.toMillis(6)
+ }
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/imageproxy/RealImageProxyInterceptor.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/imageproxy/RealImageProxyInterceptor.kt
index c3b7cfe..16727ad 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/imageproxy/RealImageProxyInterceptor.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/imageproxy/RealImageProxyInterceptor.kt
@@ -17,6 +17,7 @@ import javax.inject.Singleton
@Singleton
class RealImageProxyInterceptor @Inject constructor(
private val settings: AppSettings,
+ private val blacklistManager: ProxyBlacklistManager,
) : ImageProxyInterceptor {
private val delegate = settings.observeAsStateFlow(
@@ -35,8 +36,8 @@ class RealImageProxyInterceptor @Inject constructor(
private fun createDelegate(): ImageProxyInterceptor? = when (val proxy = settings.imagesProxy) {
-1 -> null
- 0 -> WsrvNlProxyInterceptor()
- 1 -> ZeroMsProxyInterceptor()
+ 0 -> WsrvNlProxyInterceptor(blacklistManager)
+ 1 -> ZeroMsProxyInterceptor(blacklistManager)
else -> error("Unsupported images proxy $proxy")
}
}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/imageproxy/WsrvNlProxyInterceptor.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/imageproxy/WsrvNlProxyInterceptor.kt
index 13f821b..753583f 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/imageproxy/WsrvNlProxyInterceptor.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/imageproxy/WsrvNlProxyInterceptor.kt
@@ -6,7 +6,9 @@ import coil3.size.isOriginal
import okhttp3.HttpUrl
import okhttp3.Request
-class WsrvNlProxyInterceptor : BaseImageProxyInterceptor() {
+class WsrvNlProxyInterceptor(
+ blacklistManager: ProxyBlacklistManager? = null,
+) : BaseImageProxyInterceptor(blacklistManager) {
override suspend fun onInterceptImageRequest(request: ImageRequest, url: HttpUrl): ImageRequest {
val newUrl = HttpUrl.Builder()
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/imageproxy/ZeroMsProxyInterceptor.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/imageproxy/ZeroMsProxyInterceptor.kt
index 9c6d01e..2e42c6f 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/imageproxy/ZeroMsProxyInterceptor.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/imageproxy/ZeroMsProxyInterceptor.kt
@@ -5,7 +5,9 @@ import okhttp3.HttpUrl
import okhttp3.HttpUrl.Companion.toHttpUrl
import okhttp3.Request
-class ZeroMsProxyInterceptor : BaseImageProxyInterceptor() {
+class ZeroMsProxyInterceptor(
+ blacklistManager: ProxyBlacklistManager? = null,
+) : BaseImageProxyInterceptor(blacklistManager) {
override suspend fun onInterceptImageRequest(request: ImageRequest, url: HttpUrl): ImageRequest {
if (url.host == "v.recipes") {
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/proxy/ProxyProvider.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/proxy/ProxyProvider.kt
index 498e462..ad3c95f 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/proxy/ProxyProvider.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/proxy/ProxyProvider.kt
@@ -13,6 +13,7 @@ import okhttp3.Response
import okhttp3.Route
import okio.IOException
import org.koitharu.kotatsu.core.exceptions.ProxyConfigException
+import org.koitharu.kotatsu.core.exceptions.ProxyWebViewUnsupportedException
import org.koitharu.kotatsu.core.network.CommonHeaders
import org.koitharu.kotatsu.core.prefs.AppSettings
import org.koitharu.kotatsu.core.util.ext.printStackTraceDebug
@@ -56,7 +57,7 @@ class ProxyProvider @Inject constructor(
val isProxyEnabled = isProxyEnabled()
if (!WebViewFeature.isFeatureSupported(WebViewFeature.PROXY_OVERRIDE)) {
if (isProxyEnabled) {
- throw IllegalArgumentException("Proxy for WebView is not supported") // TODO localize
+ throw ProxyWebViewUnsupportedException()
}
} else {
val controller = ProxyController.getInstance()
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/webview/adblock/AdBlock.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/webview/adblock/AdBlock.kt
index 8f05272..cbb9dfe 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/webview/adblock/AdBlock.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/webview/adblock/AdBlock.kt
@@ -57,6 +57,39 @@ class AdBlock @Inject constructor(
} ?: true
}
+ /**
+ * Returns JavaScript code to inject into WebView for hiding ad elements.
+ * Should be called after page load via WebView.evaluateJavascript().
+ * Returns null if adblock is disabled or no CSS rules are available.
+ */
+ @WorkerThread
+ fun getElementHidingScript(): String? {
+ if (!settings.isAdBlockEnabled) {
+ return null
+ }
+ val rulesList = synchronized(this) {
+ rules ?: parseRules().also { rules = it }
+ } ?: return null
+
+ val selectors = rulesList.elementHidingSelectors
+ if (selectors.isEmpty()) {
+ return null
+ }
+
+ // Build CSS rule to hide all matching elements
+ val cssRule = selectors.joinToString(",") { it.replace("'", "\\'") }
+ return buildString {
+ append("(function(){")
+ append("var style=document.createElement('style');")
+ append("style.type='text/css';")
+ append("style.appendChild(document.createTextNode('")
+ append(cssRule)
+ append("{display:none!important}'));")
+ append("document.head.appendChild(style);")
+ append("})();")
+ }
+ }
+
@WorkerThread
private fun parseRules() = runCatchingCancellable {
listFile(context).useLines { lines ->
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/webview/adblock/Rule.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/webview/adblock/Rule.kt
index 1aca71a..81ac729 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/webview/adblock/Rule.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/webview/adblock/Rule.kt
@@ -43,14 +43,27 @@ sealed interface Rule {
if (baseUrl == null) {
return true
}
+
+ val baseDomain = baseUrl.topPrivateDomain() ?: baseUrl.host
+
+ // Check domain restrictions
+ if (domains != null && baseDomain !in domains) {
+ return false
+ }
+ if (domainsNot != null && baseDomain in domainsNot) {
+ return false
+ }
+
+ // Check third-party modifier
thirdParty?.let {
val isThirdPartyRequest =
- (url.topPrivateDomain() ?: url.host) != (baseUrl.topPrivateDomain() ?: baseUrl.host)
+ (url.topPrivateDomain() ?: url.host) != baseDomain
if (isThirdPartyRequest != it) {
return false
}
}
- // TODO check other modifiers
+
+ // Note: script modifier is not checked here as we don't have resource type info
return true
}
}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/webview/adblock/RulesList.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/webview/adblock/RulesList.kt
index 58e2ba0..6b3003d 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/network/webview/adblock/RulesList.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/network/webview/adblock/RulesList.kt
@@ -12,6 +12,14 @@ class RulesList {
private val blockRules = ArrayList()
private val allowRules = ArrayList()
+ private val cssSelectors = ArrayList()
+
+ /**
+ * CSS selectors to inject for element hiding.
+ * These should be applied via WebView's evaluateJavascript after page load.
+ */
+ val elementHidingSelectors: List
+ get() = cssSelectors
operator fun get(url: HttpUrl, baseUrl: HttpUrl?): Rule? {
val rule = blockRules.find { x -> x(url, baseUrl) }
@@ -26,6 +34,7 @@ class RulesList {
fun trimToSize() {
blockRules.trimToSize()
allowRules.trimToSize()
+ cssSelectors.trimToSize()
}
private fun String.addImpl(isWhitelist: Boolean, modifiers: String?) {
@@ -53,7 +62,11 @@ class RulesList {
}
startsWith("##") -> {
- // TODO css rules
+ // CSS element hiding selector (generic, applies to all domains)
+ val selector = substring(2).trim()
+ if (selector.isNotEmpty()) {
+ cssSelectors += selector
+ }
}
else -> {
@@ -73,19 +86,41 @@ class RulesList {
}
var script: Boolean? = null
var thirdParty: Boolean? = null
- options.split(',').forEach {
- val isNot = it.startsWith('~')
- when (it.removePrefix("~")) {
- "script" -> script = !isNot
- "third-party" -> thirdParty = !isNot
+ var domains: MutableSet? = null
+ var domainsNot: MutableSet? = null
+
+ options.split(',').forEach { option ->
+ val isNot = option.startsWith('~')
+ val optionName = option.removePrefix("~")
+
+ when {
+ optionName == "script" -> script = !isNot
+ optionName == "third-party" -> thirdParty = !isNot
+ optionName.startsWith("domain=") -> {
+ // Parse domain restriction: domain=example.com|~exclude.com
+ val domainList = optionName.removePrefix("domain=").split('|')
+ domainList.forEach { domain ->
+ val isDomainNot = domain.startsWith('~')
+ val domainName = domain.removePrefix("~").lowercase()
+ if (domainName.isNotEmpty()) {
+ if (isDomainNot) {
+ if (domainsNot == null) domainsNot = mutableSetOf()
+ domainsNot?.add(domainName)
+ } else {
+ if (domains == null) domains = mutableSetOf()
+ domains?.add(domainName)
+ }
+ }
+ }
+ }
}
}
return Rule.WithModifiers(
baseRule = this,
script = script,
thirdParty = thirdParty,
- domains = null, //TODO
- domainsNot = null, //TODO
+ domains = domains,
+ domainsNot = domainsNot,
)
}
}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/os/NetworkState.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/os/NetworkState.kt
index 6bef33c..f418623 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/os/NetworkState.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/os/NetworkState.kt
@@ -54,6 +54,39 @@ class NetworkState(
fun isOfflineOrRestricted() = !isOnline() || isRestricted()
+ /**
+ * Check if currently connected via WiFi.
+ */
+ fun isWifi(): Boolean {
+ val network = connectivityManager.activeNetwork ?: return false
+ val capabilities = connectivityManager.getNetworkCapabilities(network) ?: return false
+ return capabilities.hasTransport(NetworkCapabilities.TRANSPORT_WIFI)
+ }
+
+ /**
+ * Check if currently connected via cellular.
+ */
+ fun isCellular(): Boolean {
+ val network = connectivityManager.activeNetwork ?: return false
+ val capabilities = connectivityManager.getNetworkCapabilities(network) ?: return false
+ return capabilities.hasTransport(NetworkCapabilities.TRANSPORT_CELLULAR)
+ }
+
+ /**
+ * Get the current transport type as a readable string.
+ */
+ fun getTransportType(): String {
+ val network = connectivityManager.activeNetwork ?: return "none"
+ val capabilities = connectivityManager.getNetworkCapabilities(network) ?: return "unknown"
+ return when {
+ capabilities.hasTransport(NetworkCapabilities.TRANSPORT_WIFI) -> "wifi"
+ capabilities.hasTransport(NetworkCapabilities.TRANSPORT_CELLULAR) -> "cellular"
+ capabilities.hasTransport(NetworkCapabilities.TRANSPORT_ETHERNET) -> "ethernet"
+ capabilities.hasTransport(NetworkCapabilities.TRANSPORT_VPN) -> "vpn"
+ else -> "other"
+ }
+ }
+
suspend fun awaitForConnection() {
if (value) {
return
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/BitmapWrapper.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/BitmapWrapper.kt
index 5c8d4fd..263d8bf 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/BitmapWrapper.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/BitmapWrapper.kt
@@ -1,6 +1,8 @@
package org.koitharu.kotatsu.core.parser
import android.graphics.Canvas
+import android.os.Build
+import androidx.annotation.IntRange
import androidx.core.graphics.createBitmap
import org.koitharu.kotatsu.parsers.bitmap.Bitmap
import org.koitharu.kotatsu.parsers.bitmap.Rect
@@ -8,6 +10,20 @@ import java.io.OutputStream
import android.graphics.Bitmap as AndroidBitmap
import android.graphics.Rect as AndroidRect
+/**
+ * Supported compression formats for bitmap output.
+ */
+enum class BitmapCompressFormat {
+ /** Lossless compression, best for graphics with transparency */
+ PNG,
+ /** Lossy compression, smaller files, good for photos */
+ JPEG,
+ /** Modern format with better compression, requires API 14+ (lossy) or API 30+ (lossless) */
+ WEBP,
+ /** Lossless WebP, requires API 30+ */
+ WEBP_LOSSLESS,
+}
+
class BitmapWrapper private constructor(
private val androidBitmap: AndroidBitmap,
) : Bitmap, AutoCloseable {
@@ -29,8 +45,48 @@ class BitmapWrapper private constructor(
androidBitmap.recycle()
}
+ /**
+ * Compress bitmap to output stream with default PNG format.
+ * Use [compressTo] with format parameter for more control.
+ */
fun compressTo(output: OutputStream) {
- androidBitmap.compress(AndroidBitmap.CompressFormat.PNG, 100, output)
+ compressTo(output, BitmapCompressFormat.PNG)
+ }
+
+ /**
+ * Compress bitmap to output stream with specified format and quality.
+ *
+ * @param output The output stream to write compressed data to
+ * @param format The compression format to use
+ * @param quality Quality hint for lossy formats (0-100). Ignored for PNG and WEBP_LOSSLESS.
+ * Higher values produce better quality but larger files.
+ */
+ fun compressTo(
+ output: OutputStream,
+ format: BitmapCompressFormat,
+ @IntRange(from = 0, to = 100) quality: Int = 90,
+ ) {
+ val androidFormat = when (format) {
+ BitmapCompressFormat.PNG -> AndroidBitmap.CompressFormat.PNG
+ BitmapCompressFormat.JPEG -> AndroidBitmap.CompressFormat.JPEG
+ BitmapCompressFormat.WEBP -> if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
+ AndroidBitmap.CompressFormat.WEBP_LOSSY
+ } else {
+ @Suppress("DEPRECATION")
+ AndroidBitmap.CompressFormat.WEBP
+ }
+ BitmapCompressFormat.WEBP_LOSSLESS -> if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
+ AndroidBitmap.CompressFormat.WEBP_LOSSLESS
+ } else {
+ // Fallback to PNG for lossless on older API
+ AndroidBitmap.CompressFormat.PNG
+ }
+ }
+ val effectiveQuality = when (format) {
+ BitmapCompressFormat.PNG, BitmapCompressFormat.WEBP_LOSSLESS -> 100 // Quality ignored for lossless
+ else -> quality.coerceIn(0, 100)
+ }
+ androidBitmap.compress(androidFormat, effectiveQuality, output)
}
companion object {
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/MangaLoaderContextImpl.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/MangaLoaderContextImpl.kt
index 662a197..b234690 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/MangaLoaderContextImpl.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/MangaLoaderContextImpl.kt
@@ -16,6 +16,7 @@ import org.koitharu.kotatsu.core.image.BitmapDecoderCompat
import org.koitharu.kotatsu.core.network.MangaHttpClient
import org.koitharu.kotatsu.core.network.cookies.MutableCookieJar
import org.koitharu.kotatsu.core.network.webview.WebViewExecutor
+import org.koitharu.kotatsu.core.prefs.AppSettings
import org.koitharu.kotatsu.core.prefs.SourceSettings
import org.koitharu.kotatsu.core.util.ext.toList
import org.koitharu.kotatsu.core.util.ext.toMimeType
@@ -28,7 +29,6 @@ import org.koitharu.kotatsu.parsers.model.MangaSource
import org.koitharu.kotatsu.parsers.network.UserAgents
import org.koitharu.kotatsu.parsers.util.map
import java.util.Locale
-import java.util.concurrent.TimeUnit
import javax.inject.Inject
import javax.inject.Singleton
@@ -38,9 +38,15 @@ class MangaLoaderContextImpl @Inject constructor(
override val cookieJar: MutableCookieJar,
@ApplicationContext private val androidContext: Context,
private val webViewExecutor: WebViewExecutor,
+ private val appSettings: AppSettings,
) : MangaLoaderContext() {
- private val jsTimeout = TimeUnit.SECONDS.toMillis(4)
+ /**
+ * JS evaluation timeout - configurable via settings.
+ * Useful for complex CloudFlare challenges that may need more time.
+ */
+ private val jsTimeout: Long
+ get() = appSettings.jsTimeoutMs
@Deprecated("Provide a base url")
@SuppressLint("SetJavaScriptEnabled")
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/MirrorSwitcher.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/MirrorSwitcher.kt
index d59a804..5e529b2 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/MirrorSwitcher.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/MirrorSwitcher.kt
@@ -1,6 +1,8 @@
package org.koitharu.kotatsu.core.parser
+import android.content.Context
import android.util.Log
+import dagger.hilt.android.qualifiers.ApplicationContext
import kotlinx.coroutines.sync.Mutex
import kotlinx.coroutines.sync.withLock
import okhttp3.OkHttpClient
@@ -12,21 +14,50 @@ import org.koitharu.kotatsu.parsers.model.MangaParserSource
import org.koitharu.kotatsu.parsers.util.await
import org.koitharu.kotatsu.parsers.util.runCatchingCancellable
import java.util.EnumSet
+import java.util.concurrent.TimeUnit
import javax.inject.Inject
+import javax.inject.Singleton
+/**
+ * Handles automatic mirror switching for manga sources when the primary domain fails.
+ * Features:
+ * - Persistent blacklist that survives app restarts
+ * - Time-based expiration of blacklist entries (24 hours default)
+ * - Manual blacklist clearing capability
+ */
+@Singleton
class MirrorSwitcher @Inject constructor(
private val settings: AppSettings,
@MangaHttpClient private val okHttpClient: OkHttpClient,
+ @ApplicationContext private val context: Context,
) {
private val blacklist = EnumSet.noneOf(MangaParserSource::class.java)
+ private val blacklistTimestamps = mutableMapOf()
private val mutex: Mutex = Mutex()
+ private val prefs by lazy {
+ context.getSharedPreferences(PREFS_NAME, Context.MODE_PRIVATE)
+ }
+
+ init {
+ loadBlacklistFromStorage()
+ }
val isEnabled: Boolean
get() = settings.isMirrorSwitchingEnabled
+ /**
+ * Get the list of currently blacklisted sources (excluding expired entries).
+ */
+ val blacklistedSources: Set
+ get() {
+ cleanupExpiredEntries()
+ return blacklist.toSet()
+ }
+
suspend fun trySwitchMirror(repository: ParserMangaRepository, loader: suspend () -> T?): T? {
val source = repository.source
+ cleanupExpiredEntries()
if (!isEnabled || source in blacklist) {
return null
}
@@ -59,7 +90,7 @@ class MirrorSwitcher @Inject constructor(
}
}
repository.domain = currentHost // rollback
- blacklist.add(source)
+ addToBlacklist(source)
logd { "$source blacklisted" }
return null
}
@@ -89,6 +120,96 @@ class MirrorSwitcher @Inject constructor(
}
}
+ /**
+ * Clear all blacklisted sources, allowing them to be retried.
+ * Useful when mirrors may have come back online.
+ */
+ fun clearBlacklist() {
+ blacklist.clear()
+ blacklistTimestamps.clear()
+ prefs.edit()
+ .remove(KEY_BLACKLIST)
+ .remove(KEY_BLACKLIST_TIMESTAMPS)
+ .apply()
+ logd { "Blacklist cleared" }
+ }
+
+ /**
+ * Remove a specific source from the blacklist.
+ */
+ fun removeFromBlacklist(source: MangaParserSource) {
+ if (blacklist.remove(source)) {
+ blacklistTimestamps.remove(source)
+ saveBlacklistToStorage()
+ logd { "$source removed from blacklist" }
+ }
+ }
+
+ private fun addToBlacklist(source: MangaParserSource) {
+ blacklist.add(source)
+ blacklistTimestamps[source] = System.currentTimeMillis()
+ saveBlacklistToStorage()
+ }
+
+ private fun cleanupExpiredEntries() {
+ val now = System.currentTimeMillis()
+ val expiredSources = blacklistTimestamps.filter { (_, timestamp) ->
+ now - timestamp > BLACKLIST_TTL_MS
+ }.keys
+ if (expiredSources.isNotEmpty()) {
+ expiredSources.forEach { source ->
+ blacklist.remove(source)
+ blacklistTimestamps.remove(source)
+ logd { "$source expired from blacklist" }
+ }
+ saveBlacklistToStorage()
+ }
+ }
+
+ private fun saveBlacklistToStorage() {
+ val sourceNames = blacklist.map { it.name }.toSet()
+ val timestamps = blacklistTimestamps.map { (source, time) -> "${source.name}:$time" }.toSet()
+ prefs.edit()
+ .putStringSet(KEY_BLACKLIST, sourceNames)
+ .putStringSet(KEY_BLACKLIST_TIMESTAMPS, timestamps)
+ .apply()
+ }
+
+ private fun loadBlacklistFromStorage() {
+ val sourceNames = prefs.getStringSet(KEY_BLACKLIST, emptySet()) ?: emptySet()
+ val timestamps = prefs.getStringSet(KEY_BLACKLIST_TIMESTAMPS, emptySet()) ?: emptySet()
+
+ // Parse timestamps map
+ val timestampMap = timestamps.mapNotNull { entry ->
+ val parts = entry.split(":")
+ if (parts.size == 2) {
+ try {
+ parts[0] to parts[1].toLong()
+ } catch (e: NumberFormatException) {
+ null
+ }
+ } else null
+ }.toMap()
+
+ // Load sources with valid timestamps
+ val now = System.currentTimeMillis()
+ sourceNames.forEach { name ->
+ try {
+ val source = MangaParserSource.valueOf(name)
+ val timestamp = timestampMap[name] ?: now
+ // Only load if not expired
+ if (now - timestamp <= BLACKLIST_TTL_MS) {
+ blacklist.add(source)
+ blacklistTimestamps[source] = timestamp
+ }
+ } catch (e: IllegalArgumentException) {
+ // Source no longer exists, skip it
+ logd { "Unknown source in blacklist: $name" }
+ }
+ }
+ logd { "Loaded ${blacklist.size} sources from persistent blacklist" }
+ }
+
private fun T.takeIfValid() = takeIf {
when (it) {
is Collection<*> -> it.isNotEmpty()
@@ -99,6 +220,12 @@ class MirrorSwitcher @Inject constructor(
private companion object {
const val TAG = "MirrorSwitcher"
+ const val PREFS_NAME = "mirror_switcher"
+ const val KEY_BLACKLIST = "blacklist"
+ const val KEY_BLACKLIST_TIMESTAMPS = "blacklist_timestamps"
+
+ /** Blacklist entries expire after 24 hours */
+ val BLACKLIST_TTL_MS = TimeUnit.HOURS.toMillis(24)
inline fun logd(message: () -> String) {
if (BuildConfig.DEBUG) {
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/SourceHealthTracker.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/SourceHealthTracker.kt
new file mode 100644
index 0000000..7514c71
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/SourceHealthTracker.kt
@@ -0,0 +1,123 @@
+package org.koitharu.kotatsu.core.parser
+
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.withContext
+import org.koitharu.kotatsu.explore.data.SourceHealthRepository
+import org.koitharu.kotatsu.parsers.model.MangaSource
+import org.koitharu.kotatsu.parsers.util.runCatchingCancellable
+import javax.inject.Inject
+import javax.inject.Singleton
+import kotlin.system.measureTimeMillis
+
+/**
+ * Tracks health metrics for manga sources.
+ * Wraps network operations to automatically record success/failure and response times.
+ */
+@Singleton
+class SourceHealthTracker @Inject constructor(
+ private val healthRepository: SourceHealthRepository,
+) {
+ /**
+ * Execute an operation and track its health metrics.
+ * Records success with response time, or failure with error message.
+ *
+ * @param source The manga source being accessed
+ * @param operation The suspend operation to execute
+ * @return The result of the operation
+ */
+ suspend fun trackOperation(
+ source: MangaSource,
+ operation: suspend () -> T
+ ): T {
+ var responseTime = 0L
+ val result = runCatchingCancellable {
+ var operationResult: T
+ responseTime = measureTimeMillis {
+ operationResult = operation()
+ }
+ operationResult
+ }
+
+ // Record metrics in background
+ withContext(Dispatchers.Default) {
+ result.fold(
+ onSuccess = {
+ // Validate result - empty collections may indicate issues
+ val isValidResult = when (it) {
+ is Collection<*> -> it.isNotEmpty()
+ is String -> it.isNotEmpty()
+ else -> true
+ }
+ if (isValidResult) {
+ healthRepository.recordSuccess(source, responseTime)
+ } else {
+ // Empty result might indicate a source issue
+ healthRepository.recordFailure(source, EmptyResultException())
+ }
+ },
+ onFailure = { error ->
+ healthRepository.recordFailure(source, error)
+ }
+ )
+ }
+
+ return result.getOrThrow()
+ }
+
+ /**
+ * Execute an operation that returns a nullable result and track its health metrics.
+ * Null results are treated as successful if expected.
+ */
+ suspend fun trackNullableOperation(
+ source: MangaSource,
+ operation: suspend () -> T?
+ ): T? {
+ var responseTime = 0L
+ val result = runCatchingCancellable {
+ var operationResult: T?
+ responseTime = measureTimeMillis {
+ operationResult = operation()
+ }
+ operationResult
+ }
+
+ withContext(Dispatchers.Default) {
+ result.fold(
+ onSuccess = {
+ healthRepository.recordSuccess(source, responseTime)
+ },
+ onFailure = { error ->
+ healthRepository.recordFailure(source, error)
+ }
+ )
+ }
+
+ return result.getOrThrow()
+ }
+
+ /**
+ * Record a manual success for operations tracked externally
+ */
+ suspend fun recordSuccess(source: MangaSource, responseTimeMs: Long) {
+ healthRepository.recordSuccess(source, responseTimeMs)
+ }
+
+ /**
+ * Record a manual failure for operations tracked externally
+ */
+ suspend fun recordFailure(source: MangaSource, error: Throwable?) {
+ healthRepository.recordFailure(source, error)
+ }
+
+ /**
+ * Check if a source is currently unreliable (many consecutive failures)
+ */
+ suspend fun isSourceUnreliable(source: MangaSource): Boolean {
+ return healthRepository.isSourceUnreliable(source)
+ }
+
+ /**
+ * Exception for empty results that shouldn't necessarily fail but indicate potential issues
+ */
+ class EmptyResultException : Exception("Operation returned empty result")
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/external/ExternalMangaRepository.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/external/ExternalMangaRepository.kt
index 80ad1df..5744395 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/external/ExternalMangaRepository.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/external/ExternalMangaRepository.kt
@@ -63,5 +63,17 @@ class ExternalMangaRepository(
contentSource.getPageUrl(page.url)
}
- override suspend fun getRelatedMangaImpl(seed: Manga): List = emptyList() // TODO
+ /**
+ * Get related manga from external plugin.
+ * Returns empty list if the plugin doesn't support this feature.
+ */
+ override suspend fun getRelatedMangaImpl(seed: Manga): List {
+ // Check if plugin supports related manga
+ if (capabilities?.isRelatedMangaSupported != true) {
+ return emptyList()
+ }
+ return runInterruptible(Dispatchers.IO) {
+ contentSource.getRelatedManga(seed)
+ }
+ }
}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/external/ExternalPluginContentSource.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/external/ExternalPluginContentSource.kt
index 58dff0d..082dbbc 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/external/ExternalPluginContentSource.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/parser/external/ExternalPluginContentSource.kt
@@ -159,6 +159,32 @@ class ExternalPluginContentSource(
}
}
+ /**
+ * Get related manga for a given seed manga.
+ * Returns empty list if the plugin doesn't support related manga.
+ */
+ @Blocking
+ @WorkerThread
+ fun getRelatedManga(seed: Manga): List {
+ val uri = "content://${source.authority}/manga/related".toUri()
+ .buildUpon()
+ .appendPath(seed.url)
+ .build()
+ return runCatching {
+ contentResolver.query(uri, null, null, null, null)
+ .safe()
+ .use { cursor ->
+ val result = ArrayList(cursor.count)
+ if (cursor.moveToFirst()) {
+ do {
+ result += cursor.getManga()
+ } while (cursor.moveToNext())
+ }
+ result
+ }
+ }.getOrDefault(emptyList())
+ }
+
@Blocking
@WorkerThread
private fun fetchLocales(): Set {
@@ -201,6 +227,7 @@ class ExternalPluginContentSource(
isOriginalLocaleSupported = cursor.getBooleanOrDefault(COLUMN_ORIGINAL_LOCALE, false),
isAuthorSearchSupported = cursor.getBooleanOrDefault(COLUMN_AUTHOR, false),
),
+ isRelatedMangaSupported = cursor.getBooleanOrDefault(COLUMN_RELATED_MANGA, false),
)
} else {
null
@@ -304,6 +331,7 @@ class ExternalPluginContentSource(
class MangaSourceCapabilities(
val availableSortOrders: Set,
val listFilterCapabilities: MangaListFilterCapabilities,
+ val isRelatedMangaSupported: Boolean = false,
)
private companion object {
@@ -316,6 +344,7 @@ class ExternalPluginContentSource(
const val COLUMN_YEAR = "year"
const val COLUMN_YEAR_RANGE = "year_range"
const val COLUMN_ORIGINAL_LOCALE = "original_locale"
+ const val COLUMN_RELATED_MANGA = "related_manga"
const val COLUMN_ID = "id"
const val COLUMN_NAME = "name"
const val COLUMN_NUMBER = "number"
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/prefs/AppSettings.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/prefs/AppSettings.kt
index 2910cd4..11c20dd 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/prefs/AppSettings.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/prefs/AppSettings.kt
@@ -286,6 +286,35 @@ class AppSettings @Inject constructor(@ApplicationContext context: Context) {
val isMirrorSwitchingEnabled: Boolean
get() = prefs.getBoolean(KEY_MIRROR_SWITCHING, false)
+ /**
+ * JavaScript evaluation timeout in milliseconds.
+ * Used for CloudFlare challenges and other WebView JS operations.
+ * Default: 4000ms, Range: 2000-30000ms
+ */
+ val jsTimeoutMs: Long
+ get() = prefs.getInt(KEY_JS_TIMEOUT, 4000).toLong().coerceIn(2000L, 30000L)
+
+ /**
+ * Memory cache TTL settings in minutes.
+ * Controls how long manga details, pages, and related manga are cached in memory.
+ */
+ val cacheDetailsTtlMinutes: Int
+ get() = prefs.getInt(KEY_CACHE_DETAILS_TTL, DEFAULT_CACHE_DETAILS_TTL).coerceIn(1, 60)
+
+ val cachePagesTtlMinutes: Int
+ get() = prefs.getInt(KEY_CACHE_PAGES_TTL, DEFAULT_CACHE_PAGES_TTL).coerceIn(1, 120)
+
+ val cacheRelatedTtlMinutes: Int
+ get() = prefs.getInt(KEY_CACHE_RELATED_TTL, DEFAULT_CACHE_RELATED_TTL).coerceIn(1, 120)
+
+ /**
+ * Maximum number of parallel chapter checks during tracker updates.
+ * Higher values = faster checking but more network load.
+ * Default: 6, Range: 1-12
+ */
+ val trackerParallelism: Int
+ get() = prefs.getInt(KEY_TRACKER_PARALLELISM, DEFAULT_TRACKER_PARALLELISM).coerceIn(1, 12)
+
val isExitConfirmationEnabled: Boolean
get() = prefs.getBoolean(KEY_EXIT_CONFIRM, false)
@@ -295,6 +324,35 @@ class AppSettings @Inject constructor(@ApplicationContext context: Context) {
val isUnstableUpdatesAllowed: Boolean
get() = prefs.getBoolean(KEY_UPDATES_UNSTABLE, false)
+ /**
+ * Enhanced App Update Check settings
+ */
+ var isAutoUpdateCheckEnabled: Boolean
+ get() = prefs.getBoolean(KEY_AUTO_UPDATE_CHECK, true)
+ set(value) = prefs.edit { putBoolean(KEY_AUTO_UPDATE_CHECK, value) }
+
+ val isUpdateCheckWifiOnly: Boolean
+ get() = prefs.getBoolean(KEY_UPDATE_CHECK_WIFI_ONLY, false)
+
+ val updateCheckIntervalHours: Int
+ get() = prefs.getString(KEY_UPDATE_CHECK_INTERVAL, null)?.toIntOrNull() ?: DEFAULT_UPDATE_CHECK_INTERVAL_HOURS
+
+ var lastUpdateCheckTime: Long
+ get() = prefs.getLong(KEY_LAST_UPDATE_CHECK, 0L)
+ set(value) = prefs.edit { putLong(KEY_LAST_UPDATE_CHECK, value) }
+
+ fun isVersionSkipped(versionName: String): Boolean {
+ return prefs.getString(KEY_SKIPPED_UPDATE_VERSION, null) == versionName
+ }
+
+ fun skipVersion(versionName: String) {
+ prefs.edit { putString(KEY_SKIPPED_UPDATE_VERSION, versionName) }
+ }
+
+ fun clearSkippedVersion() {
+ prefs.edit { remove(KEY_SKIPPED_UPDATE_VERSION) }
+ }
+
val isPagesTabEnabled: Boolean
get() = prefs.getBoolean(KEY_PAGES_TAB, true)
@@ -772,11 +830,21 @@ class AppSettings @Inject constructor(@ApplicationContext context: Context) {
const val KEY_APP_LOCALE = "app_locale"
const val KEY_SOURCES_GRID = "sources_grid"
const val KEY_UPDATES_UNSTABLE = "updates_unstable"
+ const val KEY_AUTO_UPDATE_CHECK = "auto_update_check"
+ const val KEY_UPDATE_CHECK_WIFI_ONLY = "update_check_wifi_only"
+ const val KEY_UPDATE_CHECK_INTERVAL = "update_check_interval"
+ const val KEY_LAST_UPDATE_CHECK = "last_update_check"
+ const val KEY_SKIPPED_UPDATE_VERSION = "skipped_update_version"
const val KEY_TIPS_CLOSED = "tips_closed"
const val KEY_SSL_BYPASS = "ssl_bypass"
const val KEY_READER_AUTOSCROLL_SPEED = "as_speed"
const val KEY_READER_AUTOSCROLL_FAB = "as_fab"
const val KEY_MIRROR_SWITCHING = "mirror_switching"
+ const val KEY_JS_TIMEOUT = "js_timeout"
+ const val KEY_CACHE_DETAILS_TTL = "cache_details_ttl"
+ const val KEY_CACHE_PAGES_TTL = "cache_pages_ttl"
+ const val KEY_CACHE_RELATED_TTL = "cache_related_ttl"
+ const val KEY_TRACKER_PARALLELISM = "tracker_parallelism"
const val KEY_PROXY = "proxy"
const val KEY_PROXY_TYPE = "proxy_type_2"
const val KEY_PROXY_ADDRESS = "proxy_address"
@@ -845,5 +913,16 @@ class AppSettings @Inject constructor(@ApplicationContext context: Context) {
// values
private const val READER_CROP_PAGED = 1
private const val READER_CROP_WEBTOON = 2
+
+ // cache TTL defaults (in minutes)
+ const val DEFAULT_CACHE_DETAILS_TTL = 5
+ const val DEFAULT_CACHE_PAGES_TTL = 10
+ const val DEFAULT_CACHE_RELATED_TTL = 10
+
+ // tracker defaults
+ const val DEFAULT_TRACKER_PARALLELISM = 6
+
+ // update check defaults
+ const val DEFAULT_UPDATE_CHECK_INTERVAL_HOURS = 24
}
}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/util/ext/Fragment.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/util/ext/Fragment.kt
index 483d41b..962b245 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/util/ext/Fragment.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/util/ext/Fragment.kt
@@ -34,5 +34,5 @@ tailrec fun Fragment.findParentCallback(cls: Class): T? {
val Fragment.container: FragmentContainerView?
get() = view?.ancestors?.firstNotNullOfOrNull {
- it as? FragmentContainerView // TODO check if direct parent
+ it as? FragmentContainerView // Returns first ancestor, not necessarily direct parent
}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/util/ext/Preferences.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/util/ext/Preferences.kt
index 954fd82..199999f 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/util/ext/Preferences.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/util/ext/Preferences.kt
@@ -19,7 +19,9 @@ fun ListPreference.setDefaultValueCompat(defaultValue: String) {
}
fun MultiSelectListPreference.setDefaultValueCompat(defaultValue: Set) {
- setDefaultValue(defaultValue) // FIXME not working
+ if (values.isNullOrEmpty()) {
+ values = defaultValue
+ }
}
fun > SharedPreferences.getEnumValue(key: String, enumClass: Class): E? {
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/core/util/ext/Throwable.kt b/app/src/main/kotlin/org/koitharu/kotatsu/core/util/ext/Throwable.kt
index 5dffe99..7d3793b 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/core/util/ext/Throwable.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/core/util/ext/Throwable.kt
@@ -28,6 +28,7 @@ import org.koitharu.kotatsu.core.exceptions.InteractiveActionRequiredException
import org.koitharu.kotatsu.core.exceptions.NoDataReceivedException
import org.koitharu.kotatsu.core.exceptions.NonFileUriException
import org.koitharu.kotatsu.core.exceptions.ProxyConfigException
+import org.koitharu.kotatsu.core.exceptions.ProxyWebViewUnsupportedException
import org.koitharu.kotatsu.core.exceptions.SyncApiException
import org.koitharu.kotatsu.core.exceptions.UnsupportedFileException
import org.koitharu.kotatsu.core.exceptions.UnsupportedSourceException
@@ -106,6 +107,7 @@ private fun Throwable.getDisplayMessageOrNull(resources: Resources): String? = w
is EmptyHistoryException -> resources.getString(R.string.history_is_empty)
is EmptyMangaException -> reason?.let { resources.getString(it.msgResId) } ?: cause?.getDisplayMessage(resources)
is ProxyConfigException -> resources.getString(R.string.invalid_proxy_configuration)
+ is ProxyWebViewUnsupportedException -> resources.getString(R.string.proxy_webview_not_supported)
is SyncApiException,
is ContentUnavailableException -> message
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/details/domain/DetailsInteractor.kt b/app/src/main/kotlin/org/koitharu/kotatsu/details/domain/DetailsInteractor.kt
index e8c1aa0..f2a4651 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/details/domain/DetailsInteractor.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/details/domain/DetailsInteractor.kt
@@ -23,7 +23,10 @@ import org.koitharu.kotatsu.scrobbling.common.domain.model.ScrobblingInfo
import org.koitharu.kotatsu.tracker.domain.TrackingRepository
import javax.inject.Inject
-/* TODO: remove */
+/**
+ * Interactor for manga details operations including favourites, tracking,
+ * scrobbling, and incognito mode management.
+ */
class DetailsInteractor @Inject constructor(
private val historyRepository: HistoryRepository,
private val favouritesRepository: FavouritesRepository,
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/details/ui/ReadButtonDelegate.kt b/app/src/main/kotlin/org/koitharu/kotatsu/details/ui/ReadButtonDelegate.kt
index a697e87..6150225 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/details/ui/ReadButtonDelegate.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/details/ui/ReadButtonDelegate.kt
@@ -108,8 +108,11 @@ class ReadButtonDelegate(
private fun openReader(isIncognitoMode: Boolean) {
val manga = viewModel.getMangaOrNull() ?: return
if (viewModel.historyInfo.value.isChapterMissing) {
- Snackbar.make(buttonRead, R.string.chapter_is_missing, Snackbar.LENGTH_SHORT)
- .show() // TODO
+ Snackbar.make(buttonRead, R.string.chapter_is_missing, Snackbar.LENGTH_LONG)
+ .setAction(R.string.select_chapter) {
+ router.showChapterPagesSheet()
+ }
+ .show()
} else {
val intentBuilder = ReaderIntent.Builder(context)
.manga(manga)
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/download/domain/DownloadStateTracker.kt b/app/src/main/kotlin/org/koitharu/kotatsu/download/domain/DownloadStateTracker.kt
new file mode 100644
index 0000000..151ce36
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/download/domain/DownloadStateTracker.kt
@@ -0,0 +1,315 @@
+package org.koitharu.kotatsu.download.domain
+
+import android.content.Context
+import android.content.SharedPreferences
+import dagger.hilt.android.qualifiers.ApplicationContext
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.sync.Mutex
+import kotlinx.coroutines.sync.withLock
+import kotlinx.coroutines.withContext
+import org.json.JSONArray
+import org.json.JSONObject
+import java.io.File
+import javax.inject.Inject
+import javax.inject.Singleton
+
+/**
+ * Tracks partial download state for resume capability.
+ *
+ * Stores:
+ * - Chapter download progress (which pages are complete)
+ * - Partial file information (path, expected size, current size)
+ * - Download session metadata
+ */
+@Singleton
+class DownloadStateTracker @Inject constructor(
+ @ApplicationContext private val context: Context,
+) {
+ private val prefs: SharedPreferences by lazy {
+ context.getSharedPreferences(PREFS_NAME, Context.MODE_PRIVATE)
+ }
+
+ private val mutex = Mutex()
+
+ /**
+ * Record that a chapter download has started.
+ */
+ suspend fun startChapter(
+ mangaId: Long,
+ chapterId: Long,
+ totalPages: Int,
+ ) = mutex.withLock {
+ withContext(Dispatchers.IO) {
+ val key = getChapterKey(mangaId, chapterId)
+ val state = ChapterDownloadState(
+ chapterId = chapterId,
+ totalPages = totalPages,
+ completedPages = mutableSetOf(),
+ startedAt = System.currentTimeMillis(),
+ )
+ prefs.edit().putString(key, state.toJson()).apply()
+ }
+ }
+
+ /**
+ * Mark a page as completed.
+ */
+ suspend fun markPageComplete(
+ mangaId: Long,
+ chapterId: Long,
+ pageIndex: Int,
+ ) = mutex.withLock {
+ withContext(Dispatchers.IO) {
+ val key = getChapterKey(mangaId, chapterId)
+ val json = prefs.getString(key, null) ?: return@withContext
+ val state = ChapterDownloadState.fromJson(json)
+ state.completedPages.add(pageIndex)
+ prefs.edit().putString(key, state.toJson()).apply()
+ }
+ }
+
+ /**
+ * Mark a chapter as fully completed.
+ */
+ suspend fun completeChapter(
+ mangaId: Long,
+ chapterId: Long,
+ ) = mutex.withLock {
+ withContext(Dispatchers.IO) {
+ val key = getChapterKey(mangaId, chapterId)
+ prefs.edit().remove(key).apply()
+
+ // Add to completed chapters list
+ val completedKey = getCompletedChaptersKey(mangaId)
+ val completed = getCompletedChapterIds(mangaId).toMutableSet()
+ completed.add(chapterId)
+ prefs.edit().putString(completedKey, JSONArray(completed.toList()).toString()).apply()
+ }
+ }
+
+ /**
+ * Get the download state for a chapter.
+ */
+ suspend fun getChapterState(
+ mangaId: Long,
+ chapterId: Long,
+ ): ChapterDownloadState? = mutex.withLock {
+ withContext(Dispatchers.IO) {
+ val key = getChapterKey(mangaId, chapterId)
+ val json = prefs.getString(key, null) ?: return@withContext null
+ ChapterDownloadState.fromJson(json)
+ }
+ }
+
+ /**
+ * Get completed pages for a chapter (for resume).
+ */
+ suspend fun getCompletedPages(
+ mangaId: Long,
+ chapterId: Long,
+ ): Set {
+ return getChapterState(mangaId, chapterId)?.completedPages ?: emptySet()
+ }
+
+ /**
+ * Get IDs of fully completed chapters.
+ */
+ suspend fun getCompletedChapterIds(mangaId: Long): Set = mutex.withLock {
+ withContext(Dispatchers.IO) {
+ val key = getCompletedChaptersKey(mangaId)
+ val json = prefs.getString(key, null) ?: return@withContext emptySet()
+ try {
+ val array = JSONArray(json)
+ (0 until array.length()).map { array.getLong(it) }.toSet()
+ } catch (e: Exception) {
+ emptySet()
+ }
+ }
+ }
+
+ /**
+ * Track a partial file download.
+ */
+ suspend fun trackPartialFile(
+ url: String,
+ file: File,
+ expectedSize: Long,
+ ) = mutex.withLock {
+ withContext(Dispatchers.IO) {
+ val key = getPartialFileKey(url)
+ val state = PartialFileState(
+ url = url,
+ filePath = file.absolutePath,
+ expectedSize = expectedSize,
+ downloadedSize = file.length(),
+ lastModified = System.currentTimeMillis(),
+ )
+ prefs.edit().putString(key, state.toJson()).apply()
+ }
+ }
+
+ /**
+ * Get partial file state for resume.
+ */
+ suspend fun getPartialFile(url: String): PartialFileState? = mutex.withLock {
+ withContext(Dispatchers.IO) {
+ val key = getPartialFileKey(url)
+ val json = prefs.getString(key, null) ?: return@withContext null
+ val state = PartialFileState.fromJson(json)
+
+ // Verify file still exists
+ val file = File(state.filePath)
+ if (!file.exists()) {
+ prefs.edit().remove(key).apply()
+ return@withContext null
+ }
+
+ // Update with current size
+ state.copy(downloadedSize = file.length())
+ }
+ }
+
+ /**
+ * Remove partial file tracking.
+ */
+ suspend fun removePartialFile(url: String) = mutex.withLock {
+ withContext(Dispatchers.IO) {
+ val key = getPartialFileKey(url)
+ prefs.edit().remove(key).apply()
+ }
+ }
+
+ /**
+ * Clear all tracking data for a manga.
+ */
+ suspend fun clearMangaState(mangaId: Long) = mutex.withLock {
+ withContext(Dispatchers.IO) {
+ val editor = prefs.edit()
+ prefs.all.keys
+ .filter { it.startsWith("chapter_${mangaId}_") || it == getCompletedChaptersKey(mangaId) }
+ .forEach { editor.remove(it) }
+ editor.apply()
+ }
+ }
+
+ /**
+ * Clean up old tracking data (older than 7 days).
+ */
+ suspend fun cleanupOldData() = mutex.withLock {
+ withContext(Dispatchers.IO) {
+ val cutoff = System.currentTimeMillis() - MAX_AGE_MS
+ val editor = prefs.edit()
+ var removed = 0
+
+ prefs.all.forEach { (key, value) ->
+ if (key.startsWith("chapter_") && value is String) {
+ try {
+ val state = ChapterDownloadState.fromJson(value)
+ if (state.startedAt < cutoff) {
+ editor.remove(key)
+ removed++
+ }
+ } catch (e: Exception) {
+ editor.remove(key)
+ }
+ } else if (key.startsWith("partial_") && value is String) {
+ try {
+ val state = PartialFileState.fromJson(value)
+ if (state.lastModified < cutoff) {
+ editor.remove(key)
+ removed++
+ }
+ } catch (e: Exception) {
+ editor.remove(key)
+ }
+ }
+ }
+
+ if (removed > 0) {
+ editor.apply()
+ }
+ }
+ }
+
+ private fun getChapterKey(mangaId: Long, chapterId: Long) = "chapter_${mangaId}_$chapterId"
+ private fun getCompletedChaptersKey(mangaId: Long) = "completed_$mangaId"
+ private fun getPartialFileKey(url: String) = "partial_${url.hashCode()}"
+
+ /**
+ * State of a chapter download in progress.
+ */
+ data class ChapterDownloadState(
+ val chapterId: Long,
+ val totalPages: Int,
+ val completedPages: MutableSet,
+ val startedAt: Long,
+ ) {
+ fun toJson(): String {
+ return JSONObject().apply {
+ put("chapterId", chapterId)
+ put("totalPages", totalPages)
+ put("completedPages", JSONArray(completedPages.toList()))
+ put("startedAt", startedAt)
+ }.toString()
+ }
+
+ companion object {
+ fun fromJson(json: String): ChapterDownloadState {
+ val obj = JSONObject(json)
+ val pagesArray = obj.getJSONArray("completedPages")
+ val pages = (0 until pagesArray.length()).map { pagesArray.getInt(it) }.toMutableSet()
+ return ChapterDownloadState(
+ chapterId = obj.getLong("chapterId"),
+ totalPages = obj.getInt("totalPages"),
+ completedPages = pages,
+ startedAt = obj.optLong("startedAt", 0L),
+ )
+ }
+ }
+ }
+
+ /**
+ * State of a partial file download.
+ */
+ data class PartialFileState(
+ val url: String,
+ val filePath: String,
+ val expectedSize: Long,
+ val downloadedSize: Long,
+ val lastModified: Long,
+ ) {
+ val isComplete: Boolean
+ get() = expectedSize > 0 && downloadedSize >= expectedSize
+
+ val progress: Float
+ get() = if (expectedSize > 0) downloadedSize.toFloat() / expectedSize else 0f
+
+ fun toJson(): String {
+ return JSONObject().apply {
+ put("url", url)
+ put("filePath", filePath)
+ put("expectedSize", expectedSize)
+ put("downloadedSize", downloadedSize)
+ put("lastModified", lastModified)
+ }.toString()
+ }
+
+ companion object {
+ fun fromJson(json: String): PartialFileState {
+ val obj = JSONObject(json)
+ return PartialFileState(
+ url = obj.getString("url"),
+ filePath = obj.getString("filePath"),
+ expectedSize = obj.getLong("expectedSize"),
+ downloadedSize = obj.getLong("downloadedSize"),
+ lastModified = obj.optLong("lastModified", 0L),
+ )
+ }
+ }
+ }
+
+ companion object {
+ private const val PREFS_NAME = "download_state_tracker"
+ private const val MAX_AGE_MS = 7 * 24 * 60 * 60 * 1000L // 7 days
+ }
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/download/domain/ResumableDownloader.kt b/app/src/main/kotlin/org/koitharu/kotatsu/download/domain/ResumableDownloader.kt
new file mode 100644
index 0000000..3f593a3
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/download/domain/ResumableDownloader.kt
@@ -0,0 +1,271 @@
+package org.koitharu.kotatsu.download.domain
+
+import android.util.Log
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.withContext
+import okhttp3.OkHttpClient
+import okhttp3.Request
+import okhttp3.Response
+import okio.buffer
+import okio.sink
+import org.koitharu.kotatsu.BuildConfig
+import org.koitharu.kotatsu.core.network.MangaHttpClient
+import org.koitharu.kotatsu.core.util.ext.ensureSuccess
+import org.koitharu.kotatsu.core.util.ext.writeAllCancellable
+import org.koitharu.kotatsu.parsers.util.requireBody
+import java.io.File
+import java.io.IOException
+import javax.inject.Inject
+import javax.inject.Singleton
+
+/**
+ * Handles resumable downloads with HTTP Range header support.
+ *
+ * Features:
+ * - Detects server support for range requests (Accept-Ranges header)
+ * - Resumes partial downloads from last byte position
+ * - Falls back to full download if resume not supported
+ * - Validates Content-Range responses
+ * - Tracks download progress for resume capability
+ */
+@Singleton
+class ResumableDownloader @Inject constructor(
+ @MangaHttpClient private val okHttp: OkHttpClient,
+) {
+
+ /**
+ * Download a file with resume support.
+ *
+ * @param request The original OkHttp request
+ * @param targetFile The file to download to (may be partial)
+ * @param progressCallback Optional callback for download progress
+ * @return The completed file
+ */
+ suspend fun download(
+ request: Request,
+ targetFile: File,
+ progressCallback: ((downloaded: Long, total: Long) -> Unit)? = null,
+ ): File = withContext(Dispatchers.IO) {
+ val existingBytes = if (targetFile.exists()) targetFile.length() else 0L
+
+ // If file exists and has content, try to resume
+ if (existingBytes > 0) {
+ logDebug { "Attempting resume from byte $existingBytes for ${request.url}" }
+
+ // First, check if server supports range requests
+ val supportsRange = checkRangeSupport(request)
+
+ if (supportsRange) {
+ try {
+ return@withContext resumeDownload(request, targetFile, existingBytes, progressCallback)
+ } catch (e: RangeNotSatisfiableException) {
+ logDebug { "Range not satisfiable, file may be complete or corrupted: ${e.message}" }
+ // File might be complete or corrupted, verify size
+ val contentLength = getContentLength(request)
+ if (contentLength > 0 && existingBytes >= contentLength) {
+ logDebug { "File appears complete ($existingBytes >= $contentLength)" }
+ progressCallback?.invoke(existingBytes, existingBytes)
+ return@withContext targetFile
+ }
+ // File is corrupted, restart download
+ targetFile.delete()
+ } catch (e: ResumeNotSupportedException) {
+ logDebug { "Resume not supported: ${e.message}" }
+ targetFile.delete()
+ }
+ } else {
+ logDebug { "Server doesn't support range requests, starting fresh" }
+ targetFile.delete()
+ }
+ }
+
+ // Full download (no resume)
+ fullDownload(request, targetFile, progressCallback)
+ }
+
+ /**
+ * Check if the server supports HTTP Range requests.
+ */
+ private suspend fun checkRangeSupport(request: Request): Boolean {
+ return try {
+ val headRequest = request.newBuilder()
+ .head()
+ .build()
+
+ val response = okHttp.newCall(headRequest).execute()
+ response.use {
+ val acceptRanges = it.header("Accept-Ranges")
+ val supportsRange = acceptRanges?.equals("bytes", ignoreCase = true) == true
+
+ // Also check if Content-Length is present (needed for progress)
+ val contentLength = it.header("Content-Length")?.toLongOrNull() ?: 0L
+
+ logDebug { "Range support check: Accept-Ranges=$acceptRanges, Content-Length=$contentLength" }
+ supportsRange && contentLength > 0
+ }
+ } catch (e: Exception) {
+ logDebug { "Failed to check range support: ${e.message}" }
+ false
+ }
+ }
+
+ /**
+ * Get the content length for a request.
+ */
+ private suspend fun getContentLength(request: Request): Long {
+ return try {
+ val headRequest = request.newBuilder()
+ .head()
+ .build()
+
+ okHttp.newCall(headRequest).execute().use { response ->
+ response.header("Content-Length")?.toLongOrNull() ?: -1L
+ }
+ } catch (e: Exception) {
+ -1L
+ }
+ }
+
+ /**
+ * Resume a partial download.
+ */
+ private suspend fun resumeDownload(
+ request: Request,
+ targetFile: File,
+ existingBytes: Long,
+ progressCallback: ((downloaded: Long, total: Long) -> Unit)?,
+ ): File {
+ val rangeRequest = request.newBuilder()
+ .header("Range", "bytes=$existingBytes-")
+ .build()
+
+ val response = okHttp.newCall(rangeRequest).execute()
+
+ return response.use { resp ->
+ when (resp.code) {
+ 206 -> {
+ // Partial content - resume successful
+ val contentRange = resp.header("Content-Range")
+ val totalSize = parseContentRangeTotal(contentRange) ?: run {
+ // Fallback: use Content-Length + existing bytes
+ val contentLength = resp.body?.contentLength() ?: 0L
+ existingBytes + contentLength
+ }
+
+ logDebug { "Resuming download: existing=$existingBytes, total=$totalSize" }
+
+ resp.requireBody().use { body ->
+ // Append to existing file
+ targetFile.sink(append = true).buffer().use { sink ->
+ var downloaded = existingBytes
+ val source = body.source()
+ val buffer = okio.Buffer()
+
+ while (true) {
+ val read = source.read(buffer, BUFFER_SIZE)
+ if (read == -1L) break
+
+ sink.write(buffer, read)
+ downloaded += read
+ progressCallback?.invoke(downloaded, totalSize)
+ }
+ }
+ }
+ targetFile
+ }
+ 416 -> {
+ // Range Not Satisfiable
+ throw RangeNotSatisfiableException("Server returned 416: Range not satisfiable")
+ }
+ 200 -> {
+ // Server ignored range request, sent full file
+ throw ResumeNotSupportedException("Server returned 200 instead of 206")
+ }
+ else -> {
+ resp.ensureSuccess()
+ throw IOException("Unexpected response code: ${resp.code}")
+ }
+ }
+ }
+ }
+
+ /**
+ * Perform a full download (no resume).
+ */
+ private suspend fun fullDownload(
+ request: Request,
+ targetFile: File,
+ progressCallback: ((downloaded: Long, total: Long) -> Unit)?,
+ ): File {
+ val response = okHttp.newCall(request).execute()
+
+ return response.use { resp ->
+ resp.ensureSuccess()
+
+ resp.requireBody().use { body ->
+ val totalSize = body.contentLength().takeIf { it > 0 } ?: -1L
+
+ targetFile.sink(append = false).buffer().use { sink ->
+ if (progressCallback != null && totalSize > 0) {
+ // Download with progress tracking
+ var downloaded = 0L
+ val source = body.source()
+ val buffer = okio.Buffer()
+
+ while (true) {
+ val read = source.read(buffer, BUFFER_SIZE)
+ if (read == -1L) break
+
+ sink.write(buffer, read)
+ downloaded += read
+ progressCallback.invoke(downloaded, totalSize)
+ }
+ } else {
+ // Simple download without progress
+ sink.writeAllCancellable(body.source())
+ }
+ }
+ }
+ targetFile
+ }
+ }
+
+ /**
+ * Parse the total size from Content-Range header.
+ * Format: bytes start-end/total
+ * Example: bytes 1000-1999/5000
+ */
+ private fun parseContentRangeTotal(contentRange: String?): Long? {
+ if (contentRange == null) return null
+
+ return try {
+ val parts = contentRange.split("/")
+ if (parts.size == 2) {
+ val total = parts[1].trim()
+ if (total != "*") {
+ total.toLongOrNull()
+ } else {
+ null
+ }
+ } else {
+ null
+ }
+ } catch (e: Exception) {
+ null
+ }
+ }
+
+ private inline fun logDebug(message: () -> String) {
+ if (BuildConfig.DEBUG) {
+ Log.d(TAG, message())
+ }
+ }
+
+ class RangeNotSatisfiableException(message: String) : IOException(message)
+ class ResumeNotSupportedException(message: String) : IOException(message)
+
+ companion object {
+ private const val TAG = "ResumableDownloader"
+ private const val BUFFER_SIZE = 8192L
+ }
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/download/ui/worker/DownloadNotificationFactory.kt b/app/src/main/kotlin/org/koitharu/kotatsu/download/ui/worker/DownloadNotificationFactory.kt
index 9bb19d8..65159b0 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/download/ui/worker/DownloadNotificationFactory.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/download/ui/worker/DownloadNotificationFactory.kt
@@ -52,7 +52,8 @@ class DownloadNotificationFactory @AssistedInject constructor(
@Assisted val isSilent: Boolean,
) {
- private val covers = HashMap() // TODO cache
+ // Simple in-memory cache for notification covers - limited by active downloads count
+ private val covers = HashMap()
private val builder = NotificationCompat.Builder(context, if (isSilent) CHANNEL_ID_SILENT else CHANNEL_ID_DEFAULT)
private val mutex = Mutex()
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/download/ui/worker/DownloadWorker.kt b/app/src/main/kotlin/org/koitharu/kotatsu/download/ui/worker/DownloadWorker.kt
index 41298f1..252e821 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/download/ui/worker/DownloadWorker.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/download/ui/worker/DownloadWorker.kt
@@ -75,6 +75,8 @@ import org.koitharu.kotatsu.core.util.ext.writeAllCancellable
import org.koitharu.kotatsu.core.util.progress.RealtimeEtaEstimator
import org.koitharu.kotatsu.download.domain.DownloadProgress
import org.koitharu.kotatsu.download.domain.DownloadState
+import org.koitharu.kotatsu.download.domain.DownloadStateTracker
+import org.koitharu.kotatsu.download.domain.ResumableDownloader
import org.koitharu.kotatsu.local.data.LocalMangaRepository
import org.koitharu.kotatsu.local.data.LocalStorageCache
import org.koitharu.kotatsu.local.data.LocalStorageChanges
@@ -113,6 +115,8 @@ class DownloadWorker @AssistedInject constructor(
@LocalStorageChanges private val localStorageChanges: MutableSharedFlow,
private val slowdownDispatcher: DownloadSlowdownDispatcher,
private val imageProxyInterceptor: ImageProxyInterceptor,
+ private val resumableDownloader: ResumableDownloader,
+ private val downloadStateTracker: DownloadStateTracker,
notificationFactoryFactory: DownloadNotificationFactory.Factory,
) : CoroutineWorker(appContext, params) {
@@ -395,27 +399,48 @@ class DownloadWorker @AssistedInject constructor(
}
return file
}
+
+ // HTTP download with resume support
val request = PageLoader.createPageRequest(url, source)
slowdownDispatcher.delay(source)
- return imageProxyInterceptor.interceptPageRequest(request, okHttp)
- .ensureSuccess()
- .use { response ->
- var file: File? = null
- try {
- response.requireBody().use { body ->
- file = destination.createTempFile(
- ext = MimeTypes.getExtension(body.contentType()?.toMimeType())
- )
- file.sink(append = false).buffer().use {
- it.writeAllCancellable(body.source())
- }
- }
- } catch (e: Exception) {
- file?.delete()
- throw e
+
+ // Check for existing partial download
+ val partialState = downloadStateTracker.getPartialFile(url)
+ val targetFile = if (partialState != null) {
+ File(partialState.filePath)
+ } else {
+ destination.createTempFile(null) // Extension determined after download
+ }
+
+ return try {
+ // Use resumable downloader for HTTP requests
+ resumableDownloader.download(request, targetFile)
+
+ // Clean up tracking after successful download
+ downloadStateTracker.removePartialFile(url)
+
+ // Rename with correct extension if needed
+ val mimeType = getMediaType(url, targetFile)
+ val ext = mimeType?.let { MimeTypes.getExtension(it) }
+ if (!ext.isNullOrEmpty() && !targetFile.name.contains(".$ext")) {
+ val renamedFile = File(targetFile.parent, "${targetFile.nameWithoutExtension}.$ext.tmp")
+ if (targetFile.renameTo(renamedFile)) {
+ renamedFile
+ } else {
+ targetFile
}
- checkNotNull(file)
+ } else {
+ targetFile
}
+ } catch (e: Exception) {
+ // Track partial download for potential resume
+ if (targetFile.exists() && targetFile.length() > 0) {
+ downloadStateTracker.trackPartialFile(url, targetFile, -1L)
+ } else {
+ targetFile.delete()
+ }
+ throw e
+ }
}
private fun File.createTempFile(ext: String?) = File(
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/explore/data/SourceHealthRepository.kt b/app/src/main/kotlin/org/koitharu/kotatsu/explore/data/SourceHealthRepository.kt
new file mode 100644
index 0000000..ca32b11
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/explore/data/SourceHealthRepository.kt
@@ -0,0 +1,167 @@
+package org.koitharu.kotatsu.explore.data
+
+import kotlinx.coroutines.flow.Flow
+import kotlinx.coroutines.flow.map
+import org.koitharu.kotatsu.core.db.MangaDatabase
+import org.koitharu.kotatsu.core.db.dao.SourceHealthDao
+import org.koitharu.kotatsu.core.db.entity.SourceHealthEntity
+import org.koitharu.kotatsu.parsers.model.MangaSource
+import javax.inject.Inject
+import javax.inject.Singleton
+
+/**
+ * Repository for tracking and querying source health metrics.
+ * Provides methods to record successes/failures and query source reliability.
+ */
+@Singleton
+class SourceHealthRepository @Inject constructor(
+ private val db: MangaDatabase,
+) {
+ private val dao: SourceHealthDao
+ get() = db.getSourceHealthDao()
+
+ /**
+ * Record a successful request for a source
+ * @param source The manga source
+ * @param responseTimeMs Response time in milliseconds
+ */
+ suspend fun recordSuccess(source: MangaSource, responseTimeMs: Long) {
+ dao.recordSuccess(source.name, responseTimeMs)
+ }
+
+ /**
+ * Record a failed request for a source
+ * @param source The manga source
+ * @param error The error that occurred
+ */
+ suspend fun recordFailure(source: MangaSource, error: Throwable?) {
+ dao.recordFailure(source.name, error?.message)
+ }
+
+ /**
+ * Get health metrics for a specific source
+ */
+ suspend fun getHealth(source: MangaSource): SourceHealthEntity? {
+ return dao.get(source.name)
+ }
+
+ /**
+ * Observe health metrics for a specific source
+ */
+ fun observeHealth(source: MangaSource): Flow {
+ return dao.observe(source.name)
+ }
+
+ /**
+ * Get all source health metrics
+ */
+ suspend fun getAllHealth(): List {
+ return dao.getAll()
+ }
+
+ /**
+ * Observe all source health metrics
+ */
+ fun observeAllHealth(): Flow> {
+ return dao.observeAll()
+ }
+
+ /**
+ * Get sources that meet a minimum success rate threshold
+ * @param minSuccessRate Minimum success rate percentage (0-100)
+ */
+ suspend fun getHealthySources(minSuccessRate: Float = 80f): List {
+ return dao.getHealthySources(minSuccessRate)
+ }
+
+ /**
+ * Get sources that are currently failing (high consecutive failures)
+ * @param threshold Number of consecutive failures to consider as "failing"
+ */
+ suspend fun getFailingSources(threshold: Int = 3): List {
+ return dao.getFailingSources(threshold)
+ }
+
+ /**
+ * Get sources sorted by reliability score (highest first)
+ */
+ suspend fun getSourcesByReliability(): List {
+ return dao.getAll().sortedByDescending { it.reliabilityScore }
+ }
+
+ /**
+ * Get sources sorted by response time (fastest first)
+ */
+ suspend fun getSourcesBySpeed(): List {
+ return dao.getAll()
+ .filter { it.avgResponseTime > 0 }
+ .sortedBy { it.avgResponseTime }
+ }
+
+ /**
+ * Observe source health with calculated summary stats
+ */
+ fun observeHealthSummary(): Flow {
+ return dao.observeAll().map { entities ->
+ val total = entities.size
+ val healthy = entities.count { it.healthStatus == SourceHealthEntity.HealthStatus.HEALTHY }
+ val degraded = entities.count { it.healthStatus == SourceHealthEntity.HealthStatus.DEGRADED }
+ val poor = entities.count { it.healthStatus == SourceHealthEntity.HealthStatus.POOR }
+ val critical = entities.count { it.healthStatus == SourceHealthEntity.HealthStatus.CRITICAL }
+ val unknown = entities.count { it.healthStatus == SourceHealthEntity.HealthStatus.UNKNOWN }
+ val avgReliability = if (entities.isNotEmpty()) {
+ entities.filter { it.successCount + it.failureCount > 0 }
+ .map { it.reliabilityScore }
+ .average()
+ .toFloat()
+ } else 0f
+
+ HealthSummary(
+ totalSources = total,
+ healthyCount = healthy,
+ degradedCount = degraded,
+ poorCount = poor,
+ criticalCount = critical,
+ unknownCount = unknown,
+ averageReliability = avgReliability,
+ )
+ }
+ }
+
+ /**
+ * Reset statistics for a specific source
+ */
+ suspend fun resetStats(source: MangaSource) {
+ dao.resetStats(source.name)
+ }
+
+ /**
+ * Clear all health statistics
+ */
+ suspend fun clearAllStats() {
+ dao.deleteAll()
+ }
+
+ /**
+ * Check if a source should be considered unreliable
+ * @param source The manga source to check
+ * @param failureThreshold Number of consecutive failures to consider unreliable
+ */
+ suspend fun isSourceUnreliable(source: MangaSource, failureThreshold: Int = 5): Boolean {
+ val health = dao.get(source.name) ?: return false
+ return health.consecutiveFailures >= failureThreshold
+ }
+
+ /**
+ * Summary of health across all tracked sources
+ */
+ data class HealthSummary(
+ val totalSources: Int,
+ val healthyCount: Int,
+ val degradedCount: Int,
+ val poorCount: Int,
+ val criticalCount: Int,
+ val unknownCount: Int,
+ val averageReliability: Float,
+ )
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/history/data/HistoryEntity.kt b/app/src/main/kotlin/org/koitharu/kotatsu/history/data/HistoryEntity.kt
index 421565d..045a1c3 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/history/data/HistoryEntity.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/history/data/HistoryEntity.kt
@@ -3,6 +3,7 @@ package org.koitharu.kotatsu.history.data
import androidx.room.ColumnInfo
import androidx.room.Entity
import androidx.room.ForeignKey
+import androidx.room.Index
import androidx.room.PrimaryKey
import org.koitharu.kotatsu.core.db.TABLE_HISTORY
import org.koitharu.kotatsu.core.db.entity.MangaEntity
@@ -17,6 +18,13 @@ import org.koitharu.kotatsu.core.db.entity.MangaEntity
onDelete = ForeignKey.CASCADE,
),
],
+ indices = [
+ Index(value = ["deleted_at"]),
+ Index(value = ["updated_at"]),
+ Index(value = ["created_at"]),
+ Index(value = ["percent"]),
+ Index(value = ["deleted_at", "updated_at"]),
+ ],
)
data class HistoryEntity(
@PrimaryKey(autoGenerate = false)
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/history/data/HistoryRepository.kt b/app/src/main/kotlin/org/koitharu/kotatsu/history/data/HistoryRepository.kt
index 3d5f052..fecba81 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/history/data/HistoryRepository.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/history/data/HistoryRepository.kt
@@ -29,8 +29,7 @@ import org.koitharu.kotatsu.parsers.model.MangaSource
import org.koitharu.kotatsu.parsers.model.MangaTag
import org.koitharu.kotatsu.parsers.util.findById
import org.koitharu.kotatsu.parsers.util.levenshteinDistance
-import org.koitharu.kotatsu.scrobbling.common.domain.Scrobbler
-import org.koitharu.kotatsu.scrobbling.common.domain.tryScrobble
+import org.koitharu.kotatsu.scrobbling.common.domain.ScrobblingManager
import org.koitharu.kotatsu.search.domain.SearchKind
import org.koitharu.kotatsu.tracker.domain.CheckNewChaptersUseCase
import javax.inject.Inject
@@ -40,7 +39,7 @@ import javax.inject.Provider
class HistoryRepository @Inject constructor(
private val db: MangaDatabase,
private val settings: AppSettings,
- private val scrobblers: Set<@JvmSuppressWildcards Scrobbler>,
+ private val scrobblingManager: ScrobblingManager,
private val mangaRepository: MangaDataRepository,
private val localObserver: HistoryLocalObserver,
private val newChaptersUseCaseProvider: Provider,
@@ -132,7 +131,7 @@ class HistoryRepository @Inject constructor(
),
)
newChaptersUseCaseProvider.get()(manga, chapterId)
- scrobblers.forEach { it.tryScrobble(manga, chapterId) }
+ scrobblingManager.scrobble(manga, chapterId)
}
}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/list/ui/adapter/TypedListSpacingDecoration.kt b/app/src/main/kotlin/org/koitharu/kotatsu/list/ui/adapter/TypedListSpacingDecoration.kt
index 17bb3dc..60d290f 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/list/ui/adapter/TypedListSpacingDecoration.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/list/ui/adapter/TypedListSpacingDecoration.kt
@@ -70,7 +70,7 @@ class TypedListSpacingDecoration(
ListItemType.CHAPTER_GRID -> outRect.set(spacingSmall)
- ListItemType.TIP -> outRect.set(0) // TODO
+ ListItemType.TIP -> outRect.set(0, spacingSmall, 0, spacingSmall)
}
if (addHorizontalPadding && !itemType.isEdgeToEdge()) {
outRect.set(
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/reader/domain/PageLoader.kt b/app/src/main/kotlin/org/koitharu/kotatsu/reader/domain/PageLoader.kt
index e46e4c2..5f9886f 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/reader/domain/PageLoader.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/reader/domain/PageLoader.kt
@@ -3,6 +3,8 @@ package org.koitharu.kotatsu.reader.domain
import android.content.Context
import android.graphics.Rect
import android.net.Uri
+import android.net.NetworkCapabilities
+import android.os.Build
import androidx.annotation.AnyThread
import androidx.annotation.CheckResult
import androidx.collection.LongSparseArray
@@ -49,6 +51,7 @@ import org.koitharu.kotatsu.core.util.MimeTypes
import org.koitharu.kotatsu.core.util.ext.URI_SCHEME_ZIP
import org.koitharu.kotatsu.core.util.ext.cancelChildrenAndJoin
import org.koitharu.kotatsu.core.util.ext.compressToPNG
+import org.koitharu.kotatsu.core.util.ext.connectivityManager
import org.koitharu.kotatsu.core.util.ext.ensureRamAtLeast
import org.koitharu.kotatsu.core.util.ext.ensureSuccess
import org.koitharu.kotatsu.core.util.ext.getCompletionResultOrNull
@@ -104,7 +107,8 @@ class PageLoader @Inject constructor(
private var repository: MangaRepository? = null
private val prefetchQueue = LinkedList()
private val counter = AtomicInteger(0)
- private var prefetchQueueLimit = PREFETCH_LIMIT_DEFAULT // TODO adaptive
+ private val prefetchQueueLimit: Int
+ get() = computeAdaptivePrefetchLimit()
private val edgeDetector = EdgeDetector(context)
fun isPrefetchApplicable(): Boolean {
@@ -312,6 +316,40 @@ class PageLoader @Inject constructor(
return context.ramAvailable <= FileSize.MEGABYTES.convert(PREFETCH_MIN_RAM_MB, FileSize.BYTES)
}
+ /**
+ * Computes adaptive prefetch queue limit based on:
+ * - Available RAM: More RAM = larger queue
+ * - Network type: Unmetered (WiFi) = larger queue, metered (cellular) = smaller queue
+ * - Power save mode: Reduced prefetch when active
+ */
+ private fun computeAdaptivePrefetchLimit(): Int {
+ // Base limit from RAM availability
+ val ramBytes = context.ramAvailable
+ val ramBasedLimit = when {
+ ramBytes >= FileSize.MEGABYTES.convert(512, FileSize.BYTES) -> PREFETCH_LIMIT_HIGH
+ ramBytes >= FileSize.MEGABYTES.convert(256, FileSize.BYTES) -> PREFETCH_LIMIT_DEFAULT
+ ramBytes >= FileSize.MEGABYTES.convert(128, FileSize.BYTES) -> PREFETCH_LIMIT_LOW
+ else -> PREFETCH_LIMIT_MINIMUM
+ }
+
+ // Adjust based on network conditions
+ val networkMultiplier = if (isNetworkUnmetered()) 1.0f else 0.5f
+
+ // Adjust for power save mode
+ val powerMultiplier = if (context.isPowerSaveMode()) 0.5f else 1.0f
+
+ return (ramBasedLimit * networkMultiplier * powerMultiplier)
+ .toInt()
+ .coerceIn(PREFETCH_LIMIT_MINIMUM, PREFETCH_LIMIT_HIGH)
+ }
+
+ private fun isNetworkUnmetered(): Boolean {
+ val cm = context.connectivityManager
+ val network = cm.activeNetwork ?: return false
+ val capabilities = cm.getNetworkCapabilities(network) ?: return false
+ return capabilities.hasCapability(NetworkCapabilities.NET_CAPABILITY_NOT_METERED)
+ }
+
private fun Image.toImageSource(): ImageSource = if (this is BitmapImage) {
ImageSource.cachedBitmap(toBitmap())
} else {
@@ -335,7 +373,10 @@ class PageLoader @Inject constructor(
companion object {
private const val PROGRESS_UNDEFINED = -1f
+ private const val PREFETCH_LIMIT_MINIMUM = 2
+ private const val PREFETCH_LIMIT_LOW = 4
private const val PREFETCH_LIMIT_DEFAULT = 6
+ private const val PREFETCH_LIMIT_HIGH = 10
private const val PREFETCH_MIN_RAM_MB = 80L
fun createPageRequest(pageUrl: String, mangaSource: MangaSource) = Request.Builder()
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/reader/ui/ReaderActivity.kt b/app/src/main/kotlin/org/koitharu/kotatsu/reader/ui/ReaderActivity.kt
index 05599a7..e6767a5 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/reader/ui/ReaderActivity.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/reader/ui/ReaderActivity.kt
@@ -83,7 +83,8 @@ class ReaderActivity :
IdlingDetector.Callback,
ZoomControl.ZoomControlListener,
View.OnClickListener,
- ScrollTimerControlView.OnVisibilityChangeListener {
+ ScrollTimerControlView.OnVisibilityChangeListener,
+ ReaderMenuProvider.Callback {
@Inject
lateinit var settings: AppSettings
@@ -194,7 +195,7 @@ class ReaderActivity :
viewModel.isZoomControlsEnabled.observe(this) {
viewBinding.zoomControl.isVisible = it
}
- addMenuProvider(ReaderMenuProvider(viewModel))
+ addMenuProvider(ReaderMenuProvider(viewModel, this))
observeWindowLayout()
@@ -207,6 +208,11 @@ class ReaderActivity :
return AppRouter.detailsIntent(this, manga)
}
+ override fun onOpenMangaInfo() {
+ val manga = viewModel.getMangaOrNull() ?: return
+ router.openDetails(manga)
+ }
+
override fun onUserInteraction() {
super.onUserInteraction()
if (!viewBinding.timerControl.isVisible) {
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/reader/ui/ReaderMenuProvider.kt b/app/src/main/kotlin/org/koitharu/kotatsu/reader/ui/ReaderMenuProvider.kt
index fd93a20..0f699f3 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/reader/ui/ReaderMenuProvider.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/reader/ui/ReaderMenuProvider.kt
@@ -8,8 +8,13 @@ import org.koitharu.kotatsu.R
class ReaderMenuProvider(
private val viewModel: ReaderViewModel,
+ private val callback: Callback? = null,
) : MenuProvider {
+ interface Callback {
+ fun onOpenMangaInfo()
+ }
+
override fun onCreateMenu(menu: Menu, menuInflater: MenuInflater) {
menuInflater.inflate(R.menu.opt_reader, menu)
}
@@ -17,7 +22,7 @@ class ReaderMenuProvider(
override fun onMenuItemSelected(menuItem: MenuItem): Boolean {
return when (menuItem.itemId) {
R.id.action_info -> {
- // TODO
+ callback?.onOpenMangaInfo()
true
}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/reader/ui/ReaderViewModel.kt b/app/src/main/kotlin/org/koitharu/kotatsu/reader/ui/ReaderViewModel.kt
index 272b05e..ae6b1ce 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/reader/ui/ReaderViewModel.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/reader/ui/ReaderViewModel.kt
@@ -340,7 +340,10 @@ class ReaderViewModel @Inject constructor(
prevJob?.cancelAndJoin()
loadingJob?.join()
if (pages.size != content.value.pages.size) {
- return@launchJob // TODO
+ // Pages changed during coroutine execution - the position indices are now stale.
+ // A new onCurrentPageChanged call will occur with the updated page list,
+ // so we safely discard this stale update.
+ return@launchJob
}
val centerPos = (lowerPos + upperPos) / 2
pages.getOrNull(centerPos)?.let { page ->
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/reader/ui/pager/BasePageHolder.kt b/app/src/main/kotlin/org/koitharu/kotatsu/reader/ui/pager/BasePageHolder.kt
index 59192fe..28f3cff 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/reader/ui/pager/BasePageHolder.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/reader/ui/pager/BasePageHolder.kt
@@ -142,7 +142,10 @@ abstract class BasePageHolder(
}
override fun onTrimMemory(level: Int) {
- // TODO
+ when {
+ level >= ComponentCallbacks2.TRIM_MEMORY_MODERATE -> ssiv.recycle()
+ level >= ComponentCallbacks2.TRIM_MEMORY_BACKGROUND -> ssiv.applyDownSampling(isForeground = false)
+ }
}
override fun onConfigurationChanged(newConfig: Configuration) = Unit
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/scrobbling/common/domain/ScrobbleOfflineQueue.kt b/app/src/main/kotlin/org/koitharu/kotatsu/scrobbling/common/domain/ScrobbleOfflineQueue.kt
new file mode 100644
index 0000000..d19cefb
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/scrobbling/common/domain/ScrobbleOfflineQueue.kt
@@ -0,0 +1,180 @@
+package org.koitharu.kotatsu.scrobbling.common.domain
+
+import android.content.Context
+import android.util.Log
+import dagger.hilt.android.qualifiers.ApplicationContext
+import kotlinx.coroutines.flow.MutableStateFlow
+import kotlinx.coroutines.flow.StateFlow
+import kotlinx.coroutines.flow.asStateFlow
+import org.koitharu.kotatsu.BuildConfig
+import org.koitharu.kotatsu.scrobbling.common.domain.model.ScrobblerService
+import java.util.concurrent.TimeUnit
+import javax.inject.Inject
+import javax.inject.Singleton
+
+/**
+ * Manages a persistent queue of failed scrobble operations for retry when network is available.
+ *
+ * Features:
+ * - SharedPreferences persistence across app restarts
+ * - Automatic expiration of old entries (default: 7 days)
+ * - Thread-safe operations
+ * - Observable queue size
+ * - Deduplication (same manga+chapter combination)
+ */
+@Singleton
+class ScrobbleOfflineQueue @Inject constructor(
+ @ApplicationContext private val context: Context,
+) {
+
+ private val _queueSize = MutableStateFlow(0)
+ val queueSize: StateFlow = _queueSize.asStateFlow()
+
+ private val queue = mutableListOf()
+ private val lock = Any()
+
+ private val prefs by lazy {
+ context.getSharedPreferences(PREFS_NAME, Context.MODE_PRIVATE)
+ }
+
+ init {
+ loadFromStorage()
+ }
+
+ /**
+ * Add a scrobble operation to the queue for later retry.
+ * Deduplicates entries with the same manga and chapter.
+ */
+ fun enqueue(service: ScrobblerService, mangaId: Long, chapterId: Long) {
+ synchronized(lock) {
+ // Remove any existing entry for the same manga/chapter/service
+ queue.removeAll { it.scrobblerService == service && it.mangaId == mangaId && it.chapterId == chapterId }
+
+ val entry = ScrobbleQueueEntry(
+ scrobblerService = service,
+ mangaId = mangaId,
+ chapterId = chapterId,
+ )
+ queue.add(entry)
+ saveToStorage()
+ _queueSize.value = queue.size
+ logDebug { "Enqueued scrobble: ${service.name} manga=$mangaId chapter=$chapterId (queue size: ${queue.size})" }
+ }
+ }
+
+ /**
+ * Get all pending entries for a specific scrobbler service.
+ */
+ fun getEntriesForService(service: ScrobblerService): List {
+ synchronized(lock) {
+ cleanupExpired()
+ return queue.filter { it.scrobblerService == service }.toList()
+ }
+ }
+
+ /**
+ * Get all pending entries.
+ */
+ fun getAllEntries(): List {
+ synchronized(lock) {
+ cleanupExpired()
+ return queue.toList()
+ }
+ }
+
+ /**
+ * Remove a specific entry from the queue (after successful processing).
+ */
+ fun remove(entry: ScrobbleQueueEntry) {
+ synchronized(lock) {
+ if (queue.remove(entry)) {
+ saveToStorage()
+ _queueSize.value = queue.size
+ logDebug { "Removed scrobble from queue: ${entry.scrobblerService.name} manga=${entry.mangaId}" }
+ }
+ }
+ }
+
+ /**
+ * Remove all entries for a specific manga (e.g., if manga is deleted).
+ */
+ fun removeForManga(mangaId: Long) {
+ synchronized(lock) {
+ val removed = queue.removeAll { it.mangaId == mangaId }
+ if (removed) {
+ saveToStorage()
+ _queueSize.value = queue.size
+ logDebug { "Removed all scrobbles for manga=$mangaId" }
+ }
+ }
+ }
+
+ /**
+ * Clear all pending entries.
+ */
+ fun clear() {
+ synchronized(lock) {
+ queue.clear()
+ prefs.edit().remove(KEY_QUEUE).apply()
+ _queueSize.value = 0
+ logDebug { "Cleared scrobble queue" }
+ }
+ }
+
+ /**
+ * Check if the queue is empty.
+ */
+ val isEmpty: Boolean
+ get() = synchronized(lock) { queue.isEmpty() }
+
+ /**
+ * Get the current queue size.
+ */
+ val size: Int
+ get() = synchronized(lock) { queue.size }
+
+ private fun cleanupExpired() {
+ val now = System.currentTimeMillis()
+ val expiredBefore = now - ENTRY_TTL_MS
+ val removed = queue.removeAll { it.timestamp < expiredBefore }
+ if (removed) {
+ saveToStorage()
+ _queueSize.value = queue.size
+ logDebug { "Cleaned up expired scrobble entries" }
+ }
+ }
+
+ private fun saveToStorage() {
+ val serialized = queue.map { it.serialize() }.toSet()
+ prefs.edit().putStringSet(KEY_QUEUE, serialized).apply()
+ }
+
+ private fun loadFromStorage() {
+ val serialized = prefs.getStringSet(KEY_QUEUE, emptySet()) ?: emptySet()
+ val now = System.currentTimeMillis()
+ val expiredBefore = now - ENTRY_TTL_MS
+
+ queue.clear()
+ serialized.mapNotNull { ScrobbleQueueEntry.deserialize(it) }
+ .filter { it.timestamp >= expiredBefore }
+ .forEach { queue.add(it) }
+
+ _queueSize.value = queue.size
+ logDebug { "Loaded ${queue.size} entries from scrobble queue" }
+ }
+
+ private inline fun logDebug(message: () -> String) {
+ if (BuildConfig.DEBUG) {
+ Log.d(TAG, message())
+ }
+ }
+
+ companion object {
+ private const val TAG = "ScrobbleOfflineQueue"
+ private const val PREFS_NAME = "scrobble_offline_queue"
+ private const val KEY_QUEUE = "queue"
+
+ /** Entries expire after 7 days */
+ val ENTRY_TTL_MS = TimeUnit.DAYS.toMillis(7)
+ }
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/scrobbling/common/domain/ScrobbleQueueEntry.kt b/app/src/main/kotlin/org/koitharu/kotatsu/scrobbling/common/domain/ScrobbleQueueEntry.kt
new file mode 100644
index 0000000..5bf15cc
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/scrobbling/common/domain/ScrobbleQueueEntry.kt
@@ -0,0 +1,43 @@
+package org.koitharu.kotatsu.scrobbling.common.domain
+
+import org.koitharu.kotatsu.scrobbling.common.domain.model.ScrobblerService
+
+/**
+ * Represents a pending scrobble operation that failed due to network issues
+ * and needs to be retried when connectivity is restored.
+ */
+data class ScrobbleQueueEntry(
+ val scrobblerService: ScrobblerService,
+ val mangaId: Long,
+ val chapterId: Long,
+ val timestamp: Long = System.currentTimeMillis(),
+) {
+ /**
+ * Serialize to string for storage.
+ * Format: "scrobblerId:mangaId:chapterId:timestamp"
+ */
+ fun serialize(): String = "${scrobblerService.id}:$mangaId:$chapterId:$timestamp"
+
+ companion object {
+ /**
+ * Deserialize from string.
+ * @return ScrobbleQueueEntry or null if parsing fails
+ */
+ fun deserialize(data: String): ScrobbleQueueEntry? {
+ val parts = data.split(":")
+ if (parts.size != 4) return null
+ return try {
+ val scrobblerId = parts[0].toInt()
+ val service = ScrobblerService.entries.find { it.id == scrobblerId } ?: return null
+ ScrobbleQueueEntry(
+ scrobblerService = service,
+ mangaId = parts[1].toLong(),
+ chapterId = parts[2].toLong(),
+ timestamp = parts[3].toLong(),
+ )
+ } catch (e: NumberFormatException) {
+ null
+ }
+ }
+ }
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/scrobbling/common/domain/ScrobbleQueueWorker.kt b/app/src/main/kotlin/org/koitharu/kotatsu/scrobbling/common/domain/ScrobbleQueueWorker.kt
new file mode 100644
index 0000000..af1c86c
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/scrobbling/common/domain/ScrobbleQueueWorker.kt
@@ -0,0 +1,107 @@
+package org.koitharu.kotatsu.scrobbling.common.domain
+
+import android.content.Context
+import android.util.Log
+import androidx.hilt.work.HiltWorker
+import androidx.work.BackoffPolicy
+import androidx.work.Constraints
+import androidx.work.CoroutineWorker
+import androidx.work.ExistingWorkPolicy
+import androidx.work.NetworkType
+import androidx.work.OneTimeWorkRequestBuilder
+import androidx.work.WorkManager
+import androidx.work.WorkerParameters
+import dagger.assisted.Assisted
+import dagger.assisted.AssistedInject
+import kotlinx.coroutines.CancellationException
+import org.koitharu.kotatsu.BuildConfig
+import org.koitharu.kotatsu.core.util.ext.printStackTraceDebug
+import java.util.concurrent.TimeUnit
+
+/**
+ * Worker that processes queued scrobbles when network is available.
+ *
+ * This worker is scheduled when:
+ * - A scrobble fails due to network issues
+ * - The app starts and there are queued scrobbles
+ * - Network connectivity is restored
+ */
+@HiltWorker
+class ScrobbleQueueWorker @AssistedInject constructor(
+ @Assisted context: Context,
+ @Assisted workerParams: WorkerParameters,
+ private val scrobblingManager: ScrobblingManager,
+ private val offlineQueue: ScrobbleOfflineQueue,
+) : CoroutineWorker(context, workerParams) {
+
+ override suspend fun doWork(): Result {
+ if (offlineQueue.isEmpty) {
+ logDebug { "Queue is empty, nothing to process" }
+ return Result.success()
+ }
+
+ return try {
+ val processed = scrobblingManager.processAllQueues()
+ logDebug { "Processed $processed queued scrobbles" }
+
+ if (offlineQueue.isEmpty) {
+ Result.success()
+ } else {
+ // Still have items, retry later
+ Result.retry()
+ }
+ } catch (e: CancellationException) {
+ throw e
+ } catch (e: Throwable) {
+ e.printStackTraceDebug()
+ Result.retry()
+ }
+ }
+
+ private inline fun logDebug(message: () -> String) {
+ if (BuildConfig.DEBUG) {
+ Log.d(TAG, message())
+ }
+ }
+
+ companion object {
+ private const val TAG = "ScrobbleQueueWorker"
+ private const val WORK_NAME = "scrobble_queue_processor"
+
+ /**
+ * Schedule the queue processor to run when network is available.
+ */
+ fun schedule(context: Context) {
+ val constraints = Constraints.Builder()
+ .setRequiredNetworkType(NetworkType.CONNECTED)
+ .build()
+
+ val request = OneTimeWorkRequestBuilder()
+ .setConstraints(constraints)
+ .setBackoffCriteria(
+ BackoffPolicy.EXPONENTIAL,
+ 15,
+ TimeUnit.MINUTES,
+ )
+ .build()
+
+ WorkManager.getInstance(context)
+ .enqueueUniqueWork(
+ WORK_NAME,
+ ExistingWorkPolicy.REPLACE,
+ request,
+ )
+
+ if (BuildConfig.DEBUG) {
+ Log.d(TAG, "Scheduled queue processing")
+ }
+ }
+
+ /**
+ * Cancel any pending queue processing.
+ */
+ fun cancel(context: Context) {
+ WorkManager.getInstance(context).cancelUniqueWork(WORK_NAME)
+ }
+ }
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/scrobbling/common/domain/ScrobblingManager.kt b/app/src/main/kotlin/org/koitharu/kotatsu/scrobbling/common/domain/ScrobblingManager.kt
new file mode 100644
index 0000000..8f4e518
--- /dev/null
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/scrobbling/common/domain/ScrobblingManager.kt
@@ -0,0 +1,195 @@
+package org.koitharu.kotatsu.scrobbling.common.domain
+
+import android.content.Context
+import android.util.Log
+import dagger.hilt.android.qualifiers.ApplicationContext
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.withContext
+import org.koitharu.kotatsu.BuildConfig
+import org.koitharu.kotatsu.core.db.MangaDatabase
+import org.koitharu.kotatsu.core.db.entity.toManga
+import org.koitharu.kotatsu.core.os.NetworkState
+import org.koitharu.kotatsu.parsers.model.Manga
+import org.koitharu.kotatsu.parsers.util.runCatchingCancellable
+import org.koitharu.kotatsu.scrobbling.common.domain.model.ScrobblerService
+import java.io.IOException
+import javax.inject.Inject
+import javax.inject.Singleton
+
+/**
+ * Manager for scrobbling operations with offline queue support.
+ *
+ * Features:
+ * - Automatic queueing when offline or on network failure
+ * - Processes queued scrobbles when network is available
+ * - Works with all scrobbler services
+ */
+@Singleton
+class ScrobblingManager @Inject constructor(
+ @ApplicationContext private val context: Context,
+ private val scrobblers: Set<@JvmSuppressWildcards Scrobbler>,
+ private val offlineQueue: ScrobbleOfflineQueue,
+ private val networkState: NetworkState,
+ private val db: MangaDatabase,
+) {
+
+ init {
+ // Schedule queue processing on startup if there are pending scrobbles
+ if (!offlineQueue.isEmpty) {
+ ScrobbleQueueWorker.schedule(context)
+ }
+ }
+
+ /**
+ * Scrobble a chapter read, queueing for later if offline or on failure.
+ *
+ * @param manga The manga being read
+ * @param chapterId The chapter that was read
+ * @return true if scrobbled immediately, false if queued for later
+ */
+ suspend fun scrobble(manga: Manga, chapterId: Long): Boolean {
+ val enabledScrobblers = scrobblers.filter { it.isEnabled }
+ if (enabledScrobblers.isEmpty()) {
+ return true // Nothing to do
+ }
+
+ var allSucceeded = true
+ for (scrobbler in enabledScrobblers) {
+ val success = tryScrobbleWithQueue(scrobbler, manga, chapterId)
+ if (!success) allSucceeded = false
+ }
+ return allSucceeded
+ }
+
+ /**
+ * Process all queued scrobbles for a specific service.
+ *
+ * @return Number of successfully processed entries
+ */
+ suspend fun processQueue(service: ScrobblerService): Int = withContext(Dispatchers.IO) {
+ if (!networkState.value) {
+ logDebug { "Skipping queue processing - offline" }
+ return@withContext 0
+ }
+
+ val scrobbler = scrobblers.find { it.scrobblerService == service }
+ if (scrobbler == null || !scrobbler.isEnabled) {
+ logDebug { "Skipping queue processing - scrobbler not enabled: ${service.name}" }
+ return@withContext 0
+ }
+
+ val entries = offlineQueue.getEntriesForService(service)
+ if (entries.isEmpty()) {
+ return@withContext 0
+ }
+
+ logDebug { "Processing ${entries.size} queued scrobbles for ${service.name}" }
+ var successCount = 0
+
+ for (entry in entries) {
+ val result = runCatchingCancellable {
+ // We need to get the manga from the database for scrobbling
+ processQueueEntry(scrobbler, entry.mangaId, entry.chapterId)
+ }
+
+ if (result.isSuccess) {
+ offlineQueue.remove(entry)
+ successCount++
+ logDebug { "Successfully processed queued scrobble: manga=${entry.mangaId}" }
+ } else {
+ val error = result.exceptionOrNull()
+ if (error !is IOException) {
+ // Non-network error, remove from queue to avoid infinite retries
+ offlineQueue.remove(entry)
+ logDebug { "Removed failed scrobble (non-network error): ${error?.message}" }
+ } else {
+ logDebug { "Will retry scrobble later: ${error.message}" }
+ }
+ }
+ }
+
+ return@withContext successCount
+ }
+
+ /**
+ * Process all queued scrobbles for all services.
+ *
+ * @return Total number of successfully processed entries
+ */
+ suspend fun processAllQueues(): Int {
+ var total = 0
+ for (service in ScrobblerService.entries) {
+ total += processQueue(service)
+ }
+ return total
+ }
+
+ /**
+ * Get the scrobbler for a specific service.
+ */
+ fun getScrobbler(service: ScrobblerService): Scrobbler? {
+ return scrobblers.find { it.scrobblerService == service }
+ }
+
+ private suspend fun tryScrobbleWithQueue(
+ scrobbler: Scrobbler,
+ manga: Manga,
+ chapterId: Long,
+ ): Boolean {
+ // If offline, queue immediately
+ if (!networkState.value) {
+ offlineQueue.enqueue(scrobbler.scrobblerService, manga.id, chapterId)
+ ScrobbleQueueWorker.schedule(context)
+ logDebug { "Offline - queued scrobble: ${scrobbler.scrobblerService.name} manga=${manga.id}" }
+ return false
+ }
+
+ // Try to scrobble
+ val result = runCatchingCancellable {
+ scrobbler.scrobble(manga, chapterId)
+ }
+
+ return if (result.isSuccess) {
+ true
+ } else {
+ val error = result.exceptionOrNull()
+ // Queue on network errors
+ if (error is IOException) {
+ offlineQueue.enqueue(scrobbler.scrobblerService, manga.id, chapterId)
+ ScrobbleQueueWorker.schedule(context)
+ logDebug { "Network error - queued scrobble: ${error.message}" }
+ } else {
+ // Log non-network errors but don't queue
+ logDebug { "Scrobble failed (not queuing): ${error?.message}" }
+ }
+ false
+ }
+ }
+
+ /**
+ * Process a queued scrobble entry by loading manga from database and calling scrobble.
+ */
+ private suspend fun processQueueEntry(scrobbler: Scrobbler, mangaId: Long, chapterId: Long) {
+ // Verify scrobbling entity exists
+ db.getScrobblingDao().find(scrobbler.scrobblerService.id, mangaId)
+ ?: throw IllegalStateException("No scrobbling entity found for manga $mangaId")
+
+ // Get manga from database
+ val mangaWithTags = db.getMangaDao().find(mangaId)
+ ?: throw IllegalStateException("Manga $mangaId not found in database")
+
+ // Convert to domain model and scrobble
+ val manga = mangaWithTags.toManga()
+ scrobbler.scrobble(manga, chapterId)
+ }
+
+ private inline fun logDebug(message: () -> String) {
+ if (BuildConfig.DEBUG) {
+ Log.d(TAG, message())
+ }
+ }
+
+ companion object {
+ private const val TAG = "ScrobblingManager"
+ }
+}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/settings/about/AppUpdateViewModel.kt b/app/src/main/kotlin/org/koitharu/kotatsu/settings/about/AppUpdateViewModel.kt
index cd5f0dc..f3b61ec 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/settings/about/AppUpdateViewModel.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/settings/about/AppUpdateViewModel.kt
@@ -14,6 +14,7 @@ import kotlinx.coroutines.flow.MutableStateFlow
import kotlinx.coroutines.isActive
import org.koitharu.kotatsu.R
import org.koitharu.kotatsu.core.github.AppUpdateRepository
+import org.koitharu.kotatsu.core.prefs.AppSettings
import org.koitharu.kotatsu.core.ui.BaseViewModel
import org.koitharu.kotatsu.core.util.ext.MutableEventFlow
import org.koitharu.kotatsu.core.util.ext.call
@@ -23,6 +24,7 @@ import javax.inject.Inject
@HiltViewModel
class AppUpdateViewModel @Inject constructor(
private val repository: AppUpdateRepository,
+ private val settings: AppSettings,
@ApplicationContext context: Context,
) : BaseViewModel() {
@@ -76,6 +78,16 @@ class AppUpdateViewModel @Inject constructor(
}
}
+ /**
+ * Skip the current available update version.
+ * The user won't be notified about this version again.
+ */
+ fun skipCurrentVersion() {
+ nextVersion.value?.let { version ->
+ settings.skipVersion(version.name)
+ }
+ }
+
private suspend fun observeDownload(id: Long) {
val query = DownloadManager.Query()
query.setFilterById(id)
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/settings/work/WorkScheduleManager.kt b/app/src/main/kotlin/org/koitharu/kotatsu/settings/work/WorkScheduleManager.kt
index 794e964..88192e7 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/settings/work/WorkScheduleManager.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/settings/work/WorkScheduleManager.kt
@@ -1,8 +1,11 @@
package org.koitharu.kotatsu.settings.work
+import android.content.Context
import android.content.SharedPreferences
+import dagger.hilt.android.qualifiers.ApplicationContext
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.launch
+import org.koitharu.kotatsu.core.github.AppUpdateCheckWorker
import org.koitharu.kotatsu.core.prefs.AppSettings
import org.koitharu.kotatsu.core.util.ext.processLifecycleScope
import org.koitharu.kotatsu.suggestions.ui.SuggestionsWorker
@@ -12,6 +15,7 @@ import javax.inject.Singleton
@Singleton
class WorkScheduleManager @Inject constructor(
+ @ApplicationContext private val context: Context,
private val settings: AppSettings,
private val suggestionScheduler: SuggestionsWorker.Scheduler,
private val trackerScheduler: TrackWorker.Scheduler,
@@ -33,6 +37,12 @@ class WorkScheduleManager @Inject constructor(
isEnabled = settings.isSuggestionsEnabled,
force = key != AppSettings.KEY_SUGGESTIONS,
)
+
+ AppSettings.KEY_AUTO_UPDATE_CHECK,
+ AppSettings.KEY_UPDATE_CHECK_WIFI_ONLY,
+ AppSettings.KEY_UPDATE_CHECK_INTERVAL -> {
+ AppUpdateCheckWorker.schedule(context, settings)
+ }
}
}
@@ -41,6 +51,8 @@ class WorkScheduleManager @Inject constructor(
processLifecycleScope.launch(Dispatchers.Default) {
updateWorkerImpl(trackerScheduler, settings.isTrackerEnabled, true) // always force due to adaptive interval
updateWorkerImpl(suggestionScheduler, settings.isSuggestionsEnabled, false)
+ // Schedule app update checks
+ AppUpdateCheckWorker.schedule(context, settings)
}
}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/tracker/data/TrackEntity.kt b/app/src/main/kotlin/org/koitharu/kotatsu/tracker/data/TrackEntity.kt
index 925508f..0e14643 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/tracker/data/TrackEntity.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/tracker/data/TrackEntity.kt
@@ -4,6 +4,7 @@ import androidx.annotation.IntDef
import androidx.room.ColumnInfo
import androidx.room.Entity
import androidx.room.ForeignKey
+import androidx.room.Index
import androidx.room.PrimaryKey
import org.koitharu.kotatsu.core.db.entity.MangaEntity
@@ -17,6 +18,11 @@ import org.koitharu.kotatsu.core.db.entity.MangaEntity
onDelete = ForeignKey.CASCADE,
),
],
+ indices = [
+ Index(value = ["chapters_new"]),
+ Index(value = ["last_chapter_date"]),
+ Index(value = ["last_check_time"]),
+ ],
)
class TrackEntity(
@PrimaryKey(autoGenerate = false)
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/tracker/domain/CheckNewChaptersUseCase.kt b/app/src/main/kotlin/org/koitharu/kotatsu/tracker/domain/CheckNewChaptersUseCase.kt
index b3dc35f..56d0485 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/tracker/domain/CheckNewChaptersUseCase.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/tracker/domain/CheckNewChaptersUseCase.kt
@@ -2,6 +2,11 @@ package org.koitharu.kotatsu.tracker.domain
import android.util.Log
import coil3.request.CachePolicy
+import kotlinx.coroutines.flow.Flow
+import kotlinx.coroutines.flow.channelFlow
+import kotlinx.coroutines.launch
+import kotlinx.coroutines.sync.Semaphore
+import kotlinx.coroutines.sync.withPermit
import org.koitharu.kotatsu.BuildConfig
import org.koitharu.kotatsu.core.model.getPreferredBranch
import org.koitharu.kotatsu.core.model.isLocal
@@ -44,6 +49,37 @@ class CheckNewChaptersUseCase @Inject constructor(
invokeImpl(track)
}
+ /**
+ * Check multiple tracks in parallel with configurable parallelism.
+ *
+ * @param tracks List of manga tracks to check
+ * @param parallelism Maximum number of concurrent checks (default: 6)
+ * @return Flow emitting MangaUpdates for each track as they complete
+ */
+ fun checkBatch(
+ tracks: List,
+ parallelism: Int = DEFAULT_PARALLELISM,
+ ): Flow = channelFlow {
+ val semaphore = Semaphore(parallelism.coerceIn(1, MAX_PARALLELISM))
+ for (track in tracks) {
+ launch {
+ semaphore.withPermit {
+ val result = runCatchingCancellable {
+ mutex.withLock(track.manga.id) {
+ invokeImpl(track)
+ }
+ }.getOrElse { error ->
+ MangaUpdates.Failure(
+ manga = track.manga,
+ error = error,
+ )
+ }
+ send(result)
+ }
+ }
+ }
+ }
+
suspend operator fun invoke(manga: Manga, currentChapterId: Long) = mutex.withLock(manga.id) {
runCatchingCancellable {
repository.updateTracks()
@@ -149,4 +185,9 @@ class CheckNewChaptersUseCase @Inject constructor(
}
}
}
+
+ companion object {
+ const val DEFAULT_PARALLELISM = 6
+ const val MAX_PARALLELISM = 12
+ }
}
diff --git a/app/src/main/kotlin/org/koitharu/kotatsu/tracker/work/TrackWorker.kt b/app/src/main/kotlin/org/koitharu/kotatsu/tracker/work/TrackWorker.kt
index 5094feb..ad649dd 100644
--- a/app/src/main/kotlin/org/koitharu/kotatsu/tracker/work/TrackWorker.kt
+++ b/app/src/main/kotlin/org/koitharu/kotatsu/tracker/work/TrackWorker.kt
@@ -32,12 +32,8 @@ import dagger.assisted.AssistedInject
import kotlinx.coroutines.CancellationException
import kotlinx.coroutines.NonCancellable
import kotlinx.coroutines.flow.Flow
-import kotlinx.coroutines.flow.channelFlow
import kotlinx.coroutines.flow.collect
import kotlinx.coroutines.flow.map
-import kotlinx.coroutines.launch
-import kotlinx.coroutines.sync.Semaphore
-import kotlinx.coroutines.sync.withPermit
import kotlinx.coroutines.withContext
import org.koitharu.kotatsu.BuildConfig
import org.koitharu.kotatsu.R
@@ -119,28 +115,12 @@ class TrackWorker @AssistedInject constructor(
@CheckResult
private suspend fun checkUpdatesAsync(tracks: List) {
- val semaphore = Semaphore(MAX_PARALLELISM)
+ val parallelism = settings.trackerParallelism
val groupNotifications = mutableListOf()
try {
- channelFlow {
- for (track in tracks) {
- launch {
- semaphore.withPermit {
- send(
- runCatchingCancellable {
- checkNewChaptersUseCase.invoke(track)
- }.getOrElse { error ->
- MangaUpdates.Failure(
- manga = track.manga,
- error = error,
- )
- },
- )
- }
- }
- }
- }.onEachIndexed { index, it ->
+ checkNewChaptersUseCase.checkBatch(tracks, parallelism)
+ .onEachIndexed { index, it ->
if (applicationContext.checkNotificationPermission(WORKER_CHANNEL_ID)) {
notificationManager.notify(
WORKER_NOTIFICATION_ID,
@@ -365,7 +345,6 @@ class TrackWorker @AssistedInject constructor(
const val WORKER_NOTIFICATION_ID = 35
const val TAG = "tracking"
const val TAG_ONESHOT = "tracking_oneshot"
- const val MAX_PARALLELISM = 6
val BATCH_SIZE = if (BuildConfig.DEBUG) 20 else 46
const val SETTINGS_ACTION_CODE = 5
}
diff --git a/app/src/main/res/values/strings.xml b/app/src/main/res/values/strings.xml
index 8758ee8..24917db 100644
--- a/app/src/main/res/values/strings.xml
+++ b/app/src/main/res/values/strings.xml
@@ -416,6 +416,7 @@
Address
Port
Proxy
+ Proxy for WebView is not supported on this device
Invalid value
Kitsu
Enter your email and password to continue
@@ -766,6 +767,7 @@
More options
Destination directory
You can select chapters to download by long click on item in the chapter list.
+ Select chapter
All
Downloading over cellular network
diff --git a/app/src/test/kotlin/org/koitharu/kotatsu/core/github/VersionIdTest.kt b/app/src/test/kotlin/org/koitharu/kotatsu/core/github/VersionIdTest.kt
index 45b4144..51d2ac1 100644
--- a/app/src/test/kotlin/org/koitharu/kotatsu/core/github/VersionIdTest.kt
+++ b/app/src/test/kotlin/org/koitharu/kotatsu/core/github/VersionIdTest.kt
@@ -45,8 +45,14 @@ class VersionIdTest {
@Test
fun testCurrentVersion() {
+ val currentVersion = VersionId(BuildConfig.VERSION_NAME)
+ // Skip comparison for nightly builds since they use date-based versioning
+ if (currentVersion.variantType == "n") {
+ // Just verify nightly version was parsed correctly
+ assertTrue(currentVersion.build > 0)
+ return
+ }
val version1 = VersionId("2.4.6")
- val version2 = VersionId(BuildConfig.VERSION_NAME)
- assertTrue(version1 < version2)
+ assertTrue(version1 < currentVersion)
}
}
\ No newline at end of file
diff --git a/build.gradle b/build.gradle
index 809d5a7..517550b 100644
--- a/build.gradle
+++ b/build.gradle
@@ -4,6 +4,6 @@ plugins {
alias(libs.plugins.hilt) apply false
alias(libs.plugins.ksp) apply false
alias(libs.plugins.room) apply false
- alias(libs.plugins.kotlinx.serizliation) apply false
+ alias(libs.plugins.kotlinx.serialization) apply false
// alias(libs.plugins.decoroutinator) apply false
}
diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml
index 1efbadd..c59b157 100644
--- a/gradle/libs.versions.toml
+++ b/gradle/libs.versions.toml
@@ -35,7 +35,8 @@ material = "1.14.0-alpha05"
moshi = "1.15.2"
okhttp = "5.2.1"
okio = "3.16.1"
-parsers = "e205a8706e"
+# YakaTeam/kotatsu-parsers - Using latest commit for recent fixes
+parsers = "b5b4d2cd43"
preference = "1.2.1"
recyclerview = "1.4.0"
room = "2.7.2"
@@ -122,7 +123,7 @@ androidx-window = { module = "androidx.window:window", version.ref = "window" }
android-application = { id = "com.android.application", version.ref = "gradle" }
hilt = { id = "com.google.dagger.hilt.android", version.ref = "dagger" }
kotlin = { id = "org.jetbrains.kotlin.android", version.ref = "kotlin" }
-kotlinx-serizliation = { id = "org.jetbrains.kotlin.plugin.serialization", version.ref = "kotlin" }
+kotlinx-serialization = { id = "org.jetbrains.kotlin.plugin.serialization", version.ref = "kotlin" }
ksp = { id = "com.google.devtools.ksp", version.ref = "ksp" }
room = { id = "androidx.room", version.ref = "room" }
decoroutinator = { id = "dev.reformator.stacktracedecoroutinator", version.ref = "decoroutinator" }