Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
53 commits
Select commit Hold shift + click to select a range
38422a8
Use `from_static` instead of `from_string`
Tokarak Mar 18, 2025
8720677
On error, fallback to cache; Use `hyper::Error`
Tokarak Mar 18, 2025
47771b5
Same fix for another function
Tokarak Mar 18, 2025
d6d399d
cargo fmt
Tokarak Mar 18, 2025
f304393
Fix wrong content served in previous commit
Tokarak Mar 18, 2025
eb7fbf4
Begin removing some deprecated `to_bytes`
Tokarak Mar 18, 2025
bac01e1
Start using http_api_problem
Tokarak Mar 19, 2025
8580a16
Migrate /style.css
Tokarak Mar 19, 2025
901123b
Migrate /manifest.json
Tokarak Mar 19, 2025
57592e2
Migrate /robots.txt
Tokarak Mar 19, 2025
84bf573
Migrate /favicon.ico
Tokarak Mar 19, 2025
bb15282
Migrate /logo.png
Tokarak Mar 19, 2025
77e7e07
Create helper macro cached_static_resource
Tokarak Mar 19, 2025
b0eedba
Migrate multiple static resources
Tokarak Mar 19, 2025
7cdfe90
Use reqwest HTTP client; Migrate /commits.atom
Tokarak Mar 19, 2025
d342c78
Migrate /instances.json
Tokarak Mar 19, 2025
d709c62
Add a proxy helper function
Tokarak Mar 19, 2025
083841e
Add Path extractor to proxy function
Tokarak Mar 20, 2025
8c42fdb
Cargo fix
Tokarak Mar 20, 2025
31a534f
Merge branch 'main' into pr-axum-refactor
Tokarak Mar 20, 2025
2202218
Migrate proxy routes
Tokarak Mar 20, 2025
f8059cc
axum::Router::remove_v07_checks()
Tokarak Mar 20, 2025
ff2165d
Reimplement proxy function, fix bugs
Tokarak Mar 20, 2025
0d868b7
Modify headers at proxy (Tested working!)
Tokarak Mar 20, 2025
f0b1abe
Migrate redirect
Tokarak Mar 21, 2025
c290893
Update oauth.rs to use reqwest and shared client
Tokarak Mar 23, 2025
01aaf39
Refactor cookie-getter idiomatic
Tokarak Apr 3, 2025
f897f9a
Port some Reddit client stuff
Tokarak Apr 3, 2025
ca6ef1e
Remove some cloning
Tokarak Apr 3, 2025
4f36dd3
Use CookieJar extractor
Tokarak Apr 4, 2025
6004e6a
Refactor parse_comments and query_comments
Tokarak Apr 4, 2025
47c5990
Refactor Comment struct initialiser
Tokarak Apr 4, 2025
d198bd6
UTIL: convert old Request->Cookiejar
Tokarak Apr 4, 2025
44e1fc8
Migrate old code new Comment init
Tokarak Apr 4, 2025
5bbc979
Begin migrating `item` fn
Tokarak Apr 4, 2025
7eea58e
Create a PathParameters struct
Tokarak Apr 4, 2025
4b9f940
Deps: Migrate rinja -> askama
Tokarak Apr 5, 2025
a70896e
Create nsfw_landingx
Tokarak Apr 5, 2025
114a244
Use Preference struct as extractor
Tokarak Apr 5, 2025
517370d
Improve memory sharing Preferences
Tokarak Apr 5, 2025
7c66b9c
Finish implementing itemx
Tokarak Apr 5, 2025
172e32c
Routing typo bugfixes
Tokarak Apr 5, 2025
d303c50
Change reddit_getx host to oauth
Tokarak Apr 5, 2025
52085c0
Handle trailing slash in path
Tokarak Apr 5, 2025
e69c12b
Modify pathparameters to optional
Tokarak Apr 5, 2025
b36e8c6
Remove unused dependency
Tokarak Apr 5, 2025
01556e8
Refactor nsfwlandingx less abstract
Tokarak Apr 18, 2025
afd51fd
Refactor profile page
Tokarak Apr 21, 2025
a8e8863
Add /u/[deleted] error page
Tokarak Apr 21, 2025
8fd2bfd
Migrate user rss feed
Tokarak Apr 22, 2025
adb6338
Small routing reorder
Tokarak Apr 24, 2025
77e2b09
Add preferences page
Tokarak Apr 28, 2025
6eb08c2
Add placeholder system.css theme
Tokarak Apr 28, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
983 changes: 882 additions & 101 deletions Cargo.lock

Large diffs are not rendered by default.

14 changes: 12 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ edition = "2021"
default-run = "redlib"

[dependencies]
rinja = { version = "0.3.4", default-features = false }
cached = { version = "0.54.0", features = ["async"] }
clap = { version = "4.4.11", default-features = false, features = [
"std",
Expand All @@ -23,7 +22,7 @@ regex = "1.10.2"
serde = { version = "1.0.193", features = ["derive"] }
cookie = "0.18.0"
futures-lite = "2.2.0"
hyper = { version = "0.14.31", features = ["full"] }
hyper = { version = "0.14.32", features = ["stream", "backports"] }
percent-encoding = "2.3.1"
route-recognizer = "0.3.1"
serde_json = "1.0.133"
Expand Down Expand Up @@ -56,6 +55,17 @@ htmlescape = "0.3.1"
bincode = "1.3.3"
base2048 = "2.0.2"
revision = "0.10.0"
http-api-problem = { version = "0.60", features = ["axum", "api-error"] }
futures-util = "0.3.31"
axum= { version = "0.8", features = [] }
tower-default-headers = "0.2.0"
reqwest = { version = "0.12.15", features = ["stream", "json", "gzip"] }
strfmt = "0.2.4"
axum-extra = { version = "0.10.0", features = ["cookie"] }
askama = "0.13.0"
tower = "0.5.2"
tower-http = { version = "0.6.2", features = ["normalize-path"] }
serde-inline-default = "0.2.3"


[dev-dependencies]
Expand Down
198 changes: 194 additions & 4 deletions src/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,16 @@ use once_cell::sync::Lazy;
use percent_encoding::{percent_encode, CONTROLS};
use serde_json::Value;

use std::sync::atomic::Ordering;
use std::sync::atomic::{AtomicBool, AtomicU16};
use std::{io, result::Result};

use crate::dbg_msg;
use crate::oauth::{force_refresh_token, token_daemon, Oauth};
use crate::server::RequestExt;
use crate::utils::{format_url, Post};
use http_api_problem::ApiError;
use reqwest;
use std::sync::atomic::Ordering;
use std::sync::atomic::{AtomicBool, AtomicU16};
use std::{io, result::Result};
use strfmt;

const REDDIT_URL_BASE: &str = "https://oauth.reddit.com";
const REDDIT_URL_BASE_HOST: &str = "oauth.reddit.com";
Expand All @@ -34,6 +36,37 @@ pub static HTTPS_CONNECTOR: Lazy<HttpsConnector<HttpConnector>> =
Lazy::new(|| hyper_rustls::HttpsConnectorBuilder::new().with_native_roots().https_only().enable_http2().build());

pub static CLIENT: Lazy<Client<HttpsConnector<HttpConnector>>> = Lazy::new(|| Client::builder().build::<_, Body>(HTTPS_CONNECTOR.clone()));
pub static CLIENTX: Lazy<reqwest::Client> = Lazy::new(|| reqwest::Client::new());

pub fn into_api_error(e: reqwest::Error) -> ApiError {
if e.is_timeout() || e.is_connect() {
ApiError::builder(http_api_problem::StatusCode::GATEWAY_TIMEOUT) // 504
.title("Gateway Timeout")
.message(format!("{e}"))
.source(e)
.finish()
} else if e.is_decode() {
ApiError::builder(http_api_problem::StatusCode::BAD_GATEWAY) // 502
.title("Bad Gateway")
.message(format!("{e}"))
.source(e)
.finish()
} else if e.is_status() && e.status().is_some() {
let status = e.status().unwrap();
ApiError::try_builder(status.as_u16())
.expect("reqwest considers this HTTP status to be an error status, but http_api_problem does not.")
.title(format!("Reddit Error {}", status))
.message(format!("{e}"))
.source(e)
.finish()
} else {
ApiError::builder(http_api_problem::StatusCode::INTERNAL_SERVER_ERROR) // 500
.title("Internal Server Error")
.message(format!("{e}"))
.source(e)
.finish()
}
}

pub static OAUTH_CLIENT: Lazy<ArcSwap<Oauth>> = Lazy::new(|| {
let client = block_on(Oauth::new());
Expand Down Expand Up @@ -149,6 +182,71 @@ pub async fn proxy(req: Request<Body>, format: &str) -> Result<Response<Body>, S
stream(&url, &req).await
}

pub async fn proxy_get(
axum::extract::Path(parameters): axum::extract::Path<std::collections::HashMap<String, String>>,
mut req: axum::extract::Request,
fmtstr: &str,
) -> impl axum::response::IntoResponse {
// Format URI from fmtstr, then append any queries from the request.
let uri = format!(
"{}?{}",
strfmt::strfmt(fmtstr, &parameters) // Format given uri, then append any queries
.map_err(|e| {
// Should fail only if the passed fmtstr parameter if formatted wrong. See fmtstr docs.
ApiError::builder(http_api_problem::StatusCode::INTERNAL_SERVER_ERROR)
.title("Internal Server Error")
.message(format!("Could not rewrite url: {e}"))
.source(e)
.finish()
})?,
req.uri().query().unwrap_or_default()
);
log::debug!("Forwarding {} request: {} to {}", req.method(), req.uri(), uri);
// Change req URI
*req.uri_mut() = axum::http::Uri::try_from(uri).map_err(|e| {
ApiError::builder(http_api_problem::StatusCode::BAD_REQUEST) // 400
.title("Bad Request")
.message(format!("Could not read uri: {e}"))
.source(e)
.finish()
})?;

// Filter request headers
let mut new_headers = axum::http::HeaderMap::new();
// NOTE: These header values are old Redlib code, and are only tested on GET requests.
for &key in &["Range", "If-Modified-Since", "Cache-Control"] {
if let Some(value) = req.headers().get(key) {
new_headers.insert(key, value.clone());
}
}
*req.headers_mut() = new_headers;

let req: reqwest::Request = req.map(|body| reqwest::Body::wrap_stream(body.into_data_stream())).try_into().map_err(&into_api_error)?;
let response: reqwest::Response = CLIENTX
.execute(req)
.await
.and_then(reqwest::Response::error_for_status)
.map(|mut res| {
// Remove unwanted headers
let mut rm = |key: &str| res.headers_mut().remove(key);
rm("access-control-expose-headers");
rm("server");
rm("vary");
rm("etag");
rm("x-cdn");
rm("x-cdn-client-region");
rm("x-cdn-name");
rm("x-cdn-server-region");
rm("x-reddit-cdn");
rm("x-reddit-video-features");
rm("Nel");
rm("Report-To");
res
})
.map_err(&into_api_error)?;
Ok::<axum::http::response::Response<reqwest::Body>, ApiError>(response.into())
}

async fn stream(url: &str, req: &Request<Body>) -> Result<Response<Body>, String> {
// First parameter is target URL (mandatory).
let parsed_uri = url.parse::<Uri>().map_err(|_| "Couldn't parse URL".to_string())?;
Expand Down Expand Up @@ -197,6 +295,10 @@ fn reddit_get(path: String, quarantine: bool) -> Boxed<Result<Response<Body>, St
request(&Method::GET, path, true, quarantine, REDDIT_URL_BASE, REDDIT_URL_BASE_HOST)
}

async fn reddit_getx(path: &str, quarantine: bool) -> Result<reqwest::Response, ApiError> {
reddit_request(reqwest::Method::GET, path, quarantine, REDDIT_URL_BASE, REDDIT_URL_BASE_HOST).await
}

/// Makes a HEAD request to Reddit at `path, using the short URL base. This will not follow redirects.
fn reddit_short_head(path: String, quarantine: bool, base_path: &'static str, host: &'static str) -> Boxed<Result<Response<Body>, String>> {
request(&Method::HEAD, path, false, quarantine, base_path, host)
Expand All @@ -208,6 +310,35 @@ fn reddit_short_head(path: String, quarantine: bool, base_path: &'static str, ho
// }
// Unused - reddit_head is only ever called in the context of a short URL

async fn reddit_request(method: reqwest::Method, path: &str, quarantine: bool, base_path: &'static str, host: &'static str) -> Result<reqwest::Response, ApiError> {
let url = format!("{base_path}/{path}");

// Build request to Reddit. Reqwest handles gzip encoding. Reddit does not yet support Brotli encoding.
use reqwest::header;
use reqwest::header::{HeaderMap, HeaderValue};
let mut headers: HeaderMap = HeaderMap::new();
headers.append(header::HOST, HeaderValue::from_static(host)); // FIXME: Reddit can fingerprint. Either shuffle headers, or add dynamic headers.
if quarantine {
headers.append(
header::COOKIE,
HeaderValue::from_static("_options=%7B%22pref_quarantine_optin%22%3A%20true%2C%20%22pref_gated_sr_optin%22%3A%20true%7D"),
);
}

let client = OAUTH_CLIENT.load();
let headermap2: HeaderMap<HeaderValue> = HeaderMap::try_from(&client.headers_map).expect("Invalid hashmap of headers");
headers.extend(headermap2);

let result = CLIENTX
.request(method, url)
.headers(headers)
.send()
.await
.and_then(reqwest::Response::error_for_status)
.map_err(&into_api_error)?;
Ok(result)
}

/// Makes a request to Reddit. If `redirect` is `true`, `request_with_redirect`
/// will recurse on the URL that Reddit provides in the Location HTTP header
/// in its response.
Expand Down Expand Up @@ -356,6 +487,65 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo
.boxed()
}

#[cached(size = 100, time = 30, result = true)]
pub async fn jsonx(path: String, quarantine: bool) -> Result<Value, ApiError> {
// First, handle rolling over the OAUTH_CLIENT if need be.
let current_rate_limit = OAUTH_RATELIMIT_REMAINING.load(Ordering::SeqCst);
let is_rolling_over = OAUTH_IS_ROLLING_OVER.load(Ordering::SeqCst);
if current_rate_limit < 10 && !is_rolling_over {
log::info!("Rate limit {current_rate_limit} is low. Spawning force_refresh_token()");
tokio::spawn(force_refresh_token());
}
OAUTH_RATELIMIT_REMAINING.fetch_sub(1, Ordering::SeqCst);

let response = reddit_getx(&path, quarantine).await?;

// Handle OAUTH stuff
let reset = response.headers().get("x-ratelimit-reset").map(|val| String::from_utf8_lossy(val.as_bytes()));
let remaining = response.headers().get("x-ratelimit-remaining").map(|val| String::from_utf8_lossy(val.as_bytes()));
let used = response.headers().get("x-ratelimit-used").map(|val| String::from_utf8_lossy(val.as_bytes()));
trace!(
"Ratelimit remaining: Header says {}, we have {current_rate_limit}. Resets in {}. Rollover: {}. Ratelimit used: {}",
remaining.as_deref().unwrap_or_default(),
reset.as_deref().unwrap_or_default(),
if is_rolling_over { "yes" } else { "no" },
used.as_deref().unwrap_or_default(),
);
if let Some(val) = remaining.and_then(|s| s.parse::<f32>().ok()) {
OAUTH_RATELIMIT_REMAINING.store(val.round() as u16, Ordering::SeqCst);
}

// Work with the JSON.
let json: Value = response.json().await.map_err(|e| {
ApiError::builder(http_api_problem::StatusCode::BAD_GATEWAY) // 502
.title("Bad Gateway")
.message(format!("Failed to parse page JSON data: {e}"))
.source(e)
.finish()
})?;

// FIXME: If we get http 401, with error message "Unauthorized", force a token refresh. Currently 401 is handled by `into_api_error`. Perphaps reddit_request can handle oauth stuff

if let Some(true) = json["data"]["is_suspended"].as_bool() {
return Err(
ApiError::builder(http_api_problem::StatusCode::NOT_FOUND)
.title("Suspended")
.message("user is suspended")
.finish(),
);
}
if let Some(error_code) = json["error"].as_i64() {
// Err(format!("Reddit error {} \"{}\": {} | {path}", json["error"], json["reason"], json["message"]));
return Err(
ApiError::builder(http_api_problem::StatusCode::NOT_FOUND)
.title(format!("Reddit Error {error_code}: {}", json["reason"]))
.message(json["message"].as_str().unwrap_or_default())
.finish(),
);
}
Ok(json)
}

// Make a request to a Reddit API and parse the JSON response
#[cached(size = 100, time = 30, result = true)]
pub async fn json(path: String, quarantine: bool) -> Result<Value, String> {
Expand Down
61 changes: 33 additions & 28 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,10 @@ impl Config {
// Return the first non-`None` value
// If all are `None`, return `None`
let legacy_key = key.replace("REDLIB_", "LIBREDDIT_");
var(key).ok().or_else(|| var(legacy_key).ok()).or_else(|| get_setting_from_config(key, &config))
var(key)
.ok()
.or_else(|| var(legacy_key).ok())
.or_else(|| get_setting_from_config(key, &config).map(|s| s.to_string()))
};
Self {
sfw_only: parse("REDLIB_SFW_ONLY"),
Expand Down Expand Up @@ -164,38 +167,40 @@ impl Config {
}
}

fn get_setting_from_config(name: &str, config: &Config) -> Option<String> {
fn get_setting_from_config<'a>(name: &str, config: &'a Config) -> Option<&'a str> {
match name {
"REDLIB_SFW_ONLY" => config.sfw_only.clone(),
"REDLIB_DEFAULT_THEME" => config.default_theme.clone(),
"REDLIB_DEFAULT_FRONT_PAGE" => config.default_front_page.clone(),
"REDLIB_DEFAULT_LAYOUT" => config.default_layout.clone(),
"REDLIB_DEFAULT_COMMENT_SORT" => config.default_comment_sort.clone(),
"REDLIB_DEFAULT_POST_SORT" => config.default_post_sort.clone(),
"REDLIB_DEFAULT_BLUR_SPOILER" => config.default_blur_spoiler.clone(),
"REDLIB_DEFAULT_SHOW_NSFW" => config.default_show_nsfw.clone(),
"REDLIB_DEFAULT_BLUR_NSFW" => config.default_blur_nsfw.clone(),
"REDLIB_DEFAULT_USE_HLS" => config.default_use_hls.clone(),
"REDLIB_DEFAULT_HIDE_HLS_NOTIFICATION" => config.default_hide_hls_notification.clone(),
"REDLIB_DEFAULT_WIDE" => config.default_wide.clone(),
"REDLIB_DEFAULT_HIDE_AWARDS" => config.default_hide_awards.clone(),
"REDLIB_DEFAULT_HIDE_SIDEBAR_AND_SUMMARY" => config.default_hide_sidebar_and_summary.clone(),
"REDLIB_DEFAULT_HIDE_SCORE" => config.default_hide_score.clone(),
"REDLIB_DEFAULT_SUBSCRIPTIONS" => config.default_subscriptions.clone(),
"REDLIB_DEFAULT_FILTERS" => config.default_filters.clone(),
"REDLIB_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION" => config.default_disable_visit_reddit_confirmation.clone(),
"REDLIB_BANNER" => config.banner.clone(),
"REDLIB_ROBOTS_DISABLE_INDEXING" => config.robots_disable_indexing.clone(),
"REDLIB_PUSHSHIFT_FRONTEND" => config.pushshift.clone(),
"REDLIB_ENABLE_RSS" => config.enable_rss.clone(),
"REDLIB_FULL_URL" => config.full_url.clone(),
"REDLIB_DEFAULT_REMOVE_DEFAULT_FEEDS" => config.default_remove_default_feeds.clone(),
_ => None,
"REDLIB_SFW_ONLY" => &config.sfw_only,
"REDLIB_DEFAULT_THEME" => &config.default_theme,
"REDLIB_DEFAULT_FRONT_PAGE" => &config.default_front_page,
"REDLIB_DEFAULT_LAYOUT" => &config.default_layout,
"REDLIB_DEFAULT_COMMENT_SORT" => &config.default_comment_sort,
"REDLIB_DEFAULT_POST_SORT" => &config.default_post_sort,
"REDLIB_DEFAULT_BLUR_SPOILER" => &config.default_blur_spoiler,
"REDLIB_DEFAULT_SHOW_NSFW" => &config.default_show_nsfw,
"REDLIB_DEFAULT_BLUR_NSFW" => &config.default_blur_nsfw,
"REDLIB_DEFAULT_USE_HLS" => &config.default_use_hls,
"REDLIB_DEFAULT_HIDE_HLS_NOTIFICATION" => &config.default_hide_hls_notification,
"REDLIB_DEFAULT_WIDE" => &config.default_wide,
"REDLIB_DEFAULT_HIDE_AWARDS" => &config.default_hide_awards,
"REDLIB_DEFAULT_HIDE_SIDEBAR_AND_SUMMARY" => &config.default_hide_sidebar_and_summary,
"REDLIB_DEFAULT_HIDE_SCORE" => &config.default_hide_score,
"REDLIB_DEFAULT_SUBSCRIPTIONS" => &config.default_subscriptions,
"REDLIB_DEFAULT_FILTERS" => &config.default_filters,
"REDLIB_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION" => &config.default_disable_visit_reddit_confirmation,
"REDLIB_BANNER" => &config.banner,
"REDLIB_ROBOTS_DISABLE_INDEXING" => &config.robots_disable_indexing,
"REDLIB_PUSHSHIFT_FRONTEND" => &config.pushshift,
"REDLIB_ENABLE_RSS" => &config.enable_rss,
"REDLIB_FULL_URL" => &config.full_url,
"REDLIB_DEFAULT_REMOVE_DEFAULT_FEEDS" => &config.default_remove_default_feeds,
_ => &None,
}
.as_ref()
.map(String::as_str)
}

/// Retrieves setting from environment variable or config file.
pub fn get_setting(name: &str) -> Option<String> {
pub fn get_setting(name: &str) -> Option<&'static str> {
get_setting_from_config(name, &CONFIG)
}

Expand Down
6 changes: 3 additions & 3 deletions src/duplicates.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@ use crate::server::RequestExt;
use crate::subreddit::{can_access_quarantine, quarantine};
use crate::utils::{error, filter_posts, get_filters, nsfw_landing, parse_post, template, Post, Preferences};

use askama::Template;
use hyper::{Body, Request, Response};
use rinja::Template;
use serde_json::Value;
use std::borrow::ToOwned;
use std::collections::HashSet;
Expand Down Expand Up @@ -43,7 +43,7 @@ struct DuplicatesTemplate {

/// num_posts_filtered counts how many posts were filtered from the
/// duplicates list.
num_posts_filtered: u64,
num_posts_filtered: usize,

/// all_posts_filtered is true if every duplicate was filtered. This is an
/// edge case but can still happen.
Expand Down Expand Up @@ -221,7 +221,7 @@ pub async fn item(req: Request<Body>) -> Result<Response<Body>, String> {
}

// DUPLICATES
async fn parse_duplicates(json: &Value, filters: &HashSet<String>) -> (Vec<Post>, u64, bool) {
async fn parse_duplicates(json: &Value, filters: &HashSet<String>) -> (Vec<Post>, usize, bool) {
let post_duplicates: &Vec<Value> = &json["data"]["children"].as_array().map_or(Vec::new(), ToOwned::to_owned);
let mut duplicates: Vec<Post> = Vec::new();

Expand Down
2 changes: 1 addition & 1 deletion src/instance_info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@ use crate::{
server::RequestExt,
utils::{ErrorTemplate, Preferences},
};
use askama::Template;
use build_html::{Container, Html, HtmlContainer, Table};
use hyper::{http::Error, Body, Request, Response};
use once_cell::sync::Lazy;
use rinja::Template;
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;

Expand Down
Loading