Skip to content

Commit ac352bc

Browse files
committed
1 parent 182230d commit ac352bc

8 files changed

Lines changed: 240 additions & 52 deletions

File tree

Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ fastrand = "2.0.1"
4747
log = "0.4.20"
4848
pretty_env_logger = "0.5.0"
4949
dotenvy = "0.15.7"
50-
rss = "2.0.7"
50+
rss = "2.0.12"
5151
arc-swap = "1.7.1"
5252
serde_json_path = "0.7.1"
5353
async-recursion = "1.1.1"

src/lib.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ pub mod instance_info;
55
pub mod oauth;
66
pub mod oauth_resources;
77
pub mod post;
8+
pub mod redgifs;
89
pub mod search;
910
pub mod server;
1011
pub mod settings;

src/main.rs

Lines changed: 22 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -83,9 +83,9 @@ async fn resource(body: &str, content_type: &str, cache: bool) -> Result<Respons
8383
.unwrap_or_default();
8484

8585
if cache {
86-
if let Ok(val) = HeaderValue::from_str("public, max-age=1209600, s-maxage=86400") {
87-
res.headers_mut().insert("Cache-Control", val);
88-
}
86+
res
87+
.headers_mut()
88+
.insert("Cache-Control", HeaderValue::from_static("public, max-age=1209600, s-maxage=86400"));
8989
}
9090

9191
Ok(res)
@@ -260,8 +260,8 @@ async fn main() {
260260
.get(|_| resource(include_str!("../static/check_update.js"), "text/javascript", false).boxed());
261261
app.at("/copy.js").get(|_| resource(include_str!("../static/copy.js"), "text/javascript", false).boxed());
262262

263-
app.at("/commits.atom").get(|_| async move { proxy_commit_info().await }.boxed());
264-
app.at("/instances.json").get(|_| async move { proxy_instances().await }.boxed());
263+
app.at("/commits.atom").get(|_| async move { Ok(proxy_commit_info().await.unwrap()) }.boxed()); // TODO: see below
264+
app.at("/instances.json").get(|_| async move { Ok(proxy_instances().await.unwrap()) }.boxed()); // TODO: In the process of migrating error handling. (I recommend thiserror crate for dynamic errors.) No proper error handling yes, so functionality unimpacted.
265265

266266
// Proxy media through Redlib
267267
app.at("/vid/:id/:size").get(|r| proxy(r, "https://v.redd.it/{id}/DASH_{size}").boxed());
@@ -278,6 +278,9 @@ async fn main() {
278278
app.at("/preview/:loc/:id").get(|r| proxy(r, "https://{loc}view.redd.it/{id}").boxed());
279279
app.at("/style/*path").get(|r| proxy(r, "https://styles.redditmedia.com/{path}").boxed());
280280
app.at("/static/*path").get(|r| proxy(r, "https://www.redditstatic.com/{path}").boxed());
281+
282+
// RedGifs proxy with lazy loading
283+
app.at("/redgifs/*path").get(|req| redlib::redgifs::handler(req).boxed());
281284

282285
// Browse user profile
283286
app
@@ -421,40 +424,36 @@ async fn main() {
421424
}
422425
}
423426

424-
pub async fn proxy_commit_info() -> Result<Response<Body>, String> {
427+
pub async fn proxy_commit_info() -> Result<Response<Body>, hyper::Error> {
425428
Ok(
426429
Response::builder()
427430
.status(200)
428431
.header("content-type", "application/atom+xml")
429-
.body(Body::from(fetch_commit_info().await))
432+
.body(Body::from(fetch_commit_info().await?))
430433
.unwrap_or_default(),
431434
)
432435
}
433436

434-
#[cached(time = 600)]
435-
async fn fetch_commit_info() -> String {
436-
let uri = Uri::from_str("https://github.com/redlib-org/redlib/commits/main.atom").expect("Invalid URI");
437-
438-
let resp: Body = CLIENT.get(uri).await.expect("Failed to request GitHub").into_body();
439-
440-
hyper::body::to_bytes(resp).await.expect("Failed to read body").iter().copied().map(|x| x as char).collect()
437+
#[cached(time = 600, result = true, result_fallback = true)]
438+
async fn fetch_commit_info() -> Result<String, hyper::Error> {
439+
let uri = Uri::from_static("https://github.com/redlib-org/redlib/commits/main.atom");
440+
let resp: Body = CLIENT.get(uri).await?.into_body(); // Could fail if there is no internet
441+
Ok(hyper::body::to_bytes(resp).await?.iter().copied().map(|x| x as char).collect())
441442
}
442443

443-
pub async fn proxy_instances() -> Result<Response<Body>, String> {
444+
pub async fn proxy_instances() -> Result<Response<Body>, hyper::Error> {
444445
Ok(
445446
Response::builder()
446447
.status(200)
447448
.header("content-type", "application/json")
448-
.body(Body::from(fetch_instances().await))
449+
.body(Body::from(fetch_instances().await?)) // Could fail if no internet
449450
.unwrap_or_default(),
450451
)
451452
}
452453

453-
#[cached(time = 600)]
454-
async fn fetch_instances() -> String {
455-
let uri = Uri::from_str("https://raw.githubusercontent.com/redlib-org/redlib-instances/refs/heads/main/instances.json").expect("Invalid URI");
456-
457-
let resp: Body = CLIENT.get(uri).await.expect("Failed to request GitHub").into_body();
458-
459-
hyper::body::to_bytes(resp).await.expect("Failed to read body").iter().copied().map(|x| x as char).collect()
454+
#[cached(time = 600, result = true, result_fallback = true)]
455+
async fn fetch_instances() -> Result<String, hyper::Error> {
456+
let uri = Uri::from_static("https://raw.githubusercontent.com/redlib-org/redlib-instances/refs/heads/main/instances.json");
457+
let resp: Body = CLIENT.get(uri).await?.into_body(); // Could fail if no internet
458+
Ok(hyper::body::to_bytes(resp).await?.iter().copied().map(|x| x as char).collect())
460459
}

src/redgifs.rs

Lines changed: 99 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,99 @@
1+
use hyper::{Body, Request, Response};
2+
use serde_json::Value;
3+
use std::sync::LazyLock;
4+
5+
use crate::client::{proxy, CLIENT};
6+
use crate::server::RequestExt;
7+
8+
// RedGifs token cache: (token, expiry_timestamp)
9+
static REDGIFS_TOKEN: LazyLock<std::sync::Mutex<(String, i64)>> = LazyLock::new(|| std::sync::Mutex::new((String::new(), 0)));
10+
11+
pub fn is_redgifs_domain(domain: &str) -> bool {
12+
domain == "redgifs.com" || domain == "www.redgifs.com" || domain.ends_with(".redgifs.com")
13+
}
14+
15+
/// Handles both video IDs (redirects) and actual video files (proxies)
16+
pub async fn handler(req: Request<Body>) -> Result<Response<Body>, String> {
17+
let path = req.param("path").unwrap_or_default();
18+
19+
if path.ends_with(".mp4") {
20+
return proxy(req, &format!("https://media.redgifs.com/{}", path)).await;
21+
}
22+
23+
match fetch_video_url(&format!("https://www.redgifs.com/watch/{}", path)).await.ok() {
24+
Some(video_url) => {
25+
let filename = video_url.strip_prefix("https://media.redgifs.com/").unwrap_or(&video_url);
26+
Ok(Response::builder()
27+
.status(302)
28+
.header("Location", format!("/redgifs/{}", filename))
29+
.body(Body::empty())
30+
.unwrap_or_default())
31+
}
32+
None => Ok(Response::builder().status(404).body("RedGifs video not found".into()).unwrap_or_default()),
33+
}
34+
}
35+
36+
async fn fetch_video_url(redgifs_url: &str) -> Result<String, String> {
37+
let video_id = redgifs_url
38+
.split('/')
39+
.last()
40+
.and_then(|s| s.split('?').next())
41+
.ok_or("Invalid RedGifs URL")?;
42+
43+
let token = get_token().await?;
44+
let api_url = format!("https://api.redgifs.com/v2/gifs/{}?views=yes", video_id);
45+
46+
let req = create_request(&api_url, Some(&token))?;
47+
let res = CLIENT.request(req).await.map_err(|e| e.to_string())?;
48+
let body_bytes = hyper::body::to_bytes(res.into_body()).await.map_err(|e| e.to_string())?;
49+
let json: Value = serde_json::from_slice(&body_bytes).map_err(|e| e.to_string())?;
50+
51+
// Prefer HD, fallback to SD
52+
let hd_url = json["gif"]["urls"]["hd"].as_str();
53+
let sd_url = json["gif"]["urls"]["sd"].as_str();
54+
55+
hd_url
56+
.or(sd_url)
57+
.map(String::from)
58+
.ok_or_else(|| "No video URL in RedGifs response".to_string())
59+
}
60+
61+
async fn get_token() -> Result<String, String> {
62+
let now = std::time::SystemTime::now()
63+
.duration_since(std::time::UNIX_EPOCH)
64+
.map_err(|_| "Time error")?
65+
.as_secs() as i64;
66+
67+
// Return cached token if still valid (without holding lock across await)
68+
{
69+
let cache = REDGIFS_TOKEN.lock().map_err(|_| "Lock error")?;
70+
if !cache.0.is_empty() && now < cache.1 {
71+
return Ok(cache.0.clone());
72+
}
73+
}
74+
75+
let req = create_request("https://api.redgifs.com/v2/auth/temporary", None)?;
76+
let res = CLIENT.request(req).await.map_err(|e| e.to_string())?;
77+
let body_bytes = hyper::body::to_bytes(res.into_body()).await.map_err(|e| e.to_string())?;
78+
let json: Value = serde_json::from_slice(&body_bytes).map_err(|e| e.to_string())?;
79+
let token = json["token"].as_str().map(String::from).ok_or_else(|| "No token in RedGifs response".to_string())?;
80+
81+
let mut cache = REDGIFS_TOKEN.lock().map_err(|_| "Lock error")?;
82+
cache.0 = token.clone();
83+
cache.1 = now + 86000; // 24h - 400s buffer
84+
Ok(token)
85+
}
86+
87+
fn create_request(url: &str, token: Option<&str>) -> Result<Request<Body>, String> {
88+
let mut builder = hyper::Request::get(url)
89+
.header("user-agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36")
90+
.header("referer", "https://www.redgifs.com/")
91+
.header("origin", "https://www.redgifs.com")
92+
.header("content-type", "application/json");
93+
94+
if let Some(t) = token {
95+
builder = builder.header("Authorization", format!("Bearer {}", t));
96+
}
97+
98+
builder.body(Body::empty()).map_err(|e| e.to_string())
99+
}

src/subreddit.rs

Lines changed: 87 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,7 @@
33
use crate::{config, utils};
44
// CRATES
55
use crate::utils::{
6-
catch_random, error, filter_posts, format_num, format_url, get_filters, info, nsfw_landing, param, redirect, rewrite_urls, setting, template, val, Post, Preferences,
7-
Subreddit,
6+
Post, Preferences, Subreddit, catch_random, error, filter_posts, format_num, format_url, get_filters, info, nsfw_landing, param, redirect, rewrite_urls, setting, template, to_absolute_url, val
87
};
98
use crate::{client::json, server::RequestExt, server::ResponseExt};
109
use askama::Template;
@@ -14,6 +13,7 @@ use hyper::{Body, Request, Response};
1413

1514
use chrono::DateTime;
1615
use regex::Regex;
16+
use rss::{ChannelBuilder, Item, Enclosure};
1717
use std::sync::LazyLock;
1818
use time::{Duration, OffsetDateTime};
1919

@@ -68,7 +68,7 @@ pub async fn community(req: Request<Body>) -> Result<Response<Body>, String> {
6868
let subscribed = setting(&req, "subscriptions");
6969
let front_page = setting(&req, "front_page");
7070
let remove_default_feeds = setting(&req, "remove_default_feeds") == "on";
71-
let post_sort = req.cookie("post_sort").map_or_else(|| "hot".to_string(), |c| c.value().to_string());
71+
let post_sort = setting(&req, "post_sort");
7272
let sort = req.param("sort").unwrap_or_else(|| req.param("id").unwrap_or(post_sort));
7373

7474
let sub_name = req.param("sub").unwrap_or(if front_page == "default" || front_page.is_empty() {
@@ -595,7 +595,6 @@ pub async fn rss(req: Request<Body>) -> Result<Response<Body>, String> {
595595
}
596596

597597
use hyper::header::CONTENT_TYPE;
598-
use rss::{ChannelBuilder, Item};
599598

600599
// Get subreddit
601600
let sub = req.param("sub").unwrap_or_default();
@@ -605,6 +604,9 @@ pub async fn rss(req: Request<Body>) -> Result<Response<Body>, String> {
605604
// Get path
606605
let path = format!("/r/{sub}/{sort}.json?{}", req.uri().query().unwrap_or_default());
607606

607+
// Get subreddit link
608+
let subreddit_link: String = format!("{}/r/{sub}", config::get_setting("REDLIB_FULL_URL").unwrap_or_default());
609+
608610
// Get subreddit data
609611
let subreddit = subreddit(&sub, false).await?;
610612

@@ -615,21 +617,23 @@ pub async fn rss(req: Request<Body>) -> Result<Response<Body>, String> {
615617
let channel = ChannelBuilder::default()
616618
.title(&subreddit.title)
617619
.description(&subreddit.description)
620+
.link(&subreddit_link)
618621
.items(
619622
posts
620623
.into_iter()
621-
.map(|post| Item {
622-
title: Some(post.title.to_string()),
623-
link: Some(format_url(&utils::get_post_url(&post))),
624-
author: Some(post.author.name),
625-
content: Some(rewrite_urls(&decode_html(&post.body).unwrap())),
626-
pub_date: Some(DateTime::from_timestamp(post.created_ts as i64, 0).unwrap_or_default().to_rfc2822()),
627-
description: Some(format!(
628-
"<a href='{}{}'>Comments</a>",
629-
config::get_setting("REDLIB_FULL_URL").unwrap_or_default(),
630-
post.permalink
631-
)),
632-
..Default::default()
624+
.map(|post| {
625+
let mut item = Item {
626+
title: Some(post.title.to_string()),
627+
link: Some(format_url(&utils::get_post_url(&post))),
628+
author: Some(post.author.name.to_string()),
629+
content: Some(rewrite_urls(&decode_html(&post.body).unwrap())),
630+
pub_date: Some(DateTime::from_timestamp(post.created_ts as i64, 0).unwrap_or_default().to_rfc2822()),
631+
description: Some(format!("<a href='{}'>Comments</a>", to_absolute_url(&post.permalink))),
632+
..Default::default()
633+
};
634+
635+
apply_enclosure(&mut item, &post);
636+
item
633637
})
634638
.collect::<Vec<_>>(),
635639
)
@@ -645,6 +649,73 @@ pub async fn rss(req: Request<Body>) -> Result<Response<Body>, String> {
645649
Ok(res)
646650
}
647651

652+
// Set enclosure image for RSS feed item
653+
fn apply_enclosure(item: &mut Item, post: &Post) {
654+
item.set_enclosure(get_rss_image(&post));
655+
656+
// Embed the number of gallery images in description and content since
657+
// only the first image in the gallery is used for the enclosure
658+
if post.post_type == "gallery" && post.gallery.len() > 1 {
659+
item.set_description(
660+
format!("<a href='{}'>Gallery with {} images</a>",
661+
to_absolute_url(&post.permalink),
662+
post.gallery.len()
663+
)
664+
);
665+
666+
if let Some(content) = item.content() {
667+
let new_content = format!(
668+
"{}<br/>{}",
669+
item.description().unwrap_or(""),
670+
content,
671+
);
672+
item.set_content(new_content);
673+
}
674+
}
675+
676+
}
677+
678+
fn get_rss_image(post: &Post) -> Option<Enclosure> {
679+
let image_url = match post.post_type.as_str() {
680+
"image" => Some(post.media.url.clone()),
681+
"gallery" => post.gallery.get(0).and_then(|media| decode_html(&media.url).ok()),
682+
"gif" | "video" => decode_html(&post.media.poster).ok(),
683+
_ => None,
684+
};
685+
686+
image_url.map(|url| {
687+
let mut enclosure = Enclosure::default();
688+
enclosure.set_mime_type(get_mime_type(&url));
689+
enclosure.set_url(to_absolute_url(&url));
690+
enclosure.set_length("0");
691+
enclosure
692+
})
693+
}
694+
695+
/// Determines the MIME type based on file extension in a URL.
696+
/// Handles both absolute and relative URLs with query parameters.
697+
fn get_mime_type(url: &str) -> &'static str {
698+
// Extract the path component, removing query parameters
699+
let path = url.split('?').next().unwrap_or(url);
700+
701+
// Get the file extension (everything after the last dot)
702+
let extension = path
703+
.rsplit('.')
704+
.next()
705+
.unwrap_or("")
706+
.to_lowercase();
707+
708+
// Match common image extensions
709+
match extension.as_str() {
710+
"jpg" | "jpeg" => "image/jpeg",
711+
"png" => "image/png",
712+
"gif" => "image/gif",
713+
"webp" => "image/webp",
714+
"svg" => "image/svg+xml",
715+
_ => "application/octet-stream",
716+
}
717+
}
718+
648719
#[tokio::test(flavor = "multi_thread")]
649720
async fn test_fetching_subreddit() {
650721
let subreddit = subreddit("rust", false).await;

0 commit comments

Comments
 (0)