Refactor subreddit searching

This commit is contained in:
spikecodes 2021-01-14 11:45:04 -08:00
parent cf4c5e1fe8
commit 18684c934b
1 changed files with 34 additions and 31 deletions

View File

@ -1,5 +1,5 @@
// CRATES // CRATES
use crate::utils::{error, fetch_posts, param, prefs, Post, Preferences, request, val}; use crate::utils::{error, fetch_posts, param, prefs, request, val, Post, Preferences};
use actix_web::{HttpRequest, HttpResponse}; use actix_web::{HttpRequest, HttpResponse};
use askama::Template; use askama::Template;
@ -34,42 +34,19 @@ struct SearchTemplate {
// SERVICES // SERVICES
pub async fn find(req: HttpRequest) -> HttpResponse { pub async fn find(req: HttpRequest) -> HttpResponse {
let path = format!("{}.json?{}", req.path(), req.query_string()); let path = format!("{}.json?{}", req.path(), req.query_string());
let sub = req.match_info().get("sub").unwrap_or("").to_string();
let sort = if param(&path, "sort").is_empty() { let sort = if param(&path, "sort").is_empty() {
"relevance".to_string() "relevance".to_string()
} else { } else {
param(&path, "sort") param(&path, "sort")
}; };
let sub = req.match_info().get("sub").unwrap_or("").to_string();
let mut subreddits: Vec<Subreddit> = Vec::new();
if param(&path, "restrict_sr") == "" { let subreddits = if param(&path, "restrict_sr").is_empty() {
let subreddit_search_path = format!("/subreddits/search.json?q={}&limit=3", param(&path, "q")); search_subreddits(param(&path, "q")).await
let res; } else {
let subreddit_list; Vec::new()
};
// Send a request to the url
match request(&subreddit_search_path).await {
// If success, receive JSON in response
Ok(response) => {
res = response;
subreddit_list = res["data"]["children"].as_array();
}
// If the Reddit API returns an error, exit this function
Err(_msg) => {subreddit_list = None;}
}
// For each subreddit from subreddit list
if !subreddit_list.is_none() {
for subreddit in subreddit_list.unwrap() {
subreddits.push(Subreddit {
name: val(subreddit, "display_name_prefixed"),
url: val(subreddit, "url"),
description: val(subreddit, "public_description"),
subscribers: subreddit["data"]["subscribers"].as_u64().unwrap_or_default() as i64,
});
}
}
}
match fetch_posts(&path, String::new()).await { match fetch_posts(&path, String::new()).await {
Ok((posts, after)) => HttpResponse::Ok().content_type("text/html").body( Ok((posts, after)) => HttpResponse::Ok().content_type("text/html").body(
@ -93,3 +70,29 @@ pub async fn find(req: HttpRequest) -> HttpResponse {
Err(msg) => error(msg).await, Err(msg) => error(msg).await,
} }
} }
async fn search_subreddits(q: String) -> Vec<Subreddit> {
let subreddit_search_path = format!("/subreddits/search.json?q={}&limit=3", q.replace(' ', "+"));
// Send a request to the url
match request(&subreddit_search_path).await {
// If success, receive JSON in response
Ok(response) => {
match response["data"]["children"].as_array() {
// For each subreddit from subreddit list
Some(list) => list
.iter()
.map(|subreddit| Subreddit {
name: val(subreddit, "display_name_prefixed"),
url: val(subreddit, "url"),
description: val(subreddit, "public_description"),
subscribers: subreddit["data"]["subscribers"].as_u64().unwrap_or_default() as i64,
})
.collect::<Vec<Subreddit>>(),
_ => Vec::new(),
}
}
// If the Reddit API returns an error, exit this function
_ => Vec::new(),
}
}