Reorganize Crate References

This commit is contained in:
spikecodes 2020-11-25 13:53:30 -08:00
parent 9b049be627
commit 3902a36ea3
6 changed files with 19 additions and 28 deletions

View File

@ -7,6 +7,7 @@ mod post;
mod subreddit; mod subreddit;
mod user; mod user;
mod proxy; mod proxy;
mod utils;
// Create Services // Create Services
#[get("/style.css")] #[get("/style.css")]

View File

@ -1,10 +1,7 @@
// CRATES // CRATES
use actix_web::{get, web, HttpResponse, Result}; use actix_web::{get, web, HttpResponse, Result, http::StatusCode};
use askama::Template; use askama::Template;
use crate::utils::{fetch_posts, ErrorTemplate, Params, Post};
#[path = "utils.rs"]
mod utils;
use utils::{fetch_posts, ErrorTemplate, Params, Post};
// STRUCTS // STRUCTS
#[derive(Template)] #[derive(Template)]
@ -37,7 +34,7 @@ async fn render(sub_name: String, sort: Option<String>, ends: (Option<String>, O
} }
.render() .render()
.unwrap(); .unwrap();
Ok(HttpResponse::Ok().status(actix_web::http::StatusCode::NOT_FOUND).content_type("text/html").body(s)) Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(s))
} else { } else {
let items = items_result.unwrap(); let items = items_result.unwrap();

View File

@ -1,12 +1,9 @@
// CRATES // CRATES
use actix_web::{get, web, HttpResponse, Result}; use actix_web::{get, web, HttpResponse, Result, http::StatusCode};
use askama::Template; use askama::Template;
use chrono::{TimeZone, Utc}; use chrono::{TimeZone, Utc};
use pulldown_cmark::{html, Options, Parser}; use pulldown_cmark::{html, Options, Parser};
use crate::utils::{request, val, Comment, ErrorTemplate, Flair, Params, Post};
#[path = "utils.rs"]
mod utils;
use utils::{request, val, Comment, ErrorTemplate, Flair, Params, Post};
// STRUCTS // STRUCTS
#[derive(Template)] #[derive(Template)]
@ -34,7 +31,7 @@ async fn render(id: String, sort: String) -> Result<HttpResponse> {
} }
.render() .render()
.unwrap(); .unwrap();
return Ok(HttpResponse::Ok().status(actix_web::http::StatusCode::NOT_FOUND).content_type("text/html").body(s)); return Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(s));
} }
// Otherwise, grab the JSON output from the request // Otherwise, grab the JSON output from the request

View File

@ -1,10 +1,7 @@
// CRATES // CRATES
use actix_web::{get, web, HttpResponse, Result}; use actix_web::{get, web, HttpResponse, Result, http::StatusCode};
use askama::Template; use askama::Template;
use crate::utils::{request, val, fetch_posts, ErrorTemplate, Params, Post, Subreddit};
#[path = "utils.rs"]
mod utils;
pub use utils::{request, val, fetch_posts, ErrorTemplate, Flair, Params, Post, Subreddit};
// STRUCTS // STRUCTS
#[derive(Template)] #[derive(Template)]
@ -45,7 +42,7 @@ pub async fn render(sub_name: String, sort: Option<String>, ends: (Option<String
} }
.render() .render()
.unwrap(); .unwrap();
Ok(HttpResponse::Ok().status(actix_web::http::StatusCode::NOT_FOUND).content_type("text/html").body(s)) Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(s))
} else { } else {
let mut sub = sub_result.unwrap(); let mut sub = sub_result.unwrap();
let items = items_result.unwrap(); let items = items_result.unwrap();

View File

@ -1,10 +1,7 @@
// CRATES // CRATES
use actix_web::{get, web, HttpResponse, Result}; use actix_web::{get, web, HttpResponse, Result, http::StatusCode};
use askama::Template; use askama::Template;
use crate::utils::{nested_val, request, fetch_posts, ErrorTemplate, Params, Post, User};
#[path = "utils.rs"]
mod utils;
use utils::{nested_val, request, fetch_posts, ErrorTemplate, Params, Post, User};
// STRUCTS // STRUCTS
#[derive(Template)] #[derive(Template)]
@ -28,7 +25,7 @@ async fn render(username: String, sort: String) -> Result<HttpResponse> {
} }
.render() .render()
.unwrap(); .unwrap();
Ok(HttpResponse::Ok().status(actix_web::http::StatusCode::NOT_FOUND).content_type("text/html").body(s)) Ok(HttpResponse::Ok().status(StatusCode::NOT_FOUND).content_type("text/html").body(s))
} else { } else {
let s = UserTemplate { let s = UserTemplate {
user: user.unwrap(), user: user.unwrap(),

View File

@ -2,6 +2,8 @@
// CRATES // CRATES
// //
use chrono::{TimeZone, Utc}; use chrono::{TimeZone, Utc};
use surf::{get, client, middleware::Redirect};
use serde_json::{Value, from_str};
// //
// STRUCTS // STRUCTS
@ -157,8 +159,8 @@ pub async fn request(url: String) -> Result<serde_json::Value, &'static str> {
// let body = std::str::from_utf8(res.as_ref())?; // .as_ref converts Bytes to [u8] // let body = std::str::from_utf8(res.as_ref())?; // .as_ref converts Bytes to [u8]
// --- surf --- // --- surf ---
let req = surf::get(&url).header("User-Agent", "libreddit"); let req = get(&url).header("User-Agent", "libreddit");
let client = surf::client().with(surf::middleware::Redirect::new(5)); let client = client().with(Redirect::new(5));
let mut res = client.send(req).await.unwrap(); let mut res = client.send(req).await.unwrap();
let success = res.status().is_success(); let success = res.status().is_success();
let body = res.body_string().await.unwrap(); let body = res.body_string().await.unwrap();
@ -173,12 +175,12 @@ pub async fn request(url: String) -> Result<serde_json::Value, &'static str> {
// let body = res.text().await.unwrap(); // let body = res.text().await.unwrap();
// Parse the response from Reddit as JSON // Parse the response from Reddit as JSON
let json: serde_json::Value = serde_json::from_str(body.as_str()).unwrap_or(serde_json::Value::Null); let json: Value = from_str(body.as_str()).unwrap_or(Value::Null);
if !success { if !success {
println!("! {} - {}", url, "Page not found"); println!("! {} - {}", url, "Page not found");
Err("Page not found") Err("Page not found")
} else if json == serde_json::Value::Null { } else if json == Value::Null {
println!("! {} - {}", url, "Failed to parse page JSON data"); println!("! {} - {}", url, "Failed to parse page JSON data");
Err("Failed to parse page JSON data") Err("Failed to parse page JSON data")
} else { } else {