From 57e17d064813162b1a1017368ce651ea89ea8aad Mon Sep 17 00:00:00 2001 From: BlackDex Date: Sun, 28 Mar 2021 00:10:01 +0100 Subject: [PATCH] Updated diagnostics page - Added reverse proxy check - Better deffinition of internet proxy - Added SQL Server version detection --- src/api/admin.rs | 35 ++++++++++- src/db/mod.rs | 64 +++++++++++++++++--- src/static/templates/admin/diagnostics.hbs | 68 +++++++++++++++++----- 3 files changed, 141 insertions(+), 26 deletions(-) diff --git a/src/api/admin.rs b/src/api/admin.rs index 39fbc691..f1ebc113 100644 --- a/src/api/admin.rs +++ b/src/api/admin.rs @@ -16,7 +16,7 @@ use crate::{ api::{ApiResult, EmptyResult, JsonResult, NumberOrString}, auth::{decode_admin, encode_jwt, generate_admin_claims, ClientIp}, config::ConfigBuilder, - db::{backup_database, models::*, DbConn, DbConnType}, + db::{backup_database, get_sql_server_version, models::*, DbConn, DbConnType}, error::{Error, MapResult}, mail, util::{format_naive_datetime_local, get_display_size, is_running_in_docker}, @@ -96,6 +96,27 @@ impl<'a, 'r> FromRequest<'a, 'r> for Referer { } } +#[derive(Debug)] +struct IpHeader(Option); + +impl<'a, 'r> FromRequest<'a, 'r> for IpHeader { + type Error = (); + + fn from_request(req: &'a Request<'r>) -> Outcome { + if req.headers().get_one(&CONFIG.ip_header()).is_some() { + Outcome::Success(IpHeader(Some(CONFIG.ip_header()))) + } else if req.headers().get_one("X-Client-IP").is_some() { + Outcome::Success(IpHeader(Some(String::from("X-Client-IP")))) + } else if req.headers().get_one("X-Real-IP").is_some() { + Outcome::Success(IpHeader(Some(String::from("X-Real-IP")))) + } else if req.headers().get_one("X-Forwarded-For").is_some() { + Outcome::Success(IpHeader(Some(String::from("X-Forwarded-For")))) + } else { + Outcome::Success(IpHeader(None)) + } + } +} + /// Used for `Location` response headers, which must specify an absolute URI /// (see https://tools.ietf.org/html/rfc2616#section-14.30). fn admin_url(referer: Referer) -> String { @@ -475,7 +496,7 @@ fn has_http_access() -> bool { } #[get("/diagnostics")] -fn diagnostics(_token: AdminToken, _conn: DbConn) -> ApiResult> { +fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResult> { use crate::util::read_file_string; use chrono::prelude::*; use std::net::ToSocketAddrs; @@ -529,6 +550,11 @@ fn diagnostics(_token: AdminToken, _conn: DbConn) -> ApiResult> { ("-".to_string(), "-".to_string(), "-".to_string()) }; + let ip_header_name = match &ip_header.0 { + Some(h) => h, + _ => "" + }; + let diagnostics_json = json!({ "dns_resolved": dns_resolved, "web_vault_version": web_vault_version.version, @@ -537,8 +563,13 @@ fn diagnostics(_token: AdminToken, _conn: DbConn) -> ApiResult> { "latest_web_build": latest_web_build, "running_within_docker": running_within_docker, "has_http_access": has_http_access, + "ip_header_exists": &ip_header.0.is_some(), + "ip_header_match": ip_header_name == &CONFIG.ip_header(), + "ip_header_name": ip_header_name, + "ip_header_config": &CONFIG.ip_header(), "uses_proxy": uses_proxy, "db_type": *DB_TYPE, + "db_version": get_sql_server_version(&conn), "admin_url": format!("{}/diagnostics", admin_url(Referer(None))), "server_time": Utc::now().format("%Y-%m-%d %H:%M:%S UTC").to_string(), // Run the date/time check as the last item to minimize the difference }); diff --git a/src/db/mod.rs b/src/db/mod.rs index 35d9bf78..19191582 100644 --- a/src/db/mod.rs +++ b/src/db/mod.rs @@ -125,16 +125,34 @@ macro_rules! db_run { $($( #[cfg($db)] crate::db::DbConn::$db(ref $conn) => { - paste::paste! { + paste::paste! { #[allow(unused)] use crate::db::[<__ $db _schema>]::{self as schema, *}; #[allow(unused)] use [<__ $db _model>]::*; - #[allow(unused)] use crate::db::FromDb; + #[allow(unused)] use crate::db::FromDb; } $body }, )+)+ } }; + + // Same for all dbs + ( @raw $conn:ident: $body:block ) => { + db_run! { @raw $conn: sqlite, mysql, postgresql $body } + }; + + // Different code for each db + ( @raw $conn:ident: $( $($db:ident),+ $body:block )+ ) => { + #[allow(unused)] use diesel::prelude::*; + match $conn { + $($( + #[cfg($db)] + crate::db::DbConn::$db(ref $conn) => { + $body + }, + )+)+ + } + }; } @@ -144,7 +162,7 @@ pub trait FromDb { fn from_db(self) -> Self::Output; } -// For each struct eg. Cipher, we create a CipherDb inside a module named __$db_model (where $db is sqlite, mysql or postgresql), +// For each struct eg. Cipher, we create a CipherDb inside a module named __$db_model (where $db is sqlite, mysql or postgresql), // to implement the Diesel traits. We also provide methods to convert between them and the basic structs. Later, that module will be auto imported when using db_run! #[macro_export] macro_rules! db_object { @@ -154,10 +172,10 @@ macro_rules! db_object { $( $( #[$field_attr:meta] )* $vis:vis $field:ident : $typ:ty ),+ $(,)? } - )+ ) => { + )+ ) => { // Create the normal struct, without attributes $( pub struct $name { $( /*$( #[$field_attr] )**/ $vis $field : $typ, )+ } )+ - + #[cfg(sqlite)] pub mod __sqlite_model { $( db_object! { @db sqlite | $( #[$attr] )* | $name | $( $( #[$field_attr] )* $field : $typ ),+ } )+ } #[cfg(mysql)] @@ -178,7 +196,7 @@ macro_rules! db_object { )+ } impl [<$name Db>] { - #[allow(clippy::wrong_self_convention)] + #[allow(clippy::wrong_self_convention)] #[inline(always)] pub fn to_db(x: &super::$name) -> Self { Self { $( $field: x.$field.clone(), )+ } } } @@ -222,6 +240,36 @@ pub fn backup_database() -> Result<(), Error> { Ok(()) } + +use diesel::sql_types::Text; +#[derive(QueryableByName,Debug)] +struct SqlVersion { + #[sql_type = "Text"] + version: String, +} + +/// Get the SQL Server version +pub fn get_sql_server_version(conn: &DbConn) -> String { + db_run! {@raw conn: + postgresql, mysql { + match diesel::sql_query("SELECT version() AS version;").get_result::(conn).ok() { + Some(v) => { + v.version + }, + _ => "Unknown".to_string() + } + } + sqlite { + match diesel::sql_query("SELECT sqlite_version() AS version;").get_result::(conn).ok() { + Some(v) => { + v.version + }, + _ => "Unknown".to_string() + } + } + } +} + /// Attempts to retrieve a single connection from the managed database pool. If /// no pool is currently managed, fails with an `InternalServerError` status. If /// no connections are available, fails with a `ServiceUnavailable` status. @@ -263,7 +311,7 @@ mod sqlite_migrations { let connection = diesel::sqlite::SqliteConnection::establish(&crate::CONFIG.database_url())?; // Disable Foreign Key Checks during migration - + // Scoped to a connection. diesel::sql_query("PRAGMA foreign_keys = OFF") .execute(&connection) @@ -314,7 +362,7 @@ mod postgresql_migrations { let connection = diesel::pg::PgConnection::establish(&crate::CONFIG.database_url())?; // Disable Foreign Key Checks during migration - + // FIXME: Per https://www.postgresql.org/docs/12/sql-set-constraints.html, // "SET CONSTRAINTS sets the behavior of constraint checking within the // current transaction", so this setting probably won't take effect for diff --git a/src/static/templates/admin/diagnostics.hbs b/src/static/templates/admin/diagnostics.hbs index fbb6a183..8d7901db 100644 --- a/src/static/templates/admin/diagnostics.hbs +++ b/src/static/templates/admin/diagnostics.hbs @@ -2,7 +2,7 @@
Diagnostics
-

Version

+

Versions

@@ -35,6 +35,10 @@ {{diagnostics.latest_web_build}} {{/unless}} +
Database
+
+ {{diagnostics.db_type}}: {{diagnostics.db_version}} +
@@ -46,35 +50,65 @@
Running within Docker
{{#if diagnostics.running_within_docker}} - Yes + Yes {{/if}} {{#unless diagnostics.running_within_docker}} - No + No {{/unless}}
-
Uses a proxy
+
Uses a reverse proxy
- {{#if diagnostics.uses_proxy}} - Yes + {{#if diagnostics.ip_header_exists}} + Yes {{/if}} - {{#unless diagnostics.uses_proxy}} - No + {{#unless diagnostics.ip_header_exists}} + No {{/unless}}
+ {{!-- Only show this if the IP Header Exists --}} + {{#if diagnostics.ip_header_exists}} +
IP header + {{#if diagnostics.ip_header_match}} + Match + {{/if}} + {{#unless diagnostics.ip_header_match}} + No Match + {{/unless}} +
+
+ {{#if diagnostics.ip_header_match}} + Config/Server: {{ diagnostics.ip_header_name }} + {{/if}} + {{#unless diagnostics.ip_header_match}} + Config: {{ diagnostics.ip_header_config }} + Server: {{ diagnostics.ip_header_name }} + {{/unless}} +
+ {{/if}} + {{!-- End if IP Header Exists --}}
Internet access {{#if diagnostics.has_http_access}} - Ok + Ok {{/if}} {{#unless diagnostics.has_http_access}} - Error + Error {{/unless}}
{{#if diagnostics.has_http_access}} - Yes + Yes {{/if}} {{#unless diagnostics.has_http_access}} - No + No + {{/unless}} +
+
Internet access via a proxy
+
+ {{#if diagnostics.uses_proxy}} + Yes + {{/if}} + {{#unless diagnostics.uses_proxy}} + No {{/unless}}
DNS (github.com) @@ -263,16 +297,18 @@ supportString += "* Bitwarden_rs version: v{{ version }}\n"; supportString += "* Web-vault version: v{{ diagnostics.web_vault_version }}\n"; supportString += "* Running within Docker: {{ diagnostics.running_within_docker }}\n"; + supportString += "* Uses a reverse proxy: {{ diagnostics.ip_header_exists }}\n"; + {{#if diagnostics.ip_header_exists}} + supportString += "* IP Header check: {{ diagnostics.ip_header_match }} ({{ diagnostics.ip_header_name }})\n"; + {{/if}} supportString += "* Internet access: {{ diagnostics.has_http_access }}\n"; - supportString += "* Uses a proxy: {{ diagnostics.uses_proxy }}\n"; + supportString += "* Internet access via a proxy: {{ diagnostics.uses_proxy }}\n"; supportString += "* DNS Check: " + dnsCheck + "\n"; supportString += "* Time Check: " + timeCheck + "\n"; supportString += "* Domain Configuration Check: " + domainCheck + "\n"; supportString += "* HTTPS Check: " + httpsCheck + "\n"; supportString += "* Database type: {{ diagnostics.db_type }}\n"; - {{#case diagnostics.db_type "MySQL" "PostgreSQL"}} - supportString += "* Database version: [PLEASE PROVIDE DATABASE VERSION]\n"; - {{/case}} + supportString += "* Database version: {{ diagnostics.db_version }}\n"; supportString += "* Clients used: \n"; supportString += "* Reverse proxy and version: \n"; supportString += "* Other relevant information: \n";